From e510ba699f873fe313784af1215dbbefe8532012 Mon Sep 17 00:00:00 2001 From: Jean-Marc Valin Date: Sun, 24 Mar 2019 03:48:26 -0400 Subject: [PATCH] Making it easier to adapt (or not) a model --- src/train_lpcnet.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/src/train_lpcnet.py b/src/train_lpcnet.py index 8c82f19..0b5e0ba 100755 --- a/src/train_lpcnet.py +++ b/src/train_lpcnet.py @@ -105,6 +105,20 @@ # dump models to disk as we go checkpoint = ModelCheckpoint('lpcnet24g_384_10_G16_{epoch:02d}.h5') -model.load_weights('lpcnet24c_384_10_G16_120.h5') -model.compile(optimizer=Adam(0.0001, amsgrad=True), loss='sparse_categorical_crossentropy') -model.fit([in_data, features, periods], out_exc, batch_size=batch_size, epochs=nb_epochs, validation_split=0.0, callbacks=[checkpoint, lpcnet.Sparsify(0, 0, 1, (0.05, 0.05, 0.2))]) +#Set this to True to adapt an existing model (e.g. on new data) +adaptation = False + +if adaptation: + #Adapting from an existing model + model.load_weights('lpcnet24c_384_10_G16_120.h5') + sparsify = lpcnet.Sparsify(0, 0, 1, (0.05, 0.05, 0.2)) + lr = 0.0001 + decay = 0 +else: + #Training from scratch + sparsify = lpcnet.Sparsify(2000, 40000, 400, (0.05, 0.05, 0.2)) + lr = 0.001 + decay = 5e-5 + +model.compile(optimizer=Adam(lr, amsgrad=True, decay=decay), loss='sparse_categorical_crossentropy') +model.fit([in_data, features, periods], out_exc, batch_size=batch_size, epochs=nb_epochs, validation_split=0.0, callbacks=[checkpoint, sparsify])