def testMetaLearnerCanPredictWithConvolutionalModel(self): model = self.create_convolutional_model() wrapper = create_model_wrapper(model) meta = create_meta_learner(wrapper, input_type='sequences') meta.compile(loss=model.loss, optimizer='adam') batch = next(self.generator(return_sequences=True)) self.assertEquals((1, 1, 2, 1), meta.predict(batch[0]).shape)
def testMetaLearnerCanPredict(self): model = self.create_model() wrapper = create_model_wrapper(model, sparse=True, num_sparse_params=4) meta = create_meta_learner(wrapper) meta.compile(loss=model.loss, optimizer='adam') batch = next(self.generator()) self.assertEquals((1, 2, 1), meta.predict(batch[0]).shape)
def testMetaLearnerCanOverfitWithConvolutionalModel(self): np.random.seed(0) model = self.create_convolutional_model() wrapper = create_model_wrapper(model) meta = create_meta_learner(wrapper, input_type='sequences') meta.compile(loss=model.loss, optimizer='adam') generator = self.generator(return_sequences=True) history = meta.fit_generator(generator, steps_per_epoch=100, epochs=5) loss = history.history["loss"] self.assertTrue(loss[0] > loss[-1]) self.assertTrue(0.05 > loss[-1])
def testMetaLearnerCanOverfit(self): np.random.seed(0) model = self.create_model() wrapper = create_model_wrapper(model) meta = create_meta_learner(wrapper) meta.compile(loss=model.loss, optimizer='adam') generator = self.generator() history = meta.fit_generator(generator, steps_per_epoch=100, epochs=5) loss = history.history["loss"] self.assertTrue(loss[0] > loss[-1]) self.assertTrue(0.05 > loss[-1])
if len(sys.argv) >= 12: subsampling_factor = int(sys.argv[9]) left_context = int(sys.argv[10]) right_context = int(sys.argv[11]) else: subsampling_factor = 1 left_context = 0 right_context = 0 model = load_acoustic_model(model_path, adaptation_type) model.compile(loss='sparse_categorical_crossentropy', optimizer='adam') model.summary() wrapper = create_model_wrapper(model) meta = create_meta_learner(wrapper, units=20, input_type=input_type) meta.compile( loss=model.loss, optimizer=Adam(), metrics=['accuracy'] ) meta.summary() params = get_model_weights(model) num_train_batches, train_generator, num_val_batches, val_generator = load_data( params, feats, utt2spk, adapt_pdfs, test_pdfs, subsampling_factor=subsampling_factor, left_context=left_context, right_context=right_context, return_sequences=return_sequences)
left_context = int(sys.argv[9]) right_context = int(sys.argv[10]) num_frames = int(sys.argv[11]) meta_learner_mode = sys.argv[12] num_epochs = 20 batch_size = 4 model = load_model(model_path, adaptation_type) model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy']) wrapper = create_model_wrapper(model, batch_size=batch_size) meta = create_meta_learner(wrapper, meta_learner_type='lr_per_layer', mode=meta_learner_mode) meta.compile(loss=model.loss, optimizer=Adam(), metrics=['accuracy']) meta.summary() model_params = get_model_weights(model) utt_to_adapt_pdfs = load_utt_to_pdfs(adapt_pdfs) utt_to_test_pdfs = load_utt_to_pdfs(test_pdfs) params_dataset = load_params_generator(model_params) params_dataset = params_dataset.batch(batch_size, drop_remainder=True) train_dataset = load_dataset_for_maml(train_feats, utt_to_adapt_pdfs, utt_to_test_pdfs, left_context=left_context,