def scheduler(epoch): if epoch < 35: return 0.05 elif epoch < 50: return 0.05 / 2 else: return 0.05 / 2 / 2 lr_callback = tf.keras.callbacks.LearningRateScheduler(scheduler) ##################################################### # models ##################################################### config = tdnn_config(args.model_size) model = make_tdnn_model(config, 1211, input_shape) model.summary() model.compile(optimizer=tf.keras.optimizers.SGD(0.1, momentum=0.9), loss='sparse_categorical_crossentropy', metrics=['sparse_categorical_accuracy']) ##################################################### # fit model ##################################################### model.save_weights(checkpoint_path.format(epoch=0)) model.fit(train_ds, epochs=n_epochs, steps_per_epoch=steps_per_epoch, callbacks=[cp_callback, lr_callback],
else: return 0.0005 / 2 lr_callback = tf.keras.callbacks.LearningRateScheduler(scheduler) #################################################### # train & models #################################################### train_graph = tf.Graph() train_sess = tf.Session(graph=train_graph) tf.keras.backend.set_session(train_sess) with train_graph.as_default(): config = tdnn_config(model_size) train_model = make_quant_tdnn_model_mnist(config, n_labels=1211, n_frames=n_frames) train_ds = tf.data.Dataset.from_generator(train_generator, output_types=(tf.float32, tf.int32), output_shapes=((28, 28, 1), ())) train_ds = train_ds.shuffle(buffer_size=len(train_x)) train_ds = train_ds.repeat() train_ds = train_ds.prefetch(buffer_size=AUTOTUNE) train_ds = train_ds.batch(batch_size) train_iterator = train_ds.make_one_shot_iterator() train_feat, train_label = train_iterator.get_next() train_feat = tf.quantization.fake_quant_with_min_max_args(train_feat,