def main(test_desc_file, train_desc_file, load_dir): import os import tensorflow as tf from keras.backend.tensorflow_backend import set_session os.environ["CUDA_VISIBLE_DEVICES"] = "3" # 进行配置,使用80%的GPU config = tf.ConfigProto() config.gpu_options.per_process_gpu_memory_fraction = 0.9 #config.gpu_options.allow_growth=True #不全部占满显存, 按需分配 set_session(tf.Session(config=config)) # Prepare the data generator datagen = DataGenerator() # Load the JSON file that contains the dataset datagen.load_test_data(test_desc_file) datagen.load_train_data(train_desc_file) # Use a few samples from the dataset, to calculate the means and variance # of the features, so that we can center our inputs to the network # datagen.fit_train_test(load_dir) datagen.fit_train(100) # Compile a Recurrent Network with 1 1D convolution layer, GRU units # and 1 fully connected layer model = load_model(load_dir) # Compile the CTC decode function decode_fn = decode_ctc_fn(model) # Compile the testing function test_fn = compile_test_fn(model) # Test the model test_loss = test(model, test_fn, decode_fn, datagen) print ("Test loss: {}".format(test_loss))
def main(train_desc_file, val_desc_file, epochs, save_dir, sortagrad): if not os.path.exists(save_dir): os.makedirs(save_dir) # Configure logging configure_logging(file_log_path=os.path.join(save_dir, 'train_log.txt')) # Prepare the data generator datagen = DataGenerator() # Load the JSON file that contains the dataset datagen.load_train_data(train_desc_file) datagen.load_validation_data(val_desc_file) # Use a few samples from the dataset, to calculate the means and variance # of the features, so that we can center our inputs to the network datagen.fit_train(100) # Compile a Recurrent Network with 1 1D convolution layer, GRU units # and 1 fully connected layer model = compile_gru_model(recur_layers=3, nodes=1000, batch_norm=True) # Compile the CTC training function train_fn = compile_train_fn(model) # Compile the validation function val_fn = compile_test_fn(model) # Train the model train(model, train_fn, val_fn, datagen, save_dir, epochs=epochs, do_sortagrad=sortagrad)
def main(test_desc_file, train_desc_file, load_dir, result_file='test_result'): datagen = DataGenerator() datagen.load_test_data(test_desc_file) datagen.load_train_data(train_desc_file) datagen.fit_train(100) # load model model = load_model(load_dir) test_fn = compile_test_fn(model) # test begin start = time.time() test(model, test_fn, datagen, result_file) print('elapsed: %s' % (time.time() - start))
def main(train_desc_file, val_desc_file, epochs, save_dir, sortagrad): if not os.path.exists(save_dir): os.makedirs(save_dir) # Configure logging configure_logging(file_log_path=os.path.join(save_dir, 'train_log.txt')) # Prepare the data generator datagen = DataGenerator() # Load the JSON file that contains the dataset datagen.load_train_data(train_desc_file) datagen.load_validation_data(val_desc_file) # Use a few samples from the dataset, to calculate the means and variance # of the features, so that we can center our inputs to the network feats_mean, feats_std = datagen.fit_train(100) np.savetxt(save_dir + '/feats_mean.txt', feats_mean, fmt='%0.8f') np.savetxt(save_dir + '/feats_std.txt', feats_std, fmt='%0.8f') # Compile a Recurrent Network with 1 1D convolution layer, GRU units # and 1 fully connected layer model = compile_gru_model(recur_layers=3, batch_norm=True) # 输出模型结构 logger.info('*' * 20) model_summary = model.summary() logger.info(model_summary) logger.info('*' * 20) # 加载已训练好的模型 model.load_weights("model_0621_zhongjin_0624/model_46000_weights.h5") # Compile the CTC training function train_fn = compile_train_fn(model) # Compile the validation function val_fn = compile_test_fn(model) # Compile the CTC decode function decode_fn = decode_ctc_fn(model) # Train the model train(model, train_fn, val_fn, decode_fn, datagen, save_dir, epochs=epochs, do_sortagrad=sortagrad)
def main(test_desc_file, train_desc_file, load_dir): # Prepare the data generator datagen = DataGenerator() # Load the JSON file that contains the dataset datagen.load_test_data(test_desc_file) datagen.load_train_data(train_desc_file) # Use a few samples from the dataset, to calculate the means and variance # of the features, so that we can center our inputs to the network datagen.fit_train(100) # Compile a Recurrent Network with 1 1D convolution layer, GRU units # and 1 fully connected layer model = load_model(load_dir) # Compile the testing function test_fn = compile_test_fn(model) # Test the model test_loss = test(model, test_fn, datagen) print("Test loss: {}".format(test_loss))