# Variable initialization if (kaldi_model == None) and (tf_model == None): sess.run(tf.global_variables_initializer()) start_time = time.time() start_clock = time.clock() log.info( "Starting training at: " + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(start_time))) train_nn(n_epoch, n_batch, lr_first, lr_last, train_batch_func, check_dev=check_dev_class_loss_acc, get_para_func=get_para, set_para_func=set_para, model_save_file=model_prefix, patience=patience, save_func=save_func, patience_2=patience_2, half_every_N_epochs=half_every_N_epochs, save_every_epoch=False) end_time = time.time() end_clock = time.clock() log.info( " Started training at: " + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(start_time))) log.info( " Ended training at: " + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(end_time)))
# Variable initialization if (kaldi_txt_model == None) and (tf_model == None): sess.run(tf.global_variables_initializer()) start_time = time.time() start_clock = time.clock() log.info( "Starting training at: " + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(start_time))) train_nn(n_epoch, n_batch, lr_first, lr_last, train_batch_func, check_dev=check_dev_multi_loss_acc, get_para_func=get_para, set_para_func=set_para, model_save_file=model_prefix, patience=patience, save_func=save_func, patience_2=patience_2) end_time = time.time() end_clock = time.clock() log.info( " Started training at: " + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(start_time))) log.info( " Ended training at: " + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(end_time))) log.info(" CPU time used: " + str(end_clock - start_clock) + " s.")