def train(params): x, labels, sess, train_op, accuracy, loss, merged, train_writer, gst = model_fn(params, mode='train') xdata, labeldata = train_input_fn() print('Training begins') start_time=time.time() for j in range (params.num_epochs): print("EPOCH NUMBER: ", j+1) avg_acc=0 avg_lss=0 for k in range(0, len(xdata), params.batch_size): current_batch_x_train = xdata[k:k+params.batch_size] current_batch_label_train = labeldata[k:k+params.batch_size] _, acc, lss, merg, gg= sess.run([train_op, accuracy, loss, merged, gst], feed_dict = {x: current_batch_x_train, labels: current_batch_label_train}) avg_acc+=acc avg_lss+=lss #print("Mini-batch training Accuracy", acc) #print("Mini-batch training Loss", lss) train_writer.add_summary(merg, global_step=gg) print("Average Training Accuracy= ", avg_acc * params.batch_size/len(xdata)) print("Average Training Loss= ", avg_lss * params.batch_size/len(xdata)) print("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") train_time=time.time() - start_time saver= tf.train.Saver() saver.save(sess , os.path.join(os.getcwd(), params.model_file)) print("Total training time= ", train_time, "seconds")
ANCHORS = [0, 1, 2] # 0 for "tweet", 1 for "image", 2 for "user" if __name__ == '__main__': tf.reset_default_graph() #tf.logging.set_verbosity(tf.logging.INFO) # Load the parameters from json file args = parser.parse_args() json_path = os.path.join(args.model_dir, 'params.json') assert os.path.isfile(json_path), "No json configuration file found at {}".format(json_path) params = Params(json_path) # Get the datasets #tf.logging.info("Getting the dataset...") dataset_iter = train_input_fn(args.data_dir, params, args.embed_dir) dataset_next = dataset_iter.get_next() # Define the model #tf.logging.info("Creating the model...") model = TripletLoss(params) num_train = 5031 num_train_steps = int(num_train/params.batch_size) * params.num_epochs with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess.run(tf.local_variables_initializer()) """ Training Module """ for i in tqdm(range(0, num_train_steps)): sess.run(dataset_iter.initializer)
# Load the parameters from json file args = parser.parse_args() json_path = os.path.join(args.model_dir, 'params.json') assert os.path.isfile(json_path), "No json configuration file found at {}".format(json_path) params = Params(json_path) # Define the model tf.logging.info("Creating the model...") config = tf.estimator.RunConfig(tf_random_seed=230, model_dir=args.model_dir, save_summary_steps=params.save_summary_steps) estimator = tf.estimator.Estimator(model_fn, params=params, config=config) # Train the model tf.logging.info("Starting training for {} epoch(s).".format(params.num_epochs)) estimator.train(lambda: train_input_fn(args.data_dir, params)) ''' path = args.model_save try: os.makedirs(path) except OSError: print ("Creation of the directory %s failed" % path) else: print ("Successfully created the directory %s " % path) estimator.export_saved_model(path, serving_input_receiver_fn()) print("Model is SAVED !!")
args = parser.parse_args() json_path = os.path.join(args.model_dir, 'params.json') assert os.path.isfile( json_path), "No json configuration file found at {}".format(json_path) params = Params(json_path) # Define the model tf.logging.info("Creating the model...") config = tf.estimator.RunConfig( tf_random_seed=230, model_dir=args.model_dir, save_summary_steps=params.save_summary_steps) estimator = tf.estimator.Estimator(model_fn, params=params, config=config) # Train the model tf.logging.info("Starting training for {} epoch(s).".format( params.num_epochs)) start = time.clock() estimator.train( lambda: train_input_fn(args.data_dir, params, args.model_dir)) tf.logging.info("Training took %s seconds." % (time.clock() - start)) # Evaluate the model on the validation set tf.logging.info("Evaluation on validation set.") start = time.clock() res = estimator.evaluate( lambda: val_input_fn(args.data_dir, params, args.model_dir)) tf.logging.info("Evaluation took %s seconds." % (time.clock() - start)) for key in res: print("{}: {}".format(key, res[key]))
import tensorflow as tf from model.model import create_model from model.input_fn import train_input_fn from model.loss import composite_loss from model.utils import Vocabulary import os, json ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) STATIC_CONFIG = dict(json.load(open('config.json', 'r'))) print(staticmethod) RUNTIME_CONFIG = {"root_path": ROOT_PATH} print(RUNTIME_CONFIG) CONFIG = {**STATIC_CONFIG, **RUNTIME_CONFIG} vocabulary = Vocabulary() next_training_batch = train_input_fn(vocabulary, CONFIG) modeloutput - create_model(next_training_batch, CONFIG, True)