def create_model(session, targetSpaceSize, vocabsize, forward_only): """Create SSE model and initialize or load parameters in session.""" modelParams = {'max_seq_length': FLAGS.max_seq_length, 'vocab_size': vocabsize, 'embedding_size': FLAGS.embedding_size, 'encoding_size': FLAGS.encoding_size, 'learning_rate': FLAGS.learning_rate, 'learning_rate_decay_factor': FLAGS.learning_rate_decay_factor, 'src_cell_size':FLAGS.src_cell_size, 'tgt_cell_size':FLAGS.tgt_cell_size, 'network_mode': FLAGS.network_mode, 'predict_nbest':FLAGS.predict_nbest, 'targetSpaceSize':targetSpaceSize, 'forward_only': forward_only} data_utils.save_model_configs(FLAGS.model_dir, modelParams) model = sse_model.SSEModel( modelParams ) ckpt = tf.train.get_checkpoint_state(FLAGS.model_dir) if ckpt: print("Reading model parameters from %s" % ckpt.model_checkpoint_path) model.saver.restore(session, ckpt.model_checkpoint_path) else: if forward_only: print('Error!!!Could not load any model from specified folder: %s' % FLAGS.model_dir ) exit(-1) else: print("Created model with fresh parameters.") session.run(tf.global_variables_initializer()) return model
def create_model(session, targetSpaceSize, vocabsize, forward_only): """Create SSE model and initialize or load parameters in session.""" modelConfigs = ( FLAGS.max_seq_length, FLAGS.max_gradient_norm, vocabsize, FLAGS.embedding_size, FLAGS.encoding_size, FLAGS.src_cell_size, FLAGS.tgt_cell_size, FLAGS.num_layers, FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, targetSpaceSize , FLAGS.network_mode , FLAGS.predict_nbest, FLAGS.alpha, FLAGS.neg_samples ) data_utils.save_model_configs(FLAGS.model_dir, modelConfigs) model = sse_model.SSEModel( FLAGS.max_seq_length, FLAGS.max_gradient_norm, vocabsize, FLAGS.embedding_size, FLAGS.encoding_size, FLAGS.src_cell_size, FLAGS.tgt_cell_size, FLAGS.num_layers, FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, targetSpaceSize , network_mode=FLAGS.network_mode , forward_only=forward_only, TOP_N=FLAGS.predict_nbest, alpha=FLAGS.alpha, neg_samples = FLAGS.neg_samples ) ckpt = tf.train.get_checkpoint_state(FLAGS.model_dir) if ckpt: print("Reading model parameters from %s" % ckpt.model_checkpoint_path) model.saver.restore(session, ckpt.model_checkpoint_path) else: if forward_only: print('Error!!!Could not load any model from specified folder: %s' % FLAGS.model_dir ) exit(-1) else: print("Created model with fresh parameters.") session.run(tf.global_variables_initializer()) return model