def __init__(self, data_dir, disable_cuda=True): config = model_ws.Config() config.name = "model_asbc_Att-0_BiLSTM-cross-2-500_batch128-run1" config.attention_heads = 0 config.is_cross_bilstm = True config.layers = 2 config.hidden_d = 500 config.w_token_to_vector, config.w_embedding_d = _load_embedding(os.path.join(data_dir, "embedding_character")) if disable_cuda: if "CUDA_VISIBLE_DEVICES" in os.environ: env_backup = os.environ["CUDA_VISIBLE_DEVICES"] else: env_backup = None os.environ["CUDA_VISIBLE_DEVICES"] = "" with tf.Graph().as_default(): model = model_ws.Model(config) model.sess = tf.Session() model.sess.run(tf.global_variables_initializer()) saver = tf.train.Saver() saver.restore(model.sess, os.path.join(data_dir, "model_ws", config.name)) if disable_cuda and env_backup: os.environ["CUDA_VISIBLE_DEVICES"] = env_backup self.model = model return
def __init__(self, data_dir): config = model_ws.Config() config.name = "model_asbc_Att-0_BiLSTM-cross-2-500_batch128-run1" config.attention_heads = 0 config.is_cross_bilstm = True config.layers = 2 config.hidden_d = 500 config.w_token_to_vector, config.w_embedding_d = _load_embedding(os.path.join(data_dir, "embedding_character")) with tf.Graph().as_default(): model = model_ws.Model(config) model.sess = tf.Session() model.sess.run(tf.global_variables_initializer()) saver = tf.train.Saver() saver.restore(model.sess, os.path.join(data_dir, "model_ws", config.name)) self.model = model return