Пример #1
0
 def __init__(self, data_dir, disable_cuda=True):
     config = model_pos.Config()
     config.name = "model_asbc_Att-0_BiLSTM-2-500_batch256-run1"
     config.attention_heads = 0
     config.layers = 2
     config.hidden_d = 500
     config.c_token_to_vector, config.c_embedding_d = _load_embedding(os.path.join(data_dir, "embedding_character"))
     config.w_token_to_vector, config.w_embedding_d = _load_embedding(os.path.join(data_dir, "embedding_word"))
     config.label_list, config.label_to_index = _read_pos_list(os.path.join(data_dir, "model_pos", "label_list.txt"))
     config.output_d = len(config.label_list)
     
     if disable_cuda:
         if "CUDA_VISIBLE_DEVICES" in os.environ:
             env_backup = os.environ["CUDA_VISIBLE_DEVICES"]
         else:
             env_backup = None
         os.environ["CUDA_VISIBLE_DEVICES"] = ""
         
     with tf.Graph().as_default():
         model = model_pos.Model(config)
         model.sess = tf.Session()
         model.sess.run(tf.global_variables_initializer())
         saver = tf.train.Saver()
         saver.restore(model.sess, os.path.join(data_dir, "model_pos", config.name))
         
     if disable_cuda and env_backup:
         os.environ["CUDA_VISIBLE_DEVICES"] = env_backup
         
     self.model = model
     return
Пример #2
0
 def __init__(self, data_dir):
     config = model_pos.Config()
     config.name = "model_asbc_Att-0_BiLSTM-2-500_batch256-run1"
     config.attention_heads = 0
     config.layers = 2
     config.hidden_d = 500
     config.c_token_to_vector, config.c_embedding_d = _load_embedding(os.path.join(data_dir, "embedding_character"))
     config.w_token_to_vector, config.w_embedding_d = _load_embedding(os.path.join(data_dir, "embedding_word"))
     config.label_list, config.label_to_index = _read_pos_list(os.path.join(data_dir, "model_pos", "label_list.txt"))
     config.output_d = len(config.label_list)
     
     with tf.Graph().as_default():
         model = model_pos.Model(config)
         model.sess = tf.Session()
         model.sess.run(tf.global_variables_initializer())
         saver = tf.train.Saver()
         saver.restore(model.sess, os.path.join(data_dir, "model_pos", config.name))
     
     self.model = model
     return