Example #1
0
 def _init_ner_model(self, session, ckpt_path):
     """Create ner Tagger model and initialize or load parameters in session."""
     # initilize config
     config = ner_model.get_config(self.name)
     if config is None:
         print("WARNING: Input model name %s has no configuration..." %
               self.name)
     config.batch_size = 1
     config.num_steps = 1  # iterator one token per time
     model_var_scope = get_model_var_scope(self.var_scope, self.name)
     print("NOTICE: Input NER Model Var Scope Name '%s'" % model_var_scope)
     # Check if self.model already exist
     if self.model is None:
         with tf.variable_scope(model_var_scope, reuse=tf.AUTO_REUSE):
             self.model = ner_model.NERTagger(
                 is_training=True,
                 config=config)  # save object after is_training
     #else:   # Model Graph Def already exist
     #    print ("DEBUG: Model Def already exists")
     # update model parameters
     if len(glob.glob(ckpt_path + '.data*')
            ) > 0:  # file exist with pattern: 'ner.ckpt.data*'
         print("NOTICE: Loading model parameters from %s" % ckpt_path)
         all_vars = tf.global_variables()
         model_vars = [
             k for k in all_vars if model_var_scope in k.name.split("/")
         ]  # e.g. ner_var_scope_zh
         tf.train.Saver(model_vars).restore(session, ckpt_path)
     else:
         print(
             "NOTICE: Model not found, Try to run method: deepnlp.download(module='ner', name='%s')"
             % self.name)
         print("NOTICE: Created with fresh parameters.")
         session.run(tf.global_variables_initializer())
Example #2
0
 def _init_ner_model(self, session, ckpt_path):
     """Create ner Tagger model and initialize or load parameters in session."""
     # initilize config
     config = ner_model.get_config(self.lang)
     config.batch_size = 1
     config.num_steps = 1 # iterator one token per time
     
     with tf.variable_scope("ner_var_scope"):
         model = ner_model.NERTagger(is_training=True, config=config) # save object after is_training
     
     if len(glob.glob(ckpt_path + '.data*')) > 0: # file exist with pattern: 'ner.ckpt.data*'
         print("Loading model parameters from %s" % ckpt_path)
         all_vars = tf.global_variables()
         model_vars = [k for k in all_vars if k.name.startswith("ner_var_scope")]
         tf.train.Saver(model_vars).restore(session, ckpt_path)
     else:
         print("Model not found, created with fresh parameters.")
         session.run(tf.global_variables_initializer())
     return model