from model_graph_cnn import build_graph
elif model_tag == 'csm':
    from model_graph_csm import build_graph
elif model_tag == 'rnn':
    from model_graph_rnn import build_graph
elif model_tag == 'mlp':
    from model_graph_mlp import build_graph

#
# data
dataset = Dataset()
dataset.load_vocab_tokens_and_emb()
#

#
config = ModelSettings()
config.vocab = dataset.vocab
config.model_tag = model_tag
config.model_graph = build_graph
config.is_train = False
config.check_settings()
#
model = ModelWrapper(config)
model.prepare_for_prediction()
#

text_raw = ["这本书不错"]
"""
work_book = xlrd.open_workbook(file_raw)
data_sheet = work_book.sheets()[0]
text_raw = data_sheet.col_values(0)
                                    global_step = total_batch)
                #
            #
            if flag_stop: break # for epoch
            #
        #
        str_info = "training ended after total epoches: %d" % (epoch + 1)
        self._log_info(str_info)
        self._log_info("")
        # print(str_info)
        # print()
        #
            
if __name__ == '__main__':
    
    sett = ModelSettings('vocab_placeholder', False)
    
    sett.model_tag = 'cnn'
    
    sett.check_settings()
    
    #print(dir(sett))    
    #l = [i for i in dir(sett) if inspect.isbuiltin(getattr(sett, i))]
    #l = [i for i in dir(sett) if inspect.isfunction(getattr(sett, i))]
    #l = [i for i in dir(sett) if not callable(getattr(sett, i))]
    
    print(sett.__dict__.keys())
    print()
    
    model = ModelWrapper(sett)
    
Esempio n. 3
0
                        help='restart')
    #
    return parser.parse_args()

#
if __name__ == '__main__':
    """
    """
    args = parse_args()
    #
    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
    os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
    #

    # settings
    settings = ModelSettings()
    settings.gpu_available = args.gpu

    # data
    if args.debug == 1:
        train_files = demo_data["train"]
        dev_files = demo_data["dev"]
        test_files = demo_data["test"]
        #
        settings.tokens_file = os.path.join(dir_vocab_demo, "vocab_tokens.txt")
        settings.base_dir = "../task_mrc_demo"
        #
        assign_paras_from_dict(settings, debug_paras)
        #
    else:
        train_files = data_all["train"]
Esempio n. 4
0
 #
 if model_tag.startswith('cnn'):
     from model_graph_cnn import ModelGraph
 elif model_tag.startswith('rnn'):
     from model_graph_rnn import ModelGraph
 elif model_tag.startswith('rnf'):
     from model_graph_rnf import ModelGraph
 elif model_tag.startswith('msa'):
     from model_graph_msa import ModelGraph
 elif model_tag.startswith('cap'):
     from model_graph_cap import ModelGraph
 else:
     assert False, "NOT supported model_tag"
 #
 # settings
 settings = ModelSettings()
 settings.gpu_available = args.gpu
 settings.model_tag = model_tag
 #
 if run_mode == 'predict':
     settings.is_train = False
 else:
     settings.is_train = True
 #
 settings.check_settings()
 settings.create_or_reset_log_file()
 settings.logger.info('running with args : {}'.format(args))
 settings.logger.info(settings.trans_info_to_dict())
 settings.save_to_json_file("./temp_settings.json")
 #
 # vocab