Beispiel #1
0
def load_model_decode(model_dir, data, name, gpu, seg=True):
    data.HP_gpu = gpu
    print "Load Model from file: ", model_dir
    model = SeqModel(data)
    ## load model need consider if the model trained in GPU and load in CPU, or vice versa
    # if not gpu:
    #     model.load_state_dict(torch.load(model_dir), map_location=lambda storage, loc: storage)
    #     # model = torch.load(model_dir, map_location=lambda storage, loc: storage)
    # else:
    model.load_state_dict(torch.load(model_dir))
    # model = torch.load(model_dir)

    print("Decode %s data ..." % (name))
    start_time = time.time()
    speed, acc, p, r, f, pred_results = evaluate(data, model, name)
    end_time = time.time()
    time_cost = end_time - start_time
    if seg:
        print(
            "%s: time:%.2fs, speed:%.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"
            % (name, time_cost, speed, acc, p, r, f))
    else:
        print("%s: time:%.2fs, speed:%.2fst/s; acc: %.4f" %
              (name, time_cost, speed, acc))
    return pred_results
Beispiel #2
0
def load_model(model_dir, data, gpu):
    data.HP_gpu = gpu
    print
    "Load Model from file: ", model_dir
    model = SeqModel(data)
    ## load model need consider if the model trained in GPU and load in CPU, or vice versa
    if not gpu:
        model.load_state_dict(
            torch.load(model_dir, map_location=lambda storage, loc: storage))
        # model = torch.load(model_dir, map_location=lambda storage, loc: storage)
    else:
        model.load_state_dict(torch.load(model_dir))
    # model = torch.load(model_dir)
    return model
Beispiel #3
0
class Tagger():
    def __init__(self, model_dir, dset_dir, gpu, seg):
        self.model_dir = model_dir
        self.dset_dir = dset_dir
        self.data = load_data_setting(dset_dir)
        self.data.HP_gpu = gpu
        self.model = SeqModel(self.data)
        self.model.load_state_dict(torch.load(self.model_dir))

    def change_inlines(self, text):
        self.data.inference_single_with_gaz(text)

    def load_model_inference(self, seg=True):
        #self.model = SeqModel(self.data)
        return inference(self.data, self.model)
Beispiel #4
0
def load_model_decode(model_dir, data, name, gpu, seg=True):
    data.HP_gpu = gpu
    print ("Load Model from file: ", model_dir)
    model = SeqModel(data)
    ## load model need consider if the model trained in GPU and load in CPU, or vice versa
    # if not gpu:
    #     model.load_state_dict(torch.load(model_dir), map_location=lambda storage, loc: storage)
    #     # model = torch.load(model_dir, map_location=lambda storage, loc: storage)
    # else:
    model.load_state_dict(torch.load(model_dir))
        # model = torch.load(model_dir)
    
    print("Decode %s data ..."%(name))
    start_time = time.time()
    speed, acc, p, r, f, pred_results = evaluate(data, model, name)
    end_time = time.time()
    time_cost = end_time - start_time
    if seg:
        print("%s: time:%.2fs, speed:%.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"%(name, time_cost, speed, acc, p, r, f))
    else:
        print("%s: time:%.2fs, speed:%.2fst/s; acc: %.4f"%(name, time_cost, speed, acc))
    return pred_results
Beispiel #5
0
def load_model_inference(model_dir, data, name, gpu, seg=True):
    data.HP_gpu = gpu
    model = SeqModel(data)
    model.load_state_dict(torch.load(model_dir))
    return inference(data, model)
Beispiel #6
0
# -*- coding: utf-8 -*-