示例#1
0
def batch_valid_with_data(opt, round_index, file_index, valid_filename, constrain_=True):
    model_filename = join(cur_dir, '{}_{}_model.pkl'.format(round_index, file_index))
    data_agent_filename = join(cur_dir, '{}_{}_data_agent.pkl'.format(round_index, file_index))
    model = pickle.load(open(model_filename, 'rb'))
    data_agent = pickle.load(open(data_agent_filename, 'rb'))
    perf = additional_validate(opt, model, data_agent, valid_filename, constrain_=constrain_, no_hits=True)
    return perf
示例#2
0
def batch_valid(opt, round_index, constrain_=True):
    perfs = []
    for i in range(100000):
        model_filename = join(cur_dir, '{}_{}_model.pkl'.format(round_index, i))
        if not os.path.exists(model_filename):
            break
        data_agent_filename = join(cur_dir, '{}_{}_data_agent.pkl'.format(round_index, i))
        valid_filename = join(cur_dir, '{}_{}_valid.pkl'.format(round_index, i))
        model = pickle.load(open(model_filename, 'rb'))
        data_agent = pickle.load(open(data_agent_filename, 'rb'))
        perf = additional_validate(opt, model, data_agent, valid_filename, constrain_=constrain_)
        print('batch_valid {} {}'.format(round_index, i))
        perfs.append(perf)
    return perfs