Exemplo n.º 1
0
N = 20
figsize = (10, 8)
axis_off = True
# ***************************************************************************

model_name_norm = 'FT' if model_name != 'SIMPLE' else model_name
path_man = PathManager(dataset=dataset_name,
                       d_type=dataset_subtype,
                       model_name=model_name_norm,
                       version=version)
################################################
#----------------------读取模型参数------------------
################################################

if model_name == 'SIMPLE':
    model_cfg = TrainingConfigManager(path_man.Doc() + 'config.json')
else:
    model_cfg = TrainingConfigManager('../run/runConfig.json')

modelParams = model_cfg.modelParams()

dataset = SeqFileDataset(path_man.FileData(), path_man.FileSeqLen(), N=N)
dataloader = DataLoader(dataset,
                        batch_size=N,
                        collate_fn=batchSequenceWithoutPad)

if model_name != 'Random':
    state_dict = t.load(path_man.Model() + '_v%s.0' % version)
    word_matrix = state_dict['Embedding.weight']
else:
    word_matrix = t.Tensor(
Exemplo n.º 2
0
                                  cuda=True, expand=expand,
                                parallel=modelParams['data_parallel_devices'])
    val_task = ImpEpisodeTask(k, qk, n, N, val_dataset,
                                  cuda=True, expand=expand,
                              parallel=modelParams['data_parallel_devices'])

else:
    train_task = ProtoEpisodeTask(k, qk, n, N, train_dataset,
                                  cuda=True, expand=expand,
                                  parallel=modelParams['data_parallel_devices'])
    val_task = ProtoEpisodeTask(k, qk, n, N, val_dataset,
                                  cuda=True, expand=expand,
                                parallel=modelParams['data_parallel_devices'])

stat = TrainStatManager(model_save_path=train_path_manager.Model(),
                        stat_save_path=train_path_manager.Doc(),
                        train_report_iter=ValCycle,
                        criteria=criteria)

if RecordGradient:
    types.append('line')
    titles.append('gradient')
    xlabels.append('iterations')
    ylabels.append('gradient norm')
    legends.append(['Encoder Gradient'])

if UseVisdom:
    plot = VisdomPlot(env_title='train monitoring',
                      types=types,
                      titles=titles,
                      xlabels=xlabels,
Exemplo n.º 3
0
################################################

printState('init model...')
word_matrix = t.Tensor(np.load(test_path_manager.WordEmbedMatrix(), allow_pickle=True))

loss = t.nn.NLLLoss().cuda() if loss_func_name == 'nll' else t.nn.MSELoss().cuda()

model = FT(n=n, loss_fn=loss,
           pretrained_matrix=word_matrix, **modelParams)

# model.load_state_dict(state_dict)
model = model.cuda()

statParamNumber(model)

if os.path.exists(test_path_manager.Doc()):
    deleteDir(test_path_manager.Doc())
os.mkdir(test_path_manager.Doc())
shutil.copy('../models/FT.py', test_path_manager.Doc()+"FT.py")
shutil.copy('./ftConfig.json', test_path_manager.Doc()+"config.json")
print('doc path:', test_path_manager.Doc())

stat.startTimer()

################################################
#--------------------开始测试------------------
################################################

metrics = np.zeros(4,)
with t.autograd.set_detect_anomaly(False):
    for epoch in range(TestingEpoch):
Exemplo n.º 4
0
################################################
#----------------------定义数据------------------
################################################

printState('init managers...')
test_path_manager = PathManager(dataset=data_folder,
                                d_type=USED_SUB_DATASET,
                                model_name=model_name,
                                version=version)

################################################
#----------------------读取模型参数------------------
################################################

param_config_path = 'runConfig.json' if MODEL_RANDOM_STATE else test_path_manager.Doc(
) + 'config.json'
model_cfg = TrainingConfigManager(param_config_path)

modelParams = model_cfg.modelParams()

LRDecayIters, LRDecayGamma, optimizer_type,\
weight_decay, loss_func, default_lr, lrs, \
taskBatchSize, criteria = model_cfg.trainingParams()

test_dataset = SeqFileDataset(test_path_manager.FileData(),
                              test_path_manager.FileSeqLen(), N)

expand = True if loss_func == 'mse' else False

if model_type in ADAPTED_MODELS:
    test_task = AdaptEpisodeTask(k,