Пример #1
0
def main(exp_const,data_const,model_const):
    io.mkdir_if_not_exists(exp_const.exp_dir,recursive=True)
    io.mkdir_if_not_exists(exp_const.log_dir)
    io.mkdir_if_not_exists(exp_const.model_dir)
    configure(exp_const.log_dir)
    save_constants({
        'exp': exp_const,
        'data': data_const,
        'model': model_const},
        exp_const.exp_dir)

    print('Creating network ...')
    model = Model()
    model.const = model_const
    model.encoder = Encoder(model.const.encoder).cuda()
    model.decoder = Decoder(model.const.decoder).cuda()

    encoder_path = os.path.join(
        exp_const.model_dir,
        f'encoder_{-1}')
    torch.save(model.encoder.state_dict(),encoder_path)

    decoder_path = os.path.join(
        exp_const.model_dir,
        f'decoder_{-1}')
    torch.save(model.decoder.state_dict(),decoder_path)

    print('Creating dataloader ...')
    dataset = VisualFeaturesDataset(data_const)
    dataloader = DataLoader(
        dataset,
        batch_size=exp_const.batch_size,
        shuffle=True)

    train_model(model,dataloader,exp_const)
Пример #2
0
def main(exp_const, data_const, model_const):
    print('Creating network ...')
    model = Model()
    model.const = model_const
    model.encoder = Encoder(model.const.encoder).cuda()
    encoder_path = os.path.join(exp_const.model_dir,
                                'encoder_' + str(model.const.model_num))
    model.encoder.load_state_dict(torch.load(encoder_path))

    print('Creating dataloader ...')
    dataset = VisualFeaturesDataset(data_const)
    dataloader = DataLoader(dataset,
                            batch_size=exp_const.batch_size,
                            shuffle=True)

    print('Get features ...')
    features = get_visual_features(model, dataloader, exp_const)

    print('Save features h5py ...')
    word_features_h5py = h5py.File(
        os.path.join(exp_const.exp_dir, 'word_features.h5py'), 'w')
    word_features_h5py.create_dataset('features',
                                      data=features,
                                      chunks=(1, features.shape[1]))
    word_features_h5py.create_dataset('mean', data=np.mean(features, axis=0))
    word_features_h5py.close()

    print('Save features word idx json ...')
    word_to_idx_json = os.path.join(exp_const.exp_dir, 'word_to_idx.json')
    io.dump_json_object(dataloader.dataset.word_to_idx, word_to_idx_json)