Пример #1
0
def main(config):
    laed_config = load_config(config.model)
    laed_config.use_gpu = config.use_gpu
    laed_config = process_config(laed_config)

    setattr(laed_config, 'black_domains', config.black_domains)
    setattr(laed_config, 'black_ratio', config.black_ratio)
    setattr(laed_config, 'include_domain', True)
    setattr(laed_config, 'include_example', False)
    setattr(laed_config, 'include_state', True)
    setattr(laed_config, 'entities_file', 'NeuralDialog-ZSDG/data/stanford/kvret_entities.json')
    setattr(laed_config, 'action_match', True)
    setattr(laed_config, 'batch_size', config.batch_size)
    setattr(laed_config, 'data_dir', config.data_dir)
    setattr(laed_config, 'include_eod', False) # for StED model
    setattr(laed_config, 'domain_description', config.domain_description)

    if config.process_seed_data:
        assert config.corpus_client[:3] == 'Zsl', 'Incompatible coprus_client for --process_seed_data flag'
    corpus_client = getattr(corpora, config.corpus_client)(laed_config)
    if config.vocab:
        corpus_client.vocab, corpus_client.rev_vocab, corpus_client.unk_token = load_vocab(config.vocab)
    prepare_dirs_loggers(config, os.path.basename(__file__))

    dial_corpus = corpus_client.get_corpus()
    # train_dial, valid_dial, test_dial = dial_corpus['train'], dial_corpus['valid'], dial_corpus['test']
    # all_dial = train_dial + valid_dial + test_dial
    # all_utts = reduce(lambda x, y: x + y, all_dial, [])

    model = load_model(config.model, config.model_name, config.model_type, corpus_client=corpus_client)

    if config.use_gpu:
        model.cuda()

    for dataset_name in ['train', 'valid', 'test']:
        dataset = dial_corpus[dataset_name]
        feed_data = dataset if config.model_type == 'dialog' else reduce(lambda x, y: x + y, dataset, [])

        # create data loader that feed the deep models
        if config.process_seed_data:
            seed_utts = corpus_client.get_seed_responses(utt_cnt=len(corpus_client.domain_descriptions))
        main_feed = getattr(data_loaders, config.data_loader)("Test", feed_data, laed_config)

        features = process_data_feed(model, main_feed, laed_config)
        if config.data_loader == 'SMDDialogSkipLoader':
            pad_mode = 'start_end'
        elif config.data_loader == 'SMDDataLoader':
            pad_mode = 'start'
        else:
            pad_mode = None
        features = deflatten_laed_features(features, dataset, pad_mode=pad_mode)
        assert sum(map(len, dataset)) == sum(map(lambda x: x.shape[0], features))

        if not os.path.exists(config.out_folder):
            os.makedirs(config.out_folder)
        with open(os.path.join(config.out_folder, 'dialogs_{}.pkl'.format(dataset_name)), 'w') as result_out:
            pickle.dump(features, result_out)

    if config.process_seed_data:
        seed_utts = corpus_client.get_seed_responses(utt_cnt=len(corpus_client.domain_descriptions))
        seed_feed = data_loaders.PTBDataLoader("Seed", seed_utts, laed_config)
        seed_features = process_data_feed(model, seed_feed, laed_config)
        with open(os.path.join(config.out_folder, 'seed_utts.pkl'), 'w') as result_out:
            pickle.dump(seed_features, result_out)
Пример #2
0
def main(config):
    prepare_dirs_loggers(config, os.path.basename(__file__))

    corpus_client = corpora.PTBCorpus(config)

    dial_corpus = corpus_client.get_corpus()
    train_dial, valid_dial, test_dial = dial_corpus['train'],\
                                        dial_corpus['valid'],\
                                        dial_corpus['test']

    evaluator = evaluators.BleuEvaluator("CornellMovie")

    # create data loader that feed the deep models
    train_feed = data_loaders.PTBDataLoader("Train", train_dial, config)
    valid_feed = data_loaders.PTBDataLoader("Valid", valid_dial, config)
    test_feed = data_loaders.PTBDataLoader("Test", test_dial, config)
    #model = sent_models.DiVAE(corpus_client, config)
    model = models.DirVAE(corpus_client, config)
    model.apply(lambda m: [
        torch.nn.init.uniform_(p.data, -1.2 * config.init_w, 1.2 * config.
                               init_w) for p in m.parameters()
    ])
    model.logvar_bn.weight.fill_(1)
    model.mean_bn.weight.fill_(1)
    model.decoder_bn.weight.fill_(1)
    if config.forward_only:
        test_file = os.path.join(
            config.log_dir, config.load_sess,
            "{}-test-{}.txt".format(get_time(), config.gen_type))
        dump_file = os.path.join(config.log_dir, config.load_sess,
                                 "{}-z.pkl".format(get_time()))
        model_file = os.path.join(config.log_dir, config.load_sess, "model")
    else:
        test_file = os.path.join(
            config.session_dir,
            "{}-test-{}.txt".format(get_time(), config.gen_type))
        dump_file = os.path.join(config.session_dir,
                                 "{}-z.pkl".format(get_time()))
        model_file = os.path.join(config.session_dir, "model")

    if config.use_gpu:
        model.cuda()

    if config.forward_only is False:
        try:
            engine.train(model,
                         train_feed,
                         valid_feed,
                         test_feed,
                         config,
                         evaluator,
                         gen=utt_utils.generate)
        except KeyboardInterrupt:
            print("Training stopped by keyboard.")

    config.batch_size = 50
    model.load_state_dict(torch.load(model_file))

    engine.validate(model, test_feed, config)
    utt_utils.sweep(model, test_feed, config, num_batch=50)

    with open(os.path.join(dump_file), "wb") as f:
        print("Dumping test to {}".format(dump_file))
        utt_utils.dump_latent(model, test_feed, config, f, num_batch=None)

    with open(os.path.join(test_file), "wb") as f:
        print("Saving test to {}".format(test_file))
        utt_utils.generate(model,
                           test_feed,
                           config,
                           evaluator,
                           num_batch=None,
                           dest_f=f)