예제 #1
0
            dim_embedding=args.embedding_dim,
            vocab_size=train_data_engine.tokenizer.get_vocab_size(),
            attr_vocab_size=attr_vocab_size,
            n_layers=args.n_layers,
            bidirectional=args.bidirectional,
            model_dir=args.model_dir,
            log_dir=args.log_dir,
            is_load=args.is_load,
            replace_model=args.replace_model,
            device=device,
            dir_name=args.dir_name,
            with_intent=args.with_intent)

lm = None
if args.lm_model_dir:
    lm = LM.load_pretrained(args.lm_model_dir, train_data_engine,
                            test_data_engine, device)

made = None
if args.made_model_dir:
    # made = Marginal.load_pretrained(
    #     args.made_model_dir,
    #     device=device
    # )
    made = Marginal(
        batch_size=args.batch_size,
        optimizer=args.optimizer,
        learning_rate=args.learning_rate,
        train_data_engine=train_data_engine,
        test_data_engine=test_data_engine,
        dim_hidden=args.hidden_size,
        dim_embedding=args.embedding_dim,
예제 #2
0
파일: run_lm.py 프로젝트: Fengsss/DuaLUG
    regen=args.regen,
    train=False)

vocab, rev_vocab, token_vocab, rev_token_vocab = \
        pickle.load(open(args.vocab_file, 'rb'))
attr_vocab_size = len(token_vocab)
vocab_size = args.vocab_size + 4

model = LM(batch_size=args.batch_size,
           optimizer=args.optimizer,
           learning_rate=args.learning_rate,
           train_data_engine=train_data_engine,
           test_data_engine=test_data_engine,
           dim_hidden=args.hidden_size,
           dim_embedding=args.embedding_dim,
           vocab_size=vocab_size,
           n_layers=args.n_layers,
           model_dir=args.model_dir,
           log_dir=args.log_dir,
           is_load=args.is_load,
           replace_model=args.replace_model,
           device=device,
           dir_name=args.dir_name)

# record model config
if not args.is_load:
    with open(os.path.join(model.model_dir, "lm_config"), "w+") as f:
        for arg in vars(args):
            f.write("{}: {}\n".format(arg, str(getattr(args, arg))))
        f.close()