def override_params(params, args): params.parse(args.parameters) src_vocab, src_w2idx, src_idx2w = data.load_vocabulary(args.vocabulary[0]) tgt_vocab, tgt_w2idx, tgt_idx2w = data.load_vocabulary(args.vocabulary[1]) params.vocabulary = {"source": src_vocab, "target": tgt_vocab} params.lookup = {"source": src_w2idx, "target": tgt_w2idx} params.mapping = {"source": src_idx2w, "target": tgt_idx2w} return params
def override_params(params, args): params.model = args.model or params.model params.input = args.input or params.input params.output = args.output or params.output params.vocab = args.vocabulary or params.vocab params.parse(args.parameters) src_vocab, src_w2idx, src_idx2w = data.load_vocabulary(params.vocab[0]) tgt_vocab, tgt_w2idx, tgt_idx2w = data.load_vocabulary(params.vocab[1]) params.vocabulary = {"source": src_vocab, "target": tgt_vocab} params.lookup = {"source": src_w2idx, "target": tgt_w2idx} params.mapping = {"source": src_idx2w, "target": tgt_idx2w} return params
def get_model(args): model_cls = models.get_model(args.model) params = default_params() params = merge_params(params, model_cls.default_params()) params = merge_params(params, predictor.default_params()) params = import_params(args.dir, args.model, params) params.decode_batch_size = 1 src_vocab, src_w2idx, src_idx2w = data.load_vocabulary(params.vocab[0]) tgt_vocab, tgt_w2idx, tgt_idx2w = data.load_vocabulary(params.vocab[1]) params.vocabulary = {"source": src_vocab, "target": tgt_vocab} params.lookup = {"source": src_w2idx, "target": tgt_w2idx} params.mapping = {"source": src_idx2w, "target": tgt_idx2w} torch.cuda.set_device(0) torch.set_default_tensor_type(torch.cuda.FloatTensor) # Create model model = model_cls(params).cuda() return model, params