def create_model(model_class, dicts, gen_out_size): if opt.data_type == 'code': encoder = lib.TreeEncoder(opt, dicts["src"]) decoder = lib.TreeDecoder(opt, dicts["tgt"]) elif opt.data_type == 'text': encoder = lib.Encoder(opt, dicts["src"]) decoder = lib.TreeDecoder(opt, dicts["tgt"]) elif opt.data_type == 'hybrid': code_encoder = lib.TreeEncoder(opt, dicts["src"]) text_encoder = lib.Encoder(opt, dicts["src"]) decoder = lib.HybridDecoder(opt, dicts["tgt"]) # Use memory efficient generator when output size is large and # max_generator_batches is smaller than batch_size. if opt.max_generator_batches < opt.batch_size and gen_out_size > 1: generator = lib.MemEfficientGenerator(nn.Linear(opt.rnn_size, gen_out_size), opt) else: generator = lib.BaseGenerator(nn.Linear(opt.rnn_size, gen_out_size), opt) if opt.data_type == 'code' or opt.data_type == 'text': model = model_class(encoder, decoder, generator, opt) elif opt.data_type == 'hybrid': model = model_class(code_encoder, text_encoder, decoder, generator, opt) init(model) optim = create_optim(model) return model, optim
def create_model(model_class, dicts, gen_out_size): encoder = lib.Encoder(opt, dicts["src"]) decoder = lib.Decoder(opt, dicts["tgt"]) generator = lib.BaseGenerator(nn.Linear(opt.rnn_size, gen_out_size), opt) model = model_class(encoder, decoder, generator, opt) init(model) optim = create_optim(model) return model, optim
def create_model(model_class, dicts, gen_out_size): encoder = lib.Encoder(opt, dicts["src"]) decoder = lib.Decoder(opt, dicts["tgt"]) # Use memory efficient generator when output size is large and # max_generator_batches is smaller than batch_size. if opt.max_generator_batches < opt.batch_size and gen_out_size > 1: generator = lib.MemEfficientGenerator(nn.Linear(opt.rnn_size, gen_out_size), opt) else: generator = lib.BaseGenerator(nn.Linear(opt.rnn_size, gen_out_size), opt) model = model_class(encoder, decoder, generator, opt) init(model) optim = create_optim(model) return model, optim