def train(args): input_lang, output_lang, pairs = prepareData(args) print(random.choice(pairs)) model = {} model['hidden_size'] = 1000 model['dropout'] = 0.1 model['input_lang'] = input_lang model['output_lang'] = output_lang model['max_length'] = max(input_lang.max_length, output_lang.max_length) + 2 print('Max length: {}'.format(model['max_length'])) encoder1 = EncoderRNN(input_lang.n_words, model['hidden_size']).to(getDevice()) encoder1.train() attn_decoder1 = AttnDecoderRNN(model['hidden_size'], output_lang.n_words, dropout_p=model['dropout'], max_length=model['max_length']).to( getDevice()) attn_decoder1.train() n_iters = 30000 training_pairs = [ tensorsFromPair(input_lang, output_lang, random.choice(pairs)) for _ in range(n_iters) ] trainIters(training_pairs, encoder1, attn_decoder1, n_iters, print_every=1000, optim=args.optim, learning_rate=args.learning_rate, max_length=model['max_length']) print('saving models...') model['encoder_state'] = encoder1.state_dict() model['decoder_state'] = attn_decoder1.state_dict() torch.save( model, "data/{}_model_checkpoint.pth".format(args.phase.split('_')[-1]))
embedding.load_state_dict(embedding_sd) # 初始化encoder和decoder模型 encoder = EncoderRNN(hidden_size, embedding, encoder_n_layers, dropout) decoder = LuongAttnDecoderRNN(attn_model, embedding, hidden_size, voc.num_words, decoder_n_layers, dropout) if loadFilename: encoder.load_state_dict(encoder_sd) decoder.load_state_dict(decoder_sd) # 使用合适的设备 encoder = encoder.to(device) decoder = decoder.to(device) print('Models built and ready to go!') ###################################################################### # 设置进入训练模式,从而开启dropout encoder.train() decoder.train() # 初始化优化器 print('Building optimizers ...') encoder_optimizer = optim.Adam(encoder.parameters(), lr=learning_rate) decoder_optimizer = optim.Adam(decoder.parameters(), lr=learning_rate * decoder_learning_ratio) if loadFilename: encoder_optimizer.load_state_dict(encoder_optimizer_sd) decoder_optimizer.load_state_dict(decoder_optimizer_sd) # 开始训练 print("Starting Training!")