device, 'gelu' ) sp.Load(swedish_model) criterion = nn.CrossEntropyLoss(ignore_index=sp.pad_id()) optimizer = optim.Adam(model.parameters(), lr=learning_rate) scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.95) load_model = False if load_model == True: checkpoint = torch.load(model_path, map_location='cpu') model.load_state_dict(checkpoint['state_dict']) optimizer.load_state_dict(checkpoint['optimizer']) model.to(device) sentence = "Sámediggi lea sámiid álbmotválljen orgána Norggas." sentence2 = "Deaŧalaš lea gozihit álgoálbmotoli nationála ja riikkaidgaskasaš forain." scores = [] e_losses = [] e_val_losses = [] e_ppl = [] e_val_ppl = [] threshold = 5 step = 5
max_len, device, 'gelu') # [30] END # [31] START sp.Load(swedish_model) criterion = nn.CrossEntropyLoss(ignore_index=sp.pad_id()) optimizer = optim.Adam(model_synth_swe.parameters(), lr=learning_rate) scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.95) # [31] END # [32] START if load_model == True: checkpoint = torch.load(model_path, map_location='cpu') model_synth_swe.load_state_dict(checkpoint['state_dict']) optimizer.load_state_dict(checkpoint['optimizer']) model_synth_swe.to(device) # [32] END # [33] START translate_sentence(model_synth_swe, sent1, device, sami_model, swedish_model) # [33] END # [34] START translate_sentence(model_synth_swe, sent2, device, sami_model, swedish_model) # [34] END # [35] START translate_sentence(model_synth_swe, sent3, device, sami_model, swedish_model) # [35] END # [36] START