from utils.data_reader import Personas from model.transformer import Transformer from model.common_layer import evaluate from utils import config from tqdm import tqdm import numpy as np def count_parameters(model): return sum(p.numel() for p in model.parameters() if p.requires_grad) p = Personas() data_loader_tr, data_loader_val, data_loader_test = \ p.get_all_data(batch_size=config.batch_size) if (config.test): print("Test model", config.model) model = Transformer(p.vocab, model_file_path=config.save_path, is_eval=True) evaluate(model, data_loader_test, model_name=config.model, ty='test') exit(0) model = Transformer(p.vocab) print("MODEL USED", config.model) print("TRAINABLE PARAMETERS", count_parameters(model)) best_ppl = 1000 cnt = 0
from model.common_layer import evaluate from utils import config import torch import torch.nn as nn import torch.nn.functional as F from tqdm import tqdm import os import time import numpy as np def count_parameters(model): return sum(p.numel() for p in model.parameters() if p.requires_grad) p = Personas() data_loader_tr, data_loader_val, data_loader_test = p.get_all_data(batch_size=config.batch_size) if(config.test): print("Test model",config.model) model = Transformer(p.vocab,model_file_path=config.save_path,is_eval=True) evaluate(model,data_loader_test,model_name=config.model,ty='test') exit(0) model = Transformer(p.vocab) print("MODEL USED",config.model) print("TRAINABLE PARAMETERS",count_parameters(model)) best_ppl = 1000 cnt = 0 for e in range(config.epochs): print("Epoch", e)