Exemple #1
0
    def __init__(self, H, dataset, transform=None):
        super(CustomDataset, self).__init__('.', transform, None, None)
        adj = H.to_sparse()
        edge_index = adj._indices()
        data_list = []

        for i in range(0, len(dataset), 2):
            data = Data(edge_index=edge_index)
            data.x = dataset[i].t()
            data.y = dataset[i + 1].t()
            data_list.append(data)

        self.data, self.slices = self.collate(data_list)
Exemple #2
0
    def __init__(self, H, dataset, x, fac_num, transform=None):
        super(CustomDataset, self).__init__('.', transform, None, None)
        adj = H.to_sparse()
        edge_index = adj._indices()
        data_list = []

        for row in dataset:
            data = Data(edge_index=edge_index)
            row = torch.cat([row.unsqueeze(0).t(),
                             torch.zeros(fac_num, 1)],
                            dim=0)
            data.x = row
            data.y = x.t()
            data_list.append(data)

        self.data, self.slices = self.collate(data_list)
Exemple #3
0
    best_ckpt_path = os.path.join(ckpt_dir, 'best.ckpt')
    last_ckpt_path = os.path.join(ckpt_dir, 'last.ckpt')

    logger.setLevel(logging.INFO)
    handler = logging.FileHandler(log_path, 'w')
    handler.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s: %(message)s',
                                  datefmt='%Y/%m/%d %H:%M:%S')
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    epochs = int(config.getfloat("hyperparameters", "max_epochs"))
    learning_rate = config.getfloat("hyperparameters", "learning_rate")
    weight_decay = config.getfloat("hyperparameters", "weight_decay")

    dataset = Dataset(config)

    model = init_model(config, device)
    logger.info(model)
    parameters = filter(lambda p: p.requires_grad, model.parameters())
    #total_parameters = sum(p.numel() for p in parameters)
    #logging.info("| there are totally {} trainable parameters".format(total_parameters))

    for name, param in model.named_parameters():
        print(name)

    loss_func = nn.MSELoss()
    optimizer = optim.Adam(parameters, lr=learning_rate)

    least_loss = 9999999
Exemple #4
0
    log_path = os.path.join(ckpt_dir, 'train.log')
    ckpt_path = os.path.join(ckpt_dir, 'best.ckpt')

    logger.setLevel(logging.INFO)
    handler = logging.FileHandler(log_path, 'w')
    handler.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s: %(message)s',
                                  datefmt='%Y/%m/%d %H:%M:%S')
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    epochs = int(config.getfloat("hyperparameters", "max_epochs"))
    learning_rate = config.getfloat("hyperparameters", "learning_rate")
    weight_decay = config.getfloat("hyperparameters", "weight_decay")

    dataset = Dataset(config)

    model = init_model(config, dataset.wordvec_weights, device)
    dataset.wordvec_weights = None
    logger.info(model)
    parameters = filter(lambda p: p.requires_grad, model.parameters())

    loss_func = nn.MSELoss()
    optimizer = optim.Adam(parameters, lr=learning_rate)

    least_loss = 9999999

    model.to(device)
    loss_func.to(device)

    for epoch in range(epochs):