示例#1
0
    def process(self):
        data_list = []

        indices_train = []
        indices_val = []
        indices_test = []

        infile = open("test_al_50.index", "r")
        for line in infile:
            indices_test = line.split(",")
            indices_test = [int(i) for i in indices_test]

        infile = open("val_al_50.index", "r")
        for line in infile:
            indices_val = line.split(",")
            indices_val = [int(i) for i in indices_val]

        infile = open("train_al_50.index", "r")
        for line in infile:
            indices_train = line.split(",")
            indices_train = [int(i) for i in indices_train]

        targets = dp.get_dataset("alchemy_full", multigregression=True)
        tmp_1 = targets[indices_train].tolist()
        tmp_2 = targets[indices_val].tolist()
        tmp_3 = targets[indices_test].tolist()
        targets = tmp_1
        targets.extend(tmp_2)
        targets.extend(tmp_3)
        node_labels = pre.get_all_node_labels_allchem(True, True,
                                                      indices_train,
                                                      indices_val,
                                                      indices_test)

        matrices = pre.get_all_matrices("alchemy_full", indices_train)
        matrices.extend(pre.get_all_matrices("alchemy_full", indices_val))
        matrices.extend(pre.get_all_matrices("alchemy_full", indices_test))

        for i, m in enumerate(matrices):
            print(i)
            edge_index_1 = torch.tensor(matrices[i][0]).t().contiguous()
            edge_index_2 = torch.tensor(matrices[i][1]).t().contiguous()

            data = Data()
            data.edge_index_1 = edge_index_1
            data.edge_index_2 = edge_index_2

            one_hot = np.eye(83)[node_labels[i]]
            data.x = torch.from_numpy(one_hot).to(torch.float)
            data.y = data.y = torch.from_numpy(np.array([targets[i]
                                                         ])).to(torch.float)

            data_list.append(data)

        data, slices = self.collate(data_list)
        torch.save((data, slices), self.processed_paths[0])
    def process(self):
        data_list = []

        indices_train = []
        indices_val = []
        indices_test = []

        infile = open("test.index.txt", "r")
        for line in infile:
            indices_test = line.split(",")
            indices_test = [int(i) for i in indices_test]

        infile = open("val.index.txt", "r")
        for line in infile:
            indices_val = line.split(",")
            indices_val = [int(i) for i in indices_val]

        infile = open("train.index.txt", "r")
        for line in infile:
            indices_train = line.split(",")
            indices_train = [int(i) for i in indices_train]

        dp.get_dataset("ZINC_train")
        dp.get_dataset("ZINC_test")
        dp.get_dataset("ZINC_val")
        node_labels = pre.get_all_node_labels_ZINC(True, True, indices_train,
                                                   indices_val, indices_test)

        targets = pre.read_targets("ZINC_train", indices_train)
        targets.extend(pre.read_targets("ZINC_val", indices_val))
        targets.extend(pre.read_targets("ZINC_test", indices_test))

        matrices = pre.get_all_matrices("ZINC_train", indices_train)
        matrices.extend(pre.get_all_matrices("ZINC_val", indices_val))
        matrices.extend(pre.get_all_matrices("ZINC_test", indices_test))

        for i, m in enumerate(matrices):
            edge_index_1 = torch.tensor(matrices[i][0]).t().contiguous()
            edge_index_2 = torch.tensor(matrices[i][1]).t().contiguous()

            data = Data()
            data.edge_index_1 = edge_index_1
            data.edge_index_2 = edge_index_2

            one_hot = np.eye(445)[node_labels[i]]
            data.x = torch.from_numpy(one_hot).to(torch.float)
            data.y = data.y = torch.from_numpy(np.array([targets[i]
                                                         ])).to(torch.float)

            data_list.append(data)

        data, slices = self.collate(data_list)
        torch.save((data, slices), self.processed_paths[0])
示例#3
0
    def process(self):
        data_list = []

        targets = dp.get_dataset("alchemy_full",
                                 multigregression=True).tolist()
        node_labels = pre.get_all_node_labels("alchemy_full", True, True)
        matrices = pre.get_all_matrices("alchemy_full", list(range(202579)))

        for i, m in enumerate(matrices):
            edge_index_1 = torch.tensor(matrices[i][0]).t().contiguous()
            edge_index_2 = torch.tensor(matrices[i][1]).t().contiguous()

            data = Data()
            data.edge_index_1 = edge_index_1
            data.edge_index_2 = edge_index_2

            one_hot = np.eye(83)[node_labels[i]]
            data.x = torch.from_numpy(one_hot).to(torch.float)
            data.y = data.y = torch.from_numpy(np.array([targets[i]
                                                         ])).to(torch.float)

            data_list.append(data)

        data, slices = self.collate(data_list)
        torch.save((data, slices), self.processed_paths[0])
示例#4
0
    def process(self):
        data_list = []
        targets = dp.get_dataset("QM9", multigregression=True).tolist()
        attributes = pre.get_all_attributes("QM9")

        node_labels = pre.get_all_node_labels("QM9", False, False)
        matrices = pre.get_all_matrices("QM9", list(range(129433)))

        for i, m in enumerate(matrices):
            edge_index_1 = torch.tensor(matrices[i][0]).t().contiguous()
            edge_index_2 = torch.tensor(matrices[i][1]).t().contiguous()

            data = Data()
            data.edge_index_1 = edge_index_1
            data.edge_index_2 = edge_index_2

            one_hot = np.eye(3)[node_labels[i]]
            data.x = torch.from_numpy(one_hot).to(torch.float)

            # Continuous information.
            data.first = torch.from_numpy(np.array(
                attributes[i][0])[:, 0:13]).to(torch.float)
            data.first_coord = torch.from_numpy(
                np.array(attributes[i][0])[:, 13:]).to(torch.float)

            data.second = torch.from_numpy(
                np.array(attributes[i][1])[:, 0:13]).to(torch.float)
            data.second_coord = torch.from_numpy(
                np.array(attributes[i][1])[:, 13:]).to(torch.float)
            data.dist = torch.norm(data.first_coord - data.second_coord,
                                   p=2,
                                   dim=-1).view(-1, 1)
            data.edge_attr = torch.from_numpy(np.array(attributes[i][2])).to(
                torch.float)
            data.y = torch.from_numpy(np.array([targets[i]])).to(torch.float)

            data_list.append(data)

        data, slices = self.collate(data_list)
        torch.save((data, slices), self.processed_paths[0])