Ejemplo n.º 1
0
    def __init__(self, H, dataset, transform=None):
        super(CustomDataset, self).__init__('.', transform, None, None)
        adj = H.to_sparse()
        edge_index = adj._indices()
        data_list = []

        for i in range(0, len(dataset), 2):
            data = Data(edge_index=edge_index)
            data.x = dataset[i].t()
            data.y = dataset[i + 1].t()
            data_list.append(data)

        self.data, self.slices = self.collate(data_list)
Ejemplo n.º 2
0
    def __init__(self, H, dataset, x, fac_num, transform=None):
        super(CustomDataset, self).__init__('.', transform, None, None)
        adj = H.to_sparse()
        edge_index = adj._indices()
        data_list = []

        for row in dataset:
            data = Data(edge_index=edge_index)
            row = torch.cat([row.unsqueeze(0).t(),
                             torch.zeros(fac_num, 1)],
                            dim=0)
            data.x = row
            data.y = x.t()
            data_list.append(data)

        self.data, self.slices = self.collate(data_list)
def sendData(client, inputData, startLayer, endLayer):
	data=Data(inputData, startLayer, endLayer)
	str=pickle.dumps(data)
	client.send(len(data).to_bytes(length=6, byteorder='big'))
	client.send(data)
def sendData(server, inputData, startLayer, endLayer):
    data = Data(inputData, startLayer, endLayer)
    str = pickle.dumps(data)
    server.send(len(str).to_bytes(length=6, byteorder='big'))
    server.send(str)
        return len(self.images)

    def __getitem__(self, index):
        image_index = self.images[index]
        img_path = os.path.join(self.root_dir, image_index)
        img = io.imread(img_path)
        label = img_path.split('\\')[-1].split('.')[0]
        sample = {'image': img, 'label': label}

        if self.transform:
            sample = self.transform(sample)
        return sample


if __name__ == '__main__':
    train_data = Data('./write/train', transform=None)
    train_loader = DataLoader(train_data, batch_size=BATCH_SIZE, shuffle=True)
    for i_batch, batch_data in enumerate(train_loader):
        print(i_batch)
        print(batch_data['image'] / 255)
    test_data = Data('./write/test', transform=None)
    test_loader = DataLoader(test_data, batch_size=BATCH_SIZE, shuffle=True)
    for i_batch, batch_data in enumerate(train_loader):
        print(i_batch)
        print(
            batch_data['image'].size())  #batch_data['image']对应train_data.data
    test_x = torch.unsqueeze(test_data.data, dim=1).type(
        torch.FloatTensor)[:2000] / 255.  #print(batch_data['image'] / 255)
    test_y = test_data.test_labels[:2000]