Esempio n. 1
0
    def initialize(self, source, target, batch_size1, batch_size2, scale=32):
        transform = transforms.Compose([
            transforms.Scale(scale),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        ])
        dataset_source = Dataset(source['imgs'],
                                 source['labels'],
                                 transform=transform)
        dataset_target = Dataset(target['imgs'],
                                 target['labels'],
                                 transform=transform)
        # dataset_source = tnt.dataset.TensorDataset([source['imgs'], source['labels']])
        # dataset_target = tnt.dataset.TensorDataset([target['imgs'], target['labels']])
        data_loader_s = torch.utils.data.DataLoader(dataset_source,
                                                    batch_size=batch_size1,
                                                    shuffle=True,
                                                    num_workers=4)

        data_loader_t = torch.utils.data.DataLoader(dataset_target,
                                                    batch_size=batch_size2,
                                                    shuffle=True,
                                                    num_workers=4)
        self.dataset_s = dataset_source
        self.dataset_t = dataset_target
        self.paired_data = PairedData(data_loader_s, data_loader_t,
                                      float("inf"))
Esempio n. 2
0
 def test_check_hashes(self):
     d = Dataset('fake_dataset', data_store='', data_root=self.data_root,
                 local_dir='/Users/fpbatta/local_store')
     temp_dir = d.make_local_copy()
     self.temp_dirs.append(temp_dir)
     d.create_file_hashes()
     self.assertTrue(d.check_file_hashes())
Esempio n. 3
0
 def test_subdirs(self):
     d = Dataset('fake_dataset', data_store='', data_root=self.data_root,
                 local_dir='/Users/fpbatta/local_store', subdirs_as_datasets=True)
     self.assertTrue(d.children)
     temp_dir = d.make_local_copy()
     self.temp_dirs.append(temp_dir)
     self.assertEqual(os.listdir(temp_dir), ['a1.avi', 'fd1.dat', 'fd2.dat', 'fd3.dat'])
Esempio n. 4
0
 def test_temp_copy(self):
     d = Dataset('fake_dataset', data_store='', data_root=self.data_root,
                 local_dir='/Users/fpbatta/local_store')
     temp_dir = d.make_local_copy()
     self.temp_dirs.append(temp_dir)
     self.assertEqual(os.listdir(temp_dir), ['a1.avi', 'fd1.dat', 'fd2.dat', 'fd3.dat',
                                             'the_subdir'])
Esempio n. 5
0
 def test_subdirs_temp_copy(self):
     d = Dataset('fake_dataset', data_store='', data_root=self.data_root,
                 local_dir='/Users/fpbatta/local_store')
     temp_dir = d.make_local_copy()
     self.temp_dirs.append(temp_dir)
     self.assertEqual(os.listdir(temp_dir), ['a1.avi', 'fd1.dat', 'fd2.dat', 'fd3.dat',
                                             'the_subdir'])
     self.assertTrue(os.path.isdir(os.path.join(temp_dir, 'the_subdir')))
     logging.debug(os.listdir(os.path.join(temp_dir, 'the_subdir')))
Esempio n. 6
0
    def test_hashes(self):
        d = Dataset('fake_dataset', data_store='', data_root=self.data_root,
                    local_dir='/Users/fpbatta/local_store')
        temp_dir = d.make_local_copy()
        self.temp_dirs.append(temp_dir)
        d.create_file_hashes()
        self.assertTrue(d.hashes)
        h = hashlib.md5()
        h.update(fd1_dat.encode('utf-8'))
        self.assertEqual(h.hexdigest(), d.hashes['fd1.dat'])

        h = hashlib.md5()
        h.update(sfd1_dat.encode('utf-8'))
        self.assertEqual(h.hexdigest(), d.hashes['the_subdir/sfd1.dat'])

        logging.debug("hashes: " + str(d.hashes))
    def initialize(self, source, target, batch_size1, batch_size2, scale=32):
        #         transform = transforms.Compose([
        #             transforms.Scale(scale),
        #             transforms.ToTensor(),
        #             transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        #         ])
        dataset_source = Dataset(source['imgs'], source['labels'])
        dataset_target = Dataset(target['imgs'], target['labels'])

        data_loader_s = torch.utils.data.DataLoader(dataset_source,
                                                    batch_size=batch_size1,
                                                    shuffle=True,
                                                    num_workers=0)

        data_loader_t = torch.utils.data.DataLoader(dataset_target,
                                                    batch_size=batch_size2,
                                                    shuffle=True,
                                                    num_workers=0)

        # print('Source shape: {}, target shape: {}'.format(len(data_loader_s), len(data_loader_t)))
        self.dataset_s = dataset_source
        self.dataset_t = dataset_target
        self.paired_data = PairedData(data_loader_s, data_loader_t,
                                      float("inf"))
Esempio n. 8
0

if __name__ == "__main__":
    from config import config
    from datasets.datasets import Dataset
    import matplotlib.patches as patches
    import matplotlib.pyplot as plt
    import torchvision.transforms as transforms

    ii = r'/home/wei/Deep_learning_pytorch/Data/UCAS/ucas_train.txt'
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    transform = transforms.Compose([
        transforms.ToTensor(),
    ])

    img_size = 256
    da = Dataset(ii, transform=transform, img_size=img_size, train=False)
    dataloader = torch.utils.data.DataLoader(da, batch_size=1, shuffle=False)
    #x = torch.randn(1,3,128,128)
    f = FCOS(config)
    #checkpoint = torch.load('./checkpoint/ckpt.pth')
    #f.load_state_dict(checkpoint['weights'])

    for batch_i, (_, imgs, targets) in enumerate(dataloader):
        images = imgs
        targets = targets
        #loss = f(images, targets)
        detections = f(images)

        break