Ejemplo n.º 1
0
 def create_datasets(self):
     transform = transforms.Compose([
         transforms.RandomCrop(self.crop_size),
         self.net.transform,
     ])
     loaders = []
     for dataset in [self.source_dataset, self.target_dataset]:
         rot_dataset = RotationWrapper(dataset, transform=transform)
         loader = DataLoader(rot_dataset,
                             batch_size=self.batch_size // 2,
                             shuffle=True,
                             num_workers=4)
         loaders.append(loader)
     self.train_dispenser = JointDispenser(*loaders)
     val_loaders = []
     for dataset in [self.source_val_dataset, self.target_val_dataset]:
         rot_dataset = RotationWrapper(dataset, transform=transform)
         loader = DataLoader(rot_dataset,
                             batch_size=1,
                             shuffle=True,
                             num_workers=4)
         val_loaders.append(loader)
     self.val_loaders = {
         'source': val_loaders[0],
         'target': val_loaders[1],
     }
Ejemplo n.º 2
0
 def create_datasets(self):
     transform = transforms.Resize(1024)
     crop_transform = self.net.transform
     loaders = []
     for dataset in [self.source_dataset, self.target_dataset]:
         grid_dataset = GridRegressionWrapper(dataset,
                                              stride=self.stride,
                                              transform=transform,
                                              crop_transform=crop_transform)
         loader = DataLoader(grid_dataset,
                             batch_size=self.batch_size // 2,
                             shuffle=True,
                             num_workers=4)
         loaders.append(loader)
     val_loaders = []
     for dataset in [self.source_val_dataset, self.target_val_dataset]:
         grid_dataset = GridRegressionWrapper(dataset,
                                              stride=self.stride,
                                              transform=transform,
                                              crop_transform=crop_transform)
         loader = DataLoader(grid_dataset,
                             batch_size=1,
                             shuffle=True,
                             num_workers=4)
         val_loaders.append(loader)
     self.train_dispenser = JointDispenser(*loaders)
     self.val_loaders = {
         'source': val_loaders[0],
         'target': val_loaders[1],
     }
    def create_datasets(self):
        transform = transforms.Compose([
            self.net.transform,
        ])
        target_transform = transforms.Compose([
            to_tensor_raw,
        ])
        collate_fn = AugmentCollate(crop=self.crop_size, flip=True)
        train_loaders = []
        for train_dataset in [self.source_dataset, self.target_dataset]:
            dataset = TransformWrapper(train_dataset,
                                       transform=transform,
                                       target_transform=target_transform)
            train_loader = DataLoader(dataset,
                                      batch_size=self.batch_size // 2,
                                      shuffle=True,
                                      num_workers=8,
                                      collate_fn=collate_fn,
                                      drop_last=True)
            train_loaders.append(train_loader)
        self.train_dispenser = JointDispenser(*train_loaders)

        val_transform = transforms.Compose([
            transforms.Resize((1024, 2048)),
            self.net.transform,
        ])
        val_target_transform = transforms.Compose([
            transforms.Resize((1024, 2048), interpolation=Image.NEAREST),
            to_tensor_raw,
        ])
        val_loaders = []
        for dataset in [self.source_val_dataset, self.target_val_dataset]:
            dataset = TransformWrapper(
                dataset,
                transform=val_transform,
                target_transform=val_target_transform,
            )
            loader = DataLoader(
                dataset,
                batch_size=torch.cuda.device_count(),
                num_workers=4,
            )
            val_loaders.append(loader)
        self.val_loaders = {
            'source': val_loaders[0],
            'target': val_loaders[1],
        }
 def create_datasets(self):
     crop_transform = self.net.transform
     loaders = []
     for dataset in [self.source_dataset, self.target_dataset]:
         grid_dataset = ContinuousGridRegressionWrapper(
             dataset,
             crop_size=self.crop_size,
             top=self.top,
             left=self.left,
             bottom=self.bottom,
             right=self.right,
             normalize=self.normalize,
             crop_transform=crop_transform
         )
         loader = DataLoader(
             grid_dataset,
             batch_size=self.batch_size // 2,
             shuffle=True,
             num_workers=4,
             drop_last=True
         )
         loaders.append(loader)
     self.train_dispenser = JointDispenser(*loaders)
     val_loaders = []
     for dataset in [self.source_val_dataset, self.target_val_dataset]:
         grid_dataset = ContinuousGridRegressionWrapper(
             dataset,
             crop_size=self.crop_size,
             top=self.top,
             left=self.left,
             bottom=self.bottom,
             right=self.right,
             normalize=self.normalize,
             crop_transform=crop_transform
         )
         loader = DataLoader(
             grid_dataset,
             batch_size=4,
             shuffle=True,
             num_workers=4
         )
         val_loaders.append(loader)
     self.val_loaders = {
         'source': val_loaders[0],
         'target': val_loaders[1],
     }
Ejemplo n.º 5
0
class Rotation:
    def __init__(self,
                 net,
                 source_dataset,
                 target_dataset,
                 source_val_dataset,
                 target_val_dataset,
                 *,
                 batch_size,
                 name='rotation',
                 crop_size=400):
        self.net = net
        self.source_dataset = source_dataset
        self.target_dataset = target_dataset
        self.source_val_dataset = source_val_dataset
        self.target_val_dataset = target_val_dataset
        self.batch_size = batch_size
        self.name = name
        self.crop_size = crop_size

        self.loss_fn = nn.NLLLoss()
        self.create_datasets()
        self.create_head()

    def create_datasets(self):
        transform = transforms.Compose([
            transforms.RandomCrop(self.crop_size),
            self.net.transform,
        ])
        loaders = []
        for dataset in [self.source_dataset, self.target_dataset]:
            rot_dataset = RotationWrapper(dataset, transform=transform)
            loader = DataLoader(rot_dataset,
                                batch_size=self.batch_size // 2,
                                shuffle=True,
                                num_workers=4)
            loaders.append(loader)
        self.train_dispenser = JointDispenser(*loaders)
        val_loaders = []
        for dataset in [self.source_val_dataset, self.target_val_dataset]:
            rot_dataset = RotationWrapper(dataset, transform=transform)
            loader = DataLoader(rot_dataset,
                                batch_size=1,
                                shuffle=True,
                                num_workers=4)
            val_loaders.append(loader)
        self.val_loaders = {
            'source': val_loaders[0],
            'target': val_loaders[1],
        }

    def create_head(self):
        self.head = RotationHead(self.net.out_dim)
        self.net.attach_head(self.name, self.head)

    def _predict_batch(self, im):
        n, r, c, h, w = im.size()
        im = im.view(n * r, c, h, w).cuda()
        preds = self.net(im, task=self.name)
        preds = preds.view(n * r, 4)
        return preds

    def step(self):
        im, label = self.train_dispenser.next_batch()
        label = label.view(-1).cuda()
        preds = self._predict_batch(im)
        loss = self.loss_fn(preds, label)
        return loss

    def eval(self):
        self.net.eval()
        results = {}
        for domain, loader in self.val_loaders.items():
            correct = 0
            total = 0
            for im, label in loader:
                with torch.no_grad():
                    label = label.view(-1).cuda()
                    logits = self._predict_batch(im)
                    preds = logits.max(dim=1)[1]
                    correct += preds.eq(label).sum().item()
                    total += label.numel()
            accuracy = correct / total
            logging.info(f'    {self.name}.{domain}: {accuracy}')
            results[f'{self.name}.{domain}'] = accuracy
        self.net.train()
        return results
class AdversarialAdaptation:
    def __init__(self,
                 net,
                 source_dataset,
                 target_dataset,
                 source_val_dataset,
                 target_val_dataset,
                 *,
                 batch_size,
                 crop_size,
                 name='adversarial_adapt'):
        self.name = name
        self.net = net
        self.source_dataset = source_dataset
        self.target_dataset = target_dataset
        self.source_val_dataset = source_val_dataset
        self.target_val_dataset = target_val_dataset

        self.adv_opt = torch.optim

        self.batch_size = batch_size
        self.crop_size = crop_size
        self.loss_fn = nn.CrossEntropyLoss()
        self.create_datasets()
        self.create_head()

    def create_datasets(self):
        transform = transforms.Compose([
            self.net.transform,
        ])
        target_transform = transforms.Compose([
            to_tensor_raw,
        ])
        collate_fn = AugmentCollate(crop=self.crop_size, flip=True)
        train_loaders = []
        for train_dataset in [self.source_dataset, self.target_dataset]:
            dataset = TransformWrapper(train_dataset,
                                       transform=transform,
                                       target_transform=target_transform)
            train_loader = DataLoader(dataset,
                                      batch_size=self.batch_size // 2,
                                      shuffle=True,
                                      num_workers=8,
                                      collate_fn=collate_fn,
                                      drop_last=True)
            train_loaders.append(train_loader)
        self.train_dispenser = JointDispenser(*train_loaders)

        val_transform = transforms.Compose([
            transforms.Resize((1024, 2048)),
            self.net.transform,
        ])
        val_target_transform = transforms.Compose([
            transforms.Resize((1024, 2048), interpolation=Image.NEAREST),
            to_tensor_raw,
        ])
        val_loaders = []
        for dataset in [self.source_val_dataset, self.target_val_dataset]:
            dataset = TransformWrapper(
                dataset,
                transform=val_transform,
                target_transform=val_target_transform,
            )
            loader = DataLoader(
                dataset,
                batch_size=torch.cuda.device_count(),
                num_workers=4,
            )
            val_loaders.append(loader)
        self.val_loaders = {
            'source': val_loaders[0],
            'target': val_loaders[1],
        }

    def create_head(self):
        self.adv_dis = AdversarialDiscriminator(self.net.out_dim).cuda()
        self.adv_opt = torch.optim.SGD(self.adv_dis.parameters(),
                                       lr=0.007,
                                       momentum=0.9)

    def _predict_batch(self, im):
        im = im.cuda()
        preds = self.net(im, task=self.name)
        return preds

    def step(self):
        self.step_discriminator()
        return self.step_generator()

    def step_discriminator(self):
        im, _ = self.train_dispenser.next_batch()
        im = im.cuda()
        label = self.generate_label(im)
        self.adv_opt.zero_grad()
        with torch.no_grad():
            _, fts = self.net(im, task=None)
        preds = self.adv_dis(fts)
        loss = self.loss_fn(preds, label)
        loss.backward()
        self.adv_opt.step()

    def step_generator(self):
        im, _ = self.train_dispenser.next_batch()
        im = im.cuda()
        label = self.generate_label(im, invert=True)
        _, fts = self.net(im, task=None)
        preds = self.adv_dis(fts)
        loss = self.loss_fn(preds, label)
        return loss

    def generate_label(self, im, invert=False):
        zeros = torch.zeros(im.size(0) // 2,
                            device=im.device,
                            dtype=torch.long)
        ones = torch.ones(im.size(0) // 2, device=im.device, dtype=torch.long)
        if invert:
            return torch.cat([ones, zeros], dim=0)
        return torch.cat([zeros, ones], dim=0)

    def eval(self):
        return {}
Ejemplo n.º 7
0
class GridRegression:
    def __init__(self,
                 net,
                 source_dataset,
                 target_dataset,
                 source_val_dataset,
                 target_val_dataset,
                 *,
                 batch_size,
                 stride=256,
                 name='gridregression'):
        self.net = net
        self.source_dataset = source_dataset
        self.target_dataset = target_dataset
        self.source_val_dataset = source_val_dataset
        self.target_val_dataset = target_val_dataset
        self.batch_size = batch_size
        self.stride = stride
        self.name = name

        self.loss_fn = nn.MSELoss()
        self.create_datasets()
        self.create_head()

    def create_datasets(self):
        transform = transforms.Resize(1024)
        crop_transform = self.net.transform
        loaders = []
        for dataset in [self.source_dataset, self.target_dataset]:
            grid_dataset = GridRegressionWrapper(dataset,
                                                 stride=self.stride,
                                                 transform=transform,
                                                 crop_transform=crop_transform)
            loader = DataLoader(grid_dataset,
                                batch_size=self.batch_size // 2,
                                shuffle=True,
                                num_workers=4)
            loaders.append(loader)
        val_loaders = []
        for dataset in [self.source_val_dataset, self.target_val_dataset]:
            grid_dataset = GridRegressionWrapper(dataset,
                                                 stride=self.stride,
                                                 transform=transform,
                                                 crop_transform=crop_transform)
            loader = DataLoader(grid_dataset,
                                batch_size=1,
                                shuffle=True,
                                num_workers=4)
            val_loaders.append(loader)
        self.train_dispenser = JointDispenser(*loaders)
        self.val_loaders = {
            'source': val_loaders[0],
            'target': val_loaders[1],
        }

    def create_head(self):
        self.head = GridRegressionHead(self.net.out_dim)
        self.net.attach_head(self.name, self.head)

    def _predict_batch(self, im):
        n, g, c, h, w = im.size()
        im = im.view(n * g, c, h, w).cuda()
        preds = self.net(im, task=self.name)
        preds = preds.view(n * g, 2)
        return preds

    def step(self):
        im, label = self.train_dispenser.next_batch()
        label = label.view(-1, 2).cuda()
        preds = self._predict_batch(im)
        loss = self.loss_fn(preds, label)
        return loss

    def eval(self):
        self.net.eval()
        results = {}
        for domain, loader in self.val_loaders.items():
            correct = 0
            total = 0
            for im, label in loader:
                with torch.no_grad():
                    label = label.view(-1, 2).cuda()
                    preds = self._predict_batch(im)
                    preds = preds.round()
                    correct += preds.eq(label).all(dim=1).sum().item()
                    total += label.size(0)
            accuracy = correct / total
            logging.info(f'    {self.name}.{domain}: {accuracy}')
            results[f'{self.name}.{domain}'] = accuracy
        self.net.train()
        return results
class ContinuousGridRegression:

    def __init__(self, net, source_dataset, target_dataset, source_val_dataset, target_val_dataset, *, batch_size, crop_size=256, top=0, left=0, bottom=1024, right=2048, normalize=True, name='gridregression-cont'):
        self.net = net
        self.source_dataset = source_dataset
        self.target_dataset = target_dataset
        self.source_val_dataset = source_val_dataset
        self.target_val_dataset = target_val_dataset
        self.batch_size = batch_size
        self.crop_size = crop_size
        self.top = top
        self.left = left
        self.bottom = bottom
        self.right = right
        self.name = name
        self.normalize = normalize

        self.loss_fn = nn.MSELoss()
        self.create_datasets()
        self.create_head()

    def create_datasets(self):
        crop_transform = self.net.transform
        loaders = []
        for dataset in [self.source_dataset, self.target_dataset]:
            grid_dataset = ContinuousGridRegressionWrapper(
                dataset,
                crop_size=self.crop_size,
                top=self.top,
                left=self.left,
                bottom=self.bottom,
                right=self.right,
                normalize=self.normalize,
                crop_transform=crop_transform
            )
            loader = DataLoader(
                grid_dataset,
                batch_size=self.batch_size // 2,
                shuffle=True,
                num_workers=4,
                drop_last=True
            )
            loaders.append(loader)
        self.train_dispenser = JointDispenser(*loaders)
        val_loaders = []
        for dataset in [self.source_val_dataset, self.target_val_dataset]:
            grid_dataset = ContinuousGridRegressionWrapper(
                dataset,
                crop_size=self.crop_size,
                top=self.top,
                left=self.left,
                bottom=self.bottom,
                right=self.right,
                normalize=self.normalize,
                crop_transform=crop_transform
            )
            loader = DataLoader(
                grid_dataset,
                batch_size=4,
                shuffle=True,
                num_workers=4
            )
            val_loaders.append(loader)
        self.val_loaders = {
            'source': val_loaders[0],
            'target': val_loaders[1],
        }

    def create_head(self):
        self.head = GridRegressionHead(self.net.out_dim)
        self.net.attach_head(self.name, self.head)

    def _predict_batch(self, im):
        im = im.cuda()
        preds = self.net(im, task=self.name)
        preds = preds.view(-1, 2)
        return preds

    def step(self):
        im, label = self.train_dispenser.next_batch()
        label = label.cuda()
        preds = self._predict_batch(im)
        loss = self.loss_fn(preds, label)
        return loss

    def eval(self):
        self.net.eval()
        results = {}
        for domain, loader in self.val_loaders.items():
            errs = []
            for im, label in loader:
                with torch.no_grad():
                    label = label.cuda()
                    preds = self._predict_batch(im)
                    err = self.loss_fn(preds, label)
                    errs.append(err.item())
            avg_err = np.mean(errs)
            logging.info(f'    {self.name}.{domain}: {avg_err}')
            results[f'{self.name}.{domain}'] = avg_err
        self.net.train()
        return results