def __init__(self, model_name: str):
        super(ImagenetModel, self).__init__()

        self.device = torch.device(
            "cuda" if torch.cuda.is_available() else "cpu")

        # Load the model
        self.model = pretrainedmodels.__dict__[model_name](
            num_classes=1000, pretrained="imagenet")
        self.model = self.model.eval()
        self.model = self.model.to(self.device)

        # Explicitly defining the transformation w/ differentiable transforms from kornia
        self.transform = nn.Sequential(
            geo.Resize(self.model.input_size[-2:]),
            color.Normalize(torch.tensor(self.model.mean),
                            torch.tensor(self.model.std)),
        )

        if self.model.input_space == "RGB":
            pass
        elif self.model.input_space == "BGR":
            self.transform = nn.Sequential(transform, color.RgbToBgr())

        self.softmax = nn.Softmax(dim=1)
    def __init__(self, net, image_size, hidden_layer=-2, projection_size=256, projection_hidden_size=4096, augment_fn=None, moving_average_decay=0.99):
        super().__init__()

        # default SimCLR augmentation

        DEFAULT_AUG = nn.Sequential(
            RandomApply(augs.ColorJitter(0.8, 0.8, 0.8, 0.2), p=0.8),
            augs.RandomGrayscale(p=0.2),
            augs.RandomHorizontalFlip(),
            RandomApply(filters.GaussianBlur2d((3, 3), (1.5, 1.5)), p=0.1),
            augs.RandomResizedCrop((image_size, image_size)),
            color.Normalize(mean=torch.tensor(
                [0.485, 0.456, 0.406]), std=torch.tensor([0.229, 0.224, 0.225]))
        )

        self.augment = default(augment_fn, DEFAULT_AUG)

        self.online_encoder = NetWrapper(net, projection_size, projection_hidden_size, layer=hidden_layer)
        self.target_encoder = None
        self.target_ema_updater = EMA(moving_average_decay)

        self.online_predictor = MultiLayerPerceptron(projection_size, projection_size, projection_hidden_size)

        # send a mock image tensor to instantiate singleton parameters
        self.forward(torch.randn(2, 3, image_size, image_size))
Beispiel #3
0
    def test_gradcheck(self):

        # prepare input data
        data = torch.ones(2, 3, 1, 1)
        data += 2
        mean = torch.tensor([0.5, 1.0, 2.0]).double()
        std = torch.tensor([2., 2., 2.]).double()

        data = utils.tensor_to_gradcheck_var(data)  # to var

        assert gradcheck(color.Normalize(mean, std), (data, ),
                         raise_exception=True)
Beispiel #4
0
    def test_normalize(self):

        # prepare input data
        data = torch.ones(1, 2, 2)
        mean = torch.tensor([0.5])
        std = torch.tensor([2.0])

        # expected output
        expected = torch.tensor([0.25]).repeat(1, 2, 2).view_as(data)

        f = color.Normalize(mean, std)
        assert_allclose(f(data), expected)
Beispiel #5
0
    def test_batch_normalize(self):

        # prepare input data
        data = torch.ones(2, 3, 1, 1)
        data += 2

        mean = torch.tensor([0.5, 1.0, 2.0]).repeat(2, 1)
        std = torch.tensor([2.0, 2.0, 2.0]).repeat(2, 1)

        # expected output
        expected = torch.tensor([1.25, 1, 0.5]).repeat(2, 1, 1).view_as(data)

        f = color.Normalize(mean, std)
        assert_allclose(f(data), expected)
Beispiel #6
0
 def __init__(self, model, imageSize, embeddingLayer=-2, projectionDim=256, projectionHiddenDim=4096, emaDecay=0.99):
     super(BYOL, self).__init__()
     
     # Default SimCLR augmentations
     self.augment = nn.Sequential(
         RandomApply(augmentation.ColorJitter(0.8, 0.8, 0.8, 0.2), p=0.8),
         augmentation.RandomGrayscale(p=0.2),
         augmentation.RandomHorizontalFlip(),
         RandomApply(filters.GaussianBlur2d((3, 3), (1.5, 1.5)), p=0.1),
         augmentation.RandomResizedCrop((imageSize, imageSize)),
         color.Normalize(mean=torch.tensor([0.485, 0.456, 0.406]), std=torch.tensor([0.229, 0.224, 0.225]))
     )
     
     # Initialize models, predictors and EMA
     self.onlineEncoder = ModelWrapper(model, projectionDim, projectionHiddenDim, embeddingLayer)
     self.onlinePredictor = MLP(projectionDim, projectionDim, projectionHiddenDim)
     self.targetEncoder = copy.deepcopy(self.onlineEncoder)
     self.targetEMA = EMA(emaDecay)
Beispiel #7
0
 def test_smoke(self):
     mean = [0.5]
     std = [0.1]
     repr = 'Normalize(mean=[0.5], std=[0.1])'
     assert str(color.Normalize(mean, std)) == repr