Esempio n. 1
0
    def _init_encoders(self) -> Tuple[MultiCropWrapper, MultiCropWrapper]:
        # create the encoders
        head = self.head
        needs_head = self.head is None
        if isinstance(self.backbone, ResNetArch):
            student_backbone = self.backbone.value(num_classes=self.out_dim)
            self.embed_dim = student_backbone.fc.weight.shape[1]
            if needs_head:
                head = student_backbone.fc
            student_backbone.fc = nn.Identity()
        else:
            student_backbone = self.backbone
            # Resort to computing embed_dim via the brute-force approach of passing in a dummy input.
            embed_dim_t = student_backbone(
                torch.zeros(1, *self.datamodule.size)).squeeze(0)
            # If the backbone does not produce a 1-dimensional embedding, add a flattening layer.
            if embed_dim_t.ndim > 1:
                student_backbone = nn.Sequential(student_backbone,
                                                 nn.Flatten())
            self.embed_dim = embed_dim_t.numel()
            if needs_head:
                head = nn.Linear(self.embed_dim, self.out_dim)
        if needs_head:
            if self.use_mlp:
                head = nn.Sequential(nn.Linear(self.embed_dim, self.embed_dim),
                                     nn.ReLU(), head)  # type: ignore

        student = MultiCropWrapper(backbone=student_backbone, head=head)
        teacher = gcopy(student, deep=True)

        return student, teacher
    def __add__(self, other: Self) -> Self:
        if type(self.x) != type(other.x) or (
            isinstance(self.x, list) and type(self.x[0]) != type(cast(List, other.x)[0])
        ):
            raise AttributeError(
                f"Only {self.__class__.__name__} instances with 'x' attributes of "
                "the same type can be concatenated (added) together."
            )
        copy = gcopy(self, deep=False)
        if isinstance(self.x, (Tensor, np.ndarray)):
            other.x = cast(Union[Tensor, np.ndarray], other.x)
            if self.x.shape != other.x.shape:
                raise AttributeError(
                    f"Only {self.__class__.__name__} instances with 'x' attributes of "
                    "the same shape can be concatenated (added) together: the lhs variable has "
                    f"'x' of shape '{self.x.shape}', the rhs variable 'x' of shape "
                    f"'{other.x.shape}.'"
                )
        if isinstance(copy.x, Tensor):
            other.x = cast(Tensor, other.x)
            copy.x = torch.cat([copy.x, other.x], dim=0)
        elif isinstance(copy.x, np.ndarray):
            other.x = cast(np.ndarray, other.x)
            copy.x = np.concatenate([copy.x, other.x], axis=0)
        elif isinstance(copy.x, Image.Image):
            other.x = cast(Image.Image, other.x)
            copy.x = [copy.x, other.x]
        else:
            copy.x = copy.x + other.x  # type: ignore

        return copy
Esempio n. 3
0
 def ft_trainer(self) -> pl.Trainer:
     if self._finetuner is None:
         self._finetuner = gcopy(self.trainer,
                                 deep=True,
                                 num_sanity_val_batches=0)
         self._finetuner.fit_loop.max_epochs = self.eval_epochs
         self._finetuner.fit_loop.max_steps = None  # type: ignore
         self._finetuner.logger = None  # type: ignore
         bar = PostHocProgressBar()
         bar._trainer = self._finetuner
         self._finetuner.callbacks = [bar]
     return self._finetuner
Esempio n. 4
0
 def build(self,
           datamodule: CdtDataModule,
           *,
           trainer: pl.Trainer,
           copy: bool = False) -> None:
     if copy:
         datamodule = gcopy(datamodule, deep=True)
         trainer = gcopy(trainer, deep=True)
     self._datamodule = datamodule
     self._trainer = trainer
     self._build()
     # Retrieve all child models (attributes inheriting from CdtModel)
     children = cast(
         List[Tuple[str, CdtModel]],
         inspect.getmembers(self, lambda m: isinstance(m, CdtModel)),
     )
     # Build all child models
     for _, child in children:
         child.build(datamodule=self.datamodule,
                     trainer=self.trainer,
                     copy=False)
def make_subset(
    dataset: Union[PCD, Subset[PCD]],
    *,
    indices: Optional[Union[List[int], npt.NDArray[np.uint64], Tensor, slice]],
    deep: bool = False,
) -> PCD:
    """Create a subset of the dataset from the given indices.

    :param indices: The sample-indices from which to create the subset.
    In the case of being a numpy array or tensor, said array or tensor
    must be 0- or 1-dimensional.

    :param deep: Whether to create a copy of the underlying dataset as
    a basis for the subset. If False then the data of the subset will be
    a view of original dataset's data.

    :returns: A subset of the dataset from the given indices.
    """
    if isinstance(indices, (np.ndarray, Tensor)):
        if indices.ndim > 1:
            raise ValueError("If 'indices' is an array it must be a 0- or 1-dimensional.")
        indices = cast(List[int], indices.tolist())

    current_indices = None
    if isinstance(dataset, Subset):
        base_dataset, current_indices = extract_base_dataset(dataset, return_subset_indices=True)
        if not isinstance(base_dataset, CdtDataset):
            raise TypeError(
                f"Subsets can only be created from {CdtDataset.__name__} instances or PyTorch "
                "Subsets of them."
            )
        base_dataset = cast(PCD, base_dataset)

        if isinstance(current_indices, Tensor):
            current_indices = current_indices.tolist()
    else:
        base_dataset = dataset
    subset = gcopy(base_dataset, deep=deep)

    def _subset_from_indices(_dataset: PCD, _indices: Union[List[int], slice]) -> PCD:
        _dataset.x = _dataset.x[_indices]
        if _dataset.y is not None:
            _dataset.y = _dataset.y[_indices]
        if _dataset.s is not None:
            _dataset.s = _dataset.s[_indices]
        return _dataset

    if current_indices is not None:
        subset = _subset_from_indices(_dataset=subset, _indices=current_indices)
    if indices is not None:
        subset = _subset_from_indices(_dataset=subset, _indices=indices)

    return subset
Esempio n. 6
0
    def _finetune(self) -> None:
        dm_cp = gcopy(
            self.datamodule,
            deep=False,
            stratified_sampling=False,
            training_mode=TrainingMode.epoch,
        )
        if isinstance(dm_cp, CdtVisionDataModule):
            dm_cp.train_transforms = self._ft_transform
        if self.eval_batch_size is not None:
            dm_cp.train_batch_size = self.eval_batch_size

        self.ft_trainer.fit(
            self.ft_clf,
            train_dataloaders=dm_cp.train_dataloader(),
        )
 def __add__(self, other: Self) -> Self:
     copy = gcopy(self, deep=False)
     copy.global_crops = copy.global_crops + other.global_crops
     copy.local_crops += copy.local_crops + other.local_crops
     return copy
 def __div__(self, value: float) -> Self:
     copy = gcopy(self, deep=True)
     copy *= 1 / value
     return copy
 def __mul__(self, value: float) -> Self:
     copy = gcopy(self, deep=True)
     copy *= value
     return copy
 def astuple(self, deep=False) -> Tuple[X]:
     tuple_ = tuple(iter(self))
     if deep:
         tuple_ = gcopy(tuple_, deep=True)
     return tuple_