Esempio n. 1
0
    def __init__(
        self,
        model: MODEL_TYPE = None,
        loss_fn: LOSS_FN_TYPE = None,
        learning_rate: Optional[float] = None,
        optimizer: OPTIMIZER_TYPE = "Adam",
        lr_scheduler: LR_SCHEDULER_TYPE = None,
        metrics: METRICS_TYPE = None,
        output_transform: OUTPUT_TRANSFORM_TYPE = None,
    ):
        super().__init__()
        if model is not None:
            self.model = model
        self.loss_fn = {} if loss_fn is None else get_callable_dict(loss_fn)
        self.optimizer = optimizer
        self.lr_scheduler = lr_scheduler

        self.train_metrics = nn.ModuleDict(
            {} if metrics is None else get_callable_dict(metrics))
        self.val_metrics = nn.ModuleDict(
            {} if metrics is None else get_callable_dict(deepcopy(metrics)))
        self.test_metrics = nn.ModuleDict(
            {} if metrics is None else get_callable_dict(deepcopy(metrics)))
        self.learning_rate = learning_rate
        # TODO: should we save more? Bug on some regarding yaml if we save metrics
        self.save_hyperparameters(
            "learning_rate",
            "optimizer",
            ignore=["model", "backbone", "head", "adapter"])

        self._output_transform: Optional[OutputTransform] = output_transform
Esempio n. 2
0
def test_get_callable_dict():
    d = get_callable_dict(A())
    assert type(d["a"]) is A

    d = get_callable_dict([A(), b])
    assert type(d["a"]) is A
    assert d["b"] == b

    d = get_callable_dict({"one": A(), "two": b})
    assert type(d["one"]) is A
    assert d["two"] == b
Esempio n. 3
0
def test_get_callable_dict():
    d = get_callable_dict(A())
    assert type(d["a"]) == A

    d = get_callable_dict([A(), b])
    assert type(d["a"]) == A
    assert d["b"] == b

    d = get_callable_dict({"one": A(), "two": b, "three": c})
    assert type(d["one"]) == A
    assert d["two"] == b
    assert d["three"] == c
Esempio n. 4
0
    def __init__(
        self,
        model: Optional[nn.Module] = None,
        loss_fn: Optional[Union[Callable, Mapping, Sequence]] = None,
        optimizer: Union[Type[torch.optim.Optimizer],
                         torch.optim.Optimizer] = torch.optim.Adam,
        optimizer_kwargs: Optional[Dict[str, Any]] = None,
        scheduler: Optional[Union[Type[_LRScheduler], str,
                                  _LRScheduler]] = None,
        scheduler_kwargs: Optional[Dict[str, Any]] = None,
        metrics: Union[torchmetrics.Metric, Mapping, Sequence, None] = None,
        learning_rate: float = 5e-5,
        deserializer: Optional[Union[Deserializer,
                                     Mapping[str, Deserializer]]] = None,
        preprocess: Optional[Preprocess] = None,
        postprocess: Optional[Postprocess] = None,
        serializer: Optional[Union[Serializer, Mapping[str,
                                                       Serializer]]] = None,
    ):
        super().__init__()
        if model is not None:
            self.model = model
        self.loss_fn = {} if loss_fn is None else get_callable_dict(loss_fn)
        self.optimizer = optimizer
        self.scheduler = scheduler
        self.optimizer_kwargs = optimizer_kwargs or {}
        self.scheduler_kwargs = scheduler_kwargs or {}

        self.metrics = nn.ModuleDict(
            {} if metrics is None else get_callable_dict(metrics))
        self.learning_rate = learning_rate
        # TODO: should we save more? Bug on some regarding yaml if we save metrics
        self.save_hyperparameters("learning_rate", "optimizer")

        self._deserializer: Optional[Deserializer] = None
        self._preprocess: Optional[Preprocess] = preprocess
        self._postprocess: Optional[Postprocess] = postprocess
        self._serializer: Optional[Serializer] = None

        # TODO: create enum values to define what are the exact states
        self._data_pipeline_state: Optional[DataPipelineState] = None

        # Explicitly set the serializer to call the setter
        self.deserializer = deserializer
        self.serializer = serializer
Esempio n. 5
0
    def __init__(
        self,
        num_classes: int,
        backbone: Union[str, Tuple[nn.Module, int]] = "pointpillars_kitti",
        backbone_kwargs: Optional[Dict] = None,
        head: Optional[nn.Module] = None,
        loss_fn: Optional[Callable] = None,
        optimizer: Union[Type[torch.optim.Optimizer],
                         torch.optim.Optimizer] = torch.optim.Adam,
        optimizer_kwargs: Optional[Dict[str, Any]] = None,
        scheduler: Optional[Union[Type[_LRScheduler], str,
                                  _LRScheduler]] = None,
        scheduler_kwargs: Optional[Dict[str, Any]] = None,
        metrics: Union[torchmetrics.Metric, Mapping, Sequence, None] = None,
        learning_rate: float = 1e-2,
        serializer: Optional[Union[Serializer, Mapping[
            str, Serializer]]] = PointCloudObjectDetectorSerializer(),
        lambda_loss_cls: float = 1.0,
        lambda_loss_bbox: float = 1.0,
        lambda_loss_dir: float = 1.0,
    ):

        super().__init__(
            model=None,
            loss_fn=loss_fn,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
            scheduler=scheduler,
            scheduler_kwargs=scheduler_kwargs,
            metrics=metrics,
            learning_rate=learning_rate,
            serializer=serializer,
        )

        self.save_hyperparameters()

        if backbone_kwargs is None:
            backbone_kwargs = {}

        if isinstance(backbone, tuple):
            self.backbone, out_features = backbone
        else:
            self.model, out_features, collate_fn = self.backbones.get(
                backbone)(**backbone_kwargs)
            self.backbone = self.model.backbone
            self.neck = self.model.neck
            self.set_state(CollateFn(collate_fn))
            self.set_state(CollateFn(collate_fn))
            self.set_state(CollateFn(collate_fn))
            self.loss_fn = get_callable_dict(self.model.loss)

        if __FILE_EXAMPLE__ not in sys.argv[0]:
            self.model.bbox_head.conv_cls = self.head = nn.Conv2d(
                out_features, num_classes, kernel_size=(1, 1), stride=(1, 1))
Esempio n. 6
0
    def __init__(
        self,
        num_classes: int,
        backbone: Union[str, Tuple[nn.Module, int]] = "pointpillars_kitti",
        backbone_kwargs: Optional[Dict] = None,
        loss_fn: LOSS_FN_TYPE = None,
        optimizer: OPTIMIZER_TYPE = "Adam",
        lr_scheduler: LR_SCHEDULER_TYPE = None,
        metrics: METRICS_TYPE = None,
        learning_rate: Optional[float] = None,
        lambda_loss_cls: float = 1.0,
        lambda_loss_bbox: float = 1.0,
        lambda_loss_dir: float = 1.0,
    ):

        super().__init__(
            model=None,
            loss_fn=loss_fn,
            optimizer=optimizer,
            lr_scheduler=lr_scheduler,
            metrics=metrics,
            learning_rate=learning_rate,
        )

        self.save_hyperparameters()

        if backbone_kwargs is None:
            backbone_kwargs = {}

        if isinstance(backbone, tuple):
            self.backbone, out_features = backbone
        else:
            self.model, out_features, collate_fn = self.backbones.get(
                backbone)(**backbone_kwargs)
            self.collate_fn = wrap_collate(collate_fn)
            self.backbone = self.model.backbone
            self.neck = self.model.neck
            self.loss_fn = get_callable_dict(self.model.loss)

        if __FILE_EXAMPLE__ not in sys.argv[0]:
            self.model.bbox_head.conv_cls = self.head = nn.Conv2d(
                out_features, num_classes, kernel_size=(1, 1), stride=(1, 1))