Пример #1
0
    def __init__(
        self,
        name: str,
        window_size: int = 50,
        show_local_avg: bool = True,
        fmt: str = "{:f}",
        show_avg: Optional[bool] = None,
        use_running_avg: Optional[bool] = None,
    ):
        if show_avg is not None:
            msg = build_deprecation_message(
                "The parameter show_avg",
                "0.4.0",
                info="The average is now always shown.",
            )
            if show_avg:
                warnings.warn(msg)
            else:
                raise RuntimeError(msg)

        if use_running_avg is not None:
            msg = build_deprecation_message(
                "The parameter use_running_avg",
                "0.4.0",
                info="It was renamed to show_local_avg.",
            )
            warnings.warn(msg)
            show_local_avg = use_running_avg

        super().__init__(name=name, window_size=window_size)
        self.show_local_avg = show_local_avg
        self.fmt = fmt
Пример #2
0
def batch_gram_matrix(x: torch.Tensor,
                      normalize: bool = False) -> torch.Tensor:
    msg = build_deprecation_message("The function batch_gram_matrix",
                                    "0.4.0",
                                    info="It was renamed to gram_matrix")
    warnings.warn(msg, UserWarning)
    return gram_matrix(x, normalize=normalize)
Пример #3
0
def _pil_resize(
    image: Image.Image,
    size: Union[int, Tuple[int, int]],
    interpolation_mode: str,
    **kwargs: Any,
) -> Image.Image:
    if is_image_size(size):
        size = cast(Tuple[int, int], size)
        height, width = size
    elif is_edge_size(size):
        size = cast(int, size)
        height, width = edge_to_image_size(
            size, calculate_aspect_ratio((image.height, image.width)))
    else:
        raise RuntimeError

    if kwargs:
        msg = build_deprecation_message(
            "Passing additional parameters via **resize_kwargs",
            "0.4.0",
            info="The keyword arguments are ignored.",
        )
        warnings.warn(msg)

    return image.resize((width, height),
                        resample=_PIL_RESAMPLE_MAP[interpolation_mode])
Пример #4
0
 def __init__(self, *args: Any, **kwargs: Any) -> None:
     msg = build_deprecation_message(
         "The class GuidedPerceptualLoss",
         "1.0",
         info="pystiche.loss.PerceptualLoss now also handles guided inputs and targets",
     )
     warnings.warn(msg)
     super().__init__(*args, **kwargs)
Пример #5
0
 def clear_cache(self) -> None:
     msg = build_deprecation_message(
         "The method clear_cache()",
         "0.4.0",
         info="It was renamed to empty_storage().",
     )
     warnings.warn(msg)
     self.empty_storage()
Пример #6
0
 def __init__(self, *args: Any, **kwargs: Any) -> None:
     msg = build_deprecation_message(
         "The class MSEEncodingOperator",
         "0.4.0",
         info="It was renamed to FeatureReconstructionOperator.",
     )
     warnings.warn(msg)
     super().__init__(*args, **kwargs)
Пример #7
0
 def __init__(self, *args: Any, **kwargs: Any) -> None:
     msg = build_deprecation_message(
         "The class AlexNetEncoder",
         "0.4.0",
         info="It was replaced by MultiLayerAlexNetEncoder.",
     )
     warnings.warn(msg)
     super().__init__(*args, **kwargs)
Пример #8
0
def _vgg_encoder(arch: str, **kwargs: Any) -> VGGMultiLayerEncoder:
    msg = build_deprecation_message(
        f"The function {arch}_encoder",
        "0.4.0",
        info=f"It was replaced by {arch}_multi_layer_encoder.",
    )
    warnings.warn(msg)
    return VGGMultiLayerEncoder(arch, **kwargs)
Пример #9
0
def get_preprocessor(framework: str) -> nn.Module:
    msg = build_deprecation_message(
        "The function 'get_preprocessor'",
        "1.0",
        info="It was renamed to 'preprocessing'.",
    )
    warnings.warn(msg)
    return preprocessing(framework)
Пример #10
0
def alexnet_encoder(*args: Any, **kwargs: Any) -> MultiLayerAlexNetEncoder:
    msg = build_deprecation_message(
        "The function alexnet_encoder",
        "0.4.0",
        info="It was replaced by alexnet_multi_layer_encoder.",
    )
    warnings.warn(msg)
    return alexnet_multi_layer_encoder(*args, **kwargs)
Пример #11
0
def default_epoch_header_fn(*args: Any, **kwargs: Any) -> str:
    msg = build_deprecation_message(
        "The function default_epoch_header_fn",
        "0.4.0",
        info="It was renamed to default_epoch_header",
    )
    warnings.warn(msg)
    return default_epoch_header(*args, **kwargs)
Пример #12
0
    def __init__(self, weights: Optional[str] = None, **kwargs: Any) -> None:
        if weights is not None:
            msg = build_deprecation_message("The parameter weights",
                                            "0.6.0",
                                            info="It was renamed to framework")
            warnings.warn(msg, UserWarning)
            kwargs["framework"] = weights

        super().__init__(**kwargs)
Пример #13
0
 def __getitem__(self, layer: str) -> "SingleLayerEncoder":
     msg = build_deprecation_message(
         "Extracting a Encoder with bracket indexing",
         "0.4.0",
         info=("To extract a single layer encoder use "
               "MultiLayerEncoder.extract_encoder() instead."),
     )
     warnings.warn(msg)
     return self.extract_encoder(layer)
Пример #14
0
 def __delitem__(self, item: Union[str, int]) -> None:
     msg = build_deprecation_message(
         "Deleting modules via bracket indexing", "0.4.0")
     warnings.warn(msg)
     if isinstance(item, str):
         del self._modules[item]
     elif isinstance(item, int):
         del self[self._get_child_name_by_idx(item)]
     else:
         raise TypeError
Пример #15
0
 def encode(self, input_image: torch.Tensor) -> None:
     msg = build_deprecation_message(
         "The method 'encode'",
         "1.0",
         info=(
             "It is no longer needed to pre-encode the input. "
             "See https://github.com/pmeier/pystiche/issues/435 for details"
         ),
     )
     warnings.warn(msg)
Пример #16
0
 def __init__(self, criterion: nn.Module) -> None:
     msg = build_deprecation_message("The class MLEHandler", "1.0")
     warnings.warn(msg)
     self.multi_layer_encoders = {
         loss.encoder.multi_layer_encoder
         for loss in criterion.modules()
         if isinstance(loss, Loss)
         and not isinstance(loss, LossContainer)
         and isinstance(loss.encoder, enc.SingleLayerEncoder)
     }
Пример #17
0
    def __init__(
        self, named_ops: Sequence[Tuple[str, Loss]], trim: bool = True
    ) -> None:
        msg = build_deprecation_message("The class MultiOperatorLoss", "1.0")
        warnings.warn(msg)
        super().__init__(named_children=named_ops)
        self._mle_handler = MLEHandler(self)

        if trim:
            self._mle_handler.trim()
Пример #18
0
 def __init__(self,
              name: str = "time",
              fmt: str = "{:3.1f}",
              **kwargs: Any) -> None:
     msg = build_deprecation_message(
         "The class TimeMeter",
         "0.4.0",
         info="Please use AverageMeter instead.")
     warnings.warn(msg)
     super().__init__(name, fmt=fmt, **kwargs)
Пример #19
0
def patch_matching_loss(
    input: torch.Tensor,
    target: torch.Tensor,
    eps: float = 1e-8,
    reduction: str = "mean",
) -> torch.Tensor:
    msg = build_deprecation_message(
        "The function patch_matching_loss", "0.4.0", info="It was renamed to mrf_loss"
    )
    warnings.warn(msg, UserWarning)
    return mrf_loss(input, target, eps=eps, reduction=reduction)
Пример #20
0
 def __init__(
     self,
     content_loss: MSEEncodingOperator,
     style_loss: GatysEtAl2017StyleLoss,
 ):
     msg = build_deprecation_message(
         "The class GatysEtAl2017GuidedPerceptualLoss",
         "0.4.0",
         info="It can be replaced by pystiche.loss.PerceptualLoss.",
     )
     warnings.warn(msg)
     super().__init__(content_loss, style_loss)
Пример #21
0
    def __init__(self,
                 *named_ops: Sequence[Tuple[str, Operator]],
                 trim: bool = True) -> None:
        info = (
            "Please construct a MultiOperatorLoss with a sequence of named operators."
        )
        named_children: Optional[Sequence[Tuple[str, Operator]]]
        indexed_children: Optional[Sequence[nn.Module]]
        if len(named_ops) == 1:
            dict_or_seq = named_ops[0]
            if isinstance(dict_or_seq, dict):
                named_children = tuple(
                    cast(Dict[str, Operator], dict_or_seq).items())
                msg = build_deprecation_message(
                    "Passing named_ops as dictionary",
                    "0.4.0",
                    info=info,
                )
                warnings.warn(msg)
            else:
                named_children = named_ops[0]
            indexed_children = None
        else:
            msg = build_deprecation_message(
                "Passing a variable number of unnamed operators via *args",
                "0.4.0",
                info=info,
            )
            warnings.warn(msg)
            named_children = None
            indexed_children = cast(Tuple[nn.Module, ...], named_ops)

        super().__init__(named_children=named_children,
                         indexed_children=indexed_children)

        self._multi_layer_encoders = self._collect_multi_layer_encoders()

        if trim:
            for encoder in self._multi_layer_encoders:
                encoder.trim()
Пример #22
0
    def __init__(
        self,
        encoder: Encoder,
        patch_size: Union[int, Sequence[int]],
        stride: Union[int, Sequence[int]] = 1,
        target_transforms: Optional[Iterable[Transform]] = None,
        score_weight: float = 1.0,
        num_scale_steps: Optional[int] = None,
        scale_step_width: Optional[float] = None,
        num_rotation_steps: Optional[int] = None,
        rotation_step_width: Optional[float] = None,
    ):
        if any(
            [
                arg is not None
                for arg in (
                    num_scale_steps,
                    scale_step_width,
                    num_rotation_steps,
                    rotation_step_width,
                )
            ]
        ):
            msg = build_deprecation_message(
                (
                    "Parametrizing target transformations with any of "
                    "num_scale_steps, scale_step_width, num_rotation_steps, or "
                    "rotation_step_width through the constructor of MRFOperator"
                ),
                "0.4.0",
                info=(
                    "Please provide an iterable of transformations via the parameter "
                    "target_transforms. You can retain the old functionality with "
                    "MRFOperator.rotate_and_scale_transforms()."
                ),
            )
            warnings.warn(msg, UserWarning)
            target_transforms = self.scale_and_rotate_transforms(
                num_scale_steps=0 if num_scale_steps is None else num_scale_steps,
                scale_step_width=5e-2 if scale_step_width is None else scale_step_width,
                num_rotate_steps=0
                if num_rotation_steps is None
                else num_rotation_steps,
                rotate_step_width=10.0
                if rotation_step_width is None
                else rotation_step_width,
            )

        super().__init__(encoder, score_weight=score_weight)
        self.patch_size = to_2d_arg(patch_size)
        self.stride = to_2d_arg(stride)
        self.target_transforms = target_transforms
Пример #23
0
def deprecation(fn: Callable) -> Callable:
    name = f"{fn.__name__}()"
    msg = build_deprecation_message(
        f"The function pystiche.{name}",
        "0.4.0",
        info=f"It was moved to pystiche.meta.{name}.",
    )

    def wrapper(*args: Any, **kwargs: Any) -> Any:
        warnings.warn(msg)
        return fn(*args, **kwargs)

    return wrapper
Пример #24
0
 def __init__(self, *args: Any, **kwargs: Any) -> None:
     msg = build_deprecation_message(
         "The class 'Unsupervised'",
         "0.7",
         info=
         "See https://github.com/pmeier/pystiche/issues/458 for details",
     )
     warnings.warn(msg)
     if not isinstance(self, VisionDataset):
         raise RuntimeError
     # This should be used with double inheritance with a
     # torchvision.datasets.VisionDataset
     super().__init__(*args, **kwargs)  # type: ignore[call-arg]
Пример #25
0
 def add_named_modules(self, modules: Sequence[Tuple[str,
                                                     nn.Module]]) -> None:
     if isinstance(modules, dict):
         msg = build_deprecation_message(
             "Adding named_modules from a dictionary",
             "0.4",
             info=("To achieve the same behavior you can pass "
                   "tuple(modules.items()) instead."),
         )
         warnings.warn(msg)
         modules = tuple(modules.items())
     for name, module in modules:
         self.add_module(name, module)
Пример #26
0
 def __init__(
     self,
     content_loss: MSEEncodingOperator,
     style_loss: MultiLayerEncodingOperator,
     regularization: TotalVariationOperator,
 ) -> None:
     msg = build_deprecation_message(
         "The class JohnsonAlahiLi2016PerceptualLoss",
         "0.4.0",
         info="It can be replaced by pystiche.loss.PerceptualLoss.",
     )
     warnings.warn(msg)
     super().__init__(content_loss, style_loss, regularization=regularization)
Пример #27
0
 def __getitem__(self, item: Union[str, int]) -> nn.Module:
     msg = build_deprecation_message(
         "Dynamic access to the modules via bracket indexing",
         "0.4.0",
         info=
         "If you need dynamic access to the operators, use getattr() instead.",
     )
     warnings.warn(msg)
     if isinstance(item, str):
         return self._modules[item]
     elif isinstance(item, int):
         return self[self._get_child_name_by_idx(item)]
     else:
         raise TypeError
Пример #28
0
def alexnet_multi_layer_encoder(
    weights: str = "torch",
    preprocessing: Optional[bool] = None,
    internal_preprocessing: bool = True,
    allow_inplace: bool = False,
) -> MultiLayerAlexNetEncoder:
    if preprocessing is not None:
        msg = build_deprecation_message(
            "The parameter preprocessing",
            "0.4.0",
            info="It was replaced by internal_preprocessing.",
        )
        warnings.warn(msg)
        internal_preprocessing = preprocessing
    return MultiLayerAlexNetEncoder(weights, internal_preprocessing,
                                    allow_inplace)
Пример #29
0
def _deprecate(fn: Callable) -> Callable:
    name = fn.__name__
    msg = build_deprecation_message(
        f"The function ops.functional.{name}",
        "1.0",
        info=(
            f"It was moved to loss.functional.{name}. "
            f"See https://github.com/pystiche/pystiche/issues/436 for details"
        ),
    )

    @functools.wraps(fn)
    def wrapper(*args, **kwargs):
        warnings.warn(msg)
        return fn(*args, **kwargs)

    return wrapper
Пример #30
0
def __op_init__(self, *args: Any, __old_name__: str, __new_name__: str,
                **kwargs: Any) -> None:
    msg = build_deprecation_message(
        f"The class pystiche.ops.{__old_name__}",
        "1.0",
        info=(
            f"It was renamed and moved to pystiche.loss.{__new_name__}. "
            f"See https://github.com/pystiche/pystiche/issues/436 for details"
        ),
    )
    warnings.warn(msg)
    for super_cls in type(self).__mro__:
        if _PATTERN.match(str(super_cls)):
            break
    else:
        raise RuntimeError
    super_cls.__init__(self, *args, **kwargs)