Exemplo n.º 1
0
    def __init__(
        self,
        num_angular_weights: int,
        radial_base_kernel: Kernel,
        eps: Optional[int] = 1e-6,
        angular_weights_prior: Optional[Prior] = None,
        angular_weights_constraint: Optional[Interval] = None,
        alpha_prior: Optional[Prior] = None,
        alpha_constraint: Optional[Interval] = None,
        beta_prior: Optional[Prior] = None,
        beta_constraint: Optional[Interval] = None,
        **kwargs,
    ):
        if angular_weights_constraint is None:
            angular_weights_constraint = Positive()

        if alpha_constraint is None:
            alpha_constraint = Positive()

        if beta_constraint is None:
            beta_constraint = Positive()

        super().__init__(**kwargs)
        self.num_angular_weights = num_angular_weights
        self.radial_base_kernel = radial_base_kernel
        self.eps = eps

        self.register_parameter(
            name="raw_angular_weights",
            parameter=torch.nn.Parameter(
                torch.zeros(*self.batch_shape, num_angular_weights)),
        )
        self.register_constraint("raw_angular_weights",
                                 angular_weights_constraint)
        self.register_parameter(name="raw_alpha",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape, 1)))
        self.register_constraint("raw_alpha", alpha_constraint)
        self.register_parameter(name="raw_beta",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape, 1)))
        self.register_constraint("raw_beta", beta_constraint)

        if angular_weights_prior is not None:
            self.register_prior(
                "angular_weights_prior",
                angular_weights_prior,
                lambda: self.angular_weights,
                lambda v: self._set_angular_weights(v),
            )
        if alpha_prior is not None:
            self.register_prior("alpha_prior", alpha_prior, lambda: self.alpha,
                                lambda v: self._set_alpha(v))
        if beta_prior is not None:
            self.register_prior("beta_prior", beta_prior, lambda: self.beta,
                                lambda v: self._set_beta(v))
    def __init__(
        self,
        num_mixtures=None,
        ard_num_dims=1,
        batch_shape=torch.Size([]),
        mixture_scales_prior=None,
        mixture_scales_constraint=None,
        mixture_means_prior=None,
        mixture_means_constraint=None,
        mixture_weights_prior=None,
        mixture_weights_constraint=None,
        **kwargs,
    ):
        if num_mixtures is None:
            raise RuntimeError("num_mixtures is a required argument")
        if mixture_means_prior is not None or mixture_scales_prior is not None or mixture_weights_prior is not None:
            logger.warning("Priors not implemented for SpectralMixtureKernel")

        # This kernel does not use the default lengthscale
        super(SpectralMixtureKernel, self).__init__(ard_num_dims=ard_num_dims,
                                                    batch_shape=batch_shape,
                                                    **kwargs)
        self.num_mixtures = num_mixtures

        if mixture_scales_constraint is None:
            mixture_scales_constraint = Positive()

        if mixture_means_constraint is None:
            mixture_means_constraint = Positive()

        if mixture_weights_constraint is None:
            mixture_weights_constraint = Positive()

        self.register_parameter(name="raw_mixture_weights",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape,
                                                self.num_mixtures)))
        ms_shape = torch.Size(
            [*self.batch_shape, self.num_mixtures, 1, self.ard_num_dims])
        self.register_parameter(name="raw_mixture_means",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(ms_shape)))
        self.register_parameter(name="raw_mixture_scales",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(ms_shape)))

        self.register_constraint("raw_mixture_scales",
                                 mixture_scales_constraint)
        self.register_constraint("raw_mixture_means", mixture_means_constraint)
        self.register_constraint("raw_mixture_weights",
                                 mixture_weights_constraint)
Exemplo n.º 3
0
    def __init__(
        self,
        batch_shape: torch.Size = torch.Size([]),
        deg_free_prior=None,
        deg_free_constraint=None,
        noise_prior=None,
        noise_constraint=None,
    ):
        super().__init__()

        if deg_free_constraint is None:
            deg_free_constraint = GreaterThan(2)

        if noise_constraint is None:
            noise_constraint = Positive()

        self.raw_deg_free = torch.nn.Parameter(torch.zeros(*batch_shape, 1))
        self.raw_noise = torch.nn.Parameter(torch.zeros(*batch_shape, 1))

        if noise_prior is not None:
            self.register_prior("noise_prior", noise_prior, lambda: self.noise,
                                lambda v: self._set_noise(v))

        self.register_constraint("raw_noise", noise_constraint)

        if deg_free_prior is not None:
            self.register_prior("deg_free_prior", deg_free_prior,
                                lambda: self.deg_free,
                                lambda v: self._set_deg_free(v))

        self.register_constraint("raw_deg_free", deg_free_constraint)

        # Rough initialization
        self.initialize(deg_free=7)
Exemplo n.º 4
0
    def __init__(self,
                 num_tasks,
                 rank=1,
                 prior=None,
                 var_constraint=None,
                 **kwargs):
        if rank > num_tasks:
            raise RuntimeError(
                "Cannot create a task covariance matrix larger than the number of tasks"
            )
        super().__init__(**kwargs)

        if var_constraint is None:
            var_constraint = Positive()

        self.register_parameter(name="covar_factor",
                                parameter=torch.nn.Parameter(
                                    torch.randn(*self.batch_shape, num_tasks,
                                                rank)))
        self.register_parameter(name="raw_var",
                                parameter=torch.nn.Parameter(
                                    torch.randn(*self.batch_shape, num_tasks)))
        if prior is not None:
            self.register_prior("IndexKernelPrior", prior,
                                self._eval_covar_matrix)

        self.register_constraint("raw_var", var_constraint)
    def __init__(self,
                 base_kernel,
                 num_dims,
                 max_degree=None,
                 active_dims=None,
                 **kwargs):
        """Create an Additive Kernel a la https://arxiv.org/abs/1112.4394 using Newton-Girard Formulae

        :param base_kernel: a base 1-dimensional kernel. NOTE: put ard_num_dims=d in the base kernel...
        :param max_degree: the maximum numbers of kernel degrees to compute
        :param active_dims:
        :param kwargs:
        """
        super(NewtonGirardAdditiveKernel,
              self).__init__(active_dims=active_dims, **kwargs)

        self.base_kernel = base_kernel
        self.num_dims = num_dims
        if max_degree is None:
            self.max_degree = self.num_dims
        elif max_degree > self.num_dims:  # force cap on max_degree (silently)
            self.max_degree = self.num_dims
        else:
            self.max_degree = max_degree

        self.register_parameter(name="raw_outputscale",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape,
                                                self.max_degree)))
        outputscale_constraint = Positive()
        self.register_constraint("raw_outputscale", outputscale_constraint)
        self.outputscale_constraint = outputscale_constraint
        self.outputscale = [
            1 / self.max_degree for _ in range(self.max_degree)
        ]
    def __init__(self,
                 power: int,
                 offset_prior: Optional[Prior] = None,
                 offset_constraint: Optional[Interval] = None,
                 **kwargs):
        super().__init__(**kwargs)
        if offset_constraint is None:
            offset_constraint = Positive()

        self.register_parameter(name="raw_offset",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape, 1)))

        # We want the power to be a float so we dont have to worry about its device / dtype.
        if torch.is_tensor(power):
            if power.numel() > 1:
                raise RuntimeError(
                    "Cant create a Polynomial kernel with more than one power")
            else:
                power = power.item()

        self.power = power

        if offset_prior is not None:
            self.register_prior("offset_prior", offset_prior,
                                lambda: self.offset,
                                lambda v: self._set_offset(v))

        self.register_constraint("raw_offset", offset_constraint)
    def __init__(
        self,
        base_kernel,
        delta_func: Optional = None,
        angle_prior: Optional[Prior] = None,
        radius_prior: Optional[Prior] = None,
        **kwargs,
    ):
        super(ArcKernel, self).__init__(has_lengthscale=True, **kwargs)

        if self.ard_num_dims is None:
            self.last_dim = 1
        else:
            self.last_dim = self.ard_num_dims

        if delta_func is None:
            self.delta_func = self.default_delta_func
        else:
            self.delta_func = delta_func

        # TODO: check the errors given by interval
        angle_constraint = Interval(0.1, 0.9)
        self.register_parameter(
            name="raw_angle",
            parameter=torch.nn.Parameter(
                torch.zeros(*self.batch_shape, 1, self.last_dim)),
        )
        if angle_prior is not None:
            self.register_prior(
                "angle_prior",
                angle_prior,
                lambda: self.angle,
                lambda v: self._set_angle(v),
            )

        self.register_constraint("raw_angle", angle_constraint)

        self.register_parameter(
            name="raw_radius",
            parameter=torch.nn.Parameter(
                torch.zeros(*self.batch_shape, 1, self.last_dim)),
        )

        if radius_prior is not None:
            self.register_prior(
                "radius_prior",
                radius_prior,
                lambda: self.radius,
                lambda v: self._set_radius(v),
            )

        radius_constraint = Positive()
        self.register_constraint("raw_radius", radius_constraint)

        self.base_kernel = base_kernel
        if self.base_kernel.has_lengthscale:
            self.base_kernel.lengthscale = 1
            self.base_kernel.raw_lengthscale.requires_grad_(False)
Exemplo n.º 8
0
    def __init__(self, alpha_constraint=None, **kwargs):
        super(RQKernel, self).__init__(**kwargs)
        self.register_parameter(name="raw_alpha",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape, 1)))
        if alpha_constraint is None:
            alpha_constraint = Positive()

        self.register_constraint("raw_alpha", alpha_constraint)
Exemplo n.º 9
0
    def __init__(self,
                 batch_shape=torch.Size([]),
                 scale_prior=None,
                 scale_constraint=None):
        super().__init__()

        if scale_constraint is None:
            scale_constraint = Positive()

        self.raw_scale = torch.nn.Parameter(torch.ones(*batch_shape, 1))
        if scale_prior is not None:
            self.register_prior("scale_prior", scale_prior, lambda: self.scale,
                                lambda v: self._set_scale(v))

        self.register_constraint("raw_scale", scale_constraint)
    def __init__(
        self,
        ard_num_dims=None,
        batch_shape=torch.Size([]),
        active_dims=None,
        lengthscale_prior=None,
        lengthscale_constraint=None,
        eps=1e-6,
        **kwargs,
    ):
        super(Kernel, self).__init__()
        self._batch_shape = batch_shape
        if active_dims is not None and not torch.is_tensor(active_dims):
            active_dims = torch.tensor(active_dims, dtype=torch.long)
        self.register_buffer("active_dims", active_dims)
        self.ard_num_dims = ard_num_dims

        self.eps = eps

        param_transform = kwargs.get("param_transform")

        if lengthscale_constraint is None:
            lengthscale_constraint = Positive()

        if param_transform is not None:
            warnings.warn(
                "The 'param_transform' argument is now deprecated. If you want to use a different "
                "transformation, specify a different 'lengthscale_constraint' instead.",
                DeprecationWarning,
            )

        if self.has_lengthscale:
            lengthscale_num_dims = 1 if ard_num_dims is None else ard_num_dims
            self.register_parameter(
                name="raw_lengthscale",
                parameter=torch.nn.Parameter(
                    torch.zeros(*self.batch_shape, 1, lengthscale_num_dims)),
            )
            if lengthscale_prior is not None:
                self.register_prior("lengthscale_prior", lengthscale_prior,
                                    lambda: self.lengthscale,
                                    lambda v: self._set_lengthscale(v))

            self.register_constraint("raw_lengthscale", lengthscale_constraint)

        self.distance_module = None
        # TODO: Remove this on next official PyTorch release.
        self.__pdist_supports_batch = True
    def __init__(self,
                 batch_shape=torch.Size([]),
                 noise_prior=None,
                 noise_constraint=None):
        super().__init__()

        if noise_constraint is None:
            noise_constraint = Positive()

        self.raw_noise = torch.nn.Parameter(torch.zeros(*batch_shape, 1))

        if noise_prior is not None:
            self.register_prior("noise_prior", noise_prior, lambda: self.noise,
                                lambda v: self._set_noise(v))

        self.register_constraint("raw_noise", noise_constraint)
Exemplo n.º 12
0
    def __init__(self,
                 num_dims,
                 num_deltas=128,
                 Z_constraint=None,
                 batch_shape=torch.Size([]),
                 **kwargs):
        Kernel.__init__(self,
                        has_lengthscale=True,
                        batch_shape=batch_shape,
                        **kwargs)

        self.raw_Z = torch.nn.Parameter(
            torch.rand(*batch_shape, num_deltas, num_dims))

        if Z_constraint:
            self.register_constraint("raw_Z", Z_constraint)
        else:
            self.register_constraint("raw_Z", Positive())

        self.num_dims = num_dims
Exemplo n.º 13
0
    def __init__(self,
                 base_kernel,
                 outputscale_prior=None,
                 outputscale_constraint=None,
                 **kwargs):
        if base_kernel.active_dims is not None:
            kwargs["active_dims"] = base_kernel.active_dims
        super(ScaleKernel, self).__init__(**kwargs)
        if outputscale_constraint is None:
            outputscale_constraint = Positive()

        self.base_kernel = base_kernel
        outputscale = torch.zeros(
            *self.batch_shape) if len(self.batch_shape) else torch.tensor(0.0)
        self.register_parameter(name="raw_outputscale",
                                parameter=torch.nn.Parameter(outputscale))
        if outputscale_prior is not None:
            self.register_prior("outputscale_prior", outputscale_prior,
                                lambda: self.outputscale,
                                lambda v: self._set_outputscale(v))

        self.register_constraint("raw_outputscale", outputscale_constraint)
    def __init__(self,
                 period_length_prior=None,
                 period_length_constraint=None,
                 **kwargs):
        super(CosineKernel, self).__init__(**kwargs)

        self.register_parameter(name="raw_period_length",
                                parameter=torch.nn.Parameter(
                                    torch.zeros(*self.batch_shape, 1, 1)))

        if period_length_constraint is None:
            period_length_constraint = Positive()

        if period_length_prior is not None:
            self.register_prior(
                "period_length_prior",
                period_length_prior,
                lambda: self.period_length,
                lambda v: self._set_period_length(v),
            )

        self.register_constraint("raw_period_length", period_length_constraint)