Example #1
0
 def __init__(self, _lambda=1, _sigma=1, normalize=True, active_dims=None):
     super(SubsetTreeKernel, self).__init__(1, active_dims, 'sstk')
     self._lambda = Param('Lambda', _lambda,Logexp())
     self._sigma = Param('Sigma', _sigma,Logexp())
     self.link_parameters(self._lambda, self._sigma)
     self.normalize = normalize
     self.kernel = wrapper_raw_SubsetTreeKernel(_lambda, _sigma, normalize)
Example #2
0
    def __init__(self,
                 input_dim: int,
                 variance: float = 1.,
                 period: float = 2. * np.pi,
                 lengthscale: float = 2. * np.pi,
                 active_dims: int = None,
                 name: str = 'pure_std_periodic') -> None:
        super(PureStdPeriodicKernel, self).__init__(input_dim, active_dims,
                                                    name)

        self.name = name

        if period is not None:
            period = np.asarray(period)
            assert period.size == input_dim, "bad number of periods"
        else:
            period = 2. * np.pi * np.ones(input_dim)
        if lengthscale is not None:
            lengthscale = np.asarray(lengthscale)
            assert lengthscale.size == input_dim, "bad number of lengthscales"
        else:
            lengthscale = 2. * np.pi * np.ones(input_dim)

        self.variance = Param('variance', variance, Logexp())
        assert self.variance.size == 1, "Variance size must be one"
        self.period = Param('period', period, Logexp())
        self.lengthscale = Param('lengthscale', lengthscale, Logexp())

        self.link_parameters(self.variance, self.period, self.lengthscale)
Example #3
0
    def __init__(self,
                 input_dim,
                 variances=1.0,
                 lengthscale=1.0,
                 ARD=False,
                 active_dims=None,
                 lengthscalefun=None,
                 name='nonstatRBF'):
        super(NonstationaryRBF, self).__init__(input_dim, active_dims, name)

        if lengthscale is None:
            lengthscale = np.ones(1)
        else:
            lengthscale = np.asarray(lengthscale)

        if lengthscalefun is None:
            lengthscalefun = lambda x: lengthscale

        self.lengthscalefun = lengthscalefun
        self.lengthscale = Param(
            'lengthscale', lengthscale,
            Logexp())  #Logexp - transforms to allow positive only values...
        self.variances = Param('variances', variances, Logexp())  #and here.
        self.link_parameters(
            self.variances, self.lengthscale
        )  #this just takes a list of parameters we need to optimise.
Example #4
0
    def __init__(self,
                 input_dim,
                 input_space_dim=None,
                 active_dims=None,
                 kernel=None,
                 name='shapeintegral',
                 Nperunit=100,
                 lengthscale=[1.0],
                 variance=1.0):
        """
        NOTE: Added input_space_dim as the number of columns in X isn't the dimensionality of the space. I.e. for pentagons there
        will be 10 columns in X, while only 2 dimensions of input space.
        
        The lengthscale, variance, etc are ideally set by specifying the kernel we'll use
        
        input_dim = number of actual columns in data
        input_space_dim = number of dimensions in the domain
        active_dims = potential list of dimensions we'll use
        kernel = latent function kernel
        Nperunit = resolution of approximation
        
        The last column of X should specify if it's the latent function or the integral that the Y refers to.
        if it's the latent function then we just use the first d-columns, and the rest can be NaN, e.g.
        X                               Y
        0,0,1,0,0,1,0,1,1,0,1,1,0     2
        1,1,nananananananananan,1     3
        is a 1x1 square with an integral of 2, and a single point in the [1,1] corner of the square with a value of 3.
        
        """
        super(ShapeIntegral, self).__init__(input_dim, active_dims, name)

        assert (
            (kernel is not None) or (input_space_dim is not None)
        ), "Need either the input space dimensionality defining or the latent kernel defining (to infer input space)"
        if kernel is None:
            kernel = RBF(input_space_dim)
        else:
            input_space_dim = kernel.input_dim
        assert kernel.input_dim == input_space_dim, "Latent kernel (dim=%d) should have same input dimensionality as specified in input_space_dim (dim=%d)" % (
            kernel.input_dim, input_space_dim)

        #assert len(kern.lengthscale)==input_space_dim, "Lengthscale of length %d, but input space has %d dimensions" % (len(lengthscale),input_space_dim)

        #self.lengthscale = Param('lengthscale', kernel.lengthscale, Logexp()) #Logexp - transforms to allow positive only values...
        #self.variance = Param('variance', kernel.variance, Logexp()) #and here.
        #self.link_parameters(self.variance, self.lengthscale) #this just takes a list of parameters we need to optimise.

        self.kernel = kernel
        self.Nperunit = Nperunit
        self.input_space_dim = input_space_dim
        self.cached_points = {
        }  #this is important, not only is it a speed up - we also get the same points for each shape, which makes our covariances more stable

        self.lengthscale = Param(
            'lengthscale', lengthscale,
            Logexp())  #Logexp - transforms to allow positive only values...
        self.variance = Param('variance', variance, Logexp())  #and here.
        self.link_parameters(
            self.variance, self.lengthscale
        )  #this just takes a list of parameters we need to optimise.
Example #5
0
    def __init__(self,
                 gap_decay=1.0,
                 match_decay=2.0,
                 order_coefs=[1.0],
                 alphabet=[],
                 maxlen=0,
                 active_dims=None,
                 normalize=True,
                 batch_size=1000):
        super(StringKernel, self).__init__(1, active_dims, 'sk')
        self._name = "sk"
        self.gap_decay = Param('Gap_decay', gap_decay, Logexp())
        self.match_decay = Param('Match_decay', match_decay, Logexp())
        self.order_coefs = Param('Order_coefs', order_coefs, Logexp())
        self.link_parameters(self.gap_decay, self.match_decay,
                             self.order_coefs)

        self.alphabet = alphabet
        self.maxlen = maxlen
        self.normalize = normalize

        self.kernel = NPStringKernel(_gap_decay=gap_decay,
                                     _match_decay=match_decay,
                                     _order_coefs=list(order_coefs),
                                     alphabet=self.alphabet,
                                     maxlen=maxlen,
                                     normalize=normalize)
    def __init__(self,
                 first,
                 second,
                 sigmoidal,
                 sigmoidal_indicator,
                 location: float = 0.,
                 slope: float = 0.5,
                 width=1.,
                 name='change_window_shifted_sides_base',
                 fixed_slope=False):
        _newkerns = [kern.copy() for kern in (first, second)]
        super(ChangeWindowShiftedSidesBase, self).__init__(_newkerns, name)
        self.first = first
        self.second = second

        self._fixed_slope = fixed_slope  # Note: here to be used by subclasses, and changing it from the outside does not link the parameter
        if self._fixed_slope: self.slope = slope
        else:
            self.slope = Param('slope', np.array(slope), Logexp())
            self.link_parameter(self.slope)

        self.sigmoidal = sigmoidal(1, False, 1., location, slope)
        self.sigmoidal_reverse = sigmoidal(1, True, 1., location, slope)
        self.sigmoidal_indicator = sigmoidal_indicator(1, False, 1., location,
                                                       slope, width)
        # self.shift = _Gk.Bias(1)
        self.location = Param('location', np.array(location))
        self.width = Param('width', np.array(width), Logexp())
        # self.shift_variance = Param('shift_variance', self.shift.variance.values, Logexp())
        self.shift_variance = Param('shift_variance', np.array(0), Logexp())
        self.link_parameters(self.location, self.width, self.shift_variance)
Example #7
0
    def __init__(self,
                 first,
                 second,
                 sigmoidal,
                 location: float = 0.,
                 slope: float = 0.5,
                 name='change_base',
                 fixed_slope=False):
        _newkerns = [kern.copy() for kern in (first, second)]
        super(ChangeKernelBase, self).__init__(_newkerns, name)
        self.first = first
        self.second = second

        self._fixed_slope = fixed_slope  # Note: here to be used by subclasses, and changing it from the outside does not link the parameter
        if self._fixed_slope: self.slope = slope
        else:
            self.slope = Param('slope', slope, Logexp())
            self.link_parameter(self.slope)

        if isinstance(location, tuple):
            self.sigmoidal = sigmoidal(1, False, 1., location[0], location[1],
                                       slope)
            self.sigmoidal_reverse = sigmoidal(1, True, 1., location[0],
                                               location[1], slope)
            self.location = Param('location', location[0])
            self.stop_location = Param('stop_location', location[1])
            self.link_parameters(self.location, self.stop_location)
        else:
            self.sigmoidal = sigmoidal(1, False, 1., location, slope)
            self.sigmoidal_reverse = sigmoidal(1, True, 1., location, slope)
            self.location = Param('location', location)
            self.link_parameter(self.location)
    def __init__(self, input_dim, variances=None, lengthscale=None, ARD=False, active_dims=None, name='integral'):
        super(Integral_Output_Observed, self).__init__(input_dim, active_dims, name)

        if lengthscale is None:
            lengthscale = np.ones(1)
        else:
            lengthscale = np.asarray(lengthscale)
            
        assert len(lengthscale)==input_dim/2            

        self.lengthscale = Param('lengthscale', lengthscale, Logexp()) #Logexp - transforms to allow positive only values...
        self.variances = Param('variances', variances, Logexp()) #and here.
        self.link_parameters(self.variances, self.lengthscale) #this just takes a list of parameters we need to optimise.
Example #9
0
    def __init__(self,n_terms=3):
        """n_terms specifies the number of tanh terms to be used"""
        self.n_terms = n_terms
        self.num_parameters = 3 * self.n_terms + 1
        self.psi = np.ones((self.n_terms, 3))

        super(TanhWarpingFunction_d, self).__init__(name='warp_tanh')
        self.psi = Param('psi', self.psi)
        self.psi[:, :2].constrain_positive()

        self.d = Param('%s' % ('d'), 1.0, Logexp())
        self.link_parameter(self.psi)
        self.link_parameter(self.d)
Example #10
0
    def __init__(self,
                 gap_decay=1.0,
                 match_decay=2.0,
                 order_coefs=[1.0],
                 alphabet=[],
                 maxlen=0,
                 num_splits=1,
                 normalize=True):
        super(SplitStringKernel, self).__init__(1, None, "sk")
        self._name = "sk"
        self.num_splits = num_splits
        self.gap_decay = Param('Gap_decay', gap_decay, Logexp())
        self.match_decay = Param('Match_decay', match_decay, Logexp())
        self.order_coefs = Param('Order_coefs', order_coefs, Logexp())
        self.link_parameters(self.gap_decay, self.match_decay,
                             self.order_coefs)

        self.alphabet = alphabet
        self.maxlen = maxlen
        self.normalize = normalize

        # make new kernels for each section
        self.kernels = []
        for i in range(0, num_splits - 1):
            self.kernels.append(
                StringKernel(gap_decay=gap_decay,
                             match_decay=match_decay,
                             order_coefs=order_coefs,
                             alphabet=alphabet,
                             maxlen=int((self.maxlen / self.num_splits)),
                             normalize=normalize))
        # final kernel might be operating on slightly loinger string if maxlen/num_splits % !=0
        self.kernels.append(
            StringKernel(gap_decay=gap_decay,
                         match_decay=match_decay,
                         order_coefs=order_coefs,
                         alphabet=alphabet,
                         maxlen=int((self.maxlen / self.num_splits)) +
                         self.maxlen - self.num_splits * int(
                             (self.maxlen / self.num_splits)),
                         normalize=normalize))
        #tie the params across the kernels
        for kern in self.kernels:
            kern.unlink_parameter(kern.gap_decay)
            kern.gap_decay = self.gap_decay
            kern.unlink_parameter(kern.match_decay)
            kern.match_decay = self.match_decay
            kern.unlink_parameter(kern.order_coefs)
            kern.order_coefs = self.order_coefs
Example #11
0
 def __init__(self, input_dim, input_space_dim=None, active_dims=None, name='shapeintegralhc',lengthscale=None, variances=None,Nrecs=10,step=0.025,Ntrials=10,dims=2):
     super(ShapeIntegralHC, self).__init__(input_dim, active_dims, name)
     assert ((input_space_dim is not None)), "Need the input space dimensionality defining"
     kernel = Integral(input_dim=input_space_dim*2,lengthscale=lengthscale,variances=variances)
     self.lengthscale = Param('lengthscale', kernel.lengthscale, Logexp())
     self.variances = Param('variances', kernel.variances, Logexp()) 
     self.link_parameters(self.variances, self.lengthscale) #this just takes a list of parameters we need to optimise.
     
     
     self.kernel = kernel
     self.input_space_dim = input_space_dim
     self.rectangle_cache = {} #this is important, not only is it a speed up - we also get the same points for each shape, which makes our covariances more stable        
     
     self.Nrecs=Nrecs
     self.step=step
     self.Ntrials=Ntrials
    def __init__(self,
                 Y_metadata,
                 gp_link=None,
                 noise_mult=1.,
                 known_variances=1.,
                 name='Scaled_het_Gauss'):
        if gp_link is None:
            gp_link = link_functions.Identity()

        if not isinstance(gp_link, link_functions.Identity):
            print(
                "Warning, Exact inference is not implemeted for non-identity link functions,\
            if you are not already, ensure Laplace inference_method is used")

        # note the known_variances are fixed, not parameterse
        self.known_variances = known_variances
        self.noise_mult = Param('noise_mult', noise_mult,
                                Logexp())  # Logexp ensures its positive
        # this is a parameter, so it gets optimized, gradients calculated etc.

        #super(ScaledHeteroscedasticGaussian, self).__init__(gp_link, variance=1.0, name=name)
        super(Gaussian, self).__init__(gp_link, name=name)
        # note: we're inheriting from Likelihood here, not Gaussian, so as to avoid problems with the Gaussian variance.

        #add a new parameter by linking it (see just above in GPy.likelihoods.gaussian.Gaussian).
        self.link_parameter(self.noise_mult)

        if isinstance(gp_link, link_functions.Identity):
            self.log_concave = True
Example #13
0
	def __init__(self, input_dim,variance=1,active_dims=[0],name="categorical", inverse=False,useGPU=False):
		super(Categorical, self).__init__(input_dim, active_dims, name,useGPU=useGPU)

		self.inverse = inverse

		self.variance = Param('variance',variance,Logexp())
		self.link_parameter(self.variance)
Example #14
0
 def __init__(self,
              input_dim,
              input_type,
              variance=1.,
              lengthscale=1.,
              active_dims=None):
     super(CustomMatern52, self).__init__(input_dim, active_dims,
                                          'matern52')
     self.variance = Param('variance', variance)
     self.lengthscale = Param('lengthscale', lengthscale)
     self.link_parameters(self.variance, self.lengthscale)
     assert isinstance(
         input_type,
         (InputY, InputX,
          InputPsi)), "The type of input_object is not supported"
     self.input_type = input_type
Example #15
0
    def __init__(self, input_dim: int, reverse: bool = False, variance: float = 1., location: float = 0., slope: float = 0.2,
                 active_dims: int = None, name: str = 'sigmoidal_kernel_base', fixed_slope = False) -> None:
        self.reverse = reverse
        super(SigmoidalKernelBase, self).__init__(input_dim, variance, active_dims, False, name)
        # TO REMOVE VARIANCE: comment line above; uncomment below; remove self.variance factors from subclass methods
        # super(BasisFuncKernel, self).__init__(input_dim, active_dims, name)
        # assert self.input_dim == 1, "Basis Function Kernel only implemented for one dimension. Use one kernel per dimension (and add them together) for more dimensions"
        # self.ARD = False
        # self.variance = 1
        self.location = Param('location', location)
        self.link_parameter(self.location)

        self._fixed_slope = fixed_slope # Note: here to be used by subclasses, and changing it from the outside does not link the parameter
        if self._fixed_slope: self.slope = slope
        else:
            self.slope = Param('slope', slope, Logexp()) # This +ve constraint makes non-reverse sigmoids only fit (+ve or -ve) curves going away from 0; similarly for other kernels
            self.link_parameter(self.slope)
Example #16
0
    def __init__(self,
                 input_dim: int,
                 variance: float = 1.,
                 offset: float = 0.,
                 active_dims: int = None,
                 name: str = 'linear_with_offset') -> None:
        super(LinearWithOffset, self).__init__(input_dim, active_dims, name)
        if variance is not None:
            variance = np.asarray(variance)
            assert variance.size == 1
        else:
            variance = np.ones(1)

        self.variance = Param('variance', variance, Logexp())
        self.offset = Param('offset', offset)

        self.link_parameters(self.variance, self.offset)
Example #17
0
 def set_l(self, l, safe=False):
     assert safe
     assert l.shape == (self.active_dim,)
     l = np.maximum(
         1.e-3,
         l
     )
     self.inner_kernel.lengthscale = Param('lengthscale', l)
Example #18
0
    def __init__(self, gp_link=None, r=1.0):
        if gp_link is None:
            #Parameterised not as link_f but as f
            #gp_link = Identity()
            gp_link = Log()

        super(LogLogistic, self).__init__(gp_link, name='LogLogistic')
        self.r = Param('r_shape', float(r), Logexp())
        self.link_parameter(self.r)
Example #19
0
    def __init__(self,gp_link=None, deg_free=5, sigma2=2):
        if gp_link is None:
            gp_link = link_functions.Identity()

        super(HetStudentT, self).__init__(gp_link, name='Hetro_Student_T')
        self.v = Param('deg_free', float(deg_free), Logexp())
        self.link_parameter(self.v)
        self.v.constrain_fixed()

        self.log_concave = False
 def __init__(self,
              input_dim,
              variances=None,
              ARD=False,
              active_dims=None,
              name='mix_integral_linear'):
     super(Mix_Integral_Linear, self).__init__(input_dim, active_dims, name)
     self.variances = Param('variances', variances, Logexp())  #and here.
     self.link_parameters(
         self.variances
     )  #this just takes a list of parameters we need to optimise.
Example #21
0
    def update_parameter_bounds(self, X):
        if self.data_range is None:
            self.data_range = (X.min(), X.max())
            self.location = Param('location', self.location,
                                  Logistic(*self.data_range))
            self.sigmoidal_indicator.location = Param(
                'location', self.location, Logistic(*self.data_range))
            # self.sigmoidal_reverse.location = Param('location', self.location, Logistic(*self.data_range))
            # self.sigmoidal.location = Param('location', self.location + self.width, Logistic(*self.data_range))
            # self.location.constrain_bounded(*self.data_range)
            # self.sigmoidal_indicator.location.constrain_bounded(*self.data_range)
            # # self.sigmoidal_reverse.location.constrain_bounded(*self.data_range)
            # # self.sigmoidal.location.constrain_bounded(*self.data_range)

        max_width = self.data_range[1] - self.location
        max_width = max_width if max_width > 0 else self.data_range[
            1] - self.data_range[0]
        self.width = Param('width', self.width, Logistic(0, max_width))
        self.sigmoidal_indicator.width = Param('width', self.width,
                                               Logistic(0, max_width))
Example #22
0
    def __init__(self,
                 first,
                 second,
                 sigmoidal,
                 sigmoidal_indicator,
                 third=None,
                 location: float = 0.,
                 slope: float = 0.5,
                 width=1.,
                 name='change_window_independent_base',
                 fixed_slope=False):
        third = deepcopy(first) if third is None else third
        _newkerns = [kern.copy() for kern in (first, second, third)]
        super(ChangeWindowIndependentBase, self).__init__(_newkerns, name)
        self.first = first
        self.second = second
        self.third = third

        self._fixed_slope = fixed_slope  # Note: here to be used by subclasses, and changing it from the outside does not link the parameter
        if self._fixed_slope: self.slope = slope
        else:
            self.slope = Param('slope', np.array(slope), Logexp())
            self.link_parameter(self.slope)

        self.sigmoidal = sigmoidal(1, False, 1., location, slope)
        self.sigmoidal_reverse = sigmoidal(1, True, 1., location, slope)
        self.sigmoidal_indicator = sigmoidal_indicator(1, False, 1., location,
                                                       slope, width)
        self.location = Param('location', np.array(location))
        self.width = Param('width', np.array(width), Logexp())
        self.link_parameters(self.location, self.width)

        self.data_range = None
        self.one_off_bounds_set = False
        self.last_parameter_values = {
            'location': np.array(location),
            'slope': np.array(slope),
            'width': np.array(width)
        }
Example #23
0
    def __init__(self,
                 input_dim,
                 input_space_dim=None,
                 active_dims=None,
                 kernel=None,
                 name='shapeintegral',
                 Nperunit=100,
                 lengthscale=None,
                 variance=None):
        """
        NOTE: Added input_space_dim as the number of columns in X isn't the dimensionality of the space. I.e. for pentagons there
        will be 10 columns in X, while only 2 dimensions of input space.
        """
        super(ShapeIntegral, self).__init__(input_dim, active_dims, name)

        assert (
            (kernel is not None) or (input_space_dim is not None)
        ), "Need either the input space dimensionality defining or the latent kernel defining (to infer input space)"
        if kernel is None:
            kernel = RBF(input_space_dim, lengthscale=lengthscale)
        else:
            input_space_dim = kernel.input_dim
        assert kernel.input_dim == input_space_dim, "Latent kernel (dim=%d) should have same input dimensionality as specified in input_space_dim (dim=%d)" % (
            kernel.input_dim, input_space_dim)

        #assert len(kern.lengthscale)==input_space_dim, "Lengthscale of length %d, but input space has %d dimensions" % (len(lengthscale),input_space_dim)

        self.lengthscale = Param(
            'lengthscale', kernel.lengthscale,
            Logexp())  #Logexp - transforms to allow positive only values...
        self.variance = Param('variance', kernel.variance,
                              Logexp())  #and here.
        self.link_parameters(
            self.variance, self.lengthscale
        )  #this just takes a list of parameters we need to optimise.

        self.kernel = kernel
        self.Nperunit = Nperunit
        self.input_space_dim = input_space_dim
Example #24
0
 def __init__(self,
              first,
              second,
              location: float = 0.,
              slope: float = 0.5,
              width: float = 1.,
              name='change_window',
              fixed_slope=False):
     super(ChangeWindowKernel,
           self).__init__(first, second, SigmoidalIndicatorKernel, location,
                          slope, name, fixed_slope)
     self.width = Param('width', width, Logexp())
     self.link_parameter(self.width)
Example #25
0
 def __init__(self,
              input_dim,
              variance,
              lengthscale,
              ARD,
              active_dims,
              name,
              useGPU=False):
     super(Stationary, self).__init__(input_dim,
                                      active_dims,
                                      name,
                                      useGPU=useGPU)
     self.ARD = ARD
     if not ARD:
         if lengthscale is None:
             lengthscale = np.ones(1)
         else:
             lengthscale = np.asarray(lengthscale)
             assert lengthscale.size == 1, "Only 1 lengthscale needed for non-ARD kernel"
     else:
         if lengthscale is not None:
             lengthscale = np.asarray(lengthscale)
             assert lengthscale.size in [1, input_dim
                                         ], "Bad number of lengthscales"
             if lengthscale.size != input_dim:
                 lengthscale = np.ones(input_dim) * lengthscale
         else:
             lengthscale = np.ones(self.input_dim)
             # lengthscale = np.ones(2)
             # n = A(1)
             # t = A(1)
             # lengthscale = np.array([n, n, n, n, n, n, t, t, t])
     self.lengthscale = Param('lengthscale', lengthscale, Logexp())
     # self.lengthscale = self.lengthscale.repeat(6)[:self.input_dim]
     # print(self.lengthscale)
     self.variance = Param('variance', variance, Logexp())
     assert self.variance.size == 1
     self.link_parameters(self.variance, self.lengthscale)
    def __init__(self,
                 k1,
                 k2=None,
                 kc=1.,
                 xc=np.array([[0]]),
                 cpDim=0,
                 changepointParameter=False):
        """
        arguments:
            k1, k2: GPy.kern.Kernel
            kc: float, covariance at the changepoint
            xc: np.array, position of changepoint(s)
            cpDim: int, dimension that changepoint exists on
            changepointParameter: bool, whether xc should be linked as a parameter

        """
        if k2 is None:
            super(Changepoint, self).__init__([k1], "changepoint")
            k2 = k1
        else:
            super(Changepoint, self).__init__([k1, k2], "changepoint")

        self.k1 = k1
        self.k2 = k2

        self.kc = Param('kc', kc, Logexp())
        self.link_parameter(self.kc)

        self.changepointParameter = changepointParameter
        self.xc = np.array(xc)
        if self.changepointParameter:
            self.xc = Param('xc', self.xc)
            self.link_parameter(self.xc)
            self.xc.gradient = [[0]]

        self.cpDim = cpDim
Example #27
0
 def __init__(self,k1,k2,kc,xc,cpDim):
     if k2 is None:
         super(Changepoint,self).__init__([k1],"changepoint")
         k2 = k1
     else:
         super(Changepoint,self).__init__([k1,k2],"changepoint")
     
     self.k1 = k1
     self.k2 = k2
     
     self.kc = Param('kc', kc, Logexp())
     self.link_parameter(self.kc)
     
     self.xc = np.array(xc)
     self.cpDim = cpDim
Example #28
0
 def __init__(self,
              input_dim,
              basis,
              variance=None,
              ARD=False,
              active_dims=None,
              name='mean',
              useGP=False):
     """
     Initialize the object.
     """
     super(MeanFunction, self).__init__(input_dim,
                                        active_dims,
                                        name,
                                        useGP=useGP)
     self.input_dim = int(input_dim)
     self._ARD = ARD
     if not hasattr(basis, '__call__'):
         raise TypeError('The basis functions must implement the '
                         '\'__call__()\' method. This method should '
                         ' the basis functions given a 2D dimensional numpy'
                         ' numpy array of \'num_points x input_dim\''
                         ' dimensions.')
     if not hasattr(basis, 'num_output'):
         raise TypeError('The basis functions must have an attribute '
                         ' \'num_output\' which should store the number of'
                         ' basis functions it contains.')
     self._basis = basis
     self._num_params = basis.num_output
     if not ARD:
         if variance is None:
             variance = np.ones(1)
         else:
             variance = np.asarray(variance)
             assert variance.size == 1, 'Only 1 variance needed for a non-ARD kernel'
     else:
         if variance is not None:
             variance = np.asarray(variance)
             assert variance.size in [1, self.num_params
                                      ], 'Bad number of variances'
             if variance.size != self.num_params:
                 variance = np.ones(self.num_params) * variance
         else:
             variance = np.ones(self.num_params)
     self.variance = Param('variance', variance, Logexp())
     self.link_parameters(self.variance)
Example #29
0
    def __init__(self, warping_indices, hidden_dims, out_dim, warped_indices,
                 name):
        super(NNwarpingFunction, self).__init__(name='nn_warping_' + name)
        self.warping_indices = warping_indices
        self.warped_indices = warped_indices
        self.nnwarping = NNwarping(len(warping_indices), hidden_dims, out_dim)
        self.params_name = list(self.nnwarping.state_dict().keys())
        self.params_value = [
            _.numpy() for _ in list(self.nnwarping.state_dict().values())
        ]
        self.params = [
            Param(self.params_name[_], self.params_value[_])
            for _ in range(len(self.params_value))
        ]
        for param in self.params:
            self.link_parameter(param)

        # training statistics
        self.params_updated_num = 0
Example #30
0
    def __init__(self, kernels):
        """
        This kernel is used for multi-fidelity problems.

        Args:
            kernels - List of GPy kernels to use for each fidelity from low
                      to high fidelity

        Reference:

        Predicting the output from a complex computer code when fast
        approximations are available. M. C. KENNEDY AND A. O'HAGAN (2000)

        Any number of fidelities are supported.

        Fidelity s is modelled as:
        f_s(x) = p_t * f_t(x) + d_s(x)

        where:
        s is the fidelity
        t is the previous fidelity
        f_s(x) is the function modelling fidelity s
        d_s(x) models the difference between fidelity s-1 and s
        p_t a scaling parameter between fidelity t and s
        """

        self.kernels = kernels
        self.n_fidelities = len(kernels)

        super(LinearMultiFidelityKernel, self).__init__(kernels=self.kernels,
                                                        name='multifidelity',
                                                        extra_dims=[-1])
        self.scaling_param = Param('scale', np.ones(self.n_fidelities - 1))

        # Link parameters so paramz knows about them
        self.link_parameters(self.scaling_param)