def __init__(self, likelihoods_list, gp_link=None ,name='heterogeneous_likelihood'): if gp_link is None: gp_link = link_functions.Identity() super(HetLikelihood, self).__init__(gp_link=gp_link, name=name) self.likelihoods_list = likelihoods_list
def __init__(self, Y_metadata, gp_link=None, noise_mult=1., known_variances=1., name='Scaled_het_Gauss'): if gp_link is None: gp_link = link_functions.Identity() if not isinstance(gp_link, link_functions.Identity): print( "Warning, Exact inference is not implemeted for non-identity link functions,\ if you are not already, ensure Laplace inference_method is used") # note the known_variances are fixed, not parameterse self.known_variances = known_variances self.noise_mult = Param('noise_mult', noise_mult, Logexp()) # Logexp ensures its positive # this is a parameter, so it gets optimized, gradients calculated etc. #super(ScaledHeteroscedasticGaussian, self).__init__(gp_link, variance=1.0, name=name) super(Gaussian, self).__init__(gp_link, name=name) # note: we're inheriting from Likelihood here, not Gaussian, so as to avoid problems with the Gaussian variance. #add a new parameter by linking it (see just above in GPy.likelihoods.gaussian.Gaussian). self.link_parameter(self.noise_mult) if isinstance(gp_link, link_functions.Identity): self.log_concave = True
def __init__(self, sigma=None, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() if sigma is None: self.sigma = 0.5 else: self.sigma = sigma super(Gaussian, self).__init__(gp_link, name='Gaussian')
def __init__(self,gp_link=None, deg_free=5, sigma2=2): if gp_link is None: gp_link = link_functions.Identity() super(HetStudentT, self).__init__(gp_link, name='Hetro_Student_T') self.v = Param('deg_free', float(deg_free), Logexp()) self.link_parameter(self.v) self.v.constrain_fixed() self.log_concave = False
def __init__(self, Y, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() self.Y = Y Clases = np.unique(self.Y) auxC = np.prod(Clases) if auxC > 0: K = len(np.unique(self.Y)) else: K = len(np.unique(self.Y)) - 1 self.K = K super(MultiClassMAA, self).__init__(gp_link, name='MultiClassMAA')
def __init__(self, K, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(Categorical, self).__init__(gp_link, name='Categorical') self.K = K
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(Beta, self).__init__(gp_link, name='Beta')
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(Poisson, self).__init__(gp_link, name='Poisson')
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(Exponential, self).__init__(gp_link, name='Exponential')
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(HetGaussianMA, self).__init__(gp_link, name='HetGaussianMA')
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(Student, self).__init__(gp_link, name='Student')
def __init__(self, likelihood_fns=[], name='Mixed_noise'): #TODO Why do we need to specify a link function? super(Mixed, self).__init__(name=name, gp_link=link_functions.Identity()) self.likelihood_fns = likelihood_fns
def __init__(self, gp_link=None): if gp_link is not None: raise NotImplementedError, "this likelihood assumes a complicated pair of link functions..." super(HetGP, self).__init__(link_functions.Identity(), 'HetGP')
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(BinaryClassMA, self).__init__(gp_link, name='BinaryClassMA')
def __init__(self, gp_link=None): if gp_link is None: gp_link = link_functions.Identity() super(Dirichlet, self).__init__(gp_link, name='Dirichlet')
def __init__(self,gp_link=None, deg_free=5, sigma2=2): if gp_link is None: gp_link = link_functions.Identity() super(HetBeta, self).__init__(gp_link, name='Hetra_beta') self.log_concave = False