Exemplo n.º 1
0
    def __init__(self, gp_link=None):
        if gp_link is None:
            gp_link = link_functions.Log()

        super(HetLogLogistic, self).__init__(gp_link, name='Het_loglogistic')
        self.log_concave = False
        self.run_already = False
        self.T = 10  # number of points to use in quadrature
Exemplo n.º 2
0
    def __init__(self, gp_link=None):
        if gp_link is None:
            gp_link = link_functions.Log()

        super(MultiPoisson, self).__init__(gp_link, name='Multi_poisson')
        self.log_concave = False
        self.run_already = False
        self.T = 15  # number of points to use in quadrature
Exemplo n.º 3
0
    def setUp(self):
        np.random.seed(fixed_seed)
        self.N = 15
        self.D = 3
        self.X = np.random.rand(self.N, self.D) * 10

        self.real_std = 0.1
        noise = np.random.randn(*self.X[:, 0].shape) * self.real_std
        self.Y = (np.sin(self.X[:, 0] * 2 * np.pi) + noise)[:, None]
        self.f = np.random.rand(self.N, 1)
        self.binary_Y = np.asarray(np.random.rand(self.N) > 0.5,
                                   dtype=np.int)[:, None]
        self.binary_Y[self.binary_Y == 0.0] = -1.0
        self.positive_Y = np.exp(self.Y.copy())
        tmp = np.round(self.X[:, 0] * 3 - 3)[:, None] + np.random.randint(
            0, 3, self.X.shape[0])[:, None]
        self.integer_Y = np.where(tmp > 0, tmp, 0)
        self.ns = np.random.poisson(50, size=self.N)[:, None]
        p = np.abs(
            np.cos(2 * np.pi * self.X +
                   np.random.normal(scale=.2, size=(self.N, self.D)))).mean(1)
        self.binomial_Y = np.array([
            np.random.binomial(int(self.ns[i]), p[i])
            for i in range(p.shape[0])
        ])[:, None]

        self.var = 0.2
        self.deg_free = 4.0

        #Make a bigger step as lower bound can be quite curved
        self.step = 1e-4
        """
        Dictionary where we nest models we would like to check
            Name: {
                "model": model_instance,
                "grad_params": {
                    "names": [names_of_params_we_want, to_grad_check],
                    "vals": [values_of_params, to_start_at],
                    "constrain": [constraint_wrappers, listed_here]
                    },
                "laplace": boolean_of_whether_model_should_work_for_laplace,
                "ep": boolean_of_whether_model_should_work_for_laplace,
                "link_f_constraints": [constraint_wrappers, listed_here]
                }
        """
        self.noise_models = {
            "Student_t_default": {
                "model":
                GPy.likelihoods.StudentT(deg_free=self.deg_free,
                                         sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [self.var],
                    "constraints": [(".*t_scale2", self.constrain_positive),
                                    (".*deg_free", self.constrain_fixed)]
                },
                "laplace":
                True
            },
            #"Student_t_deg_free": {
            #"model": GPy.likelihoods.StudentT(deg_free=self.deg_free, sigma2=self.var),
            #"grad_params": {
            #"names": [".*deg_free"],
            #"vals": [self.deg_free],
            #"constraints": [(".*t_scale2", self.constrain_fixed), (".*deg_free", self.constrain_positive)]
            #},
            #"laplace": True
            #},
            "Student_t_1_var": {
                "model":
                GPy.likelihoods.StudentT(deg_free=self.deg_free,
                                         sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [1.0],
                    "constraints": [(".*t_scale2", self.constrain_positive),
                                    (".*deg_free", self.constrain_fixed)]
                },
                "laplace":
                True
            },
            # FIXME: This is a known failure point, when the degrees of freedom
            # are very small, and the variance is relatively small, the
            # likelihood is log-concave and problems occur
            # "Student_t_small_deg_free": {
            # "model": GPy.likelihoods.StudentT(deg_free=1.5, sigma2=self.var),
            # "grad_params": {
            # "names": [".*t_scale2"],
            # "vals": [self.var],
            # "constraints": [(".*t_scale2", self.constrain_positive), (".*deg_free", self.constrain_fixed)]
            # },
            # "laplace": True
            # },
            "Student_t_small_var": {
                "model":
                GPy.likelihoods.StudentT(deg_free=self.deg_free,
                                         sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [0.001],
                    "constraints": [(".*t_scale2", self.constrain_positive),
                                    (".*deg_free", self.constrain_fixed)]
                },
                "laplace":
                True
            },
            "Student_t_large_var": {
                "model":
                GPy.likelihoods.StudentT(deg_free=self.deg_free,
                                         sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [10.0],
                    "constraints": [(".*t_scale2", self.constrain_positive),
                                    (".*deg_free", self.constrain_fixed)]
                },
                "laplace":
                True
            },
            "Student_t_approx_gauss": {
                "model": GPy.likelihoods.StudentT(deg_free=1000,
                                                  sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [self.var],
                    "constraints": [(".*t_scale2", self.constrain_positive),
                                    (".*deg_free", self.constrain_fixed)]
                },
                "laplace": True
            },
            "Gaussian_default": {
                "model": GPy.likelihoods.Gaussian(variance=self.var),
                "grad_params": {
                    "names": [".*variance"],
                    "vals": [self.var],
                    "constraints": [(".*variance", self.constrain_positive)]
                },
                "laplace": True,
                "ep":
                False,  # FIXME: Should be True when we have it working again
                "variational_expectations": True,
            },
            "Gaussian_log": {
                "model":
                GPy.likelihoods.Gaussian(gp_link=link_functions.Log(),
                                         variance=self.var),
                "grad_params": {
                    "names": [".*variance"],
                    "vals": [self.var],
                    "constraints": [(".*variance", self.constrain_positive)]
                },
                "laplace":
                True,
                "variational_expectations":
                True
            },
            #"Gaussian_probit": {
            #"model": GPy.likelihoods.gaussian(gp_link=link_functions.Probit(), variance=self.var, D=self.D, N=self.N),
            #"grad_params": {
            #"names": ["noise_model_variance"],
            #"vals": [self.var],
            #"constraints": [constrain_positive]
            #},
            #"laplace": True
            #},
            #"Gaussian_log_ex": {
            #"model": GPy.likelihoods.gaussian(gp_link=link_functions.Log_ex_1(), variance=self.var, D=self.D, N=self.N),
            #"grad_params": {
            #"names": ["noise_model_variance"],
            #"vals": [self.var],
            #"constraints": [constrain_positive]
            #},
            #"laplace": True
            #},
            "Bernoulli_default": {
                "model":
                GPy.likelihoods.Bernoulli(),
                "link_f_constraints":
                [partial(self.constrain_bounded, lower=0, upper=1)],
                "laplace":
                True,
                "Y":
                self.binary_Y,
                "ep":
                True,  # FIXME: Should be True when we have it working again
                "variational_expectations":
                True
            },
            "Exponential_default": {
                "model": GPy.likelihoods.Exponential(),
                "link_f_constraints": [self.constrain_positive],
                "Y": self.positive_Y,
                "laplace": True,
            },
            "Poisson_default": {
                "model": GPy.likelihoods.Poisson(),
                "link_f_constraints": [self.constrain_positive],
                "Y": self.integer_Y,
                "laplace": True,
                "ep": False  #Should work though...
            },
            "Binomial_default": {
                "model":
                GPy.likelihoods.Binomial(),
                "link_f_constraints":
                [partial(self.constrain_bounded, lower=0, upper=1)],
                "Y":
                self.binomial_Y,
                "Y_metadata": {
                    'trials': self.ns
                },
                "laplace":
                True,
            },
            #,
            #GAMMA needs some work!"Gamma_default": {
            #"model": GPy.likelihoods.Gamma(),
            #"link_f_constraints": [constrain_positive],
            #"Y": self.positive_Y,
            #"laplace": True
            #}
        }
Exemplo n.º 4
0
 def __init__(self, binsize=1.,  gp_link=None):
     self.binsize=binsize
     self.logbinsize = np.log(self.binsize)
     if gp_link is None:
         gp_link = link_functions.Log()
     super(BinnedPoisson, self).__init__(gp_link, name='Poisson')
Exemplo n.º 5
0
    def test_scale2_models(self):
        self.setUp()

        ####################################################
        # Constraint wrappers so we can just list them off #
        ####################################################
        def constrain_fixed(regex, model):
            model[regex].constrain_fixed()

        def constrain_negative(regex, model):
            model[regex].constrain_negative()

        def constrain_positive(regex, model):
            model[regex].constrain_positive()

        def constrain_bounded(regex, model, lower, upper):
            """
            Used like: partial(constrain_bounded, lower=0, upper=1)
            """
            model[regex].constrain_bounded(lower, upper)

        """
        Dictionary where we nest models we would like to check
            Name: {
                "model": model_instance,
                "grad_params": {
                    "names": [names_of_params_we_want, to_grad_check],
                    "vals": [values_of_params, to_start_at],
                    "constrain": [constraint_wrappers, listed_here]
                    },
                "laplace": boolean_of_whether_model_should_work_for_laplace,
                "ep": boolean_of_whether_model_should_work_for_laplace,
                "link_f_constraints": [constraint_wrappers, listed_here]
                }
        """
        noise_models = {
            "Student_t_default": {
                "model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [self.var],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                    #"constraints": [("t_scale2", constrain_positive), ("deg_free", partial(constrain_fixed, value=5))]
                },
                "laplace": True
            },
            "Student_t_1_var": {
                "model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [1.0],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                },
                "laplace": True
            },
            "Student_t_small_deg_free": {
                "model": GPy.likelihoods.StudentT(deg_free=1.5,
                                                  sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [self.var],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                },
                "laplace": True
            },
            "Student_t_small_var": {
                "model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [0.001],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                },
                "laplace": True
            },
            "Student_t_large_var": {
                "model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [10.0],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                },
                "laplace": True
            },
            "Student_t_approx_gauss": {
                "model": GPy.likelihoods.StudentT(deg_free=1000,
                                                  sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [self.var],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                },
                "laplace": True
            },
            "Student_t_log": {
                "model":
                GPy.likelihoods.StudentT(gp_link=link_functions.Log(),
                                         deg_free=5,
                                         sigma2=self.var),
                "grad_params": {
                    "names": [".*t_scale2"],
                    "vals": [self.var],
                    "constraints": [(".*t_scale2", constrain_positive),
                                    (".*deg_free", constrain_fixed)]
                },
                "laplace":
                True
            },
            "Gaussian_default": {
                "model": GPy.likelihoods.Gaussian(variance=self.var),
                "grad_params": {
                    "names": [".*variance"],
                    "vals": [self.var],
                    "constraints": [(".*variance", constrain_positive)]
                },
                "laplace": True,
                "ep":
                False  # FIXME: Should be True when we have it working again
            },
            #"Gaussian_log": {
            #"model": GPy.likelihoods.gaussian(gp_link=link_functions.Log(), variance=self.var, D=self.D, N=self.N),
            #"grad_params": {
            #"names": ["noise_model_variance"],
            #"vals": [self.var],
            #"constraints": [constrain_positive]
            #},
            #"laplace": True
            #},
            #"Gaussian_probit": {
            #"model": GPy.likelihoods.gaussian(gp_link=link_functions.Probit(), variance=self.var, D=self.D, N=self.N),
            #"grad_params": {
            #"names": ["noise_model_variance"],
            #"vals": [self.var],
            #"constraints": [constrain_positive]
            #},
            #"laplace": True
            #},
            #"Gaussian_log_ex": {
            #"model": GPy.likelihoods.gaussian(gp_link=link_functions.Log_ex_1(), variance=self.var, D=self.D, N=self.N),
            #"grad_params": {
            #"names": ["noise_model_variance"],
            #"vals": [self.var],
            #"constraints": [constrain_positive]
            #},
            #"laplace": True
            #},
            "Bernoulli_default": {
                "model":
                GPy.likelihoods.Bernoulli(),
                "link_f_constraints":
                [partial(constrain_bounded, lower=0, upper=1)],
                "laplace":
                True,
                "Y":
                self.binary_Y,
                "ep":
                False  # FIXME: Should be True when we have it working again
            },
            "Exponential_default": {
                "model": GPy.likelihoods.Exponential(),
                "link_f_constraints": [constrain_positive],
                "Y": self.positive_Y,
                "laplace": True,
            },
            "Poisson_default": {
                "model": GPy.likelihoods.Poisson(),
                "link_f_constraints": [constrain_positive],
                "Y": self.integer_Y,
                "laplace": True,
                "ep": False  #Should work though...
            }  #,
            #GAMMA needs some work!"Gamma_default": {
            #"model": GPy.likelihoods.Gamma(),
            #"link_f_constraints": [constrain_positive],
            #"Y": self.positive_Y,
            #"laplace": True
            #}
        }

        for name, attributes in noise_models.iteritems():
            model = attributes["model"]
            if "grad_params" in attributes:
                params = attributes["grad_params"]
                param_vals = params["vals"]
                param_names = params["names"]
                param_constraints = params["constraints"]
            else:
                params = []
                param_vals = []
                param_names = []
                constrain_positive = []
                param_constraints = []  # ??? TODO: Saul to Fix.
            if "link_f_constraints" in attributes:
                link_f_constraints = attributes["link_f_constraints"]
            else:
                link_f_constraints = []
            if "Y" in attributes:
                Y = attributes["Y"].copy()
            else:
                Y = self.Y.copy()
            if "f" in attributes:
                f = attributes["f"].copy()
            else:
                f = self.f.copy()
            if "laplace" in attributes:
                laplace = attributes["laplace"]
            else:
                laplace = False
            if "ep" in attributes:
                ep = attributes["ep"]
            else:
                ep = False

            #if len(param_vals) > 1:
            #raise NotImplementedError("Cannot support multiple params in likelihood yet!")

            #Required by all
            #Normal derivatives
            yield self.t_logpdf, model, Y, f
            yield self.t_dlogpdf_df, model, Y, f
            yield self.t_d2logpdf_df2, model, Y, f
            #Link derivatives
            yield self.t_dlogpdf_dlink, model, Y, f, link_f_constraints
            yield self.t_d2logpdf_dlink2, model, Y, f, link_f_constraints
            if laplace:
                #Laplace only derivatives
                yield self.t_d3logpdf_df3, model, Y, f
                yield self.t_d3logpdf_dlink3, model, Y, f, link_f_constraints
                #Params
                yield self.t_dlogpdf_dparams, model, Y, f, param_vals, param_names, param_constraints
                yield self.t_dlogpdf_df_dparams, model, Y, f, param_vals, param_names, param_constraints
                yield self.t_d2logpdf2_df2_dparams, model, Y, f, param_vals, param_names, param_constraints
                #Link params
                yield self.t_dlogpdf_link_dparams, model, Y, f, param_vals, param_names, param_constraints
                yield self.t_dlogpdf_dlink_dparams, model, Y, f, param_vals, param_names, param_constraints
                yield self.t_d2logpdf2_dlink2_dparams, model, Y, f, param_vals, param_names, param_constraints

                #laplace likelihood gradcheck
                yield self.t_laplace_fit_rbf_white, model, self.X, Y, f, self.step, param_vals, param_names, param_constraints
            if ep:
                #ep likelihood gradcheck
                yield self.t_ep_fit_rbf_white, model, self.X, Y, f, self.step, param_vals, param_names, param_constraints

        self.tearDown()