コード例 #1
0
    def _test_qX(self, kernel, psi2n=False):
        def f(p):
            self.qX.param_array[:] = p
            self.qX._trigger_params_changed()
            psi0 = kernel.psi0(self.Z, self.qX)
            psi1 = kernel.psi1(self.Z, self.qX)
            if not psi2n:
                psi2 = kernel.psi2(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (
                    self.w3 * psi2).sum()
            else:
                psi2 = kernel.psi2n(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (
                    self.w3n * psi2).sum()

        def df(p):
            self.qX.param_array[:] = p
            self.qX._trigger_params_changed()
            grad = kernel.gradients_qX_expectations(
                self.w1, self.w2, self.w3 if not psi2n else self.w3n, self.Z,
                self.qX)
            self.qX.set_gradients(grad)
            return self.qX.gradient.copy()

        from GPy.models import GradientChecker
        m = GradientChecker(f, df, self.qX.param_array.copy())
        self.assertTrue(m.checkgrad())
コード例 #2
0
ファイル: testUS_LW.py プロジェクト: rongrong1314/gpsearch-1
def main():

    np.random.seed(2)

    M, Q = 15, 3
    X = np.random.rand(M,Q)
    Y = np.random.rand(M,1)

    ker = RBF(input_dim=Q, ARD=True, variance=1.34, 
              lengthscale=np.random.rand(1,Q))
    model = GPy.models.GPRegression(X=X, Y=Y, kernel=ker,
                                    normalizer=True)

    inputs = UniformInputs([[0,1]]*Q)
    likelihood = Likelihood(model, inputs)

    x_new = np.random.rand(2, Q)
    qcrit = LCB_LW(model, inputs, likelihood=likelihood)

    g = GradientChecker(lambda x: qcrit.evaluate(x),
                        lambda x: qcrit.jacobian(x),
                        x_new, 'x')
    assert(g.checkgrad())

    a = qcrit.evaluate(x_new)
    b = a+0.0
    for i in range(x_new.shape[0]):
        print(qcrit.evaluate(x_new[i,:]))
    print(a)

    a = qcrit.jacobian(x_new)
    b = a+0.0
    for i in range(x_new.shape[0]):
        print(qcrit.jacobian(x_new[i,:]))
    print(a)
コード例 #3
0
    def _test_kernel_param(self, kernel, psi2n=False):
        def f(p):
            kernel.param_array[:] = p
            psi0 = kernel.psi0(self.Z, self.qX)
            psi1 = kernel.psi1(self.Z, self.qX)
            if not psi2n:
                psi2 = kernel.psi2(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (
                    self.w3 * psi2).sum()
            else:
                psi2 = kernel.psi2n(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (
                    self.w3n * psi2).sum()

        def df(p):
            kernel.param_array[:] = p
            kernel.update_gradients_expectations(
                self.w1, self.w2, self.w3 if not psi2n else self.w3n, self.Z,
                self.qX)
            return kernel.gradient.copy()

        from GPy.models import GradientChecker
        m = GradientChecker(f, df, kernel.param_array.copy())
        m.checkgrad(verbose=1)
        self.assertTrue(m.checkgrad())
コード例 #4
0
ファイル: kernel_tests.py プロジェクト: pxlong/GPy
    def _test_qX(self, kernel, psi2n=False):
        def f(p):
            self.qX.param_array[:] = p
            self.qX._trigger_params_changed()
            psi0 = kernel.psi0(self.Z, self.qX)
            psi1 = kernel.psi1(self.Z, self.qX)
            if not psi2n:
                psi2 = kernel.psi2(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (self.w3 * psi2).sum()
            else:
                psi2 = kernel.psi2n(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (self.w3n * psi2).sum()

        def df(p):
            self.qX.param_array[:] = p
            self.qX._trigger_params_changed()
            grad = kernel.gradients_qX_expectations(
                self.w1, self.w2, self.w3 if not psi2n else self.w3n, self.Z, self.qX
            )
            self.qX.set_gradients(grad)
            return self.qX.gradient.copy()

        from GPy.models import GradientChecker

        m = GradientChecker(f, df, self.qX.param_array.copy())
        self.assertTrue(m.checkgrad())
コード例 #5
0
    def t_dexp_dvar(self, model, Y, Y_metadata):
        print("\n{}".format(inspect.stack()[0][3]))
        #Make mu and var (marginal means and variances of q(f)) draws from a GP
        k = GPy.kern.RBF(1).K(np.linspace(0, 1, Y.shape[0])[:, None])
        L = GPy.util.linalg.jitchol(k)
        mu = L.dot(np.random.randn(*Y.shape))
        #Variance must be positive
        var = np.abs(L.dot(np.random.randn(*Y.shape))) + 0.01
        expectation = functools.partial(model.variational_expectations,
                                        Y=Y,
                                        m=mu,
                                        gh_points=None,
                                        Y_metadata=Y_metadata)

        #Function to get the nth returned value
        def F(var):
            return expectation(v=var)[0]

        def dvar(var):
            return expectation(v=var)[2]

        grad = GradientChecker(F, dvar, var.copy(), 'v')

        self.constrain_positive('v', grad)
        #grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
コード例 #6
0
ファイル: likelihood_tests.py プロジェクト: pxlong/GPy
    def t_dexp_dvar(self, model, Y, Y_metadata):
        print("\n{}".format(inspect.stack()[0][3]))
        # Make mu and var (marginal means and variances of q(f)) draws from a GP
        k = GPy.kern.RBF(1).K(np.linspace(0, 1, Y.shape[0])[:, None])
        L = GPy.util.linalg.jitchol(k)
        mu = L.dot(np.random.randn(*Y.shape))
        # Variance must be positive
        var = np.abs(L.dot(np.random.randn(*Y.shape))) + 0.01
        expectation = functools.partial(
            model.variational_expectations, Y=Y, m=mu, gh_points=None, Y_metadata=Y_metadata
        )

        # Function to get the nth returned value
        def F(var):
            return expectation(v=var)[0]

        def dvar(var):
            return expectation(v=var)[2]

        grad = GradientChecker(F, dvar, var.copy(), "v")

        self.constrain_positive("v", grad)
        # grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
コード例 #7
0
ファイル: likelihood_tests.py プロジェクト: Arthurkorn/GPy
 def t_d3logpdf_df3(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
     d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y)
     grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
コード例 #8
0
ファイル: likelihood_tests.py プロジェクト: pxlong/GPy
 def t_d3logpdf_df3(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y, Y_metadata=Y_metadata)
     d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y, Y_metadata=Y_metadata)
     grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), "g")
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
コード例 #9
0
 def t_d3logpdf_df3(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
     d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y)
     grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
コード例 #10
0
 def t_dlogpdf_df(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(model.logpdf, y=Y)
     dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
コード例 #11
0
ファイル: likelihood_tests.py プロジェクト: pxlong/GPy
 def t_dlogpdf_df(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(np.sum(model.logpdf), y=Y, Y_metadata=Y_metadata)
     dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y, Y_metadata=Y_metadata)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), "g")
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
コード例 #12
0
ファイル: likelihood_tests.py プロジェクト: Arthurkorn/GPy
 def t_dlogpdf_df(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(model.logpdf, y=Y)
     dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
コード例 #13
0
 def t_d2logpdf_df2(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     dlogpdf_df = functools.partial(model.dlogpdf_df,
                                    y=Y,
                                    Y_metadata=Y_metadata)
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2,
                                      y=Y,
                                      Y_metadata=Y_metadata)
     grad = GradientChecker(dlogpdf_df, d2logpdf_df2, f.copy(), 'g')
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
コード例 #14
0
ファイル: testGMM.py プロジェクト: ablancha/gpsearch
def main():

    ndim = 2
    np.random.seed(3)

    tf = 25
    nsteps = 1000
    u_init = [0, 0]
    noise = Noise([0, tf])
    oscil = Oscillator(noise, tf, nsteps, u_init)
    myMap = BlackBox(map_def, args=(oscil,))

    lam = noise.get_eigenvalues(ndim)
    mean = np.zeros(ndim)
    cov = np.diag(lam)

    domain = [ [-a, a] for a in 6.0*np.sqrt(np.diag(cov)) ] 
    inputs = GaussianInputs(domain, mean, cov)
   #inputs = UniformInputs(domain)

    kwargs_gmm = dict(n_components=4, covariance_type="spherical")

    X = inputs.draw_samples(100, "lhs")
    Y = myMap.evaluate(X, parallel=True)
    o = OptimalDesign(X, Y, myMap, inputs, normalize_Y=True)
    likelihood = Likelihood(o.model, o.inputs, "nominal", kwargs_gmm=kwargs_gmm)

    x_new = np.atleast_2d([1.0,2.0])
    gmm_y = likelihood.evaluate(x_new)
    print(jacobian_fdiff(likelihood, x_new))
    print(likelihood.jacobian(x_new))

    from GPy.models import GradientChecker
    gm = GradientChecker(lambda x: likelihood.evaluate(x),
                         lambda x: likelihood.jacobian(x), 
                         x_new, 'x')
    assert(gm.checkgrad())

    pts = inputs.draw_samples(n_samples=100, sample_method="grd")
    gmm_y = likelihood.evaluate(pts).flatten()
    pix = likelihood._evaluate_raw(pts).flatten()

    fig = plt.figure(figsize=(12,6))
    plt.subplot(1,2,1)
    sc = plt.scatter(pts[:,0], pts[:,1], c=pix)
    plt.colorbar(sc)
    plt.title(r"$f_x/f_y$")
    plt.subplot(1,2,2)
    sc = plt.scatter(pts[:,0], pts[:,1], c=gmm_y)
    plt.colorbar(sc)
    plt.title("GMM fit")
    plt.show()
コード例 #15
0
 def t_dlogpdf_df(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(np.sum(model.logpdf),
                                y=Y,
                                Y_metadata=Y_metadata)
     dlogpdf_df = functools.partial(model.dlogpdf_df,
                                    y=Y,
                                    Y_metadata=Y_metadata)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
コード例 #16
0
ファイル: test_likelihood.py プロジェクト: ablancha/gpsearch
def test_likelihood_gradients_importance():
    X = inputs.draw_samples(100, "lhs")
    Y = myMap.evaluate(X, parallel=True)
    o = OptimalDesign(X, Y, myMap, inputs, normalize_Y=True)
    x_new = np.atleast_2d([1.0, 2.0])
    kwargs_gmm = dict(n_components=4, covariance_type="full")
    likelihood = Likelihood(o.model,
                            o.inputs,
                            "importance",
                            kwargs_gmm=kwargs_gmm)
    gm = GradientChecker(lambda x: likelihood.evaluate(x),
                         lambda x: likelihood.jacobian(x), x_new, 'x')
    assert (gm.checkgrad())
コード例 #17
0
def dparam_checkgrad(func,
                     dfunc,
                     params,
                     params_names,
                     args,
                     constraints=None,
                     randomize=False,
                     verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
                                             func.__name__, dfunc.__name__)
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        #Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        fnum = np.atleast_2d(partial_f(param_val,
                                       param_name))[:, param_ind].shape[0]
        dfnum = np.atleast_2d(partial_df(param_val,
                                         param_name))[:, param_ind].shape[0]
        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val + 1) - 1
            print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(
                fnum, dfnum, f_ind, fixed_val)
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            #Check only the parameter and function value we wish to check at a time
            grad = GradientChecker(
                lambda p_val: np.atleast_2d(partial_f(p_val, param_name))[
                    f_ind, param_ind], lambda p_val: np.atleast_2d(
                        partial_df(p_val, param_name))[fixed_val, param_ind],
                param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print "parameter didn't exist"
                    print constrain_param, " ", constraint
            if randomize:
                grad.randomize()
            if verbose:
                print grad
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
コード例 #18
0
ファイル: likelihood_tests.py プロジェクト: pxlong/GPy
    def t_d3logpdf_dlink3(self, model, Y, f, Y_metadata, link_f_constraints):
        print("\n{}".format(inspect.stack()[0][3]))
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y, Y_metadata=Y_metadata)
        d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y, Y_metadata=Y_metadata)
        grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), "g")

        # Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint("g", grad)

        grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
コード例 #19
0
ファイル: likelihood_tests.py プロジェクト: Arthurkorn/GPy
    def t_d3logpdf_dlink3(self, model, Y, f, link_f_constraints):
        print "\n{}".format(inspect.stack()[0][3])
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
        d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y)
        grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        print grad
        print model
        assert grad.checkgrad(verbose=1)
コード例 #20
0
    def t_d3logpdf_dlink3(self, model, Y, f, link_f_constraints):
        print "\n{}".format(inspect.stack()[0][3])
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
        d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y)
        grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        print grad
        print model
        assert grad.checkgrad(verbose=1)
コード例 #21
0
 def t_dtransf_df(self, transformation, f):
     print("\n{}".format(inspect.stack()[0][3]))
     grad = GradientChecker(transformation.transf,
                            transformation.dtransf_df, f, 'f')
     grad.randomize()
     grad.checkgrad(verbose=1)
     assert grad.checkgrad()
コード例 #22
0
 def t_d3transf_df3(self, transformation, f):
     print "\n{}".format(inspect.stack()[0][3])
     grad = GradientChecker(transformation.d2transf_df2,
                            transformation.d3transf_df3, f, 'f')
     grad.randomize()
     grad.checkgrad(verbose=1)
     assert grad.checkgrad()
コード例 #23
0
    def t_d2logpdf_dlink2(self, model, Y, f, Y_metadata, link_f_constraints):
        print("\n{}".format(inspect.stack()[0][3]))
        dlogpdf_dlink = functools.partial(model.dlogpdf_dlink,
                                          y=Y,
                                          Y_metadata=Y_metadata)
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2,
                                            y=Y,
                                            Y_metadata=Y_metadata)
        grad = GradientChecker(dlogpdf_dlink, d2logpdf_dlink2, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
コード例 #24
0
    def test_gaussian_d2logpdf_df2_2(self):
        print("\n{}".format(inspect.stack()[0][3]))
        self.Y = None

        self.N = 2
        self.D = 1
        self.X = np.linspace(0, self.D, self.N)[:, None]
        self.real_std = 0.2
        noise = np.random.randn(*self.X.shape) * self.real_std
        self.Y = np.sin(self.X * 2 * np.pi) + noise
        self.f = np.random.rand(self.N, 1)

        dlogpdf_df = functools.partial(self.gauss.dlogpdf_df, y=self.Y)
        d2logpdf_df2 = functools.partial(self.gauss.d2logpdf_df2, y=self.Y)
        grad = GradientChecker(dlogpdf_df, d2logpdf_df2, self.f.copy(), 'g')
        grad.randomize()

        self.assertTrue(grad.checkgrad(verbose=1))
コード例 #25
0
ファイル: likelihood_tests.py プロジェクト: pxlong/GPy
    def test_gaussian_d2logpdf_df2_2(self):
        print("\n{}".format(inspect.stack()[0][3]))
        self.Y = None

        self.N = 2
        self.D = 1
        self.X = np.linspace(0, self.D, self.N)[:, None]
        self.real_std = 0.2
        noise = np.random.randn(*self.X.shape) * self.real_std
        self.Y = np.sin(self.X * 2 * np.pi) + noise
        self.f = np.random.rand(self.N, 1)

        dlogpdf_df = functools.partial(self.gauss.dlogpdf_df, y=self.Y)
        d2logpdf_df2 = functools.partial(self.gauss.d2logpdf_df2, y=self.Y)
        grad = GradientChecker(dlogpdf_df, d2logpdf_df2, self.f.copy(), "g")
        grad.randomize()

        self.assertTrue(grad.checkgrad(verbose=1))
コード例 #26
0
ファイル: kernel_tests.py プロジェクト: gehbiszumeis/GPy
    def _test_Z(self, kernel, psi2n=False):

        def f(p):
            psi0 = kernel.psi0(p, self.qX)
            psi1 = kernel.psi1(p, self.qX)
            psi2 = kernel.psi2(p, self.qX)
            if not psi2n:
                psi2 = kernel.psi2(p, self.qX)
                return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3*psi2).sum()
            else:
                psi2 = kernel.psi2n(p, self.qX)
                return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3n*psi2).sum()

        def df(p):
            return kernel.gradients_Z_expectations(self.w1, self.w2, self.w3 if not psi2n else self.w3n, p, self.qX)

        from GPy.models import GradientChecker
        m = GradientChecker(f, df, self.Z.copy())
        self.assertTrue(m.checkgrad())
コード例 #27
0
ファイル: kernel_tests.py プロジェクト: pxlong/GPy
    def _test_Z(self, kernel, psi2n=False):
        def f(p):
            psi0 = kernel.psi0(p, self.qX)
            psi1 = kernel.psi1(p, self.qX)
            psi2 = kernel.psi2(p, self.qX)
            if not psi2n:
                psi2 = kernel.psi2(p, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (self.w3 * psi2).sum()
            else:
                psi2 = kernel.psi2n(p, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (self.w3n * psi2).sum()

        def df(p):
            return kernel.gradients_Z_expectations(self.w1, self.w2, self.w3 if not psi2n else self.w3n, p, self.qX)

        from GPy.models import GradientChecker

        m = GradientChecker(f, df, self.Z.copy())
        self.assertTrue(m.checkgrad())
コード例 #28
0
ファイル: model_tests.py プロジェクト: Imdrail/GPy
    def check_jacobian(self):
        try:
            import autograd.numpy as np, autograd as ag, GPy, matplotlib.pyplot as plt
            from GPy.models import GradientChecker, GPRegression
        except:
            raise self.skipTest("autograd not available to check gradients")
        def k(X, X2, alpha=1., lengthscale=None):
            if lengthscale is None:
                lengthscale = np.ones(X.shape[1])
            exp = 0.
            for q in range(X.shape[1]):
                exp += ((X[:, [q]] - X2[:, [q]].T)/lengthscale[q])**2
            #exp = np.sqrt(exp)
            return alpha * np.exp(-.5*exp)
        dk = ag.elementwise_grad(lambda x, x2: k(x, x2, alpha=ke.variance.values, lengthscale=ke.lengthscale.values))
        dkdk = ag.elementwise_grad(dk, argnum=1)

        ke = GPy.kern.RBF(1, ARD=True)
        #ke.randomize()
        ke.variance = .2#.randomize()
        ke.lengthscale[:] = .5
        ke.randomize()
        X = np.linspace(-1, 1, 1000)[:,None]
        X2 = np.array([[0.]]).T
        np.testing.assert_allclose(ke.gradients_X([[1.]], X, X), dk(X, X))
        np.testing.assert_allclose(ke.gradients_XX([[1.]], X, X).sum(0), dkdk(X, X))
        np.testing.assert_allclose(ke.gradients_X([[1.]], X, X2), dk(X, X2))
        np.testing.assert_allclose(ke.gradients_XX([[1.]], X, X2).sum(0), dkdk(X, X2))

        m = GPRegression(self.X, self.Y)
        def f(x):
            m.X[:] = x
            return m.log_likelihood()
        def df(x):
            m.X[:] = x
            return m.kern.gradients_X(m.grad_dict['dL_dK'], X)
        def ddf(x):
            m.X[:] = x
            return m.kern.gradients_XX(m.grad_dict['dL_dK'], X).sum(0)
        gc = GradientChecker(f, df, self.X)
        gc2 = GradientChecker(df, ddf, self.X)
        assert(gc.checkgrad())
        assert(gc2.checkgrad())
コード例 #29
0
ファイル: kernel_tests.py プロジェクト: pxlong/GPy
    def _test_kernel_param(self, kernel, psi2n=False):
        def f(p):
            kernel.param_array[:] = p
            psi0 = kernel.psi0(self.Z, self.qX)
            psi1 = kernel.psi1(self.Z, self.qX)
            if not psi2n:
                psi2 = kernel.psi2(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (self.w3 * psi2).sum()
            else:
                psi2 = kernel.psi2n(self.Z, self.qX)
                return (self.w1 * psi0).sum() + (self.w2 * psi1).sum() + (self.w3n * psi2).sum()

        def df(p):
            kernel.param_array[:] = p
            kernel.update_gradients_expectations(self.w1, self.w2, self.w3 if not psi2n else self.w3n, self.Z, self.qX)
            return kernel.gradient.copy()

        from GPy.models import GradientChecker

        m = GradientChecker(f, df, kernel.param_array.copy())
        self.assertTrue(m.checkgrad())
コード例 #30
0
def main():

    np.random.seed(2)

    M, Q = 15, 5
    X = np.random.rand(M, Q)
    Y = np.random.rand(M, 1)

    mu = np.random.rand(Q)
    cov = np.random.rand(Q)**2

    lb = np.abs(np.random.randn(Q, 1))
    ub = lb + np.abs(np.random.randn(Q, 1))
    domain = np.hstack((lb, ub)).tolist()
    inputs = UniformInputs(domain)
    #inputs = GaussianInputs(domain, mu, cov)
    #inputs = LogNormalInputs(domain, mu, cov)

    x_new = np.random.rand(3, Q)

    g = GradientChecker(lambda x: inputs.pdf(x), lambda x: inputs.pdf_jac(x),
                        x_new, 'x')
    assert (g.checkgrad())
コード例 #31
0
    def t_dlogpdf_dlink(self, model, Y, f, link_f_constraints):
        print "\n{}".format(inspect.stack()[0][3])
        logpdf = functools.partial(model.logpdf_link, y=Y)
        dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y)
        grad = GradientChecker(logpdf, dlogpdf_dlink, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        print grad
        grad.checkgrad(verbose=1)
        assert grad.checkgrad()
コード例 #32
0
    def check_jacobian(self):
        try:
            import autograd.numpy as np, autograd as ag, GPy, matplotlib.pyplot as plt
            from GPy.models import GradientChecker, GPRegression
        except:
            raise self.skipTest("autograd not available to check gradients")

        def k(X, X2, alpha=1., lengthscale=None):
            if lengthscale is None:
                lengthscale = np.ones(X.shape[1])
            exp = 0.
            for q in range(X.shape[1]):
                exp += ((X[:, [q]] - X2[:, [q]].T) / lengthscale[q])**2
            #exp = np.sqrt(exp)
            return alpha * np.exp(-.5 * exp)

        dk = ag.elementwise_grad(lambda x, x2: k(
            x, x2, alpha=ke.variance.values, lengthscale=ke.lengthscale.values)
                                 )
        dkdk = ag.elementwise_grad(dk, argnum=1)

        ke = GPy.kern.RBF(1, ARD=True)
        #ke.randomize()
        ke.variance = .2  #.randomize()
        ke.lengthscale[:] = .5
        ke.randomize()
        X = np.linspace(-1, 1, 1000)[:, None]
        X2 = np.array([[0.]]).T
        np.testing.assert_allclose(ke.gradients_X([[1.]], X, X), dk(X, X))
        np.testing.assert_allclose(
            ke.gradients_XX([[1.]], X, X).sum(0), dkdk(X, X))
        np.testing.assert_allclose(ke.gradients_X([[1.]], X, X2), dk(X, X2))
        np.testing.assert_allclose(
            ke.gradients_XX([[1.]], X, X2).sum(0), dkdk(X, X2))

        m = GPRegression(self.X, self.Y)

        def f(x):
            m.X[:] = x
            return m.log_likelihood()

        def df(x):
            m.X[:] = x
            return m.kern.gradients_X(m.grad_dict['dL_dK'], X)

        def ddf(x):
            m.X[:] = x
            return m.kern.gradients_XX(m.grad_dict['dL_dK'], X).sum(0)

        gc = GradientChecker(f, df, self.X)
        gc2 = GradientChecker(df, ddf, self.X)
        assert (gc.checkgrad())
        assert (gc2.checkgrad())
コード例 #33
0
ファイル: likelihood_tests.py プロジェクト: Arthurkorn/GPy
def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None, randomize=False, verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
                                           func.__name__, dfunc.__name__)
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        #Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        fnum = np.atleast_2d(partial_f(param_val, param_name))[:, param_ind].shape[0]
        dfnum = np.atleast_2d(partial_df(param_val, param_name))[:, param_ind].shape[0]
        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val+1) - 1
            print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            #Check only the parameter and function value we wish to check at a time
            grad = GradientChecker(lambda p_val: np.atleast_2d(partial_f(p_val, param_name))[f_ind, param_ind],
                                   lambda p_val: np.atleast_2d(partial_df(p_val, param_name))[fixed_val, param_ind],
                                   param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print "parameter didn't exist"
                    print constrain_param, " ", constraint
            if randomize:
                grad.randomize()
            if verbose:
                print grad
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
コード例 #34
0
def dparam_checkgrad(func, dfunc, params, args, constraints=None, randomize=False, verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    #print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
                                           #func.__name__, dfunc.__name__)
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    for param in params:
        fnum = np.atleast_1d(partial_f(param)).shape[0]
        dfnum = np.atleast_1d(partial_df(param)).shape[0]
        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val+1) - 1
            print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            grad = GradientChecker(lambda x: np.atleast_1d(partial_f(x))[f_ind],
                                   lambda x : np.atleast_1d(partial_df(x))[fixed_val],
                                   param, 'p')
            #This is not general for more than one param...
            if constraints is not None:
                for constraint in constraints:
                    constraint('p', grad)
            if randomize:
                grad.randomize()
            if verbose:
                print grad
                grad.checkgrad(verbose=1)
            if not grad.checkgrad():
                gradchecking = False

    return gradchecking
コード例 #35
0
def test_predictive_gradients_with_normalizer():
    """
    Check that model.predictive_gradients returns the gradients of
    model.predict when normalizer=True 
    """
    N, M, Q = 10, 15, 3
    X = np.random.rand(M,Q)
    Y = np.random.rand(M,1)
    x = np.random.rand(N,Q)
    model = GPy.models.GPRegression(X=X, Y=Y, normalizer=False)
    gm = GradientChecker(lambda x: model.predict(x)[0],
                         lambda x: model.predictive_gradients(x)[0],
                         x, 'x')
    gc = GradientChecker(lambda x: model.predict(x)[1],
                         lambda x: model.predictive_gradients(x)[1],
                         x, 'x')
    assert(gm.checkgrad())
    assert(gc.checkgrad())
コード例 #36
0
    def check_gradient(self, link_func, lim_of_inf, test_lim=False):
        grad = GradientChecker(link_func.transf,
                               link_func.dtransf_df,
                               x0=self.mid_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df,
                                link_func.d2transf_df2,
                                x0=self.mid_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2,
                                link_func.d3transf_df3,
                                x0=self.mid_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        grad = GradientChecker(link_func.transf,
                               link_func.dtransf_df,
                               x0=self.small_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df,
                                link_func.d2transf_df2,
                                x0=self.small_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2,
                                link_func.d3transf_df3,
                                x0=self.small_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        grad = GradientChecker(link_func.transf,
                               link_func.dtransf_df,
                               x0=self.zero_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df,
                                link_func.d2transf_df2,
                                x0=self.zero_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2,
                                link_func.d3transf_df3,
                                x0=self.zero_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        #Do a limit test if the large f value is too large
        large_f = np.clip(self.large_f, -np.inf, lim_of_inf - 1e-3)
        grad = GradientChecker(link_func.transf,
                               link_func.dtransf_df,
                               x0=large_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df,
                                link_func.d2transf_df2,
                                x0=large_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2,
                                link_func.d3transf_df3,
                                x0=large_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        if test_lim:
            print("Testing limits")
            #Remove some otherwise we are too close to the limit for gradcheck to work effectively
            lim_of_inf = lim_of_inf - 1e-4
            grad = GradientChecker(link_func.transf,
                                   link_func.dtransf_df,
                                   x0=lim_of_inf)
            self.assertTrue(grad.checkgrad(verbose=True))
            grad2 = GradientChecker(link_func.dtransf_df,
                                    link_func.d2transf_df2,
                                    x0=lim_of_inf)
            self.assertTrue(grad2.checkgrad(verbose=True))
            grad3 = GradientChecker(link_func.d2transf_df2,
                                    link_func.d3transf_df3,
                                    x0=lim_of_inf)
            self.assertTrue(grad3.checkgrad(verbose=True))
コード例 #37
0
 def t_d3transf_df3(self, transformation, f):
     print "\n{}".format(inspect.stack()[0][3])
     grad = GradientChecker(transformation.d2transf_df2, transformation.d3transf_df3, f, 'f')
     grad.randomize()
     grad.checkgrad(verbose=1)
     assert grad.checkgrad()
コード例 #38
0
ファイル: plt_fig1.py プロジェクト: rongrong1314/gpsearch-1
def plot_likelihood_ratio(function, n_GMM, filename):

    my_map, inputs = function.my_map, function.inputs
    mu = np.random.randn(inputs.input_dim)
    cov = 4*np.random.randn(inputs.input_dim)**2
    inputs = GaussianInputs(inputs.domain, mu=mu, cov=cov)

    ngrid = 10
    pts = inputs.draw_samples(n_samples=ngrid, sample_method="grd")
    ndim = pts.shape[-1]
    grd = pts.reshape( (ngrid,)*ndim + (ndim,) ).T
    X, Y = grd[0], grd[1]

    # Compute map
    yy = my_map.evaluate(pts)
    # Compute GPy model
    model = GPy.models.GPRegression(pts, yy, normalizer=True)

    likelihood = Likelihood(model, inputs)
    x_new = np.random.rand(2, inputs.input_dim) 
    print(likelihood._evaluate_raw(x_new))
    print(likelihood._jacobian_raw(x_new))

    g = GradientChecker(lambda x: likelihood._evaluate_gmm(x),
                        lambda x: likelihood._jacobian_gmm(x),
                        x_new, 'x')
    assert(g.checkgrad())


    yy = model.predict(pts)[0].flatten()
    dyy_dx, _ = model.predictive_gradients(pts)
    dyy_dx = dyy_dx[:,:,0]

    ZZ = yy.reshape( (ngrid,)*ndim ).T
        
    # Compute likelihood ratio
    x, y = custom_KDE(yy, weights=inputs.pdf(pts)).evaluate()
    fnTn = scipy.interpolate.interp1d(x, y)
    fx = inputs.pdf(pts).flatten()
    fy = fnTn(yy).flatten()
    w = fx/fy
    ZL = w.reshape( (ngrid,)*ndim ).T

    # Compute gradient of likelihood ratio
    dy_dx = np.gradient(y,x)
    fnTn_dx = scipy.interpolate.interp1d(x, dy_dx)
    tmp = -fx / fy**2 * fnTn_dx(yy) 
    dw_dx = tmp[:,None] * dyy_dx

    plt.figure()
    plt.plot(x,y)
    plt.plot(x,fnTn(x), '-.')
    from scipy.interpolate import InterpolatedUnivariateSpline
    spl = InterpolatedUnivariateSpline(x, y)
    plt.plot(x, spl(x), '--')

    plt.figure()
    plt.semilogy(x,dy_dx)
    plt.semilogy(x,fnTn_dx(x), '-.')
    plt.semilogy(x, spl.derivative()(x), '--')
    plt.show(); exit()
コード例 #39
0
def dparam_checkgrad(func,
                     dfunc,
                     params,
                     params_names,
                     args,
                     constraints=None,
                     randomize=False,
                     verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print("\n{} likelihood: {} vs {}".format(func.__self__.__class__.__name__,
                                             func.__name__, dfunc.__name__))
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        #Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        f_ = partial_f(param_val, param_name)
        df_ = partial_df(param_val, param_name)
        #Reshape it such that we have a 3d matrix incase, that is we want it (?, N, D) regardless of whether ? is num_params or not
        f_ = f_.reshape(-1, f_.shape[0], f_.shape[1])
        df_ = df_.reshape(-1, f_.shape[0], f_.shape[1])

        #Get the number of f and number of dimensions
        fnum = f_.shape[-2]
        fdim = f_.shape[-1]
        dfnum = df_.shape[-2]

        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val + 1) - 1
            print("fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(
                fnum, dfnum, f_ind, fixed_val))
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            #Check only the parameter and function value we wish to check at a time
            #func = lambda p_val, fnum, fdim, param_ind, f_ind, param_ind: partial_f(p_val, param_name).reshape(-1, fnum, fdim)[param_ind, f_ind, :]
            #dfunc_dparam = lambda d_val, fnum, fdim, param_ind, fixed_val: partial_df(d_val, param_name).reshape(-1, fnum, fdim)[param_ind, fixed_val, :]

            #First we reshape the output such that it is (num_params, N, D) then we pull out the relavent parameter-findex and checkgrad just this index at a time
            func = lambda p_val: partial_f(p_val, param_name).reshape(
                -1, fnum, fdim)[param_ind, f_ind, :]
            dfunc_dparam = lambda d_val: partial_df(d_val, param_name).reshape(
                -1, fnum, fdim)[param_ind, fixed_val, :]
            grad = GradientChecker(func, dfunc_dparam, param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print("parameter didn't exist")
                    print(constrain_param, " ", constraint)
            if randomize:
                grad.randomize()
            if verbose:
                print(grad)
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
コード例 #40
0
ファイル: link_function_tests.py プロジェクト: Imdrail/GPy
    def check_gradient(self, link_func, lim_of_inf, test_lim=False):
        grad = GradientChecker(link_func.transf, link_func.dtransf_df, x0=self.mid_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df, link_func.d2transf_df2, x0=self.mid_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2, link_func.d3transf_df3, x0=self.mid_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        grad = GradientChecker(link_func.transf, link_func.dtransf_df, x0=self.small_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df, link_func.d2transf_df2, x0=self.small_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2, link_func.d3transf_df3, x0=self.small_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        grad = GradientChecker(link_func.transf, link_func.dtransf_df, x0=self.zero_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df, link_func.d2transf_df2, x0=self.zero_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2, link_func.d3transf_df3, x0=self.zero_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        #Do a limit test if the large f value is too large
        large_f = np.clip(self.large_f, -np.inf, lim_of_inf-1e-3)
        grad = GradientChecker(link_func.transf, link_func.dtransf_df, x0=large_f)
        self.assertTrue(grad.checkgrad(verbose=True))
        grad2 = GradientChecker(link_func.dtransf_df, link_func.d2transf_df2, x0=large_f)
        self.assertTrue(grad2.checkgrad(verbose=True))
        grad3 = GradientChecker(link_func.d2transf_df2, link_func.d3transf_df3, x0=large_f)
        self.assertTrue(grad3.checkgrad(verbose=True))

        if test_lim:
            print("Testing limits")
            #Remove some otherwise we are too close to the limit for gradcheck to work effectively
            lim_of_inf = lim_of_inf - 1e-4
            grad = GradientChecker(link_func.transf, link_func.dtransf_df, x0=lim_of_inf)
            self.assertTrue(grad.checkgrad(verbose=True))
            grad2 = GradientChecker(link_func.dtransf_df, link_func.d2transf_df2, x0=lim_of_inf)
            self.assertTrue(grad2.checkgrad(verbose=True))
            grad3 = GradientChecker(link_func.d2transf_df2, link_func.d3transf_df3, x0=lim_of_inf)
            self.assertTrue(grad3.checkgrad(verbose=True))
コード例 #41
0
ファイル: likelihood_tests.py プロジェクト: pxlong/GPy
def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None, randomize=False, verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print("\n{} likelihood: {} vs {}".format(func.__self__.__class__.__name__, func.__name__, dfunc.__name__))
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        # Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        f_ = partial_f(param_val, param_name)
        df_ = partial_df(param_val, param_name)
        # Reshape it such that we have a 3d matrix incase, that is we want it (?, N, D) regardless of whether ? is num_params or not
        f_ = f_.reshape(-1, f_.shape[0], f_.shape[1])
        df_ = df_.reshape(-1, f_.shape[0], f_.shape[1])

        # Get the number of f and number of dimensions
        fnum = f_.shape[-2]
        fdim = f_.shape[-1]
        dfnum = df_.shape[-2]

        for fixed_val in range(dfnum):
            # dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val + 1) - 1
            print("fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val))
            # Make grad checker with this param moving, note that set_params is NOT being called
            # The parameter is being set directly with __setattr__
            # Check only the parameter and function value we wish to check at a time
            # func = lambda p_val, fnum, fdim, param_ind, f_ind, param_ind: partial_f(p_val, param_name).reshape(-1, fnum, fdim)[param_ind, f_ind, :]
            # dfunc_dparam = lambda d_val, fnum, fdim, param_ind, fixed_val: partial_df(d_val, param_name).reshape(-1, fnum, fdim)[param_ind, fixed_val, :]

            # First we reshape the output such that it is (num_params, N, D) then we pull out the relavent parameter-findex and checkgrad just this index at a time
            func = lambda p_val: partial_f(p_val, param_name).reshape(-1, fnum, fdim)[param_ind, f_ind, :]
            dfunc_dparam = lambda d_val: partial_df(d_val, param_name).reshape(-1, fnum, fdim)[param_ind, fixed_val, :]
            grad = GradientChecker(func, dfunc_dparam, param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print("parameter didn't exist")
                    print(constrain_param, " ", constraint)
            if randomize:
                grad.randomize()
            if verbose:
                print(grad)
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
コード例 #42
0
    Y = np.random.randint(2, size=N)
    Y[Y == 0] = -1
    probit = Probit(Y)
    mu = np.random.randn(N)
    sigma2 = np.random.rand(N)

    #gradcheck for Z wrt mu
    def f(mu):
        probit.set_cavity(mu, sigma2)
        return probit.Z

    def df(mu):
        probit.set_cavity(mu, sigma2)
        return probit.dZ_dmu

    m = GradientChecker(f, df, np.random.randn(N))
    m.checkgrad(verbose=1)

    #gradcheck for Z wrt sigma2
    def f(sigma2):
        probit.set_cavity(mu, sigma2)
        return probit.Z

    def df(sigma2):
        probit.set_cavity(mu, sigma2)
        return probit.dZ_dsigma2

    m = GradientChecker(f, df, np.random.rand(N))
    m.checkgrad(verbose=1)

    #gradcheck for mean wrt mu
コード例 #43
0
ファイル: tilted.py プロジェクト: SheffieldML/TVB
if __name__=='__main__':
    N = 4
    Y = np.random.randint(2,size=N)
    Y[Y==0] = -1
    probit = Probit(Y)
    mu = np.random.randn(N)
    sigma2 = np.random.rand(N)
    #gradcheck for Z wrt mu
    def f(mu):
        probit.set_cavity(mu, sigma2)
        return probit.Z
    def df(mu):
        probit.set_cavity(mu, sigma2)
        return probit.dZ_dmu
    m = GradientChecker(f,df,np.random.randn(N))
    m.checkgrad(verbose=1)

    #gradcheck for Z wrt sigma2
    def f(sigma2):
        probit.set_cavity(mu, sigma2)
        return probit.Z
    def df(sigma2):
        probit.set_cavity(mu, sigma2)
        return probit.dZ_dsigma2
    m = GradientChecker(f,df,np.random.rand(N))
    m.checkgrad(verbose=1)

    #gradcheck for mean wrt mu
    def f(mu):
        probit.set_cavity(mu, sigma2)