示例#1
0
    def t_dexp_dmu(self, model, Y, Y_metadata):
        print("\n{}".format(inspect.stack()[0][3]))
        #Make mu and var (marginal means and variances of q(f)) draws from a GP
        k = GPy.kern.RBF(1).K(np.linspace(0, 1, Y.shape[0])[:, None])
        L = GPy.util.linalg.jitchol(k)
        mu = L.dot(np.random.randn(*Y.shape))
        #Variance must be positive
        var = np.abs(L.dot(np.random.randn(*Y.shape))) + 0.01
        expectation = functools.partial(model.variational_expectations,
                                        Y=Y,
                                        v=var,
                                        gh_points=None,
                                        Y_metadata=Y_metadata)

        #Function to get the nth returned value
        def F(mu):
            return expectation(m=mu)[0]

        def dmu(mu):
            return expectation(m=mu)[1]

        grad = GradientChecker(F, dmu, mu.copy(), 'm')

        grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
 def t_d3transf_df3(self, transformation, f):
     print "\n{}".format(inspect.stack()[0][3])
     grad = GradientChecker(transformation.d2transf_df2,
                            transformation.d3transf_df3, f, 'f')
     grad.randomize()
     grad.checkgrad(verbose=1)
     assert grad.checkgrad()
示例#3
0
def dparam_checkgrad(func,
                     dfunc,
                     params,
                     params_names,
                     args,
                     constraints=None,
                     randomize=False,
                     verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
                                             func.__name__, dfunc.__name__)
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        #Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        fnum = np.atleast_2d(partial_f(param_val,
                                       param_name))[:, param_ind].shape[0]
        dfnum = np.atleast_2d(partial_df(param_val,
                                         param_name))[:, param_ind].shape[0]
        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val + 1) - 1
            print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(
                fnum, dfnum, f_ind, fixed_val)
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            #Check only the parameter and function value we wish to check at a time
            grad = GradientChecker(
                lambda p_val: np.atleast_2d(partial_f(p_val, param_name))[
                    f_ind, param_ind], lambda p_val: np.atleast_2d(
                        partial_df(p_val, param_name))[fixed_val, param_ind],
                param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print "parameter didn't exist"
                    print constrain_param, " ", constraint
            if randomize:
                grad.randomize()
            if verbose:
                print grad
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
示例#4
0
    def t_dexp_dmu(self, model, Y, Y_metadata):
        print("\n{}".format(inspect.stack()[0][3]))
        # Make mu and var (marginal means and variances of q(f)) draws from a GP
        k = GPy.kern.RBF(1).K(np.linspace(0, 1, Y.shape[0])[:, None])
        L = GPy.util.linalg.jitchol(k)
        mu = L.dot(np.random.randn(*Y.shape))
        # Variance must be positive
        var = np.abs(L.dot(np.random.randn(*Y.shape))) + 0.01
        expectation = functools.partial(
            model.variational_expectations, Y=Y, v=var, gh_points=None, Y_metadata=Y_metadata
        )

        # Function to get the nth returned value
        def F(mu):
            return expectation(m=mu)[0]

        def dmu(mu):
            return expectation(m=mu)[1]

        grad = GradientChecker(F, dmu, mu.copy(), "m")

        grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
示例#5
0
 def t_dtransf_df(self, transformation, f):
     print("\n{}".format(inspect.stack()[0][3]))
     grad = GradientChecker(transformation.transf,
                            transformation.dtransf_df, f, 'f')
     grad.randomize()
     grad.checkgrad(verbose=1)
     assert grad.checkgrad()
示例#6
0
 def t_d3logpdf_df3(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y, Y_metadata=Y_metadata)
     d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y, Y_metadata=Y_metadata)
     grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), "g")
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
示例#7
0
 def t_d3logpdf_df3(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
     d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y)
     grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
示例#8
0
 def t_d3logpdf_df3(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
     d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y)
     grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
示例#9
0
 def t_dlogpdf_df(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(model.logpdf, y=Y)
     dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
示例#10
0
 def t_dlogpdf_df(self, model, Y, f):
     print "\n{}".format(inspect.stack()[0][3])
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(model.logpdf, y=Y)
     dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
     grad.randomize()
     print model
     assert grad.checkgrad(verbose=1)
示例#11
0
 def t_dlogpdf_df(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(np.sum(model.logpdf), y=Y, Y_metadata=Y_metadata)
     dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y, Y_metadata=Y_metadata)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), "g")
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
示例#12
0
 def t_d2logpdf_df2(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     dlogpdf_df = functools.partial(model.dlogpdf_df,
                                    y=Y,
                                    Y_metadata=Y_metadata)
     d2logpdf_df2 = functools.partial(model.d2logpdf_df2,
                                      y=Y,
                                      Y_metadata=Y_metadata)
     grad = GradientChecker(dlogpdf_df, d2logpdf_df2, f.copy(), 'g')
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
示例#13
0
 def t_dlogpdf_df(self, model, Y, f, Y_metadata):
     print("\n{}".format(inspect.stack()[0][3]))
     self.description = "\n{}".format(inspect.stack()[0][3])
     logpdf = functools.partial(np.sum(model.logpdf),
                                y=Y,
                                Y_metadata=Y_metadata)
     dlogpdf_df = functools.partial(model.dlogpdf_df,
                                    y=Y,
                                    Y_metadata=Y_metadata)
     grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
     grad.randomize()
     print(model)
     assert grad.checkgrad(verbose=1)
示例#14
0
    def t_dlogpdf_dlink(self, model, Y, f, link_f_constraints):
        print "\n{}".format(inspect.stack()[0][3])
        logpdf = functools.partial(model.logpdf_link, y=Y)
        dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y)
        grad = GradientChecker(logpdf, dlogpdf_dlink, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        print grad
        grad.checkgrad(verbose=1)
        assert grad.checkgrad()
示例#15
0
    def t_d3logpdf_dlink3(self, model, Y, f, link_f_constraints):
        print "\n{}".format(inspect.stack()[0][3])
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
        d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y)
        grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        grad.checkgrad(verbose=1)
        print grad
        assert grad.checkgrad()
示例#16
0
    def t_d3logpdf_dlink3(self, model, Y, f, Y_metadata, link_f_constraints):
        print("\n{}".format(inspect.stack()[0][3]))
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y, Y_metadata=Y_metadata)
        d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y, Y_metadata=Y_metadata)
        grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), "g")

        # Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint("g", grad)

        grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
示例#17
0
def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None, randomize=False, verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
                                           func.__name__, dfunc.__name__)
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        #Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        fnum = np.atleast_2d(partial_f(param_val, param_name))[:, param_ind].shape[0]
        dfnum = np.atleast_2d(partial_df(param_val, param_name))[:, param_ind].shape[0]
        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val+1) - 1
            print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            #Check only the parameter and function value we wish to check at a time
            grad = GradientChecker(lambda p_val: np.atleast_2d(partial_f(p_val, param_name))[f_ind, param_ind],
                                   lambda p_val: np.atleast_2d(partial_df(p_val, param_name))[fixed_val, param_ind],
                                   param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print "parameter didn't exist"
                    print constrain_param, " ", constraint
            if randomize:
                grad.randomize()
            if verbose:
                print grad
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
示例#18
0
    def test_gaussian_d2logpdf_df2_2(self):
        print("\n{}".format(inspect.stack()[0][3]))
        self.Y = None

        self.N = 2
        self.D = 1
        self.X = np.linspace(0, self.D, self.N)[:, None]
        self.real_std = 0.2
        noise = np.random.randn(*self.X.shape) * self.real_std
        self.Y = np.sin(self.X * 2 * np.pi) + noise
        self.f = np.random.rand(self.N, 1)

        dlogpdf_df = functools.partial(self.gauss.dlogpdf_df, y=self.Y)
        d2logpdf_df2 = functools.partial(self.gauss.d2logpdf_df2, y=self.Y)
        grad = GradientChecker(dlogpdf_df, d2logpdf_df2, self.f.copy(), 'g')
        grad.randomize()

        self.assertTrue(grad.checkgrad(verbose=1))
示例#19
0
    def test_gaussian_d2logpdf_df2_2(self):
        print("\n{}".format(inspect.stack()[0][3]))
        self.Y = None

        self.N = 2
        self.D = 1
        self.X = np.linspace(0, self.D, self.N)[:, None]
        self.real_std = 0.2
        noise = np.random.randn(*self.X.shape) * self.real_std
        self.Y = np.sin(self.X * 2 * np.pi) + noise
        self.f = np.random.rand(self.N, 1)

        dlogpdf_df = functools.partial(self.gauss.dlogpdf_df, y=self.Y)
        d2logpdf_df2 = functools.partial(self.gauss.d2logpdf_df2, y=self.Y)
        grad = GradientChecker(dlogpdf_df, d2logpdf_df2, self.f.copy(), "g")
        grad.randomize()

        self.assertTrue(grad.checkgrad(verbose=1))
示例#20
0
    def t_d2logpdf_dlink2(self, model, Y, f, Y_metadata, link_f_constraints):
        print("\n{}".format(inspect.stack()[0][3]))
        dlogpdf_dlink = functools.partial(model.dlogpdf_dlink,
                                          y=Y,
                                          Y_metadata=Y_metadata)
        d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2,
                                            y=Y,
                                            Y_metadata=Y_metadata)
        grad = GradientChecker(dlogpdf_dlink, d2logpdf_dlink2, f.copy(), 'g')

        #Apply constraints to link_f values
        for constraint in link_f_constraints:
            constraint('g', grad)

        grad.randomize()
        print(grad)
        print(model)
        assert grad.checkgrad(verbose=1)
示例#21
0
def dparam_checkgrad(func, dfunc, params, args, constraints=None, randomize=False, verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    #print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
                                           #func.__name__, dfunc.__name__)
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    for param in params:
        fnum = np.atleast_1d(partial_f(param)).shape[0]
        dfnum = np.atleast_1d(partial_df(param)).shape[0]
        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val+1) - 1
            print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            grad = GradientChecker(lambda x: np.atleast_1d(partial_f(x))[f_ind],
                                   lambda x : np.atleast_1d(partial_df(x))[fixed_val],
                                   param, 'p')
            #This is not general for more than one param...
            if constraints is not None:
                for constraint in constraints:
                    constraint('p', grad)
            if randomize:
                grad.randomize()
            if verbose:
                print grad
                grad.checkgrad(verbose=1)
            if not grad.checkgrad():
                gradchecking = False

    return gradchecking
示例#22
0
def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None, randomize=False, verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print("\n{} likelihood: {} vs {}".format(func.__self__.__class__.__name__, func.__name__, dfunc.__name__))
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        # Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        f_ = partial_f(param_val, param_name)
        df_ = partial_df(param_val, param_name)
        # Reshape it such that we have a 3d matrix incase, that is we want it (?, N, D) regardless of whether ? is num_params or not
        f_ = f_.reshape(-1, f_.shape[0], f_.shape[1])
        df_ = df_.reshape(-1, f_.shape[0], f_.shape[1])

        # Get the number of f and number of dimensions
        fnum = f_.shape[-2]
        fdim = f_.shape[-1]
        dfnum = df_.shape[-2]

        for fixed_val in range(dfnum):
            # dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val + 1) - 1
            print("fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val))
            # Make grad checker with this param moving, note that set_params is NOT being called
            # The parameter is being set directly with __setattr__
            # Check only the parameter and function value we wish to check at a time
            # func = lambda p_val, fnum, fdim, param_ind, f_ind, param_ind: partial_f(p_val, param_name).reshape(-1, fnum, fdim)[param_ind, f_ind, :]
            # dfunc_dparam = lambda d_val, fnum, fdim, param_ind, fixed_val: partial_df(d_val, param_name).reshape(-1, fnum, fdim)[param_ind, fixed_val, :]

            # First we reshape the output such that it is (num_params, N, D) then we pull out the relavent parameter-findex and checkgrad just this index at a time
            func = lambda p_val: partial_f(p_val, param_name).reshape(-1, fnum, fdim)[param_ind, f_ind, :]
            dfunc_dparam = lambda d_val: partial_df(d_val, param_name).reshape(-1, fnum, fdim)[param_ind, fixed_val, :]
            grad = GradientChecker(func, dfunc_dparam, param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print("parameter didn't exist")
                    print(constrain_param, " ", constraint)
            if randomize:
                grad.randomize()
            if verbose:
                print(grad)
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking
示例#23
0
 def t_d3transf_df3(self, transformation, f):
     print "\n{}".format(inspect.stack()[0][3])
     grad = GradientChecker(transformation.d2transf_df2, transformation.d3transf_df3, f, 'f')
     grad.randomize()
     grad.checkgrad(verbose=1)
     assert grad.checkgrad()
示例#24
0
def dparam_checkgrad(func,
                     dfunc,
                     params,
                     params_names,
                     args,
                     constraints=None,
                     randomize=False,
                     verbose=False):
    """
    checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
    However if we are holding other parameters fixed and moving something else
    We need to check the gradient of each of the fixed parameters
    (f and y for example) seperately,  whilst moving another parameter.
    Otherwise f: gives back R^N and
              df: gives back R^NxM where M is
    The number of parameters and N is the number of data
    Need to take a slice out from f and a slice out of df
    """
    print("\n{} likelihood: {} vs {}".format(func.__self__.__class__.__name__,
                                             func.__name__, dfunc.__name__))
    partial_f = dparam_partial(func, *args)
    partial_df = dparam_partial(dfunc, *args)
    gradchecking = True
    zipped_params = zip(params, params_names)
    for param_ind, (param_val, param_name) in enumerate(zipped_params):
        #Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
        f_ = partial_f(param_val, param_name)
        df_ = partial_df(param_val, param_name)
        #Reshape it such that we have a 3d matrix incase, that is we want it (?, N, D) regardless of whether ? is num_params or not
        f_ = f_.reshape(-1, f_.shape[0], f_.shape[1])
        df_ = df_.reshape(-1, f_.shape[0], f_.shape[1])

        #Get the number of f and number of dimensions
        fnum = f_.shape[-2]
        fdim = f_.shape[-1]
        dfnum = df_.shape[-2]

        for fixed_val in range(dfnum):
            #dlik and dlik_dvar gives back 1 value for each
            f_ind = min(fnum, fixed_val + 1) - 1
            print("fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(
                fnum, dfnum, f_ind, fixed_val))
            #Make grad checker with this param moving, note that set_params is NOT being called
            #The parameter is being set directly with __setattr__
            #Check only the parameter and function value we wish to check at a time
            #func = lambda p_val, fnum, fdim, param_ind, f_ind, param_ind: partial_f(p_val, param_name).reshape(-1, fnum, fdim)[param_ind, f_ind, :]
            #dfunc_dparam = lambda d_val, fnum, fdim, param_ind, fixed_val: partial_df(d_val, param_name).reshape(-1, fnum, fdim)[param_ind, fixed_val, :]

            #First we reshape the output such that it is (num_params, N, D) then we pull out the relavent parameter-findex and checkgrad just this index at a time
            func = lambda p_val: partial_f(p_val, param_name).reshape(
                -1, fnum, fdim)[param_ind, f_ind, :]
            dfunc_dparam = lambda d_val: partial_df(d_val, param_name).reshape(
                -1, fnum, fdim)[param_ind, fixed_val, :]
            grad = GradientChecker(func, dfunc_dparam, param_val, [param_name])

            if constraints is not None:
                for constrain_param, constraint in constraints:
                    if grad.grep_param_names(constrain_param):
                        constraint(constrain_param, grad)
                    else:
                        print("parameter didn't exist")
                    print(constrain_param, " ", constraint)
            if randomize:
                grad.randomize()
            if verbose:
                print(grad)
                grad.checkgrad(verbose=1)
            if not grad.checkgrad(verbose=True):
                gradchecking = False

            if not grad.checkgrad(verbose=True):
                gradchecking = False

    return gradchecking