def test_recursion_limit(self): # Recursion limit reached for unnamed kernels: def max_recursion(): kerns = [ P('rbf', lengthscale=Param('lengthscale', 1), variance=Param('variance', 1)) for i in range(20) ] p = Parameterized('add') p.link_parameters(*kerns) import sys sys.setrecursionlimit(100) try: from builtins import RecursionError as RE except: RE = RuntimeError self.assertRaisesRegexp(RE, "aximum recursion depth", max_recursion) # Recursion limit not reached if kernels are named individually: sys.setrecursionlimit(1000) p = Parameterized('add') kerns = [ P('rbf_{}'.format(i), lengthscale=Param('lengthscale', 1), variance=Param('variance', 1)) for i in range(10) ] p.link_parameters(*kerns)
def __init__(self, name=None, parameters=[], *a, **kw): super(Test, self).__init__(name=name) self.x = Param('x', np.random.uniform(0, 1, (3, 4)), transformations.__fixed__) self.x[0].constrain_bounded(0, 1) self.link_parameter(self.x) self.x.unfix() self.x[1].fix()
def max_recursion(): kerns = [ P('rbf', lengthscale=Param('lengthscale', 1), variance=Param('variance', 1)) for i in range(20) ] p = Parameterized('add') p.link_parameters(*kerns)
def test_names_already_exist(self): self.test1.kern.name = 'newname' self.test1.p = Param('newname', 1.22345) self.test1.link_parameter(self.test1.p) self.assertSequenceEqual(self.test1.kern.name, 'newname') self.assertSequenceEqual(self.test1.p.name, 'newname_1') self.test1.p2 = Param('newname', 1.22345) self.test1.link_parameter(self.test1.p2) self.assertSequenceEqual(self.test1.p2.name, 'newname_2') self.test1.kern.rbf.lengthscale.name = 'variance' self.assertSequenceEqual(self.test1.kern.rbf.lengthscale.name, 'variance_1') self.test1.kern.rbf.variance.name = 'variance_1' self.assertSequenceEqual(self.test1.kern.rbf.lengthscale.name, 'variance_2') self.test1.kern.rbf.variance.name = 'variance' self.assertSequenceEqual(self.test1.kern.rbf.lengthscale.name, 'variance_2') self.assertSequenceEqual(self.test1.kern.rbf.variance.name, 'variance')
def test_hierarchy_error(self): self.assertRaises(HierarchyError, self.testmodel.link_parameter, self.testmodel.parameters[0]) p2 = P('Gaussian_noise', variance=Param('variance', np.random.uniform(0.1, 0.5), transformations.Logexp())) self.testmodel.link_parameter(p2.variance) self.assertTrue(self.testmodel.checkgrad()) self.assertRaises(HierarchyError, self.testmodel.unlink_parameter, p2) self.assertRaises(HierarchyError, self.testmodel.unlink_parameter, 'not a parameter')
def setUp(self): self.testmodel = M('testmodel') self.testmodel.kern = P('rbf') self.testmodel.likelihood = P('Gaussian_noise', variance=Param( 'variance', np.random.uniform(0.1, 0.5), transformations.Logexp())) self.testmodel.link_parameter(self.testmodel.kern) self.testmodel.link_parameter(self.testmodel.likelihood) variance = Param('variance', np.random.uniform(0.1, 0.5), transformations.Logexp()) lengthscale = Param('lengthscale', np.random.uniform(.1, 1, 1), transformations.Logexp()) self.testmodel.kern.variance = variance self.testmodel.kern.lengthscale = lengthscale self.testmodel.kern.link_parameter(lengthscale) self.testmodel.kern.link_parameter(variance) self.testmodel.trigger_update()
def setUp(self): class M(Model): def __init__(self, name, **kwargs): super(M, self).__init__(name=name) for k, val in kwargs.items(): self.__setattr__(k, val) self.link_parameter(self.__getattribute__(k)) def objective_function(self): return self._obj def log_likelihood(self): return -self.objective_function() def parameters_changed(self): self._obj = (self.param_array**2).sum() self.gradient[:] = 2 * self.param_array import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore") self.testmodel = M('testmodel', initialize=False) self.testmodel.kern = Parameterized('rbf', initialize=False) self.testmodel.likelihood = P('Gaussian_noise', variance=Param( 'variance', np.random.uniform(0.1, 0.5), transformations.Logexp()), initialize=False) self.testmodel.link_parameter(self.testmodel.kern) self.testmodel.link_parameter(self.testmodel.likelihood) variance = Param('variance', np.random.uniform(0.1, 0.5), transformations.Logexp()) lengthscale = Param('lengthscale', np.random.uniform(.1, 1, 1), transformations.Logexp()) self.testmodel.kern.variance = variance self.testmodel.kern.lengthscale = lengthscale self.testmodel.kern.link_parameter(lengthscale) self.testmodel.kern.link_parameter(variance)
def setUp(self): self.rbf = Parameterized('rbf') self.rbf.lengthscale = Param('lengthscale', np.random.uniform(.1, 1), transformations.Logexp()) self.rbf.variance = Param('variance', np.random.uniform(0.1, 0.5), transformations.Logexp()) self.rbf.link_parameters(self.rbf.variance, self.rbf.lengthscale) self.white = P('white', variance=Param('variance', np.random.uniform(0.1, 0.5), transformations.Logexp())) self.param = Param('param', np.random.uniform(0, 1, (10, 5)), transformations.Logistic(0, 1)) self.test1 = Parameterized('test_parameterized') self.test1.param = self.param self.test1.kern = Parameterized('add') self.test1.kern.link_parameters(self.rbf, self.white) self.test1.link_parameter(self.test1.kern) self.test1.link_parameter(self.param, 0)
def test_add_parameter_in_hierarchy(self): self.test1.kern.rbf.link_parameter( Param("NEW", np.random.rand(2), transformations.NegativeLogexp()), 1) self.assertListEqual( self.test1.constraints[transformations.NegativeLogexp()].tolist(), list(range(self.param.size + 1, self.param.size + 1 + 2))) self.assertListEqual( self.test1.constraints[transformations.Logistic(0, 1)].tolist(), list(range(self.param.size))) self.assertListEqual( self.test1.constraints[transformations.Logexp(0, 1)].tolist(), np.r_[50, 53:55].tolist())
def __init__( self, input_dim, strings, d=4, variance=1.0, active_dims=None, name="weighted degree", ): super(WeightedDegree, self).__init__(input_dim, active_dims, name) self.variance = Param("variance", active_dims, variance, Logexp()) self.link_parameters(self.variance) self.strings = strings self.string_kernel = WD_K(self.strings.tolist(), d=d)
def __init__(self, param1=2., param2=3., param3=np.random.uniform(size=(2, 2, 2))): super(TestLikelihood, self).__init__("TestLike") self.p1 = Param('param1', param1) self.p2 = Param('param2', param2) self.link_parameter(self.p1) self.link_parameter(self.p2) self.p1.fix() self.p1.unfix() self['.*param'].constrain_positive() self.p2.constrain_negative() self.p1.fix() self.p2.constrain_positive() self.p2.fix() self.p2.constrain_positive() self['.*param1'].unconstrain(transformations.Logexp())
def test_empty_parameterized(self): #print(ParamConcatenation([self.testmodel.rbf, self.testmodel.likelihood.variance])) self.testmodel.name = 'anothername' self.testmodel.link_parameter(Parameterized('empty')) hmm = Parameterized('test') self.testmodel.kern.test = hmm self.testmodel.kern.link_parameter(hmm) self.testmodel.kern.test.link_parameter(Param('test1', 1)) self.assertIsInstance(self.testmodel['.*test1$'], Param) self.assertIsInstance(self.testmodel['.*test$'], Parameterized) self.assertIsInstance(self.testmodel['.*empty'], Parameterized) self.assertIsInstance(self.testmodel['.*test'], ParamConcatenation) self.assertIsInstance(self.testmodel['.*rbf$'], Parameterized) self.assertIs(self.testmodel['rbf.variance'], self.testmodel.rbf.variance) self.assertIs(self.testmodel['rbf$'], self.testmodel.rbf)
def test_optimize_error(self): class M(Model): def __init__(self, name, **kwargs): super(M, self).__init__(name=name) for k, val in kwargs.items(): self.__setattr__(k, val) self.link_parameter(self.__getattribute__(k)) self._allowed_failures = 1 def objective_function(self): raise ValueError('Some error occured') def log_likelihood(self): raise ValueError('Some error occured') def parameters_changed(self): #self._obj = (self.param_array**2).sum() self.gradient[:] = 2 * self.param_array testmodel = M("test", var=Param('test', np.random.normal(0, 1, (20)))) testmodel.optimize_restarts(2, messages=0, optimizer='org-bfgs', xtol=0, ftol=0, gtol=1e-6, robust=True) self.assertRaises(ValueError, testmodel.optimize_restarts, 1, messages=0, optimizer='org-bfgs', xtol=0, ftol=0, gtol=1e-6, robust=False)