def test_distributions_uniform_random_sample(): d = UniformDistribution(0, 10) x = numpy.array([2.21993171, 8.70732306, 2.06719155, 9.18610908, 4.88411189]) assert_array_almost_equal(d.sample(5, random_state=5), x) assert_raises(AssertionError, assert_array_almost_equal, d.sample(5), x)
def test_distributions_uniform_random_sample(): d = UniformDistribution(0, 10) x = numpy.array([2.21993171, 8.70732306, 2.06719155, 9.18610908, 4.88411189]) assert_array_almost_equal(d.sample(5, random_state=5), x) assert_raises(AssertionError, assert_array_almost_equal, d.sample(5), x)
def test_uniform(): d = UniformDistribution(0, 10) assert_almost_equal(d.log_probability(2.34), -2.3025850929940455, 8) assert_equal(d.log_probability(2), d.log_probability(8)) assert_equal(d.log_probability(10), d.log_probability(3.4)) assert_equal(d.log_probability(1.7), d.log_probability(9.7)) assert_equal(d.log_probability(10.0001), float("-inf")) assert_equal(d.log_probability(-0.0001), float("-inf")) for i in range(10): data = numpy.random.randn(100) * 100 d.fit(data) assert_equal(d.parameters[0], data.min()) assert_equal(d.parameters[1], data.max()) minimum, maximum = data.min(), data.max() for i in range(100): sample = d.sample() assert_less_equal(minimum, sample) assert_less_equal(sample, maximum) d = UniformDistribution(0, 10) d.fit([-5, 20], inertia=0.5) assert_equal(d.parameters[0], -2.5) assert_equal(d.parameters[1], 15) d.fit([-100, 100], inertia=1.0) assert_equal(d.parameters[0], -2.5) assert_equal(d.parameters[1], 15) d.summarize([0, 50, 2, 24, 28]) d.summarize([-20, 7, 8, 4]) d.from_summaries(inertia=0.75) assert_equal(d.parameters[0], -6.875) assert_equal(d.parameters[1], 23.75) d.summarize([0, 100]) d.summarize([100, 200]) d.from_summaries() assert_equal(d.parameters[0], 0) assert_equal(d.parameters[1], 200) d.freeze() d.fit([0, 1, 6, 7, 8, 3, 4, 5, 2]) assert_equal(d.parameters, [0, 200]) d.thaw() d.fit([0, 1, 6, 7, 8, 3, 4, 5, 2]) assert_equal(d.parameters, [0, 8]) e = Distribution.from_json(d.to_json()) assert_equal(e.name, "UniformDistribution") assert_equal(e.parameters, [0, 8]) f = pickle.loads(pickle.dumps(e)) assert_equal(f.name, "UniformDistribution") assert_equal(f.parameters, [0, 8])
def test_uniform(): d = UniformDistribution(0, 10) assert_almost_equal(d.log_probability(2.34), -2.3025850929940455, 8) assert_equal(d.log_probability(2), d.log_probability(8)) assert_equal(d.log_probability(10), d.log_probability(3.4)) assert_equal(d.log_probability(1.7), d.log_probability(9.7)) assert_equal(d.log_probability(10.0001), float("-inf")) assert_equal(d.log_probability(-0.0001), float("-inf")) for i in range(10): data = numpy.random.randn(100) * 100 d.fit(data) assert_equal(d.parameters[0], data.min()) assert_equal(d.parameters[1], data.max()) minimum, maximum = data.min(), data.max() for i in range(100): sample = d.sample() assert_less_equal(minimum, sample) assert_less_equal(sample, maximum) d = UniformDistribution(0, 10) d.fit([-5, 20], inertia=0.5) assert_equal(d.parameters[0], -2.5) assert_equal(d.parameters[1], 15) d.fit([-100, 100], inertia=1.0) assert_equal(d.parameters[0], -2.5) assert_equal(d.parameters[1], 15) d.summarize([0, 50, 2, 24, 28]) d.summarize([-20, 7, 8, 4]) d.from_summaries(inertia=0.75) assert_equal(d.parameters[0], -6.875) assert_equal(d.parameters[1], 23.75) d.summarize([0, 100]) d.summarize([100, 200]) d.from_summaries() assert_equal(d.parameters[0], 0) assert_equal(d.parameters[1], 200) d.freeze() d.fit([0, 1, 6, 7, 8, 3, 4, 5, 2]) assert_equal(d.parameters, [0, 200]) d.thaw() d.fit([0, 1, 6, 7, 8, 3, 4, 5, 2]) assert_equal(d.parameters, [0, 8]) e = Distribution.from_json(d.to_json()) assert_equal(e.name, "UniformDistribution") assert_equal(e.parameters, [0, 8]) f = pickle.loads(pickle.dumps(e)) assert_equal(f.name, "UniformDistribution") assert_equal(f.parameters, [0, 8])
def __init__(self, dim , seed=None): # K = 9 theta0=[.5,.5] beta=np.ones(K) Psi = .1*np.diag(np.ones(dim)) #mu0= np.zeros(dim) #lambd=.1, nu=dim+2. rstate = np.random.get_state() np.random.seed(seed) unif_dist = UniformDistribution(0.,1.) self.theta0 = theta0 beta_dist = DirichletDistribution(beta) self.dim = Psi.shape[0] self.dists = [] #same weights for both weights = beta_dist.sample() mus = [] for i,_ in enumerate(theta0): #weights = beta_dist.sample() #print(weights) mix = [] for j,_ in enumerate(weights): if j%3==0: Sigma = invwishart.rvs(df=nu, scale=Psi) elif j%3==1: Sigma = invwishart.rvs(df=nu, scale=.01*Psi) else: Sigma = invwishart.rvs(df=nu, scale=.0001*Psi) if i==0: mu = unif_dist.sample(self.dim) #mu =MultivariateGaussianDistribution(mu0,Sigma/lambd).sample() mus.append(mu) else: mu = mus[j] mix.append( MultivariateGaussianDistribution(mu, Sigma) ) model = GeneralMixtureModel(mix, weights=weights) self.dists.append(model) for d in self.dists: print(d) self.rstate = np.random.get_state() np.random.set_state(rstate)