def testProductCopula(self): U = dists.J([dists.Uniform(0, 1), dists.Uniform(0, 1)]) self.assertEqual(U.mean(), 0.25) self.assertEqual(U.var(), 1./9 - 1./16) U = dists.J([dists.TNormal(0, 2, -5, 5)]) self.assertEqual(U.mean(), 0) self.assertEqual(U.var(), 4) U = dists.J([dists.TNormal(1, 2, -4, 6), dists.TNormal(2, 3, -3, 7)]) self.assertEqual(U.mean(), 2) self.assertEqual(U.var(), 5. * 13. - 1. * 4.)
def testDiscretization(self): epsilon = 1e-14 U = dists.J([dists.Uniform(-1, 2), dists.Uniform(0, 3)]) _, error = U.discretize(level=1, hasBorder=True) assert error < epsilon epsilon = 1e-3 U = dists.J([dists.TNormal(0.5, 0.1, 0, 1)]) sgde, error = U.discretize(level=10) assert error < epsilon epsilon = 1e-1 U = dists.J([dists.TNormal(0.5, 0.1, 0, 1), dists.Beta(5, 10)]) _, error = U.discretize(10) assert error < epsilon
def test1DNormalDist(self): # prepare data U = dists.TNormal(0.5, .2, -1, 2) np.random.seed(1234567) trainSamples = np.array([U.rvs(1000)]).T testSamples = np.array([U.rvs(1000)]).T # build parameter set dist = SGDEdist.byLearnerSGDEConfig( trainSamples, config={ "grid_level": 6, "grid_type": "modlinear", "grid_maxDegree": 3, "refinement_numSteps": 0, "refinement_numPoints": 10, "solver_threshold": 1e-10, "solver_verbose": True, "regularization_type": "Laplace", "crossValidation_enable": True, "crossValidation_kfold": 5, "crossValidation_silent": False, "sgde_makePositive": False, "sgde_makePositive_candidateSearchAlgorithm": "fullGrid", "sgde_makePositive_interpolationAlgorithm": "setToZero", "sgde_makePositive_verbose": True, "sgde_unitIntegrand": False }, bounds=np.array([U.getBounds()])) fig = plt.figure() plotDensity1d(U, label="analytic") plotDensity1d(dist, label="sgde") plt.legend() # plt.title("mean = %g ~ %g (err=%g), var = %g ~ %g (err=%g)" % (np.mean(trainSamples), # dist.mean(), # np.abs(np.mean(trainSamples) - dist.mean()) / np.mean(trainSamples), # np.var(trainSamples), # dist.var(), # np.abs(np.var(trainSamples) - dist.var()) / np.var(trainSamples) # )) print("1d: mean = %g ~ %g (err=%g)" % (np.mean(trainSamples), dist.mean(), (np.abs(np.mean(trainSamples) - dist.mean()) / np.mean(trainSamples)))) print("1d: var = %g ~ %g (err=%g)" % (np.var(trainSamples), dist.var(), (np.abs(np.var(trainSamples) - dist.var()) / np.var(trainSamples)))) print("KL = %g" % U.klDivergence(dist, testSamples, testSamples)) print("CE = %g" % dist.crossEntropy(testSamples)) print("MSE = %g" % dist.l2error(U, testSamples, testSamples)) plt.show()