def test2DNormalDist(self): # prepare data U = dists.J( [dists.Normal(1.0, .1, 0, 2), dists.Normal(0.5, .01, 0, 1)]) trainSamples = U.rvs(300) testSamples = U.rvs(1000) # build parameter set dist = dists.KDEDist(trainSamples, bounds=U.getBounds()) samples = dist.rvs(1000, shuffle=True) # fig = plt.figure() # plotDensity2d(U) # fig.show() # # fig = plt.figure() # plotDensity2d(dist) # plt.scatter(samples[:, 0], samples[:, 1]) # fig.show() print("quad = %s" % (dblquad(lambda x, y: dist.pdf([x, y]), 0, 2, lambda _: 0, lambda _: 1), )) print("mean = %g ~ %g" % (U.mean(), dist.mean())) print("var = %g ~ %g" % (U.var(), dist.var())) print("KL = %g" % U.klDivergence(dist, testSamples, testSamples)) print("CE = %g" % dist.crossEntropy(testSamples)) print("MSE = %g" % dist.l2error(U, testSamples, testSamples)) plt.show()
def test2DNormalDist(self): # prepare data U = dists.J( [dists.Normal(2.0, .5, -1, 4), dists.Normal(1.0, .5, -1, 3)]) U = dists.J( [dists.Normal(0.5, .5, -1, 2), dists.Normal(0.5, .4, -1, 2)]) np.random.seed(1234567) trainSamples = U.rvs(300) testSamples = U.rvs(1000) # build parameter set dist = SGDEdist.byLearnerSGDEConfig( trainSamples, config={ "grid_level": 5, "grid_type": "modlinear", "refinement_numSteps": 0, "refinement_numPoints": 10, "regularization_type": "Laplace", "crossValidation_lambda": 0.000562341, "crossValidation_enable": False, "crossValidation_kfold": 5, "crossValidation_silent": False, "sgde_makePositive": False, "sgde_makePositive_candidateSearchAlgorithm": "joined", "sgde_makePositive_interpolationAlgorithm": "setToZero", "sgde_makePositive_generateConsistentGrid": False, "sgde_makePositive_verbose": True, "sgde_unitIntegrand": True }, bounds=U.getBounds()) fig = plt.figure() plotDensity2d(U) fig.show() fig = plt.figure() plotSG2d(dist.grid, dist.alpha, addContour=True, show_negative=True, show_grid_points=True) fig.show() print("2d: mean = %g ~ %g" % (U.mean(), dist.mean())) print("2d: var = %g ~ %g" % (U.var(), dist.var())) plt.show() print("KL = %g" % U.klDivergence(dist, testSamples, testSamples)) print("CE = %g" % dist.crossEntropy(testSamples)) print("MSE = %g" % dist.l2error(U, testSamples, testSamples))
def test_2DNormalDist_variance(self): # prepare data U = dists.J( [dists.Normal(2.0, .5, -1, 4), dists.Normal(1.0, .5, -1, 3)]) # U = dists.J([dists.Normal(0.5, .5, -1, 2), # dists.Normal(0.5, .4, -1, 2)]) # define linear transformation trans = JointTransformation() for a, b in U.getBounds(): trans.add(LinearTransformation(a, b)) # get a sparse grid approximation grid = Grid.createPolyGrid(U.getDim(), 10) grid.getGenerator().regular(5) gs = grid.getStorage() # now refine adaptively 5 times p = DataVector(gs.getDimension()) nodalValues = np.ndarray(gs.getSize()) # set function values in alpha for i in range(gs.getSize()): gs.getPoint(i).getStandardCoordinates(p) nodalValues[i] = U.pdf(trans.unitToProbabilistic(p.array())) # hierarchize alpha = hierarchize(grid, nodalValues) # # make positive # alpha_vec = DataVector(alpha) # createOperationMakePositive().makePositive(grid, alpha_vec) # alpha = alpha_vec.array() dist = SGDEdist(grid, alpha, bounds=U.getBounds()) fig = plt.figure() plotDensity2d(U) fig.show() fig = plt.figure() plotSG2d(dist.grid, dist.alpha, addContour=True, show_negative=True, show_grid_points=True) fig.show() print("2d: mean = %g ~ %g" % (U.mean(), dist.mean())) print("2d: var = %g ~ %g" % (U.var(), dist.var())) plt.show()
def test2DNormalMoments(self): mean = 0 var = 0.5 U = dists.J([dists.Normal(mean, var, -2, 2), dists.Normal(mean, var, -2, 2)]) trainSamples = U.rvs(10000) dist = KDEDist(trainSamples) # ----------------------------------------------- self.assertTrue(np.abs(U.mean() - dist.mean()) < 1e-2, "KDE mean wrong") self.assertTrue(np.abs(U.var() - dist.var()) < 1e-2, "KDE variance wrong")
def test1DNormalDist(self): # prepare data U = dists.Normal(1.85, .3, 0, 3) trainSamples = np.array([U.rvs(500)]).T testSamples = np.array([U.rvs(1000)]).T # build parameter set dist = KDEDist(trainSamples, kernelType=KernelType_GAUSSIAN, bandwidthOptimizationType= BandwidthOptimizationType_MAXIMUMLIKELIHOOD, bounds=U.getBounds()) # fig = plt.figure() # plotDensity1d(U) # plotDensity1d(dist) print("quad = %s" % (quad(lambda x: dist.pdf([x]), 0, 3), )) print("mean = %g ~ %g" % (U.mean(), dist.mean())) print("var = %g ~ %g" % (U.var(), dist.var())) print("KL = %g" % U.klDivergence(dist, testSamples, testSamples)) print("CE = %g" % dist.crossEntropy(testSamples)) print("MSE = %g" % dist.l2error(U, testSamples, testSamples)) plt.show()
def test_1DNormalDist_variance(self): # prepare data U = dists.Normal(1, 2, -8, 8) # U = dists.Normal(0.5, .2, 0, 1) # define linear transformation trans = JointTransformation() a, b = U.getBounds() trans.add(LinearTransformation(a, b)) # get a sparse grid approximation grid = Grid.createPolyGrid(U.getDim(), 10) grid.getGenerator().regular(5) gs = grid.getStorage() # now refine adaptively 5 times p = DataVector(gs.getDimension()) nodalValues = np.ndarray(gs.getSize()) # set function values in alpha for i in range(gs.getSize()): gs.getPoint(i).getStandardCoordinates(p) nodalValues[i] = U.pdf(trans.unitToProbabilistic(p.array())) # hierarchize alpha = hierarchize(grid, nodalValues) dist = SGDEdist(grid, alpha, bounds=U.getBounds()) fig = plt.figure() plotDensity1d(U, alpha_value=0.1, mean_label="$\mathbb{E}", interval_label="$\alpha=0.1$") fig.show() fig = plt.figure() plotDensity1d(dist, alpha_value=0.1, mean_label="$\mathbb{E}", interval_label="$\alpha=0.1$") fig.show() print("1d: mean = %g ~ %g" % (U.mean(), dist.mean())) print("1d: var = %g ~ %g" % (U.var(), dist.var())) plt.show()
def test2DNormalMoments(self): mean = 0 var = 0.5 U = dists.J( [dists.Normal(mean, var, -2, 2), dists.Normal(mean, var, -2, 2)]) np.random.seed(1234567) trainSamples = U.rvs(1000) dist = SGDEdist.byLearnerSGDEConfig(trainSamples, config={ "grid_level": 5, "grid_type": "linear", "refinement_numSteps": 0, "refinement_numPoints": 10, "regularization_type": "Laplace", "crossValidation_lambda": 0.000562341, "crossValidation_enable": False, "crossValidation_kfold": 5, "crossValidation_silent": True, "sgde_makePositive": True }, bounds=U.getBounds()) samples_dist = dist.rvs(1000, shuffle=True) kde = KDEDist(trainSamples) samples_kde = kde.rvs(1000, shuffle=True) # ----------------------------------------------- self.assertTrue( np.abs(U.mean() - dist.mean()) < 1e-2, "SGDE mean wrong") self.assertTrue( np.abs(U.var() - dist.var()) < 4e-2, "SGDE variance wrong") # ----------------------------------------------- # print the results print("E(x) ~ %g ~ %g" % (kde.mean(), dist.mean())) print("V(x) ~ %g ~ %g" % (kde.var(), dist.var())) print( "log ~ %g ~ %g" % (kde.crossEntropy(trainSamples), dist.crossEntropy(trainSamples))) print("-" * 60) print(dist.cov()) print(kde.cov()) sgde_x1 = dist.marginalizeToDimX(0) kde_x1 = kde.marginalizeToDimX(0) plt.figure() plotDensity1d(U.getDistributions()[0], label="analytic") plotDensity1d(sgde_x1, label="sgde") plotDensity1d(kde_x1, label="kde") plt.title("mean: sgde=%g, kde=%g; var: sgde=%g, kde=%g" % (sgde_x1.mean(), kde_x1.mean(), sgde_x1.var(), kde_x1.var())) plt.legend() fig = plt.figure() plotDensity2d(U, addContour=True) plt.title("analytic") fig = plt.figure() plotDensity2d(kde, addContour=True) plt.scatter(samples_kde[:, 0], samples_kde[:, 1]) plt.title("kde") fig = plt.figure() plotDensity2d(dist, addContour=True) plt.scatter(samples_dist[:, 0], samples_dist[:, 1]) plt.title( "sgde (I(f) = %g)" % (np.prod(U.getBounds()) * doQuadrature(dist.grid, dist.alpha), )) plt.show()