예제 #1
0
    def testExp2d(self):
        trainSamples = np.loadtxt("exp_2d.csv").T
        # build parameter set
        dist = SGDEdist.byLearnerSGDEConfig(
            trainSamples,
            config={
                "grid_level": 7,
                "grid_type": "linear",
                "grid_maxDegree": 1,
                "refinement_numSteps": 0,
                "refinement_numPoints": 10,
                "solver_threshold": 1e-10,
                "solver_verbose": False,
                "regularization_type": "Laplace",
                "crossValidation_lambda": 0.000562341,
                "crossValidation_enable": False,
                "crossValidation_kfold": 5,
                "crossValidation_silent": False,
                "sgde_makePositive": True,
                "sgde_makePositive_candidateSearchAlgorithm": "joined",
                "sgde_makePositive_interpolationAlgorithm":
                "interpolateBoundaries1d",
                "sgde_unitIntegrand": True
            })

        fig, ax, _ = plotDensity3d(dist)
        ax.scatter(trainSamples[:, 0], trainSamples[:, 1],
                   np.zeros(trainSamples.shape[0]))
        ax.set_title("vol=%.12f" % dist.vol)
        fig.show()
        plt.show()
예제 #2
0
    def testExpPoly2d(self):
        trainSamples = np.loadtxt("exp_2d.csv").T
        # build parameter set
        dist_sgde = SGDEdist.byLearnerSGDEConfig(
            trainSamples,
            config={
                "grid_level": 4,
                "grid_type": "modpoly",
                "grid_maxDegree": 6,
                "refinement_numSteps": 0,
                "refinement_numPoints": 10,
                "solver_threshold": 1e-10,
                "solver_verbose": True,
                "regularization_type": "Laplace",
                "crossValidation_lambda": 0.000562341,
                "crossValidation_enable": False,
                "crossValidation_kfold": 5,
                "crossValidation_silent": True,
                "sgde_makePositive": False,
                "sgde_makePositive_candidateSearchAlgorithm": "joined",
                "sgde_makePositive_interpolationAlgorithm": "setToZero",
                "sgde_makePositive_verbose": True,
                "sgde_unitIntegrand": True
            })

        # build parameter set
        dist_kde = dists.KDEDist(
            trainSamples,
            kernelType=KernelType_GAUSSIAN,
            bandwidthOptimizationType=BandwidthOptimizationType_SILVERMANSRULE)

        # fig = plt.figure()
        # plotSG2d(dist.grid, dist.alpha, show_grid_points=True)
        # plt.scatter(trainSamples[:, 0], trainSamples[:, 1], np.zeros(trainSamples.shape[0]))
        # plt.title("%.12f" % dist.vol)

        fig, _, _ = plotDensity3d(dist_sgde)
        plt.title("SGDE: vol=%g" % dist_sgde.vol)

        fig, _, _ = plotDensity3d(dist_kde)
        plt.title("KDE: vol=1.0")
        plt.show()
예제 #3
0
samples = U.rvs(1000)
testSamples = U.rvs(1000)
# ---------- using SGDE from SG++ ------------------------
dist = SGDEdist.byLearnerSGDEConfig(samples,
                                    config={"grid_level": 6,
                                            "grid_type": "Linear",
                                            "refinement_numSteps": 0,
                                            "refinement_numPoints": 3,
                                            "regularization_type": "Laplace",
                                            "crossValidation_lambda": 0.000562341,
                                            "crossValidation_enable": False,
                                            "crossValidation_kfold": 5,
                                            "crossValidation_silent": False},
                                    bounds=U.getBounds())

fig, ax = plotDensity3d(U)
ax.set_title("true density")
fig.show()
fig, ax, _ = plotSG3d(dist.grid, dist.alpha)
ax.set_title("estimated density")
fig.show()

print("mean = %g ~ %g" % (m.prod(), dist.mean()))
print("var = %g ~ %g" % (np.var(testSamples), dist.var()))
print("KL-divergence = %g" % U.klDivergence(dist, testSamples, testSamples))
print("cross entropy = %g" % dist.crossEntropy(testSamples))
print("MSE = %g" % dist.l2error(U, testSamples, testSamples))

# sampling
uniform_samples = np.random.random((1000, 2))
samples = dist.ppf(uniform_samples)
예제 #4
0
# dist = MultivariateNormal(mu, cov, 0, 1)  # problems in 3d/l2
# dist = J([Beta(5, 4, 0, 1)] * numDims)  # problems in 5d/l3
# dist = J([Lognormal(0.2, 0.7, 0, 1)] * numDims)  # problems in 5d/l3

trainSamples = dist.rvs(1000)
testSamples = dist.rvs(1000)

# plot analytic density
if numDims == 2 and plot:
    fig = plt.figure()
    plotDensity2d(dist)
    plt.title("analytic, kldivergence = %g" %
              dist.klDivergence(dist, testSamples))
    fig.show()

    fig, ax = plotDensity3d(dist)
    ax.set_xlabel(r"$x_1$")
    ax.set_ylabel(r"$x_2$")
    ax.set_zlabel(r"$f(x_1, x_2)$")
    fig.show()

# get a sparse grid approximation
grid = Grid.createGrid(gridConfig)
gridGen = grid.getGenerator()
gridGen.regular(level)
gs = grid.getStorage()

# now refine adaptively 5 times
p = DataVector(gs.getDimension())
alpha = DataVector(gs.getSize())