Beispiel #1
0
 def test_normal_mixture_hard(self):
     np.random.seed(0)
     size_batch = 1000
     competition = AdversarialCompetition(
         size_batch=size_batch,
         true_model=GenerativeNormalMixtureModel(
             np.arange(-3, 4),
             np.random.uniform(1, 2, 7).round(2)),
         discriminative=pipeline.make_pipeline(
             preprocessing.PolynomialFeatures(4),
             linear_model.LogisticRegression()),
         generative=GenerativeNormalMixtureModel(np.arange(-3, 4) * 0.1,
                                                 np.ones(7),
                                                 updates=["mu", "sigma"]),
         gradient_descent=GradientDescent(np.array([0.3, 0.1, 0.3]).reshape(
             (-1, 1)),
                                          inertia=0.9,
                                          annealing=2000,
                                          last_learning_rate=0.001),
     )
     for i in range(5000):
         competition.iteration()
     params = competition.generatives[-1]._params
     print params.shape
     true_params = competition.true_model._params
     np.testing.assert_allclose(params, true_params, 0, 0.2)
Beispiel #2
0
 def test_normal_1000(self):
     np.random.seed(0)
     size_batch = 1000
     adversarials = AdversarialCompetition(
         size_batch=size_batch,
         true_model=GenerativeNormalModel(1, 2),
         discriminative=pipeline.make_pipeline(
             preprocessing.PolynomialFeatures(4),
             linear_model.LogisticRegression()),
         generative=GenerativeNormalModel(0, 1, updates=["mu", "sigma"]),
         gradient_descent=GradientDescent(0.03, 0.9),
     )
     for i in range(200):
         adversarials.iteration()
     params = adversarials.generatives[-1]._params
     true_params = adversarials.true_model._params
     np.testing.assert_allclose(params, true_params, 0, 0.02)
 def test_normal_1000(self):
     np.random.seed(0)
     size_batch = 1000
     adversarials = AdversarialCompetition(
         size_batch=size_batch,
         true_model=GenerativeNormalModel(1, 2),
         discriminative=pipeline.make_pipeline(
             preprocessing.PolynomialFeatures(4),
             linear_model.LogisticRegression()),
         generative=GenerativeNormalModel(0, 1, updates=["mu", "sigma"]),
         gradient_descent=GradientDescent(0.03, 0.9),
     )
     for i in range(200):
         adversarials.iteration()
     params = adversarials.generatives[-1]._params
     true_params = adversarials.true_model._params
     np.testing.assert_allclose(params, true_params, 0, 0.02)
 def test_normal_mixture(self):
     np.random.seed(0)
     size_batch = 1000
     competition = AdversarialCompetition(
         size_batch=size_batch,
         true_model=GenerativeNormalMixtureModel([-3, 3], [1, 1]),
         discriminative=pipeline.make_pipeline(
             preprocessing.PolynomialFeatures(4),
             linear_model.LogisticRegression()),
         generative=GenerativeNormalMixtureModel(
             [-1, 1], [1, 1], updates=["mu", "sigma"]),
         gradient_descent=GradientDescent(
             0.1, inertia=0.9, annealing=1000, last_learning_rate=0.01),
     )
     for i in range(2000):
         competition.iteration()
     params = competition.generatives[-1]._params
     true_params = competition.true_model._params
     np.testing.assert_allclose(params, true_params, 0, 0.1)
Beispiel #5
0
 def test_normal_mixture(self):
     np.random.seed(0)
     size_batch = 1000
     competition = AdversarialCompetition(
         size_batch=size_batch,
         true_model=GenerativeNormalMixtureModel([-3, 3], [1, 1]),
         discriminative=pipeline.make_pipeline(
             preprocessing.PolynomialFeatures(4),
             linear_model.LogisticRegression()),
         generative=GenerativeNormalMixtureModel([-1, 1], [1, 1],
                                                 updates=["mu", "sigma"]),
         gradient_descent=GradientDescent(0.1,
                                          inertia=0.9,
                                          annealing=1000,
                                          last_learning_rate=0.01),
     )
     for i in range(2000):
         competition.iteration()
     params = competition.generatives[-1]._params
     true_params = competition.true_model._params
     np.testing.assert_allclose(params, true_params, 0, 0.1)
 def test_normal_mixture_hard(self):
     np.random.seed(0)
     size_batch = 1000
     competition = AdversarialCompetition(
         size_batch=size_batch,
         true_model=GenerativeNormalMixtureModel(
             np.arange(-3, 4), np.random.uniform(1, 2, 7).round(2)),
         discriminative=pipeline.make_pipeline(
             preprocessing.PolynomialFeatures(4),
             linear_model.LogisticRegression()),
         generative=GenerativeNormalMixtureModel(
             np.arange(-3, 4) * 0.1, np.ones(7), updates=["mu", "sigma"]),
         gradient_descent=GradientDescent(
             np.array([0.3, 0.1, 0.3]).reshape((-1, 1)), inertia=0.9,
             annealing=2000, last_learning_rate=0.001),
     )
     for i in range(5000):
         competition.iteration()
     params = competition.generatives[-1]._params
     print params.shape
     true_params = competition.true_model._params
     np.testing.assert_allclose(params, true_params, 0, 0.2)
Beispiel #7
0
np.random.seed(0)
size_batch = 1000

competition = AdversarialCompetition(
    size_batch=size_batch,
    true_model=GenerativeNormalMixtureModel([-3, 3], [1, 1]),
    discriminative=pipeline.make_pipeline(preprocessing.PolynomialFeatures(4),
                                          linear_model.LogisticRegression()),
    generative=GenerativeNormalMixtureModel([-1, 1], [1, 1],
                                            updates=["mu", "sigma"]),
    gradient_descent=GradientDescent(0.01, 0.5),
)

print(competition)

for i in range(1000):
    if i % 200 == 0:
        competition.plot()
        plt.show()
        pass
    competition.iteration()

print("final model %s" % competition.generatives[-1])

competition.plot_params()
plt.show()

competition.plot_auc()
plt.show()
size_batch = 1000

competition = AdversarialCompetition(
    size_batch=size_batch,
    true_model=GenerativeNormalModel(1, 2),
    discriminative=pipeline.make_pipeline(
        preprocessing.PolynomialFeatures(4),
        linear_model.LogisticRegression()),
    generative=GenerativeNormalModel(
        0, 1, updates=["mu", "sigma"]),
    gradient_descent=GradientDescent(
        0.03, inertia=0.0, annealing=100),
)

print(competition)

for i in range(200):
    if i % 50 == 0:
        competition.plot()
        plt.show()
        pass
    competition.iteration()

print("final model %s" % competition.generatives[-1])

competition.plot_params()
plt.show()

competition.plot_auc()
plt.show()
Beispiel #9
0
print(competition)

for i in range(1001):
    if i % 50 == 0:
        plt.figure()
        competition.plot()   
        pyplot.savefig('Pooling.png')
        pyplot.close()
        plt.figure()
        competition1.plot()
        competition2.plot()
        pyplot.savefig('Separating.png')
        pyplot.close()
        pass
    competition.iteration()
    competition1.iteration()
    competition2.iteration()

print("final model pooling %s" % competition.generatives[-1])
print("final model separating %s" % competition1.generatives[-1], competition2.generatives[-1])

#separated = GenerativeNormalMixtureModel([competition1.generatives[-1].params["mu"], competition2.generatives[-1].params["mu"]], [competition1.generatives[-1].params["sigma"], competition2.generatives[-1].params["sigma"]])
separated1 = GenerativeNormalModel(competition1.generatives[-1].params["mu"], competition1.generatives[-1].params["sigma"])
separated2 = GenerativeNormalModel(competition2.generatives[-1].params["mu"], competition2.generatives[-1].params["sigma"])
pooled = GenerativeNormalQuasiMixtureModel(competition.generatives[-1].params["mu"], competition.generatives[-1].params["sigma"])
#true_mixture_model = GenerativeNormalMixtureModel([mu_param, -mu_param], [sigma_param, sigma_param])
true_mixture_model2 = GenerativeNormalQuasiMixtureModel(mu_param,sigma_param)

plt.figure()
xplot = np.arange(-10, 10, 0.1).reshape((-1, 1))