Пример #1
0
    def test_linear_component(self):
        vars_to_create = {
            "sigma", "sigma_interval__", "y_obs", "lm_x0", "lm_Intercept"
        }
        with Model() as model:
            lm = LinearComponent(self.data_linear["x"],
                                 self.data_linear["y"],
                                 name="lm")  # yields lm_x0, lm_Intercept
            sigma = Uniform("sigma", 0, 20)  # yields sigma_interval__
            Normal("y_obs", mu=lm.y_est, sigma=sigma,
                   observed=self.y_linear)  # yields y_obs
            start = find_MAP(vars=[sigma])
            step = Slice(model.vars)
            trace = sample(500,
                           tune=0,
                           step=step,
                           start=start,
                           progressbar=False,
                           random_seed=self.random_seed)

            assert round(abs(np.mean(trace["lm_Intercept"]) - self.intercept),
                         1) == 0
            assert round(abs(np.mean(trace["lm_x0"]) - self.slope), 1) == 0
            assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
        assert vars_to_create == set(model.named_vars.keys())
Пример #2
0
    def test_linear_component(self):
        vars_to_create = {
            'sigma', 'sigma_interval__', 'y_obs', 'lm_x0', 'lm_Intercept'
        }
        with Model() as model:
            lm = LinearComponent(self.data_linear['x'],
                                 self.data_linear['y'],
                                 name='lm')  # yields lm_x0, lm_Intercept
            sigma = Uniform('sigma', 0, 20)  # yields sigma_interval__
            Normal('y_obs', mu=lm.y_est, sigma=sigma,
                   observed=self.y_linear)  # yields y_obs
            start = find_MAP(vars=[sigma])
            step = Slice(model.vars)
            trace = sample(500,
                           tune=0,
                           step=step,
                           start=start,
                           progressbar=False,
                           random_seed=self.random_seed)

            assert round(abs(np.mean(trace['lm_Intercept']) - self.intercept),
                         1) == 0
            assert round(abs(np.mean(trace['lm_x0']) - self.slope), 1) == 0
            assert round(abs(np.mean(trace['sigma']) - self.sd), 1) == 0
        assert vars_to_create == set(model.named_vars.keys())
Пример #3
0
    def test_glm_offset(self):
        offset = 1.0
        with Model() as model:
            GLM.from_formula("y ~ x", self.data_linear, offset=offset)
            step = Slice(model.vars)
            trace = sample(500, step=step, tune=0, progressbar=False, random_seed=self.random_seed)

            assert round(abs(np.mean(trace["Intercept"]) - self.intercept + offset), 1) == 0
Пример #4
0
    def test_glm_link_func(self):
        with Model() as model:
            GLM.from_formula('y ~ x', self.data_logistic,
                    family=families.Binomial(link=families.logit))
            step = Slice(model.vars)
            trace = sample(1000, step, progressbar=False, random_seed=self.random_seed)

            assert round(abs(np.mean(trace['Intercept'])-self.intercept), 1) == 0
            assert round(abs(np.mean(trace['x'])-self.slope), 1) == 0
Пример #5
0
    def test_glm(self):
        with Model() as model:
            GLM.from_formula('y ~ x', self.data_linear)
            step = Slice(model.vars)
            trace = sample(500, step, progressbar=False, random_seed=self.random_seed)

            assert round(abs(np.mean(trace['Intercept'])-self.intercept), 1) == 0
            assert round(abs(np.mean(trace['x'])-self.slope), 1) == 0
            assert round(abs(np.mean(trace['sd'])-self.sd), 1) == 0
Пример #6
0
    def test_glm(self):
        with Model() as model:
            GLM.from_formula("y ~ x", self.data_linear)
            step = Slice(model.vars)
            trace = sample(500, step=step, tune=0, progressbar=False, random_seed=self.random_seed)

            assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
            assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
            assert round(abs(np.mean(trace["sd"]) - self.sd), 1) == 0
Пример #7
0
    def test_glm_from_formula(self):
        with Model() as model:
            NAME = 'glm'
            GLM.from_formula('y ~ x', self.data_linear, name=NAME)
            start = find_MAP()
            step = Slice(model.vars)
            trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)

            self.assertAlmostEqual(np.mean(trace['%s_Intercept' % NAME]), self.intercept, 1)
            self.assertAlmostEqual(np.mean(trace['%s_x' % NAME]), self.slope, 1)
            self.assertAlmostEqual(np.mean(trace['%s_sd' % NAME]), self.sd, 1)
Пример #8
0
    def test_linear_component_from_formula(self):
        with Model() as model:
            lm = LinearComponent.from_formula('y ~ x', self.data_linear)
            sigma = Uniform('sigma', 0, 20)
            Normal('y_obs', mu=lm.y_est, sd=sigma, observed=self.y_linear)
            start = find_MAP(vars=[sigma])
            step = Slice(model.vars)
            trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)

            self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept, 1)
            self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1)
            self.assertAlmostEqual(np.mean(trace['sigma']), self.sd, 1)
Пример #9
0
 def test_glm(self):
     with Model() as model:
         vars_to_create = {"glm_sd", "glm_sd_log__", "glm_y", "glm_x0", "glm_Intercept"}
         GLM(self.data_linear["x"], self.data_linear["y"], name="glm")
         start = find_MAP()
         step = Slice(model.vars)
         trace = sample(
             500, tune=0, step=step, start=start, progressbar=False, random_seed=self.random_seed
         )
         assert round(abs(np.mean(trace["glm_Intercept"]) - self.intercept), 1) == 0
         assert round(abs(np.mean(trace["glm_x0"]) - self.slope), 1) == 0
         assert round(abs(np.mean(trace["glm_sd"]) - self.sigma), 1) == 0
         assert vars_to_create == set(model.named_vars.keys())
Пример #10
0
    def test_glm(self):
        with Model() as model:
            glm.glm('y ~ x', self.data_linear)
            step = Slice(model.vars)
            trace = sample(500,
                           step,
                           progressbar=False,
                           random_seed=self.random_seed)

            self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept,
                                   1)
            self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1)
            self.assertAlmostEqual(np.mean(trace['sd']), self.sd, 1)
Пример #11
0
    def test_glm_from_formula(self):
        with Model() as model:
            NAME = "glm"
            GLM.from_formula("y ~ x", self.data_linear, name=NAME)
            start = find_MAP()
            step = Slice(model.vars)
            trace = sample(
                500, tune=0, step=step, start=start, progressbar=False, random_seed=self.random_seed
            )

            assert round(abs(np.mean(trace["%s_Intercept" % NAME]) - self.intercept), 1) == 0
            assert round(abs(np.mean(trace["%s_x" % NAME]) - self.slope), 1) == 0
            assert round(abs(np.mean(trace["%s_sd" % NAME]) - self.sigma), 1) == 0
Пример #12
0
def test_multichain_plots():

    from pymc3.examples import disaster_model as dm

    with dm.model as model:
        # Run sampler
        step1 = Slice([dm.early_mean, dm.late_mean])
        step2 = Metropolis([dm.switchpoint])
        start = {'early_mean': 2., 'late_mean': 3., 'switchpoint': 50}
        ptrace = sample(1000, [step1, step2], start, njobs=2)

    forestplot(ptrace, varnames=['early_mean', 'late_mean'])

    autocorrplot(ptrace, varnames=['switchpoint'])
Пример #13
0
    def test_linear_component(self):
        with Model() as model:
            lm = LinearComponent.from_formula("y ~ x", self.data_linear)
            sigma = Uniform("sigma", 0, 20)
            Normal("y_obs", mu=lm.y_est, sigma=sigma, observed=self.y_linear)
            start = find_MAP(vars=[sigma])
            step = Slice(model.vars)
            trace = sample(
                500, tune=0, step=step, start=start, progressbar=False, random_seed=self.random_seed
            )

            assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
            assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
            assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
Пример #14
0
    def test_glm_link_func(self):
        with Model() as model:
            glm.glm('y ~ x',
                    self.data_logistic,
                    family=glm.families.Binomial(link=glm.families.logit))
            step = Slice(model.vars)
            trace = sample(1000,
                           step,
                           progressbar=False,
                           random_seed=self.random_seed)

            self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept,
                                   1)
            self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1)
Пример #15
0
 def test_glm(self):
     with Model() as model:
         vars_to_create = {
             'glm_sd_log__', 'glm_y', 'glm_x0', 'glm_Intercept'
         }
         GLM(self.data_linear['x'], self.data_linear['y'], name='glm')
         start = find_MAP()
         step = Slice(model.vars)
         trace = sample(500,
                        step=step,
                        start=start,
                        progressbar=False,
                        random_seed=self.random_seed)
         assert round(abs(np.mean(trace['glm_Intercept']) - self.intercept),
                      1) == 0
         assert round(abs(np.mean(trace['glm_x0']) - self.slope), 1) == 0
         assert round(abs(np.mean(trace['glm_sd']) - self.sd), 1) == 0
         assert vars_to_create == set(model.named_vars.keys())
Пример #16
0
 def test_glm(self):
     with Model() as model:
         vars_to_create = {
             'glm_sd_log_', 'glm_y', 'glm_x0', 'glm_Intercept'
         }
         Glm(self.data_linear['x'], self.data_linear['y'], name='glm')
         start = find_MAP()
         step = Slice(model.vars)
         trace = sample(500,
                        step,
                        start,
                        progressbar=False,
                        random_seed=self.random_seed)
         self.assertAlmostEqual(np.mean(trace['glm_Intercept']),
                                self.intercept, 1)
         self.assertAlmostEqual(np.mean(trace['glm_x0']), self.slope, 1)
         self.assertAlmostEqual(np.mean(trace['glm_sd']), self.sd, 1)
         self.assertSetEqual(vars_to_create, set(model.named_vars.keys()))
Пример #17
0
basic_model = Model()
with basic_model:
    p = Uniform("freq_cheating", 0, 1)
    true_answers = Bernoulli("truths", p)
    first_coin_flips = Bernoulli("first_flips", 0.5)
    second_coin_flips = Bernoulli("second_flips", 0.5)

    determin_val1 = Deterministic(
        'determin_val1', first_coin_flips * true_answers +
        (1 - first_coin_flips) * second_coin_flips)
    determin_val = determin_val1.sum() / float(N)

    start = find_MAP(fmin=optimize.fmin_powell)

    # instantiate sampler
    step = Slice(vars=[true_answers])
    # draw 5000 posterior samples
    trace = sample(100, step=step, start=start)

    step = Slice(vars=[first_coin_flips])
    # draw 5000 posterior samples
    trace = sample(100, step=step, start=start)

    step = Slice(vars=[second_coin_flips])
    # draw 5000 posterior samples
    trace = sample(100, step=step, start=start)

    #print(first_coin_flips.getattr_value())
    print(determin_val)

map_estimate = find_MAP(model=basic_model)
Пример #18
0
from pymc3 import find_MAP

map_estimate = find_MAP(model=basic_model)

print(map_estimate)

from pymc3 import sample
from pymc3 import Slice
from scipy import optimize

with basic_model:

    # obtain starting values via MAP
    start = find_MAP(fmin=optimize.fmin_powell)

    # instantiate sampler
    step = Slice(vars=[sigma])

    # draw 5000 posterior samples
    trace = sample(5000, step=step, start=start)

from pymc3 import traceplot, summary

trace['vel'][-5:]
traceplot(trace)
summary(trace)
plt.show()

print(np.mean(Y), np.std(Y))
Пример #19
0
aa = dict(N0=10000,
          I2=I[1],
          I3=I[2],
          I4=I[3],
          I5=I[4],
          I6=I[5],
          I7=I[6],
          I8=I[7],
          I9=I[8],
          I10=I[9],
          beta=0.0005,
          reporting=0.7,
          effprop=0.1)
print(aa)
with basic_model:

    # obtain starting values via MAP
    # start = find_MAP(fmin=optimize.fmin_powell)
    start = aa

    # instantiate sampler
    step = Slice(vars=[
        N0, I2, I3, I4, I5, I6, I7, I8, I9, I10, effprop, reporting, beta
    ])

    # draw 1000 posterior samples
    trace = sample(1000, step=step, start=start)

summary(trace)
Пример #20
0
print("Search of parameters using Slice/Metropolis.")

basic_model = Model()

with basic_model:

    # Priors for unknown model parameters
    rule_firing = HalfNormal('rule_firing', sd=2)
    lf = HalfNormal('lf', sd=2)

    sigma = HalfNormal('sigma', sd=1)

    #Deterministic value, found in the model
    mu = model(rule_firing, lf)

    # Likelihood (sampling distribution) of observations
    Normal('Y_obs', mu=mu, sd=sigma, observed=Y)

    #Slice should be used for continuous variables but it gets stuck sometimes - you can also use Metropolis, which, however, is normally used for discrete values, its estimates might be off
    #step = Metropolis(basic_model.vars, .5)

    step = Slice(basic_model.vars)

    trace = sample(500, step, njobs=1, init='MAP')

    summary(trace)

print(
    "Of course, much more things can be explored this way: more parameters could be studied; their priors could be better adjusted etc."
)