def test_more_than_one_glm_is_ok(self): with Model(): glm.glm('y ~ x', self.data_logistic, family=glm.families.Binomial(link=glm.families.logit), name='glm1') glm.glm('y ~ x', self.data_logistic, family=glm.families.Binomial(link=glm.families.logit), name='glm2')
def test_glm_link_func(self): with Model() as model: glm.glm('y ~ x', self.data_logistic, family=glm.families.Binomial(link=glm.families.logit)) step = Slice(model.vars) trace = sample(1000, step, progressbar=False, random_seed=self.random_seed) self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept, 1) self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1)
def test_glm(self): with Model() as model: glm.glm('y ~ x', self.data_linear) step = Slice(model.vars) trace = sample(500, step, progressbar=False, random_seed=self.random_seed) self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept, 1) self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1) self.assertAlmostEqual(np.mean(trace['sd']), self.sd, 1)
import scipy.optimize as opt from pymc3 import Model, glm, find_MAP, sample # Generate data size = 50 true_intercept = 1 true_slope = 2 x = np.linspace(0, 1, size) y = true_intercept + x*true_slope + np.random.normal(scale=.5, size=size) data = dict(x=x, y=y) with Model() as model: glm.glm('y ~ x', data) def run(n=2000): if n == "short": n = 50 import matplotlib.pyplot as plt with model: start = find_MAP(fmin=opt.fmin_powell) trace = sample(n, Slice(), start=start) plt.plot(x, y, 'x') glm.plot_posterior_predictive(trace) # plt.show()
import scipy.optimize as opt from pymc3 import Model, glm, find_MAP, sample # Generate data size = 50 true_intercept = 1 true_slope = 2 x = np.linspace(0, 1, size) y = true_intercept + x * true_slope + np.random.normal(scale=.5, size=size) data = dict(x=x, y=y) with Model() as model: glm.glm('y ~ x', data) def run(n=2000): if n == "short": n = 50 import matplotlib.pyplot as plt with model: start = find_MAP(fmin=opt.fmin_powell) trace = sample(n, Slice(), start=start) plt.plot(x, y, 'x') glm.plot_posterior_predictive(trace) # plt.show()
from pymc3.glm import glm import pylab as plt import pandas from scipy.stats import uniform, norm # Data np.random.seed(1056) # set seed to replicate example nobs= 250 # number of obs in model x1 = uniform.rvs(size=nobs) # random uniform variable beta0 = 2.0 # intercept beta1 = 3.0 # angular coefficient xb = beta0 + beta1 * x1 # linear predictor, xb y = norm.rvs(loc=xb, scale=1.0, size=nobs) # create y as adjusted # Fit df = pandas.DataFrame({'x1': x1, 'y': y}) # re-write data with Model() as model_glm: glm('y ~ x1', df) trace = sample(5000) # Output summary(trace) # show graphical output traceplot(trace) plt.show()
import matplotlib.pyplot as plt from pymc3 import * from pymc3.glm import glm import pandas as pd import scipy # Initialize random number generator np.random.seed(123) # True parameter values alpha, sigma = 1, 1 beta = [1, 2.5] # Size of dataset size = 100 # Predictor variable X1 = np.random.randn(size) X2 = np.random.randn(size) * 0.2 # Simulate outcome variable Y = alpha + beta[0] * X1 + beta[1] * X2 + np.random.randn(size) * sigma df = pd.DataFrame({'x1': X1, 'x2': X2, 'y': Y}) with Model() as model_glm: glm('y ~ x1 + x2', df) trace = sample(5000) traceplot(trace)
from pymc3 import Model, sample, summary, traceplot from pymc3.glm import glm import pylab as plt import pandas from scipy.stats import uniform, norm # Data np.random.seed(1056) # set seed to replicate example nobs = 250 # number of obs in model x1 = uniform.rvs(size=nobs) # random uniform variable beta0 = 2.0 # intercept beta1 = 3.0 # angular coefficient xb = beta0 + beta1 * x1 # linear predictor, xb y = norm.rvs(loc=xb, scale=1.0, size=nobs) # create y as adjusted # Fit df = pandas.DataFrame({'x1': x1, 'y': y}) # re-write data with Model() as model_glm: glm('y ~ x1', df) trace = sample(5000) # Output summary(trace) # show graphical output traceplot(trace) plt.show()