Ejemplo n.º 1
0
def CauchyModel(size, snratio, g, g_true):

    ###

    shearcal_m = pymc.Uniform('shearcal_m', -1, 1)
    shearcal_c = pymc.Uniform('shearcal_c', -0.05, 0.05)

    @pymc.deterministic
    def alpha(m=shearcal_m, c=shearcal_c):
        return (1 + m) * g_true + c

    ###

    logbeta = pymc.Uniform('logbeta', -7, 2.5)

    @pymc.deterministic
    def beta(logbeta=logbeta):
        return np.exp(logbeta)

    ###

    data = pymc.Cauchy('data', alpha=alpha, beta=beta, value=g, observed=True)

    ###

    return locals()
Ejemplo n.º 2
0
def lighthouse(data):
    # Uniform priors
    x = pymc.Uniform('x', lower=0, upper=10)
    y = pymc.Uniform('y', lower=0, upper=10)

    # Likelihood
    L = pymc.Cauchy('L', alpha=x, beta=y, value=data, observed=True)

    return pymc.Model([x, y, L])
Ejemplo n.º 3
0
def make_normal_baseline_hmm(y_data, X_data, baseline_end, initial_params):
    """ Construct a PyMC2 scalar normal-emmisions HMM with a
    stochastic reporting period start time parameter and baseline, reporting
    parameters for all other stochastics/estimated terms in the model.
    The reporting period start time parameter is given a discrete uniform
    distribution starting from the first observation after the baseline to the
    end of the series.

    Parameters
    ==========
    y_data: pandas.DataFrame
        Usage/response observations.
    X_data: list of pandas.DataFrame
        List of design matrices for each state.  Each must
        span the entire length of observations (i.e. `y_data`).
    baseline_end: pandas.tslib.Timestamp
        End of baseline period (inclusive), beginning of reporting period.
    initial_params: NormalHMMInitialParams
        An object containing the following fields/members:
    Returns
    =======
    A pymc.Model object used for sampling.
    """

    N_states = len(X_data)
    N_obs = X_data[0].shape[0]

    alpha_trans = initial_params.alpha_trans

    # TODO: If we wanted a distribution over the time
    # when a renovation becomes effective...
    baseline_idx = X_data[0].index.get_loc(baseline_end)
    reporting_start = pymc.DiscreteUniform("reporting_start",
                                           baseline_idx + 1,
                                           N_obs,
                                           value=baseline_idx + 1)

    trans_mat_baseline = TransProbMatrix("trans_mat_baseline",
                                         alpha_trans,
                                         value=initial_params.trans_mat)
    trans_mat_reporting = TransProbMatrix("trans_mat_reporting",
                                          alpha_trans,
                                          value=initial_params.trans_mat)

    @pymc.deterministic(trace=True, plot=False)
    def N_baseline(rs_=reporting_start):
        return rs_ - 1

    states_baseline_0 = initial_params.states[slice(0, baseline_idx)]
    states_baseline = HMMStateSeq("states_baseline",
                                  trans_mat_baseline,
                                  N_baseline,
                                  p0=initial_params.p0,
                                  value=states_baseline_0)

    @pymc.deterministic(trace=True, plot=False)
    def N_reporting(rs_=reporting_start):
        return N_obs - rs_

    states_reporting_0 = initial_params.states[slice(baseline_idx, N_obs)]
    # TODO, FIXME: p0 should depend on states_baseline and trans_mat_baseline,
    # no?
    states_reporting = HMMStateSeq("states_reporting",
                                   trans_mat_reporting,
                                   N_reporting,
                                   p0=initial_params.p0,
                                   value=states_reporting_0)

    @pymc.deterministic(trace=True, plot=False)
    def states(sb_=states_baseline, sr_=states_reporting):
        return np.concatenate([sb_, sr_])

    Ws = initial_params.Ws
    betas = [[], []]
    for s in range(N_states):
        size_s = len(initial_params.betas[s])
        baseline_beta_s = pymc.Cauchy('base-beta-{}'.format(s),
                                      initial_params.betas[s],
                                      Ws[s],
                                      value=initial_params.betas[s],
                                      size=size_s if size_s > 1 else None)
        betas[0] += [baseline_beta_s]

        reporting_beta_s = pymc.Cauchy('rep-beta-{}'.format(s),
                                       initial_params.betas[s],
                                       Ws[s],
                                       value=initial_params.betas[s],
                                       size=size_s if size_s > 1 else None)
        betas[1] += [reporting_beta_s]

    del s, baseline_beta_s, reporting_beta_s, size_s

    Vs = initial_params.Vs

    mu = HMMLinearCombination('mu', X_data, betas, states)

    @pymc.deterministic(trace=False, plot=False)
    def V(states_=states, V_=Vs):
        return V_[states_]

    if y_data is not None:
        y_data = np.ma.masked_invalid(y_data).astype(np.object)
        y_data.set_fill_value(None)

    y_rv = pymc.Normal('y',
                       mu,
                       1. / V,
                       value=y_data,
                       observed=True if y_data is not None else False)

    del initial_params

    return pymc.Model(locals())
Ejemplo n.º 4
0
#----------------------------------------------------------------------
# Perform MCMC:

# set up our Stochastic variables, mu and gamma
mu = pymc.Uniform('mu', -5, 5)
log_gamma = pymc.Uniform('log_gamma', -10, 10, value=0)


@pymc.deterministic
def gamma(log_gamma=log_gamma):
    return np.exp(log_gamma)


# set up our observed variable x
x = pymc.Cauchy('x', mu, gamma, observed=True, value=xi)

# set up our model dictionary
model = dict(mu=mu, log_gamma=log_gamma, gamma=gamma, x=x)

# perform the MCMC
S = pymc.MCMC(model)
S.sample(iter=50000, burn=5000)

# extract the traces we're interested in
trace_mu = S.trace('mu')[:]
trace_gamma = S.trace('gamma')[:]

# compute histogram of results to plot below
L_MCMC, mu_bins, gamma_bins = np.histogram2d(trace_mu,
                                             trace_gamma,
Ejemplo n.º 5
0
# Import relevant modules
import pymc
import numpy as np


def hit(n, a, b):
    h = np.tan(np.random.uniform(low=np.pi / 2., high=3 * np.pi / 2.,
                                 size=n)) * b + a
    return h


# Some data
n = 300
a = 1.
b = 1.5

# Priors on unknown parameters:a
alpha = pymc.Uniform('pa', lower=0.0, upper=2.0)
#alpha = pymc.Normal('pa', mu=1.0, tau=1/.3**2)

beta = pymc.Uniform('pb', lower=0.0, upper=2.0)
#beta = pymc.Normal('pb', mu=1.0, tau=1/.3**2)

h = hit(n, a, b)

# Binomial likelihood for data
hh = pymc.Cauchy('hits', alpha=alpha, beta=beta, value=h, observed=True)