def __init__(self,
                 X,
                 a_alpha=1e-3,
                 b_alpha=1e-3,
                 a_tau=1e-3,
                 b_tau=1e-3,
                 beta=1e-3):
        # data, # of samples, dims
        self.X = X
        self.d = self.X.shape[1]
        self.N = self.X.shape[0]
        self.q = self.d - 1

        # hyperparameters
        self.a_alpha = a_alpha
        self.b_alpha = b_alpha
        self.a_tau = a_tau
        self.b_tau = b_tau
        self.beta = beta

        with pm.Model() as model:
            z = pm.MvNormal('z',
                            mu=np.zeros(self.q),
                            cov=np.eye(self.q),
                            shape=(self.N, self.q))
            mu = pm.MvNormal('mu',
                             mu=np.zeros(self.d),
                             cov=np.eye(self.d) / self.beta,
                             shape=self.d)
            alpha = pm.Gamma('alpha',
                             alpha=self.a_alpha,
                             beta=self.b_alpha,
                             shape=self.q)
            w = pm.MatrixNormal('w',
                                mu=np.zeros((self.d, self.q)),
                                rowcov=np.eye(self.d),
                                colcov=diag(1 / alpha),
                                shape=(self.d, self.q))
            tau = pm.Gamma('tau', alpha=self.a_tau, beta=self.b_tau)
            x = pm.math.dot(z, w.T) + mu
            obs_x = pm.MatrixNormal('obs_x',
                                    mu=x,
                                    rowcov=np.eye(self.N),
                                    colcov=np.eye(self.d) / tau,
                                    shape=(self.N, self.d),
                                    observed=self.X)

        self.model = model
예제 #2
0
    def _sample_pymc3(cls, dist, size, seed):
        """Sample from PyMC3."""

        import pymc3
        pymc3_rv_map = {
            'MatrixNormalDistribution':
            lambda dist: pymc3.MatrixNormal(
                'X',
                mu=matrix2numpy(dist.location_matrix, float),
                rowcov=matrix2numpy(dist.scale_matrix_1, float),
                colcov=matrix2numpy(dist.scale_matrix_2, float),
                shape=dist.location_matrix.shape),
            'WishartDistribution':
            lambda dist: pymc3.WishartBartlett(
                'X', nu=int(dist.n), S=matrix2numpy(dist.scale_matrix, float))
        }

        dist_list = pymc3_rv_map.keys()

        if dist.__class__.__name__ not in dist_list:
            return None

        with pymc3.Model():
            pymc3_rv_map[dist.__class__.__name__](dist)
            return pymc3.sample(size, chains=1, progressbar=False)[:]['X']
예제 #3
0
def get_model(x, r, R, vaf0, K=10):
    nsamples = r.shape[1]
    r, R, vaf0 = r[:, :, None], R[:, :, None], vaf0[:, :, None]
    idxs = aux.corr_vector_to_matrix_indices(K)
    with pmc.Model() as model:
        w = pmc.Dirichlet('w', nmp.ones(K))
        lw = tns.log(w)

        # alpha = pmc.Gamma('alpha', 1.0, 1.0)
        # u = pmc.Beta('u', 1.0, alpha, shape=K-1)
        # lw = aux.stick_breaking_log(u)

        rho = pmc.Gamma('rho', 1.0, 1.0)
        Cc = tns.fill_diagonal(pmc.LKJCorr('C', eta=2.0, n=K)[idxs], 1.0)
        Cr = aux.cov_quad_exp(x, 1.0, rho)
        mu_psi = pmc.MatrixNormal('mu_psi',
                                  mu=nmp.zeros((nsamples, K)),
                                  rowcov=Cr,
                                  colcov=Cc,
                                  shape=(nsamples, K))
        psi = pmc.Normal('psi', mu=mu_psi, sd=0.1, shape=(nsamples, K))
        phi = pmc.Deterministic('phi', pmc.invlogit(psi))

        # psi = pmc.MvNormal('psi', mu=nmp.zeros(K), tau=nmp.eye(K), shape=(nsamples, K))
        # phi = pmc.Deterministic('phi', pmc.invlogit(psi))

        theta = pmc.Deterministic('theta', vaf0 * phi[None, :, :])
        pmc.DensityDist('r', aux.binmixND_logp_fcn(R, theta, lw), observed=r)
    return model
    def marginal_likelihood(self, name, X, y, colchol, noise, matrix_shape, is_observed=True, **kwargs):
        R"""
        Returns the marginal likelihood distribution, given the input
        locations `X` and the data `y`.
        This is integral over the product of the GP prior and a normal likelihood.
        .. math::
           y \mid X,\theta \sim \int p(y \mid f,\, X,\, \theta) \, p(f \mid X,\, \theta) \, df
        Parameters
        ----------
        name: string
            Name of the random variable
        X: array-like
            Function input values.  If one-dimensional, must be a column
            vector with shape `(n, 1)`.
        y: array-like
            Data that is the sum of the function with the GP prior and Gaussian
            noise.  Must have shape `(n, )`.
        noise: scalar, Variable, or Covariance
            Standard deviation of the Gaussian noise.  Can also be a Covariance for
            non-white noise.
        is_observed: bool
            Whether to set `y` as an `observed` variable in the `model`.
            Default is `True`.
        **kwargs
            Extra keyword arguments that are passed to `MvNormal` distribution
            constructor.
        """

        if not isinstance(noise, Covariance):
            noise = pm.gp.cov.WhiteNoise(noise)
        mu, cov = self._build_marginal_likelihood(X, y, noise)
        self.X = X
        self.y = y
        self.noise = noise

        # Warning: the shape of y is hardcode

        if is_observed:
            return pm.MatrixNormal(name, mu=mu, colchol=colchol, rowcov=cov, observed=y, shape=(matrix_shape[0],matrix_shape[1]), **kwargs)
        else:
            shape = infer_shape(X, kwargs.pop("shape", None))
            return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
예제 #5
0
    def _sample_pymc3(cls, dist, size, seed):
        """Sample from PyMC3."""

        import pymc3
        pymc3_rv_map = {
            'MatrixNormalDistribution':
            lambda dist: pymc3.MatrixNormal(
                'X',
                mu=matrix2numpy(dist.location_matrix, float),
                rowcov=matrix2numpy(dist.scale_matrix_1, float),
                colcov=matrix2numpy(dist.scale_matrix_2, float),
                shape=dist.location_matrix.shape),
            'WishartDistribution':
            lambda dist: pymc3.WishartBartlett(
                'X', nu=int(dist.n), S=matrix2numpy(dist.scale_matrix, float))
        }

        sample_shape = {
            'WishartDistribution': lambda dist: dist.scale_matrix.shape,
            'MatrixNormalDistribution': lambda dist: dist.location_matrix.shape
        }

        dist_list = pymc3_rv_map.keys()

        if dist.__class__.__name__ not in dist_list:
            return None
        import logging
        logging.getLogger("pymc3").setLevel(logging.ERROR)
        with pymc3.Model():
            pymc3_rv_map[dist.__class__.__name__](dist)
            samps = pymc3.sample(draws=prod(size),
                                 chains=1,
                                 progressbar=False,
                                 random_seed=seed,
                                 return_inferencedata=False,
                                 compute_convergence_checks=False)['X']
        return samps.reshape(size +
                             sample_shape[dist.__class__.__name__](dist))
    M_R2_T2 = tt.dot(M, R2) + T2

    # define symbolic variable of estimated M in the model so it is accessible afterwards
    M_estimation = pm.Deterministic('M_estimation', M)

    ### 5. prior over gaussian noise E_i
    # sample parameters for multivariate distribution
    sv = pm.HalfNormal("sv", sd=1)

    U = sv * tt.eye(n)  # see Theobalt 3.2 end (U is the first argument)
    V = tt.eye(3)  # see Theobalt 3.2 end (V is the second argument)

    # X1
    M_E1_T1 = pm.MatrixNormal("M_E1_T1",
                              mu=M_T1,
                              rowcov=U,
                              colcov=V,
                              shape=(n, 3),
                              observed=M_obs)

    # X2
    M_E2_T2 = pm.MatrixNormal("M_E2_T2",
                              mu=M_R2_T2,
                              rowcov=U,
                              colcov=V,
                              shape=(n, 3),
                              observed=M_obs)

with pm.Model() as outer_model:
    with inner_model:
        # draw one sample, it returns a datastructure named trace
        print("###### Sampler is called ######")
예제 #7
0
###########
# Look at this tutorial -
# https://docs.pymc.io/notebooks/getting_started.html
###########

model = pm.Model()
n_data = social_distance.shape[0]
t = np.arange(n_data)

with model:
    # Weakly informative priors on the parameters
    gamma = pm.Normal('gamma', mu=0, sigma=10, shape=(4, 3))
    mu = pm.Normal('mu', mu=0, sigma=10, shape=3)
    sigma = pm.MatrixNormal('sigma',
                            mu=np.eye(3),
                            rowcov=np.eye(3) * 10,
                            colcov=np.eye(3) * 10,
                            shape=(3, 3))

    # Define relationships amongst the params
    eta = pm.MvNormal('eta', mu=np.zeros(3), cov=sigma, shape=3)

    mat = social_distance @ gamma

    beta_0 = mu[0] + mat[:, 0] + eta[0]
    beta_1 = mu[1] + mat[:, 1] + eta[1]
    beta_2 = mu[2] + mat[:, 2] + eta[2]

    lamb = np.exp(beta_0 + beta_1 * t + beta_2 * (t**2))

    Y_obs = pm.NegativeBinomial('Y_obs', mu=lamb, alpha=1, observed=death_data)