Beispiel #1
0
def mk_node(species, node_name, node, parent_idx, effects, path, has_siblings=False):
    paths = reduce(lambda x, y: "{}__{}".format(x, y).replace(' ', '_'), path)

    if has_siblings:
        theta.append(pm.MvNormalCov('theta_{}'.format(paths),
                                    mu=theta[parent_idx],
                                    C=400*np.eye(num_traits),
                                    value=node_means[species]))
        sigma.append(pm.WishartCov('sigma_{}'.format(paths),
                                   n=num_traits+1,
                                   C=sigma[parent_idx],
                                   value=node_matrices[species]))

        parent_idx = len(theta) - 1

    if not node.items():
        obs_data = np.array(data.ix[(data['species'] == str(n.taxon)) &
   	        reduce(operator.iand,
                map(lambda s: data[s[0]] == s[1],
                    zip(effects, path[1:]))), 0:num_traits])

        data_list.append(pm.MvNormalCov('data_{}'.format(paths),
                                            mu=theta[parent_idx],
                                            C=sigma[parent_idx],
                                            value=obs_data,
                                            observed=True))


        # Slow method to simulate n populations from posterior
        #ds = []
        #for i in xrange(0,obs_data.shape[0]):
        #    ds.append(pm.MvNormalCov('data_{}_{}'.format(paths, i),
        #                                    mu=theta[parent_idx],
        #                                    C=sigma[parent_idx]
        #                                    ))

        #data_sim_list.append(ds)

        return

    has_siblings = len(node.keys()) > 1
    for k in node.keys():
        mk_node(species, k, node[k], parent_idx, effects, path + [k], has_siblings)
Beispiel #2
0
def getModel():
    D = pm.Dirichlet('1-Dirichlet', theta=[3,2,4]); #@UndefinedVariable
    C1 = pm.Categorical('2-Cat', D); #@UndefinedVariable
    C2 = pm.Categorical('10-Cat', D); #@UndefinedVariable
    C3 = pm.Categorical('11-Cat', D); #@UndefinedVariable
    W0_0 = pm.WishartCov('4-Wishart0_1', n=5, C=np.eye(2)); #@UndefinedVariable
    N0_1 = pm.MvNormalCov('5-Norm0_1', mu=[-20,-20], C=np.eye(2)); #@UndefinedVariable
    N0_2 = pm.MvNormalCov('6-Norm0_2', mu=[0,0], C=np.eye(2)); #@UndefinedVariable
    N0_3 = pm.MvNormalCov('7-Norm0_3', mu=[20,20], C=np.eye(2)); #@UndefinedVariable
    aMu = [N0_1.value, N0_2.value, N0_3.value];
    fL1 = lambda n=C1: np.select([n==0, n==1, n==2], aMu);
    fL2 = lambda n=C2: np.select([n==0, n==1, n==2], aMu);
    fL3 = lambda n=C3: np.select([n==0, n==1, n==2], aMu);
    p_N1 = pm.Lambda('p_Norm1', fL1, doc='Pr[Norm|Cat]');
    p_N2 = pm.Lambda('p_Norm2', fL2, doc='Pr[Norm|Cat]');
    p_N3 = pm.Lambda('p_Norm3', fL3, doc='Pr[Norm|Cat]');
    N = pm.MvNormalCov('3-Norm', mu=p_N1, C=W0_0); #@UndefinedVariable
    obsN1 = pm.MvNormalCov('8-Norm', mu=p_N2, C=W0_0, observed=True, value=[-20,-20]); #@UndefinedVariable @UnusedVariable
    obsN2 = pm.MvNormalCov('9-Norm', mu=p_N3, C=W0_0, observed=True, value=[20,20]); #@UndefinedVariable @UnusedVariable
    return pm.Model([D,C1,C2,C3,N,W0_0,N0_1,N0_2,N0_3,N,obsN1,obsN2]);
Beispiel #3
0
theta = {
    node_name(root):
    pm.MvNormalCov(
        'theta_0',
        mu=np.array(data.ix[:, 0:num_traits].mean()),
        #value=node_means[str(root)],
        value=np.zeros(num_traits),
        #mu=np.zeros(num_traits),
        C=np.eye(num_traits) * 100.)
}

sigma = {
    node_name(root):
    pm.WishartCov(
        'sigma_0',
        value=node_matrices[node_name(root)],
        #value=np.eye(num_traits),
        n=num_traits + 1,
        C=node_matrices[node_name(root)])
}
#C=np.eye(num_traits)*100.)}

#var_factors = {}
betas = {}
delta_z = {}

for n in t.nodes()[1:]:
    parent_idx = node_name(n.parent_node)

    #var_factors[str(i)] = pm.Uniform('var_factor_{}'.format(str(i)), lower=0, upper=1000)

    betas[node_name(n)] = pm.MvNormalCov('betas_{}'.format(node_name(n)),
Beispiel #4
0
t = dendropy.Tree.get_from_string("(B, ((C, E),(A,D)))", "newick")
num_leafs = len(t.leaf_nodes())
num_traits = 4

root = t.seed_node

theta = [
    pm.MvNormalCov('theta_0',
                   mu=np.array(data.ix[:, 0:num_traits].mean()),
                   C=np.eye(num_traits) * 10.,
                   value=np.zeros(num_traits))
]

sigma = [
    pm.WishartCov('sigma_0',
                  n=num_traits + 1,
                  C=np.eye(num_traits) * 10.,
                  value=np.eye(num_traits))
]

tree_idx = {str(root): 0}

i = 1
for n in t.nodes()[1:]:
    parent_idx = tree_idx[str(n.parent_node)]

    theta.append(
        pm.MvNormalCov('theta_{}'.format(str(i)),
                       mu=theta[parent_idx],
                       C=sigma[parent_idx],
                       value=np.zeros(num_traits)))
Beispiel #5
0
for n in t.postorder_node_iter():
    if str(n) not in node_matrices:
        node_matrices[str(n)], node_sample_size[str(n)], node_means[str(n)] = matrix_mean(n.child_nodes())

# Agora comeca o PyMC

root = t.seed_node

theta = [pm.MvNormalCov('theta_0',
                        #mu=np.array(data.ix[:, 0:num_traits].mean()),
                        mu=np.zeros(num_traits),
                        C=np.eye(num_traits)*10.,
                        value=node_means[str(root)])]

sigma = [pm.WishartCov('sigma_0',
                       n=num_traits+1,
                       C=np.eye(num_traits)*10.,
                       value=node_matrices[str(root)])]

tree_idx = {str(root): 0}

i = 1
for n in t.nodes()[1:]:
    parent_idx = tree_idx[str(n.parent_node)]

    theta.append(pm.MvNormalCov('theta_{}'.format(str(i)),
                                mu=theta[parent_idx],
                                C=sigma[parent_idx],
                                value=node_means[str(n)]))

    sigma.append(pm.WishartCov('sigma_{}'.format(str(i)),
                               n=num_traits+1,
Beispiel #6
0
root = t.seed_node

theta = [
    pm.MvNormalCov(
        'theta_0',
        #mu=np.array(data.ix[:, 0:num_traits].mean()),
        #value=node_means[str(root)],
        value=np.zeros(num_traits),
        mu=np.zeros(num_traits),
        C=np.eye(num_traits) * 100.)
]

sigma = [
    pm.WishartCov(
        'sigma_0',
        #value=node_matrices[str(root)],
        value=np.eye(num_traits),
        n=num_traits + 1,
        C=np.eye(num_traits) * 100.)
]

tree_idx = {str(root): 0}
var_factors = {}
betas = {}

i = 1
for n in t.nodes()[1:]:
    parent_idx = tree_idx[str(n.parent_node)]

    var_factors[str(i)] = pm.Uniform('var_factor_{}'.format(str(i)),
                                     lower=0,
                                     upper=1000)
Beispiel #7
0
import pymc as pm
import numpy as np

# Criando parametros conhecidos
original_sigma = np.array([[1, 0.5], [0.5, 1]])
original_theta = [1, -1]

# Simulando dados com media theta e covariancia sigma
data = np.random.multivariate_normal(original_theta, original_sigma, 100)

# Definindo priors, Wishart pra sigma e normal pra theta
sigma = pm.WishartCov('sigma', n=3, C=np.eye(2), value=np.eye(2))

theta = pm.MvNormalCov('theta', mu=[0.,0.], C=np.eye(2), value=[0.,0.])

# Verossimilhanca gaussiana com media theta e covariancia sigma
x = pm.MvNormalCov('x', theta, sigma, value=data, observed=True)
Beispiel #8
0
import pandas as pd
import numpy as np
import pymc as pm

dados = pd.read_csv("../dados/dados5sp.csv")

# Filogenia: (B, ((C, E),(A, D)))

# Hiper parametros do no (B, (CEAD))

theta_B_CEAD = pm.MvNormalCov('theta_B_CEAD',
                              mu=np.zeros(4),
                              C=np.eye(4),
                              value=np.zeros(4))

sigma_B_CEAD = pm.WishartCov('sigma_B_CEAD', n=5, C=np.eye(4), value=np.eye(4))

# Ramos do no (B, (CEAD))

theta_B = pm.MvNormalCov('theta_B',
                         mu=theta_B_CEAD,
                         C=sigma_B_CEAD,
                         value=np.zeros(4))

sigma_B = pm.WishartCov('sigma_B', n=5, C=sigma_B_CEAD, value=np.eye(4))

theta_CEAD = pm.MvNormalCov('theta_CEAD',
                            mu=theta_B_CEAD,
                            C=sigma_B_CEAD,
                            value=np.zeros(4))
Beispiel #9
0
original_theta = [1, -1]

# Simulando dados com media theta e covariancia sigma
original_theta_1 = np.random.multivariate_normal(original_theta,
                                                 original_sigma)
original_theta_2 = np.random.multivariate_normal(original_theta,
                                                 original_sigma)

#original_sigma_1 = pm.distributions.wishart_cov_like(original_sigma,
#C=original_sigma,
#n=3)
#original_sigma_2 = pm.distributions.WishartCov(original_sigma,
#C=original_sigma,
#n=3)

data_1 = np.random.multivariate_normal(original_theta_1, original_sigma, 100)
data_2 = np.random.multivariate_normal(original_theta_2, original_sigma, 100)

# Definindo priors, Wishart pra sigma e normal pra theta
sigma = pm.WishartCov('sigma', n=3, C=np.eye(2), value=np.eye(2))
sigma_1 = pm.WishartCov('sigma_1', n=3, C=sigma, value=np.eye(2))
sigma_2 = pm.WishartCov('sigma_2', n=3, C=sigma, value=np.eye(2))

theta = pm.MvNormalCov('theta', mu=[0., 0.], C=np.eye(2), value=[0., 0.])
theta_1 = pm.MvNormalCov('theta_1', mu=theta, C=sigma, value=[0., 0.])
theta_2 = pm.MvNormalCov('theta_2', mu=theta, C=sigma, value=[0., 0.])

# Verossimilhanca gaussiana com media theta e covariancia sigma
x_1 = pm.MvNormalCov('x_1', theta_1, sigma_1, value=data_1, observed=True)
x_2 = pm.MvNormalCov('x_2', theta_2, sigma_2, value=data_2, observed=True)