Ejemplo n.º 1
0
raw = np.loadtxt("gfl_new_data.txt")
data = {
    "x": raw[:, 0],
    "y": raw[:, 1],
    "r": raw[:, 2],
    "theta": raw[:, 3],
    "v": raw[:, 4],
    "sig_v": raw[:, 5],
    "N": raw.shape[0]
}

# Create the model
model = bd.Model()

# Constant velocity dispersion
model.add_node(bd.Node("velocity_dispersion", bd.Uniform(0.0, 50)))

# Constant velocity offset
model.add_node(bd.Node("v_systematic", bd.Uniform(-10.0, 10.0)))

# Rotation amplitude
model.add_node(bd.Node("A", bd.Uniform(0.0, 50.0)))

# Rotation angle
model.add_node(bd.Node("phi", bd.Uniform(0.0, 2.0 * np.pi)))

# p(data | parameters)
for i in range(0, data["N"]):

    #
    mean = "v_systematic + A*sin(theta{index} - phi)".format(index=i)
Ejemplo n.º 2
0
raw = np.loadtxt("gfl_new_data.txt")
data = {
    "x": raw[:, 0],
    "y": raw[:, 1],
    "r": raw[:, 2],
    "theta": raw[:, 3],
    "v": raw[:, 4],
    "sig_v": raw[:, 5],
    "N": raw.shape[0]
}

# Create the model
model = bd.Model()

# Constant velocity dispersion
model.add_node(bd.Node("log10_velocity_dispersion", bd.T(1.0, 0.5, 4.0)))
model.add_node(
    bd.Node("velocity_dispersion",
            bd.Delta("pow(10.0, log10_velocity_dispersion)")))

# Constant velocity offset
model.add_node(bd.Node("c_v_systematic", bd.T(0.0, 0.1, 1.0)))
model.add_node(
    bd.Node("v_systematic", bd.Delta("c_v_systematic*velocity_dispersion")))

# Rotation amplitude
model.add_node(bd.Node("t_A", bd.T(0.0, 2.0, 2.0)))
model.add_node(
    bd.Node(
        "A",
        bd.Delta("pow(10.0, 1.0-\
Ejemplo n.º 3
0
# Data
# Ntz original data:
# data = {"y": np.array([554, 701, 749, 868, 516, 573, 978, 399]),
#         "n": np.array([1183, 1510, 1597, 1924, 1178, 1324, 2173, 845]),
#         "N": 8}
# Kobe Bryant's whole field goal record from NBA records:
data = {"y": np.array([176, 391, 362, 554, 701, 749, 868, 516, 573, 978, 813, 775, 800, 716, 740]),
        "n": np.array([422, 913, 779, 1183, 1510, 1597, 1924, 1178, 1324, 2173, 1757, 1690, 1712, 1569, 1639]),
        "N": 15}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("p", bd.Uniform(0, 1)))

# Likelihood p(D|n,p) place holder.
for i in range(0, data["N"]):
    suc = "y{index}".format(index=i)
    atp = "n{index}".format(index=i)
    model.add_node(bd.Node(suc, bd.Binomial(atp, "p"), observed=True))


# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# take1      log(Z) = -39.2245367341 with Ntz original data
# take2      log(Z) = -39.223614467  with Ntz original data
# Analytical log(Z) = -39.2308
Ejemplo n.º 4
0
import math
import numpy as np
import dnest4.builder as bd

# Placeholder. data not needed
data = {"D": np.array([10.0]), "u": np.array([0.01]), "v": np.array([0.1])}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("th", bd.Uniform(-0.5, 0.5)))

# Likelihood p(D|mu) place holder. Will have to enter egg-box likelihood by hand.
name = "D{index}".format(index=0)
model.add_node(bd.Node(name, bd.Normal("mu", 1), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z)                 = 0.684022078176
# Mathematica result:                = log(2) = 0.693147
Ejemplo n.º 5
0
import math
import numpy as np
import dnest4.builder as bd

# Placeholder. data not needed
data = {
    "Xb": 1.517116,
    "sCS": 0.0004726074,
    "Yb": 1.517456,
    "sSP": 0.001038594
}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("mucssp", bd.Normal(1.5, 0.5)))

# Likelihood
model.add_node(bd.Node("Xb", bd.Normal("mucssp", "sCS"), observed=True))
model.add_node(bd.Node("Yb", bd.Normal("mucssp", "sSP"), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z) = XXXXX
Ejemplo n.º 6
0
import numpy as np
import dnest4.builder as bd

data = {
    "x": np.array([1.0, 2.0, 3.0, 4.0, 5.0]),
    "y": np.array([1.0, 2.0, 3.0, 3.9, 5.1]),
    "N": 5
}

# Create the model
model = bd.Model()

# Slope and intercept
model.add_node(bd.Node("m", bd.Uniform(-100.0, 100.0)))
model.add_node(bd.Node("b", bd.Uniform(-100.0, 100.0)))

# Noise standard deviation
model.add_node(bd.Node("log_sigma", bd.Uniform(-10.0, 10.0)))
model.add_node(bd.Node("sigma", bd.Delta("exp(log_sigma)")))

# p(data | parameters)
for i in range(0, data["N"]):
    name = "y{index}".format(index=i)
    mean = "m*x{index} + b".format(index=i)
    model.add_node(bd.Node(name, bd.Normal(mean, "sigma"), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Compile the C++ code so it's ready to go
Ejemplo n.º 7
0
import math
import numpy as np
import dnest4.builder as bd

# Placeholder. data not needed
data = {"D": np.array([1.0])}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("r", bd.Uniform(0.0, 20.0)))
model.add_node(bd.Node("th", bd.Uniform(0.0, math.pi)))
model.add_node(bd.Node("ph", bd.Uniform(0.0, 2.0 * math.pi)))

# Likelihood p(D|mu) place holder. Will have to enter likelihood by hand.
name = "D{index}".format(index=0)
model.add_node(bd.Node(name, bd.Normal("mu", 1), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z)                 = about -6.0....
Ejemplo n.º 8
0
data["N"] = 40
data["N_groups"] = 4
data["group"] = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\
                          1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\
                          2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\
                          3, 3, 3, 3, 3, 3, 3, 3, 3, 3], dtype="int64")
data["y"] = np.array([78, 88, 87, 88, 83, 82, 81, 80, 80, 89, 78, 78, 83,\
                      81, 78, 81, 81, 82, 76, 76, 79, 73, 79, 75, 77, 78,\
                      80, 78, 83, 84, 77, 69, 75, 70, 74, 83, 80, 75, 76,\
                      75], dtype="float64")

# A model
model = bd.Model()

# Hyperparameters
model.add_node(bd.Node("grand_mean", bd.Normal(0.0, 1000.0)))
model.add_node(bd.Node("diversity", bd.LogUniform(1E-3, 1E3)))

# Group mean parameters
for i in range(0, data["N_groups"]):
    model.add_node(bd.Node("n{i}".format(i=i), bd.Normal(0.0, 1.0)))
    model.add_node(\
                    bd.Node("mu{i}".format(i=i),\
                         bd.Delta("grand_mean + diversity*n{i}".format(i=i))\
                        )\
                  )

# Noise sd
model.add_node(bd.Node("sigma", bd.LogUniform(1E-3, 1E3)))

# Data nodes
Ejemplo n.º 9
0
import math
import numpy as np
import dnest4.builder as bd

# Daniel Azevedo GC-Ethanol Data:
data = {"x": np.array([0.1716393, 0.2905149, 0.5521852, 0.8684159, 1.046752, 1.279638]),  # AreaRatio
        "y": np.array([0.05, 0.1, 0.2, 0.3, 0.4, 0.5]),                                  # Concentration
        "N": 6}


# Create the model
model = bd.Model()

# Prior p(m). Need to modify C++ for Half-Cauchy on beta1
model.add_node(bd.Node("beta0", bd.Cauchy(0, 1)))
model.add_node(bd.Node("beta1", bd.Cauchy(0, 5)))  # Should be Half-Cauchy
model.add_node(bd.Node("epsilon", bd.Normal(0, 1)))

# Likelihood p(data | parameters)
for i in range(0, data["N"]):
    name = "y{index}".format(index=i)
    mean = "beta1*x{index} + beta0".format(index=i)
    model.add_node(bd.Node(name, bd.Normal(mean, "epsilon"), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z)                 = 2.24468713205
# Mathematica result:                = XXXX
Ejemplo n.º 10
0
import math
import numpy as np
import dnest4.builder as bd

# Placeholder. data not needed
data = {"D": np.array([1.0])}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("a", bd.Uniform(0, 10 * math.pi)))
model.add_node(bd.Node("b", bd.Uniform(0, 10 * math.pi)))

# Likelihood p(D|mu) place holder. Will have to enter egg-box likelihood by hand.
name = "D{index}".format(index=0)
model.add_node(bd.Node(name, bd.Normal("mu", 1), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z)                 = 235.85129744
# Feroz numerical integration result = 235.856
# Feroz Multinest result             = 235.835 and 235.839
# Mathematica result:                = 231.944
Ejemplo n.º 11
0
        -0.32019000, 1.59480235, -0.23412293, -0.08350555, -1.46057870,
        -0.62142475, 0.84171547, 1.25053406, 1.22901729, -1.28844456,
        -0.79355889, 0.64806456, 2.43395630, -1.18086072, 0.36834657,
        0.51896395, 0.99233285, -0.19108939, -1.15934395, 0.64806456,
        0.82019870, -0.90114273, 0.75564840, -0.64294152, -1.80484699,
        0.56199749, 0.30379627
    ]),
    "N":
    42
}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("a", bd.Normal(0, 1000)))
model.add_node(bd.Node("b", bd.Normal(0, 100)))
# Need to modify C++ for Half-Cauchy on sigma_1
#model.add_node(bd.Node("sig1", bd.Cauchy(0, 5)))
model.add_node(bd.Node("sig1", bd.Uniform(0, 1e8)))

# Likelihood p(data | parameters)
for i in range(0, data["N"]):
    name = "y{index}".format(index=i)
    mean = "b*x{index} + a".format(index=i)
    model.add_node(bd.Node(name, bd.Normal(mean, "sig1"), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)
import numpy as np
import dnest4.builder as bd
import pandas as pd

CHD = pd.read_csv('CHD.csv')

data = {"CHD": np.array(CHD["CHD"]).astype("int64"),\
        "Age": np.array(CHD["Age"]).astype("int64"),\
        "AgeGrp": np.array(CHD["AgeGrp"]).astype("int64")}
data["N"] = len(data["CHD"])

# Create the model
model = bd.Model()

# Coefficients
model.add_node(bd.Node("beta_0", bd.Normal(0, 10)))
model.add_node(bd.Node("beta_1", bd.Normal(0, 10)))

# Sampling Distribution
for i in range(0, data["N"]):
    name = "CHD{i}".format(i=i)
    #prob = "exp(beta_0 + beta_1 * Age{i})/(1.0 + exp(beta_0 + beta_1 * Age{i}))"
    prob = "exp(beta_0 + beta_1 * Age{i})"
    prob += "/(1.0 + exp(beta_0 + beta_1 * Age{i}))"
    prob = prob.format(i=i)
    distribution = bd.Binomial(1, prob)
    node = bd.Node(name, distribution, observed=True)
    model.add_node(node)

# Extra node for prediction
name = "CHDnew"
Ejemplo n.º 13
0
import numpy as np
import dnest4.builder as bd

# Placeholder. data not needed
data = {
    "Xb": 1.517116,
    "sCS": 0.0004726074,
    "Yb": 1.517456,
    "sSP": 0.001038594
}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("mucssp", bd.Normal(1.51, 0.01)))  # Hp
# model.add_node(bd.Node("mucs", bd.Normal(1.51, 0.01)))  # Hd
# model.add_node(bd.Node("musp", bd.Normal(1.51, 0.01)))  # Hd

# Likelihood
model.add_node(bd.Node("Xb", bd.Normal("mucssp", "sCS"), observed=True))  # Hp
model.add_node(bd.Node("Yb", bd.Normal("mucssp", "sSP"), observed=True))  # Hp
# model.add_node(bd.Node("Xb", bd.Normal("mucs", "sCS"), observed=True))  # Hd
# model.add_node(bd.Node("Yb", bd.Normal("musp", "sSP"), observed=True))  # Hd

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z) = XXXXX
# Hp: log(Z) = -4.29510326831  # Super Wide prior, mu = 0, sigma = 10000
Ejemplo n.º 14
0
import math
import numpy as np
import dnest4.builder as bd

# Data, shell centers:
data = {"cx": np.array([3.5,-3.5]),
        "cy": np.array([0.0,0.0])}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("m1", bd.Uniform(-6.0, 6.0)))
model.add_node(bd.Node("m2", bd.Uniform(-6.0, 6.0)))

# Likelihood p(D|mu) place holder. Will have to enter shell likelihood by hand.
name = "D{index}".format(index=0)
model.add_node(bd.Node(name, bd.Normal("mu", 1), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# Run result: log(Z)                  = -1.72298793735, -1.74864668478, -1.75171455818
# Feroz analytical integration result = −1.75
# Feroz Multinest result              = -1.61, -1.71, -1.72, -1.69 depending on control params
# Mathematica result:                 = 
Ejemplo n.º 15
0
                      1949, 1950, 1951, 1952, 1953, 1954, 1955,
                      1956, 1957, 1958, 1959, 1960, 1961, 1962])\
                                                       .astype("float64")
data["y"] = np.array([4, 5, 4, 1, 0, 4, 3, 4, 0, 6, 3, 3, 4, 0,
                      2, 6, 3, 3, 5, 4, 5, 3, 1, 4, 4, 1, 5, 5,
                      3, 4, 2, 5, 2, 2, 3, 4, 2, 1, 3, 2, 2, 1,
                      1, 1, 1, 3, 0, 0, 1, 0, 1, 1, 0, 0, 3, 1,
                      0, 3, 2, 2, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0,
                      0, 2, 1, 0, 0, 0, 1, 1, 0, 2, 3, 3, 1, 1,
                      2, 1, 1, 1, 1, 2, 4, 2, 0, 0, 0, 1, 4, 0,
                      0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1])\
                                                       .astype("float64")
data["N"] = int(len(data["t"]))

model = bd.Model()
model.add_node(bd.Node("log_mu1", bd.Cauchy(0.0, 5.0)))
model.add_node(bd.Node("diff", bd.Cauchy(0.0, 1.0)))
model.add_node(bd.Node("log_mu2", bd.Delta("log_mu1 + diff")))
model.add_node(bd.Node("mu1", bd.Delta("exp(log_mu1)")))
model.add_node(bd.Node("mu2", bd.Delta("exp(log_mu2)")))
model.add_node(bd.Node("change_year", bd.Uniform(1851.0, 1962.0)))
model.add_node(bd.Node("L", bd.LogUniform(1E-2, 1E2)))

# Data nodes
for i in range(0, data["N"]):
    name = "y{i}".format(i=i)
    mean = "mu1 + (mu2 - mu1)/(1.0 + exp(-(t{i} - change_year)/L))"\
                                        .format(i=i)
    model.add_node(bd.Node(name, bd.Poisson(mean), observed=True))

# Create the C++ code
Ejemplo n.º 16
0
import numpy as np
import dnest4.builder as bd

data = {"D": np.array([90.0])}

# Create the model
model = bd.Model()

# Prior p(m)
model.add_node(bd.Node("mu", bd.Cauchy(0.3, 10)))

# Likelihood p(D|mu)
name = "D{index}".format(index=0)
# Start with this and modify the log likelihood that gets written in MyModel.cpp
model.add_node(bd.Node(name, bd.Normal("mu", 0.5), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)
Ejemplo n.º 17
0
import dnest4.builder as bd
import pandas as pd

# The data, as a dictionary
Clocks = pd.read_csv('Clocks.csv')
data = {}
data = {'log_age': np.log(np.array(Clocks['age']).astype('float64')),\
        'log_num_bidders': np.log(np.array(Clocks['num_bidders'])).astype('float64'),\
        'log_y': np.log(np.array(Clocks['y']).astype('float64'))}
data["N"] = int(len(data["log_y"]))

# Model
model = bd.Model()

# Slopes and Intercept
model.add_node(bd.Node("beta0", bd.Normal(0, 100)))
model.add_node(bd.Node("beta1", bd.Normal(0, 100)))
model.add_node(bd.Node("beta2", bd.Normal(0, 100)))
model.add_node(bd.Node("beta3", bd.Normal(0, 100)))

# Noise standard deviation
model.add_node(bd.Node("sigma", bd.LogUniform(0.001, 100)))

# Sampling distribution
for i in range(0, data["N"]):
    name = "log_y{i}".format(i=i)
    mean = "beta0 + beta1 * log_age{i}  + beta2 * log_num_bidders{i} +"
    mean += "beta3 * log_age{i} * log_num_bidders{i}"
    mean = mean.format(i=i)
    model.add_node(bd.Node(name, bd.Normal(mean, "sigma"), observed=True))
Ejemplo n.º 18
0
        237, 257, 261, 248, 219, 244, 283, 293, 312, 272, 280, 298, 275, 297,
        350, 260, 313, 273, 289, 326, 281, 288, 280, 283, 307, 286, 298, 267,
        272, 285, 286, 303, 295, 289, 258, 286, 320, 354, 328, 297, 323, 331,
        305, 338, 376, 296, 352, 314, 325, 358, 312, 324, 316, 317, 336, 321,
        334, 302, 302, 323, 331, 345, 333, 316, 291, 324
    ]).astype("float64"),
    "x":
    np.array([8.0, 15.0, 22.0, 29.0, 36.0])
}
data["x_bar"] = float(data["x"].mean())  # Convert from numpy.float64 to float

# A model
model = bd.Model()

# Priors
model.add_node(bd.Node("alpha_mu", bd.Normal(0.0, 100.0)))
model.add_node(bd.Node("beta_mu", bd.Normal(0.0, 100.0)))
model.add_node(bd.Node("tau_c", bd.Gamma(1E-3, 1E3)))
model.add_node(bd.Node("alpha_tau", bd.Gamma(1E-3, 1E3)))
model.add_node(bd.Node("beta_tau", bd.Gamma(1E-3, 1E3)))

# Transformations
model.add_node(bd.Node("alpha_sigma", bd.Delta("1.0/sqrt(alpha_tau)")))
model.add_node(bd.Node("beta_sigma", bd.Delta("1.0/sqrt(beta_tau)")))
model.add_node(bd.Node("sigma_c", bd.Delta("1.0/sqrt(tau_c)")))
model.add_node(bd.Node("a0", bd.Delta("alpha_mu - beta_mu*x_bar")))

# Sampling distribution
for i in range(0, data["N"]):
    model.add_node(bd.Node("n_alpha{i}".format(i=i), bd.Normal(0.0, 1.0)))
    model.add_node(bd.Node("n_beta{i}".format(i=i), bd.Normal(0.0, 1.0)))
Ejemplo n.º 19
0
data["N_Species"] = len(np.unique(data["Species"]))
data["N"] = len(data["Species"])
data["N_Input"] = 6

# Add Some Ones to the Data for the One Trick
data["ones"] = np.ones(data["N"], dtype="int64")

# Create the model
model = bd.Model()

# Coefficients
for j in range(0, data["N_Species"]):
    for k in range(0, data["N_Input"]):
        name = "beta_{j}_{k}".format(j=j, k=k)
        prior = bd.Normal(0.0, 10.0)
        node = bd.Node(name, prior)
        model.add_node(node)

# Sampling distribution
for i in range(0, data["N"]):

    # Probability Distribution over Categories
    for j in range(0, data["N_Species"]):
        name = "p_{j}_{i}".format(i=i, j=j)
        formula = ""
        formula += "beta_{j}_0 +"
        formula += "beta_{j}_1 * SL{i} + beta_{j}_2 * SW{i} + "
        formula += "beta_{j}_3 * PL{i} + beta_{j}_4 * PW{i} + "
        formula += "beta_{j}_5 * SW{i} * PL{i}"
        formula = formula.format(i=i, j=j)
        model.add_node(bd.Node(name, bd.Delta(formula)))
Ejemplo n.º 20
0
    "n":
    np.array([
        422, 913, 779, 1183, 1510, 1597, 1924, 1178, 1324, 2173, 1757, 1690,
        1712, 1569, 1639
    ]),
    "N":
    15
}

# Create the model
model = bd.Model()

# Prior p(m)
for i in range(0, data["N"]):
    ppi = "ppi{index}".format(index=i)
    model.add_node(bd.Node(ppi, bd.Uniform(0, 1)))

# Likelihood p(D|n,p) place holder.
for i in range(0, data["N"]):
    suc = "y{index}".format(index=i)
    atp = "n{index}".format(index=i)
    ppi = "ppi{index}".format(index=i)
    model.add_node(bd.Node(suc, bd.Binomial(atp, ppi), observed=True))

# Create the C++ code
bd.generate_h(model, data)
bd.generate_cpp(model, data)

# take1      log(Z) = -58.0551362482 with Ntz original data
# take2      log(Z) = -57.9893885883  with Ntz original data
# take3      log(Z) = -57.9826848935  with Ntz original data
Ejemplo n.º 21
0
# Plot the data
plt.plot(data["t"], data["adult"], "ko-", label="Adult")
plt.plot(data["t"], data["youth"], "go-", label="Youth")
plt.xlabel("Time (quarters)")
plt.ylabel("Unemployment rate (logit)")
plt.legend()
plt.xlim([data["t"].min() - 0.5, data["t"].max() + 0.5])
plt.ylim([-4.0, 0.0])
plt.show()

# A model (of the prior information!)
model = bd.Model()

# AR(1) parameters for adult unemployment rate
model.add_node(bd.Node("mu1", bd.Uniform(-10.0, 0.0)))
model.add_node(bd.Node("L1", bd.LogUniform(1.0, 1E4)))
model.add_node(bd.Node("beta1", bd.LogUniform(1E-3, 1E3)))
model.add_node(bd.Node("alpha1", bd.Delta("exp(-1.0/L1)")))
model.add_node(bd.Node("sigma1", bd.Delta("beta1/sqrt(1.0 - alpha1*alpha1)")))

# Sampling distribution for adult data
model.add_node(bd.Node("adult0",\
                    bd.Normal("mu1", "sigma1"), observed=True))
for i in range(1, data["N"]):
    name = "adult{i}".format(i=i)
    dist = bd.Normal("mu1 + alpha1*(adult{k} - mu1)".format(k=(i-1)), "beta1")
    model.add_node(bd.Node(name, dist, observed=True))

# Parameters relating to youth data
model.add_node(bd.Node("offset", bd.Normal(0.0, 1.0)))
CHD = pd.read_csv('CHD.csv')

data = {"CHD": np.array(CHD["CHD"]).astype("int64"),\
        "Age": np.array(CHD["Age"]).astype("int64")}
data["N"] = len(data["CHD"])
data["N_Input"] = 2
data["N_CHD"] = 2

# Add Some Ones to the Data for the One Trick
data["ones"] = np.ones(data["N"], dtype="int64")

# Create the model
model = bd.Model()

# Coefficients
model.add_node(bd.Node("beta_0_0", bd.Normal(0, 10)))
model.add_node(bd.Node("beta_0_1", bd.Normal(0, 10)))
model.add_node(bd.Node("beta_1_0", bd.Normal(0, 10)))
model.add_node(bd.Node("beta_1_1", bd.Normal(0, 10)))

# Sampling distribution
for i in range(0, data["N"]):
    for j in range(0, data["N_CHD"]):
        name = "p_{j}_{i}".format(i=i, j=j)
        formula = ""
        formula += "beta_{j}_0 + beta_{j}_1 * Age{i} "
        formula = formula.format(i=i, j=j)
        model.add_node(bd.Node(name, bd.Delta(formula)))
    # Probability Distribution over Categories
    #name = "p_0_{i}".format(i=i)
    #formula = ""