Esempio n. 1
0
def pca_model(M, N, D):
    # Construct the PCA model with ARD

    # ARD
    alpha = nodes.Gamma(1e-2, 1e-2, plates=(D, ), name='alpha')

    # Loadings
    W = nodes.Gaussian(np.zeros(D),
                       alpha.as_diagonal_wishart(),
                       name="W",
                       plates=(M, 1))

    # States
    X = nodes.Gaussian(np.zeros(D), np.identity(D), name="X", plates=(1, N))

    # PCA
    WX = nodes.Dot(W, X, name="WX")

    # Noise
    tau = nodes.Gamma(1e-2, 1e-2, name="tau", plates=())

    # Noisy observations
    Y = nodes.GaussianARD(WX, tau, name="Y", plates=(M, N))

    return (Y, WX, W, X, tau, alpha)
Esempio n. 2
0
def gaussianmix_model(N, K, D):
    # N = number of data vectors
    # K = number of clusters
    # D = dimensionality

    # Construct the Gaussian mixture model

    # K prior weights (for components)
    alpha = nodes.Dirichlet(1 * np.ones(K), name='alpha')
    # N K-dimensional cluster assignments (for data)
    z = nodes.Categorical(alpha, plates=(N, ), name='z')
    # K D-dimensional component means
    X = nodes.Gaussian(np.zeros(D),
                       0.01 * np.identity(D),
                       plates=(K, ),
                       name='X')
    # K D-dimensional component covariances
    Lambda = nodes.Wishart(D,
                           0.01 * np.identity(D),
                           plates=(K, ),
                           name='Lambda')
    # N D-dimensional observation vectors
    Y = nodes.Mixture(nodes.Gaussian)(z, X, Lambda, plates=(N, ), name='Y')
    # TODO: Plates should be learned automatically if not given (it
    # would be the smallest shape broadcasted from the shapes of the
    # parents)

    return (Y, X, Lambda, z, alpha)