Beispiel #1
0
def demo(seed=None):
    """
    Fit a weakly sparse
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)


    N = 27          # Number of neurons
    T = 60000       # Number of time bins
    dt = 1.0        # Time bin width
    dt_max = 10.0   # Max time of synaptic influence
    B = 2           # Number of basis functions for the weights

    #   Bias hyperparameters
    bias_hypers = {"mu_0": -2.0, "sigma_0": 0.25}

    p = 0.1                 # Probability of connection for each pair of clusters
    mu = -2 * np.ones((B,))     # Mean weight for each pair of clusters
    sigma = 0.1 * np.eye(B) # Covariance of weight for each pair of clusters

    # Define the true network model for the GLM
    true_network = FactorizedNetworkDistribution(
        N,
        BernoulliAdjacencyDistribution, {"p": p},
        FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

    true_model = NegativeBinomialPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
                       bias_hypers=bias_hypers,
                       network=true_network)

    ###########################################################
    # Create a test spike-and-slab model
    ###########################################################
    # Create another copy of the model with the true network model
    test_network = FactorizedNetworkDistribution(
        N,
        BernoulliAdjacencyDistribution, {"p": p},
        FixedGaussianWeightDistribution, {"B": B, "mu": mu, "sigma": sigma})

    observation_hypers = {"xi": 10., "alpha_xi": 1.0, "beta_xi": 1.0}

    test_model = NegativeBinomialPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
                            bias_hypers=bias_hypers,
                            network=test_network,
                            observation_hypers=observation_hypers)


    # Sample some synthetic data from the true model
    S = true_model.generate(T=T, keep=True, verbose=False)

    # Add training data in chunks
    chunksz = 1024
    for offset in xrange(0, T, chunksz):
        test_model.add_data(S[offset:min(offset+chunksz,T)])

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 3
    samples = []
    lps = []
    for itr in xrange(N_samples):
        lps.append(test_model.log_probability())
        samples.append(test_model.copy_sample())

        print ""
        print "Gibbs iteration ", itr
        print "LP: ", lps[-1]

        test_model.collapsed_resample_model()

    with open("gibbs_profile.txt", "w") as f:
        show_line_stats(f)
Beispiel #2
0
dt = 1.0  # Time bin width
dt_max = 10.0  # Max time of synaptic influence
B = 2  # Number of basis functions for the weights

#   Bias hyperparameters
bias_hypers = {"mu_0": -1.0, "sigma_0": 0.25}

p = 0.5  # Probability of connection for each pair of clusters
mu = np.zeros((B, ))  # Mean weight for each pair of clusters
sigma = 1.0 * np.eye(B)  # Covariance of weight for each pair of clusters

# Define the true network model for the GLM
true_network = FactorizedNetworkDistribution(N, BernoulliAdjacencyDistribution,
                                             {"p": p},
                                             FixedGaussianWeightDistribution, {
                                                 "B": B,
                                                 "mu": mu,
                                                 "sigma": sigma
                                             })

true_model = Population(N=N,
                        dt=dt,
                        dt_max=dt_max,
                        B=B,
                        bias_hypers=bias_hypers,
                        network=true_network)

# Create another copy of the model with the true network model
test_network = FactorizedNetworkDistribution(N, BernoulliAdjacencyDistribution,
                                             {"p": p},
                                             FixedGaussianWeightDistribution, {
def demo(seed=None):
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    # Create an eigenmodel with N nodes and D-dimensional feature vectors
    N = 30      # Number of nodes
    D = 2       # Dimensionality of the feature space

    true_model = \
        FactorizedNetworkDistribution(N,
            CompleteAdjacencyDistribution, {},
            LatentDistanceGaussianWeightDistribution, dict(dim=D, b=-12.))

    # Set the true locations to be on a grid
    # w = 4
    # s = 0.8
    # x = s * (np.arange(N) % w)
    # y = s * (np.arange(N) // w)
    # L = np.hstack((x[:,None], y[:,None]))
    # true_model.adjacency.L = L

    # Set the true locations to be on a circle
    r = 1.5 + np.arange(N) // (N/2.)
    th = np.linspace(0, 4 * np.pi, N, endpoint=False)
    x = r * np.cos(th)
    y = r * np.sin(th)
    L = np.hstack((x[:,None], y[:,None]))
    true_model.weights.L = L

    # Sample a graph from the eigenmodel
    A,W = true_model.rvs()

    # Make a figure to plot the true and inferred network
    plt.ion()
    fig     = plt.figure()
    ax_true = fig.add_subplot(1,2,1, aspect="equal")
    ax_test = fig.add_subplot(1,2,2, aspect="equal")
    true_model.weights.plot(A, W, ax=ax_true)

    test_model = \
        FactorizedNetworkDistribution(N,
            CompleteAdjacencyDistribution, {},
            LatentDistanceGaussianWeightDistribution, dict(dim=D, b=-12.))

    test_model.weights.plot(A, W, ax=ax_test, L_true=true_model.weights.L)
    plt.pause(0.001)

    # Fit with Gibbs sampling
    N_samples = 1000
    lps       = [test_model.log_probability((A,W))]
    for smpl in xrange(N_samples):
        print "Iteration ", smpl
        test_model.resample((A,W))
        lps.append(test_model.log_probability((A,W)))
        print "LP: ", lps[-1]
        print ""


        # Update the test plot
        if smpl % 1 == 0:
            ax_test.cla()
            test_model.weights.plot(A, W, ax=ax_test,
                                    L_true=true_model.weights.L)
            plt.pause(0.001)

    plt.ioff()
    NIWGaussianWeightDistribution, \
    SBMGaussianWeightDistribution

from graphistician.networks import FactorizedNetworkDistribution

seed = 1234
# seed = np.random.randint(2**32)

# Create an latent distance model with N nodes and D-dimensional locations
N = 30      # Number of nodes
N_test = 1  # Number of nodes to hold out for testing
B = 1       # Dimensionality of the weights
D = 2       # Dimensionality of the feature space

true_model = FactorizedNetworkDistribution(
    N+N_test,
    LatentDistanceAdjacencyDistribution, {},
    NIWGaussianWeightDistribution, {})

# Sample a graph from the eigenmodel
Afull, Wfull = true_model.rvs()
Atrain= Afull[:N,:N]
Wtrain= Wfull[:N,:N,:]
Atest_row = Afull[N:,:].ravel()
Wtest_row = Wfull[N:,:,:].reshape((N+N_test,1))
Atest_col = Afull[:,N:].ravel()
Wtest_col = Wfull[:,N:,:].reshape((N+N_test,1))

# Make a figure to plot the true and inferred network
adj_models = [
    LatentDistanceAdjacencyDistribution,
    SBMAdjacencyDistribution,
Beispiel #5
0
    NIWGaussianWeightDistribution, \
    SBMGaussianWeightDistribution

from graphistician.networks import FactorizedNetworkDistribution

seed = 1234
# seed = np.random.randint(2**32)

# Create an latent distance model with N nodes and D-dimensional locations
N = 30  # Number of nodes
N_test = 1  # Number of nodes to hold out for testing
B = 1  # Dimensionality of the weights
D = 2  # Dimensionality of the feature space

true_model = FactorizedNetworkDistribution(
    N + N_test, LatentDistanceAdjacencyDistribution, {},
    NIWGaussianWeightDistribution, {})

# Sample a graph from the eigenmodel
Afull, Wfull = true_model.rvs()
Atrain = Afull[:N, :N]
Wtrain = Wfull[:N, :N, :]
Atest_row = Afull[N:, :].ravel()
Wtest_row = Wfull[N:, :, :].reshape((N + N_test, 1))
Atest_col = Afull[:, N:].ravel()
Wtest_col = Wfull[:, N:, :].reshape((N + N_test, 1))

# Make a figure to plot the true and inferred network
adj_models = [
    LatentDistanceAdjacencyDistribution,
    SBMAdjacencyDistribution,
Beispiel #6
0
def demo(seed=None):
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    # Create an eigenmodel with N nodes and D-dimensional feature vectors
    N = 30  # Number of nodes
    D = 2  # Dimensionality of the feature space

    true_model = \
        FactorizedNetworkDistribution(N,
            CompleteAdjacencyDistribution, {},
            LatentDistanceGaussianWeightDistribution, dict(dim=D, b=-12.))

    # Set the true locations to be on a grid
    # w = 4
    # s = 0.8
    # x = s * (np.arange(N) % w)
    # y = s * (np.arange(N) // w)
    # L = np.hstack((x[:,None], y[:,None]))
    # true_model.adjacency.L = L

    # Set the true locations to be on a circle
    r = 1.5 + np.arange(N) // (N / 2.)
    th = np.linspace(0, 4 * np.pi, N, endpoint=False)
    x = r * np.cos(th)
    y = r * np.sin(th)
    L = np.hstack((x[:, None], y[:, None]))
    true_model.weights.L = L

    # Sample a graph from the eigenmodel
    A, W = true_model.rvs()

    # Make a figure to plot the true and inferred network
    plt.ion()
    fig = plt.figure()
    ax_true = fig.add_subplot(1, 2, 1, aspect="equal")
    ax_test = fig.add_subplot(1, 2, 2, aspect="equal")
    true_model.weights.plot(A, W, ax=ax_true)

    test_model = \
        FactorizedNetworkDistribution(N,
            CompleteAdjacencyDistribution, {},
            LatentDistanceGaussianWeightDistribution, dict(dim=D, b=-12.))

    test_model.weights.plot(A, W, ax=ax_test, L_true=true_model.weights.L)
    plt.pause(0.001)

    # Fit with Gibbs sampling
    N_samples = 1000
    lps = [test_model.log_probability((A, W))]
    for smpl in xrange(N_samples):
        print "Iteration ", smpl
        test_model.resample((A, W))
        lps.append(test_model.log_probability((A, W)))
        print "LP: ", lps[-1]
        print ""

        # Update the test plot
        if smpl % 1 == 0:
            ax_test.cla()
            test_model.weights.plot(A,
                                    W,
                                    ax=ax_test,
                                    L_true=true_model.weights.L)
            plt.pause(0.001)

    plt.ioff()