def demo(K=3, T=1000, dt_max=20, p=0.25):
    """

    :param K:       Number of nodes
    :param T:       Number of time bins to simulate
    :param dt_max:  Number of future time bins an event can influence
    :param p:       Sparsity of network
    :return:
    """
    ###########################################################
    # Generate synthetic data
    ###########################################################
    network_hypers = {"p": p, "allow_self_connections": False}
    true_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max,
        network_hypers=network_hypers)
    assert true_model.check_stability()

    # Sample from the true model
    S,R = true_model.generate(T=T, keep=True, print_interval=50)

    plt.ion()
    true_figure, _ = true_model.plot(color="#377eb8", T_slice=(0,100))

    ###########################################################
    # Create a test spike and slab model
    ###########################################################
    test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max,
        network_hypers=network_hypers)

    test_model.add_data(S)

    # Initialize plots
    test_figure, test_handles = test_model.plot(color="#e41a1c", T_slice=(0,100))

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 100
    samples = []
    lps = []
    for itr in range(N_samples):
        print("Gibbs iteration ", itr)
        test_model.resample_model()
        lps.append(test_model.log_probability())
        samples.append(test_model.copy_sample())

        # Update plots
        test_model.plot(handles=test_handles)

    ###########################################################
    # Analyze the samples
    ###########################################################
    analyze_samples(true_model, samples, lps)
Beispiel #2
0
def demo(K=3, T=1000, dt_max=20, p=0.25):
    """

    :param K:       Number of nodes
    :param T:       Number of time bins to simulate
    :param dt_max:  Number of future time bins an event can influence
    :param p:       Sparsity of network
    :return:
    """
    ###########################################################
    # Generate synthetic data
    ###########################################################
    network_hypers = {"p": p, "allow_self_connections": False}
    true_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max,
        network_hypers=network_hypers)
    assert true_model.check_stability()

    # Sample from the true model
    S,R = true_model.generate(T=T, keep=True, print_interval=50)

    plt.ion()
    true_figure, _ = true_model.plot(color="#377eb8", T_slice=(0,100))

    ###########################################################
    # Create a test spike and slab model
    ###########################################################
    test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max,
        network_hypers=network_hypers)

    test_model.add_data(S)

    # Initialize plots
    test_figure, test_handles = test_model.plot(color="#e41a1c", T_slice=(0,100))

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 100
    samples = []
    lps = []
    for itr in xrange(N_samples):
        print "Gibbs iteration ", itr
        test_model.resample_model()
        lps.append(test_model.log_probability())
        samples.append(test_model.copy_sample())

        # Update plots
        test_model.plot(handles=test_handles)

    ###########################################################
    # Analyze the samples
    ###########################################################
    analyze_samples(true_model, samples, lps)
Beispiel #3
0
def demo(K=3, T=1000, dt_max=20, p=0.25):
    """

    :param K:       Number of nodes
    :param T:       Number of time bins to simulate
    :param dt_max:  Number of future time bins an event can influence
    :param p:       Sparsity of network
    :return:
    """
    ###########################################################
    # Generate synthetic data
    ###########################################################
    network = ErdosRenyiFixedSparsity(K, p, v=1., allow_self_connections=False)
    bkgd_hypers = {"alpha": 1.0, "beta": 20.0}
    true_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max, bkgd_hypers=bkgd_hypers, network=network)
    A_true = np.zeros((K, K))
    A_true[0, 1] = A_true[0, 2] = 1
    W_true = np.zeros((K, K))
    W_true[0, 1] = W_true[0, 2] = 1.0
    true_model.weight_model.A = A_true
    true_model.weight_model.W = W_true
    true_model.bias_model.lambda0[0] = 0.2
    assert true_model.check_stability()

    # Sample from the true model
    S, R = true_model.generate(T=T, keep=True, print_interval=50)

    plt.ion()
    true_figure, _ = true_model.plot(color="#377eb8", T_slice=(0, 100))

    # Save the true figure
    true_figure.savefig("gifs/true.gif")

    ###########################################################
    # Create a test spike and slab model
    ###########################################################
    test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(K=K,
                                                            dt_max=dt_max,
                                                            network=network)

    test_model.add_data(S)

    # Initialize plots
    test_figure, test_handles = test_model.plot(color="#e41a1c",
                                                T_slice=(0, 100))
    test_figure.savefig("gifs/test0.gif")

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 100
    samples = []
    lps = []
    for itr in xrange(N_samples):
        print "Gibbs iteration ", itr
        test_model.resample_model()
        lps.append(test_model.log_probability())
        samples.append(test_model.copy_sample())

        # Update plots
        test_model.plot(handles=test_handles)
        test_figure.savefig("gifs/test%d.gif" % (itr + 1))
def demo(seed=None):
    """
    Create a discrete time Hawkes model and generate from it.

    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print("Setting seed to ", seed)
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    data_path = os.path.join("data", "synthetic",
                             "synthetic_K20_C4_T10000.pkl.gz")
    with gzip.open(data_path, 'r') as f:
        S, true_model = pickle.load(f)

    T = S.shape[0]
    K = true_model.K
    B = true_model.B
    dt = true_model.dt
    dt_max = true_model.dt_max

    ###########################################################
    # Initialize with MAP estimation on a standard Hawkes model
    ###########################################################
    init_with_map = True
    if init_with_map:
        init_len = T
        print("Initializing with BFGS on first ", init_len, " time bins.")
        init_model = DiscreteTimeStandardHawkesModel(K=K,
                                                     dt=dt,
                                                     dt_max=dt_max,
                                                     B=B,
                                                     alpha=1.0,
                                                     beta=1.0)
        init_model.add_data(S[:init_len, :])

        init_model.initialize_to_background_rate()
        init_model.fit_with_bfgs()
    else:
        init_model = None

    ###########################################################
    # Create a test spike and slab model
    ###########################################################

    # Copy the network hypers.
    # Give the test model p, but not c, v, or m
    network_hypers = true_model.network_hypers.copy()
    network_hypers['c'] = None
    network_hypers['v'] = None
    network_hypers['m'] = None
    test_network = StochasticBlockModel(K=K, **network_hypers)
    test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K,
        dt=dt,
        dt_max=dt_max,
        B=B,
        basis_hypers=true_model.basis_hypers,
        bkgd_hypers=true_model.bkgd_hypers,
        impulse_hypers=true_model.impulse_hypers,
        weight_hypers=true_model.weight_hypers,
        network=test_network)
    test_model.add_data(S)
    # F_test = test_model.basis.convolve_with_basis(S_test)

    # Initialize with the standard model parameters
    if init_model is not None:
        test_model.initialize_with_standard_model(init_model)

    # Initialize plots
    ln, im_net, im_clus = initialize_plots(true_model, test_model, S)

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 50
    samples = []
    lps = []
    # plls = []
    for itr in range(N_samples):
        lps.append(test_model.log_probability())
        # plls.append(test_model.heldout_log_likelihood(S_test, F=F_test))
        samples.append(test_model.copy_sample())

        print("")
        print("Gibbs iteration ", itr)
        print("LP: ", lps[-1])

        test_model.resample_model()

        # Update plot
        if itr % 1 == 0:
            update_plots(itr, test_model, S, ln, im_clus, im_net)

    ###########################################################
    # Analyze the samples
    ###########################################################
    analyze_samples(true_model, init_model, samples, lps)
Beispiel #5
0
def geweke_test():
    """
    Create a discrete time Hawkes model and generate from it.

    :return:
    """
    T = 50
    dt = 1.0
    dt_max = 3.0
    network_hypers = {
        'C': 1,
        'p': 0.5,
        'kappa': 3.0,
        'alpha': 3.0,
        'beta': 1.0 / 20.0
    }
    model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=1, dt=dt, dt_max=dt_max, network_hypers=network_hypers)
    model.generate(T=T)

    # Gibbs sample and then generate new data
    N_samples = 10000
    samples = []
    lps = []
    for itr in xrange(N_samples):
        if itr % 10 == 0:
            print "Iteration: ", itr
        # Resample the model
        model.resample_model()
        samples.append(model.copy_sample())
        lps.append(model.log_probability())

        # Geweke step
        model.data_list.pop()
        model.generate(T=T)

    # Compute sample statistics for second half of samples
    A_samples = np.array([s.weight_model.A for s in samples])
    W_samples = np.array([s.weight_model.W for s in samples])
    g_samples = np.array([s.impulse_model.g for s in samples])
    lambda0_samples = np.array([s.bias_model.lambda0 for s in samples])
    c_samples = np.array([s.network.c for s in samples])
    p_samples = np.array([s.network.p for s in samples])
    v_samples = np.array([s.network.v for s in samples])
    lps = np.array(lps)

    offset = 0
    A_mean = A_samples[offset:, ...].mean(axis=0)
    W_mean = W_samples[offset:, ...].mean(axis=0)
    g_mean = g_samples[offset:, ...].mean(axis=0)
    lambda0_mean = lambda0_samples[offset:, ...].mean(axis=0)

    print "A mean:        ", A_mean
    print "W mean:        ", W_mean
    print "g mean:        ", g_mean
    print "lambda0 mean:  ", lambda0_mean

    # Plot the log probability over iterations
    plt.figure()
    plt.plot(np.arange(N_samples), lps)
    plt.xlabel("Iteration")
    plt.ylabel("Log probability")

    # Plot the histogram of bias samples
    plt.figure()
    p_lmbda0 = gamma(model.bias_model.alpha, scale=1. / model.bias_model.beta)
    _, bins, _ = plt.hist(lambda0_samples[:, 0],
                          bins=20,
                          alpha=0.5,
                          normed=True)
    bincenters = 0.5 * (bins[1:] + bins[:-1])
    plt.plot(bincenters, p_lmbda0.pdf(bincenters), 'r--', linewidth=1)
    plt.xlabel('lam0')
    plt.ylabel('p(lam0)')

    print "Expected p(A):  ", model.network.P
    print "Empirical p(A): ", A_samples.mean(axis=0)

    # Plot the histogram of weight samples
    plt.figure()
    Aeq1 = A_samples[:, 0, 0] == 1
    # p_W1 = gamma(model.network.kappa, scale=1./model.network.v[0,0])

    # The marginal distribution of W under a gamma prior on the scale
    # is a beta prime distribution
    p_W1 = betaprime(model.network.kappa,
                     model.network.alpha,
                     scale=model.network.beta)

    _, bins, _ = plt.hist(W_samples[Aeq1, 0, 0],
                          bins=20,
                          alpha=0.5,
                          normed=True)
    bincenters = 0.5 * (bins[1:] + bins[:-1])
    plt.plot(bincenters, p_W1.pdf(bincenters), 'r--', linewidth=1)
    plt.xlabel('W')
    plt.ylabel('p(W | A=1)')

    # Plot the histogram of impulse samples
    plt.figure()
    for b in range(model.B):
        plt.subplot(1, model.B, b + 1)
        a = model.impulse_model.gamma[b]
        b = model.impulse_model.gamma.sum() - a
        p_beta11b = beta(a, b)

        _, bins, _ = plt.hist(g_samples[:, 0, 0, b],
                              bins=20,
                              alpha=0.5,
                              normed=True)
        bincenters = 0.5 * (bins[1:] + bins[:-1])
        plt.plot(bincenters, p_beta11b.pdf(bincenters), 'r--', linewidth=1)
        plt.xlabel('g_%d' % b)
        plt.ylabel('p(g_%d)' % b)

    # Plot the histogram of weight scale
    plt.figure()
    for c1 in range(model.C):
        for c2 in range(model.C):
            plt.subplot(model.C, model.C, 1 + c1 * model.C + c2)
            p_v = gamma(model.network.alpha, scale=1. / model.network.beta)

            _, bins, _ = plt.hist(v_samples[:, c1, c2],
                                  bins=20,
                                  alpha=0.5,
                                  normed=True)
            bincenters = 0.5 * (bins[1:] + bins[:-1])
            plt.plot(bincenters, p_v.pdf(bincenters), 'r--', linewidth=1)
            plt.xlabel('v_{%d,%d}' % (c1, c2))
            plt.ylabel('p(v)')

    plt.show()
Beispiel #6
0
    # network_hypers = {'C': 1, 'p': 0.5, 'kappa': 3.0, 'alpha': 3.0, 'beta': 1.0/20.0}
    network_hypers = {'c': np.zeros(K, dtype=np.int), 'p': 0.5, 'kappa': 10.0, 'v': 10*3.0}
    bkgd_hypers = {"alpha": 1., "beta": 10.}
    model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(K=K, dt=dt, dt_max=dt_max,
                                                       weight_hypers={"parallel_resampling": False},
                                                       network_hypers=network_hypers)
    model.generate(T=T)

    # Gibbs sample and then generate new data
    N_samples = 10000
    samples = []
    lps = []
    for itr in progprint_xrange(N_samples, perline=50):
        # Resample the model
        model.resample_model()
        samples.append(model.copy_sample())
        lps.append(model.log_likelihood())

        # Geweke step
        model.data_list.pop()
        model.generate(T=T)


    # Compute sample statistics for second half of samples
    A_samples       = np.array([s.weight_model.A     for s in samples])
    W_samples       = np.array([s.weight_model.W     for s in samples])
    g_samples       = np.array([s.impulse_model.g    for s in samples])
    lambda0_samples = np.array([s.bias_model.lambda0 for s in samples])
    lps             = np.array(lps)

Beispiel #7
0
def demo(seed=None):
    """
    Create a discrete time Hawkes model and generate from it.

    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    data_path = os.path.join("data", "synthetic", "synthetic_K20_C4_T10000.pkl.gz")
    with gzip.open(data_path, 'r') as f:
        S, true_model = cPickle.load(f)

    T      = S.shape[0]
    K      = true_model.K
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    ###########################################################
    # Initialize with MAP estimation on a standard Hawkes model
    ###########################################################
    init_with_map = True
    if init_with_map:
        init_len   = T
        print "Initializing with BFGS on first ", init_len, " time bins."
        init_model = DiscreteTimeStandardHawkesModel(K=K, dt=dt, dt_max=dt_max, B=B,
                                                     alpha=1.0, beta=1.0)
        init_model.add_data(S[:init_len, :])

        init_model.initialize_to_background_rate()
        init_model.fit_with_bfgs()
    else:
        init_model = None

    ###########################################################
    # Create a test spike and slab model
    ###########################################################

    # Copy the network hypers.
    # Give the test model p, but not c, v, or m
    network_hypers = true_model.network_hypers.copy()
    network_hypers['c'] = None
    network_hypers['v'] = None
    network_hypers['m'] = None
    test_network = StochasticBlockModel(K=K, **network_hypers)
    test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(K=K, dt=dt, dt_max=dt_max, B=B,
                                                            basis_hypers=true_model.basis_hypers,
                                                            bkgd_hypers=true_model.bkgd_hypers,
                                                            impulse_hypers=true_model.impulse_hypers,
                                                            weight_hypers=true_model.weight_hypers,
                                                            network=test_network)
    test_model.add_data(S)
    # F_test = test_model.basis.convolve_with_basis(S_test)

    # Initialize with the standard model parameters
    if init_model is not None:
        test_model.initialize_with_standard_model(init_model)

    # Initialize plots
    ln, im_net, im_clus = initialize_plots(true_model, test_model, S)

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 50
    samples = []
    lps = []
    # plls = []
    for itr in xrange(N_samples):
        lps.append(test_model.log_probability())
        # plls.append(test_model.heldout_log_likelihood(S_test, F=F_test))
        samples.append(test_model.copy_sample())

        print ""
        print "Gibbs iteration ", itr
        print "LP: ", lps[-1]

        test_model.resample_model()

        # Update plot
        if itr % 1 == 0:
            update_plots(itr, test_model, S, ln, im_clus, im_net)

    ###########################################################
    # Analyze the samples
    ###########################################################
    analyze_samples(true_model, init_model, samples, lps)
Beispiel #8
0
def demo(K=3, T=1000, dt_max=20, p=0.25):
    """

    :param K:       Number of nodes
    :param T:       Number of time bins to simulate
    :param dt_max:  Number of future time bins an event can influence
    :param p:       Sparsity of network
    :return:
    """
    ###########################################################
    # Generate synthetic data
    ###########################################################
    network = ErdosRenyiFixedSparsity(K, p, v=1., allow_self_connections=False)
    bkgd_hypers = {"alpha": 1.0, "beta": 20.0}
    true_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max, bkgd_hypers=bkgd_hypers, network=network)
    A_true = np.zeros((K,K))
    A_true[0,1] = A_true[0,2] = 1
    W_true = np.zeros((K,K))
    W_true[0,1] = W_true[0,2] = 1.0
    true_model.weight_model.A = A_true
    true_model.weight_model.W = W_true
    true_model.bias_model.lambda0[0] = 0.2
    assert true_model.check_stability()

    # Sample from the true model
    S,R = true_model.generate(T=T, keep=True, print_interval=50)

    plt.ion()
    true_figure, _ = true_model.plot(color="#377eb8", T_slice=(0,100))

    # Save the true figure
    true_figure.savefig("gifs/true.gif")

    ###########################################################
    # Create a test spike and slab model
    ###########################################################
    test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K, dt_max=dt_max, network=network)

    test_model.add_data(S)

    # Initialize plots
    test_figure, test_handles = test_model.plot(color="#e41a1c", T_slice=(0,100))
    test_figure.savefig("gifs/test0.gif")

    ###########################################################
    # Fit the test model with Gibbs sampling
    ###########################################################
    N_samples = 100
    samples = []
    lps = []
    for itr in xrange(N_samples):
        print "Gibbs iteration ", itr
        test_model.resample_model()
        lps.append(test_model.log_probability())
        samples.append(test_model.copy_sample())

        # Update plots
        test_model.plot(handles=test_handles)
        test_figure.savefig("gifs/test%d.gif" % (itr+1))
    model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(
        K=K,
        dt=dt,
        dt_max=dt_max,
        weight_hypers={"parallel_resampling": False},
        network_hypers=network_hypers)
    model.generate(T=T)

    # Gibbs sample and then generate new data
    N_samples = 10000
    samples = []
    lps = []
    for itr in progprint_xrange(N_samples, perline=50):
        # Resample the model
        model.resample_model()
        samples.append(model.copy_sample())
        lps.append(model.log_likelihood())

        # Geweke step
        model.data_list.pop()
        model.generate(T=T)

    # Compute sample statistics for second half of samples
    A_samples = np.array([s.weight_model.A for s in samples])
    W_samples = np.array([s.weight_model.W for s in samples])
    g_samples = np.array([s.impulse_model.g for s in samples])
    lambda0_samples = np.array([s.bias_model.lambda0 for s in samples])
    lps = np.array(lps)

    offset = 0
    A_mean = A_samples[offset:, ...].mean(axis=0)