Пример #1
0
def post_process(name, date, dt_max, burn=500):

    N = 12
    model = NetworkPoisson(N, dt_max)
    sample_path = '/Volumes/datasets/ITCH/samples/large2007_dt_max={}.hdf5'.format(
        dt_max)

    # Import samples
    samples = import_samples(sample_path, name, date, burn)
    if samples is not None:
        # Unpack samples
        lambda0, W, mu, tau = samples
        # Compute point estimates
        lambda0 = np.median(lambda0, axis=1)
        W = np.median(W, axis=2).reshape(N * N)  # row major
        mu = np.median(mu, axis=2).reshape(N * N)  # row major
        tau = np.median(tau, axis=2).reshape(N * N)  # row major
        estimates = [name, date] + list(np.concatenate([lambda0, W, mu, tau]))
        # Check stability
        model.lamb = lambda0
        model.W = W.reshape((N, N))
        model.mu = mu.reshape((N, N))
        model.tau = tau.reshape((N, N))
        _, maxeig = model.check_stability(return_value=True)
        eigenvalue = [name, date, maxeig]
        return estimates, eigenvalue
Пример #2
0
def check_stability(lambda0, W, mu, tau, dt_max):
    """Check if the model is stable for given parameter estimates."""
    N, _ = W.shape
    model = NetworkPoisson(N=N, dt_max=dt_max)
    model.lamb = lambda0
    model.W = W
    model.mu = mu
    model.tau = tau
    return model.check_stability(return_value=True)
Пример #3
0
read_path = '/Volumes/datasets/ITCH/samples/large2007_dt_max=60.hdf5'
write_path = '/Users/colinswaney/Desktop/threshold_name={}_date={}_M={}_norm={}.txt'.format(
    name, event_date, M, norm)
with h5.File(read_path, 'r') as hdf:
    start = time.time()
    try:
        lambda0, W, mu, tau = import_samples(read_path, name, mcmc_date)
    except:
        print('Unable to import samples; skipping')
    for threshold in np.arange(-8, 1, .5):
        lambda0_, W_, mu_, tau_ = get_estimates((lambda0, W, mu, tau),
                                                threshold=threshold,
                                                log=log,
                                                norm=norm)
        events, T = import_events(name, event_date)
        model_net.lamb = lambda0_
        model_net.W = W_
        model_net.mu = mu_
        model_net.tau = tau_
        model_hom.lambda0 = np.median(model_hom.sample(events, T, size=2500),
                                      axis=0)
        print(
            'Computing likelihood on random subsamples (threshold={}, M={})...'
            .format(threshold, M))
        start_sub = time.time()
        for i in np.arange(L):
            sample, T = sample_events(events, size=M)
            ll_net, _ = model_net.compute_likelihood(sample, T)
            ll_hom, _ = model_hom.compute_likelihood(sample, T)
            bits = (ll_net - ll_hom) / M
            df.append([name, threshold, ll_net, ll_hom, bits, T])