Exemple #1
0
import tacoma as tc
from tacoma.drawing import draw_edges
from tacoma.analysis import temporal_network_group_analysis
import matplotlib.pyplot as pl

from tacoma.model_conversions import estimate_ZSBB_args
from tacoma.model_conversions import estimate_flockwork_P_args
from tacoma.model_conversions import estimate_dynamic_RGG_args

import time
import numpy as np

# ======== get original network =============
socio = tc.load_json_taco("~/.tacoma/dtu_1_weeks.taco")
socio_binned = tc.bin(socio, dt=300)

socio_result = tc.measure_group_sizes_and_durations(socio)

# ============== generate surrogate network from flockwork_P model ==============
fwP_params = estimate_flockwork_P_args(
    socio_binned, dt=3600., aggregated_network=socio_result.aggregated_network)
fwP = tc.flockwork_P_varying_rates_neighbor_affinity(**fwP_params)

fwP_params = estimate_flockwork_P_args(socio_binned, dt=3600.)
fwP = tc.flockwork_P_varying_rates(**fwP_params)

fwP_binned = tc.bin(fwP, dt=300)

N = fwP.N

R0 = 2.0
import matplotlib.pyplot as pl

import tacoma as tc
from tacoma.drawing import draw_edges
from tacoma.drawing import get_edge_order

import numpy as np

#tn = tc.load_sociopatterns_hypertext_2009()
tn = tc.load_json_taco('~/.tacoma/dtu_1_weeks.taco')
tn = tc.convert(tn)

edge_traj = tc.get_edge_trajectories(tn, return_edge_similarities=True)

print(edge_traj.edge_similarities[:10])

edge_order = get_edge_order(edge_traj, threshold=3600.)

print(edge_order)
print(np.all(edge_order == np.sort(edge_order)))

draw_edges(edge_traj.trajectories, edge_order=edge_order)
draw_edges(edge_traj.trajectories)

pl.show()
Exemple #3
0
    # ========= start server ============
    thread = threading.Thread(None, server.run)
    thread.start()

    webbrowser.open("http://localhost:" + str(port) + "/?data=" + subdir)

    try:
        while True:
            time.sleep(2)
    except KeyboardInterrupt:
        # thread.join()
        print('stopping server ...')
        server.stop_this()
        thread.join()

    # time.sleep(1)

    print('changing directory back to', cwd)

    os.chdir(cwd)


if __name__ == "__main__":
    # download_d3()
    a = tc.load_json_taco("~/.tacoma/ht09.taco")
    visualize(a,
              frame_dt=20,
              titles='HT09',
              time_unit='h',
              time_normalization_factor=1. / 3600.)
Exemple #4
0
import tacoma as tc

a = tc.load_json_taco("~/.tacoma/ht09.taco")
tc.write_edge_trajectory_coordinates(
    a,
    "~/Sites/tacoma-interactive/ht09_edge_trajectories.json",
    filter_for_duration=0)

a = tc.load_json_taco("~/.tacoma/dtu_1_weeks.taco")
tc.write_edge_trajectory_coordinates(
    a,
    "~/Sites/tacoma-interactive/dtu_1_weeks_edge_trajectories.json",
    filter_for_duration=1200.)
Exemple #5
0
        #if group_size == 1:
        #    print('\n',alpha,'\n')
        x_groups.append(x)
        y_groups.append(y)

    xs = [x_k, [], x_contact ] + x_groups
    ys = [y_k, grp_sizes, y_contact ] + y_groups

    return xs, ys


if __name__ == "__main__":

    import matplotlib.pyplot as pl

    orig = tc.load_json_taco('~/.tacoma/ht09.taco')
    orig_binned = tc.bin(orig,20.)
    result = tc.measure_group_sizes_and_durations(orig_binned)

    n_bins = 100

    durations = np.array(result.group_durations[1]) / 3600.

    bins = np.append([0.],np.logspace(log10(durations.min())-1,log10(durations.max()),n_bins) )

    x, y = get_ccdf(durations)
    y_sampled = tc.sample_a_function(x,y,bins)

    print("====== HEAD ======")

    print("original", x[:4], y[:4])
import tacoma as tc

orig = tc.convert(tc.load_json_taco('~/.tacoma/ht09.taco'))
#orig = tc.convert( tc.load_json_taco('~/.tacoma/hs13.taco') )
#orig = tc.load_json_taco('~/.tacoma/dtu_1_weeks.taco')
dt_for_inference = 120.
dt_binning = 20.

fetch_k_scaling = True

if fetch_k_scaling:
    k_scaling = tc.estimate_k_scaling_gradient_descent(
        orig,
        dt_for_inference=dt_for_inference,
        dt_for_binning=dt_binning,
        measurements_per_configuration=20,
        learning_rate=0.5,
        relative_error=1e-2,
    )
else:
    k_scaling = 5

from tacoma.model_conversions import estimate_flockwork_P_args
import matplotlib.pyplot as pl
import numpy as np

t_orig, k_orig = tc.mean_degree(tc.bin(orig, dt_binning))

fig, ax = pl.subplots(1, 2, sharex=True, sharey=True)
ax[0].plot(t_orig, k_orig, label='original')
ax[1].plot(t_orig, k_orig, label='original')
Exemple #7
0
from tacoma.model_conversions import estimate_flockwork_P_args


def get_prepared_network(tn, dt):

    tn_b = tc.bin(tn, dt=dt)  # rebin the network
    tn_b.t = [t / 3600. for t in tn_b.t]  # rescale the network's time
    tn_b.tmax /= 3600.
    tn_b.time_unit = 'h'  # set time unit

    return tn_b


# ============ HT 09 ==============

tn = tc.load_json_taco("~/.tacoma/ht09.taco")
tn_b = get_prepared_network(tn, dt=20)

tc.write_edge_trajectory_coordinates(
    tn_b,
    "~/Sites/tacoma/data/ht09_edge_trajectories.json",
    filter_for_duration=0)
tc.write_json_taco(tn_b, "~/Sites/tacoma/data/ht09_binned.taco")

aggregated_network = tc.measure_group_sizes_and_durations(
    tn).aggregated_network
fw_params = estimate_flockwork_P_args(tn,
                                      dt=120,
                                      k_over_k_real_scaling=2.05,
                                      aggregated_network=aggregated_network,
                                      ensure_empty_network=True,