Пример #1
0
def run(p, conf=.95, dist=160, ilen=3, c=None, filename="template"):
	path = p
	confidence = conf
	distance = dist
	interaction_len = ilen
	cpus = c

	# make file file packges
	files = get_files(path)


	# how many cpus
	if (cpus == None):
		max_num_cpu = multiprocessing.cpu_count()
		cpus = max_num_cpu - (max_num_cpu/2/2)

	pool = multiprocessing.Pool(cpus)

	tasks = []

	for e, f in enumerate(files):
		tasks.append((e, f, confidence, distance, interaction_len))

	results = [pool.apply_async( generate_network, t ) for t in tasks]

	# Egebnisse zusammenbasteln
	edges = []

	for result in results:
		edges.append(result.get())

	G = prep.create_graph2(pd.concat(edges))
	nx.write_graphml(G, "{}_{}conf_{}dist_{}ilen".format(filename,str(confidence), str(distance), str(interaction_len)) + ".graphml")
	print(nx.info(G))
Пример #2
0
def run(path,
        start_ts,
        network_size,
        confidence=.95,
        distance=160,
        interaction_len=3,
        numCPUs=None,
        filename="template",
        year=2016,
        gap=2):

    pool = multiprocessing.Pool(numCPUs)

    #number of minutes per slice in seconds, for making parallel
    slice_len = 5 * 60  # TODO make parameter

    #network_size in seconds
    size = network_size * 60

    begin_ts = start_ts
    begin_dt = datetime.datetime.fromtimestamp(begin_ts)

    parts = int(size / slice_len)

    print("#Parts: {}".format(parts))

    tasks = []

    for enu, i in enumerate(list(range(parts))):
        b = begin_ts + (i * slice_len)
        e = (b - 0.000001) + (slice_len)
        tasks.append((enu, path, b, e, confidence, distance, interaction_len,
                      year, gap))

    results = [pool.apply_async(generate_network, t) for t in tasks]

    fname = "{}".format(filename)

    edges = []

    for result in results:
        res = result.get()

        if res.empty:
            print("Not Appended.")
        else:
            edges.append(res)
            print("Appended Result.")

    G = prep.create_graph2(pd.concat(edges))

    nx.write_graphml(
        G, "{}_{}conf_{}dist_{}ilen_{}gap_{}minutes_{}.graphml".format(
            fname, str(int(confidence * 100)), str(distance),
            str(interaction_len), str(gap), str(network_size),
            str(datetime.datetime.fromtimestamp(start_ts, tz=pytz.UTC))))

    print(nx.info(G))
Пример #3
0
def generate_networks(index,
                      file_list,
                      confidence=.95,
                      distance=160,
                      ilen=3,
                      window_size=256):
    print("process {} - start".format(index))

    xmax = 3000

    # list of networks
    network_list = []

    # one df per cam
    dataframes = np.empty(4, dtype=object)

    for i in list(range(4)):
        fc = load_frame_container(file_list[i])
        df = prep.get_dataframe2(fc)
        df = prep.calcIds(df, confidence)

        camIdx = int(file_list[i].split("/")[-1].split("_")[1])
        dataframes[camIdx] = df

    # cam 0 und cam1 nach rechts verschieben
    dataframes[0].xpos = dataframes[0].xpos + xmax
    dataframes[1].xpos = dataframes[1].xpos + xmax

    # Seiten zusammenfugen
    side0 = pd.concat([dataframes[3], dataframes[0]])
    side1 = pd.concat([dataframes[2], dataframes[1]])

    close1 = prep.get_close_bees_ckd(side0, distance)
    close2 = prep.get_close_bees_ckd(side1, distance)

    close = pd.concat([close1, close2])

    p = prep.bee_pairs_to_timeseries(close)

    for w in list(range(int(1024 / window_size))):
        part = p.ix[:, window_size * w:window_size * (w + 1)]
        edges = prep.extract_interactions(part, ilen)
        g = prep.create_graph2(edges)
        network_list.append(((index * 1024) + (w * window_size), g))

    print("process {} - end - {}".format(index, len(network_list)))
    return network_list
Пример #4
0
def run(p, conf=.95, c=None, filename="template", camid=0):
    path = p
    confidence = conf
    cpus = c

    # make file file packges
    files = get_files(path, camid)

    # how many cpus
    if (cpus == None):
        max_num_cpu = multiprocessing.cpu_count()
        cpus = max_num_cpu - (max_num_cpu / 2 / 2)

    pool = multiprocessing.Pool(cpus)

    ilens = [3, 6, 9, 12, 15, 18, 21]  #ilen
    distances = [100, 110, 120, 130, 140, 150, 160]  #distance
    l = list(itertools.product(ilens, distances))

    tasks = []

    for e, f in enumerate(files):
        tasks.append((e, f, confidence, ilens, distances))

    results = [pool.apply_async(generate_network, t) for t in tasks]

    # Egebnisse zusammenbasteln
    edges = {item: [] for item in l}

    for result in results:
        # ein result ist eine liste von ganz vielen (d,i,r)s
        reslist = result.get()

        for i, d, r in reslist:
            edges[(i, d)].append(r)

    for i, d in edges:
        G = prep.create_graph2(pd.concat(edges[(i, d)]))
        nx.write_graphml(
            G, "{}_{}conf_{}dist_{}ilen_cam{}.graphml".format(
                filename, str(confidence), str(d), str(i), str(camid)))

        print(nx.info(G))
Пример #5
0
    for i in list(range(4)):
        fc = load_frame_container(file_list[i])
        df = prep.get_dataframe(fc)
        df = prep.calcIds(df, CONFIDENCE)

        camIdx = int(file_list[i].split("/")[-1].split("_")[1])
        dataframes[camIdx] = df

    # cam 0 und cam1 nach rechts verschieben
    dataframes[0].xpos = dataframes[0].xpos + xmax
    dataframes[1].xpos = dataframes[1].xpos + xmax

    # Seiten zusammenfugen
    side0 = pd.concat([dataframes[3], dataframes[0]])
    side1 = pd.concat([dataframes[2], dataframes[1]])

    close1 = prep.get_close_bees(side0, DISTANCE)
    close2 = prep.get_close_bees(side1, DISTANCE)

    close = pd.concat([close1, close2])

    p = prep.bee_pairs_to_timeseries(close)

    edges = prep.extract_interactions(p, LENGTH)

    interactions = pd.concat([interactions, edges])

G = prep.create_graph2(interactions)
print(nx.info(G))

nx.write_graphml(G, filename + ".graphml")
Пример #6
0
fc2 = prep.get_fc(path,2)
fc3 = prep.get_fc(path,3)

df3 = prep.get_dataframe(fc3)
df3 = prep.calcIds(df3,CONFIDENCE)
df0 = prep.get_dataframe(fc0)
df0 = prep.calcIds(df0,CONFIDENCE)

df2 = prep.get_dataframe(fc2)
df2 = prep.calcIds(df2,CONFIDENCE)
df1 = prep.get_dataframe(fc1)
df1 = prep.calcIds(df1,CONFIDENCE)

df0.xpos = df0.xpos + xmax
df1.xpos = df1.xpos + xmax

side0 = pd.concat([df3, df0])
side1 = pd.concat([df2, df1])

close1 = prep.get_close_bees(side0, DISTANCE)
close2 = prep.get_close_bees(side1, DISTANCE)

close = pd.concat([close1,close2])

p = prep.bee_pairs_to_timeseries(close)

i = prep.extract_interactions(p,LENGTH)

G = prep.create_graph2(i)

nx.write_graphml(G, filename + ".graphml")