예제 #1
0
파일: pipeline.py 프로젝트: aschle/bees-sna
def generate_network(index, file_list, confidence, distance, ilen):
    
	xmax = 3000

	# one df per cam
	dataframes = np.empty(4, dtype=object)

	for i in list(range(4)):
		fc = load_frame_container(file_list[i])
		df = prep.get_dataframe2(fc)
		df = prep.calcIds(df,confidence)

		camIdx = int(file_list[i].split("/")[-1].split("_")[1])
		dataframes[camIdx] = df

	# cam 0 und cam1 nach rechts verschieben
	dataframes[0].xpos = dataframes[0].xpos + xmax
	dataframes[1].xpos = dataframes[1].xpos + xmax

	# Seiten zusammenfugen
	side0 = pd.concat([dataframes[3], dataframes[0]])
	side1 = pd.concat([dataframes[2], dataframes[1]])

	close1 = prep.get_close_bees_ckd(side0, distance)
	close2 = prep.get_close_bees_ckd(side1, distance)

	close = pd.concat([close1,close2])

	p = prep.bee_pairs_to_timeseries(close)

	return prep.extract_interactions(p,ilen)
예제 #2
0
def generate_networks(index,
                      file_list,
                      confidence=.95,
                      distance=160,
                      ilen=3,
                      window_size=256):
    print("process {} - start".format(index))

    xmax = 3000

    # list of networks
    network_list = []

    # one df per cam
    dataframes = np.empty(4, dtype=object)

    for i in list(range(4)):
        fc = load_frame_container(file_list[i])
        df = prep.get_dataframe2(fc)
        df = prep.calcIds(df, confidence)

        camIdx = int(file_list[i].split("/")[-1].split("_")[1])
        dataframes[camIdx] = df

    # cam 0 und cam1 nach rechts verschieben
    dataframes[0].xpos = dataframes[0].xpos + xmax
    dataframes[1].xpos = dataframes[1].xpos + xmax

    # Seiten zusammenfugen
    side0 = pd.concat([dataframes[3], dataframes[0]])
    side1 = pd.concat([dataframes[2], dataframes[1]])

    close1 = prep.get_close_bees_ckd(side0, distance)
    close2 = prep.get_close_bees_ckd(side1, distance)

    close = pd.concat([close1, close2])

    p = prep.bee_pairs_to_timeseries(close)

    for w in list(range(int(1024 / window_size))):
        part = p.ix[:, window_size * w:window_size * (w + 1)]
        edges = prep.extract_interactions(part, ilen)
        g = prep.create_graph2(edges)
        network_list.append(((index * 1024) + (w * window_size), g))

    print("process {} - end - {}".format(index, len(network_list)))
    return network_list
예제 #3
0
def generate_network(index, file, confidence, ilens, distances):

    fc = load_frame_container(file)
    df = prep.get_dataframe2(fc)
    df = prep.calcIds(df, confidence)

    result = []

    for distance in distances:
        close = prep.get_close_bees_ckd(df, distance)
        p = prep.bee_pairs_to_timeseries(close)

        for ilen in ilens:
            r = prep.extract_interactions(p, ilen)
            result.append((ilen, distance, r))

    return result
예제 #4
0
def generate_network(enu, path, b, e, confidence, distance, ilen, year, gap):

    xmax = 3000
    offset = 2 * distance

    parts = np.empty(4, dtype=object)

    abbrechen = False

    stat = []

    # one df per camera
    for i in list(range(4)):

        df = prep.getDF(path, b, e, i)

        numframes = 0
        if (df.shape[0] != 0):
            numframes = df.groupby(by='frame_idx').size().shape[0]

        stat.append(numframes)

        df = prep.calcIds(df, confidence, year)

        # Abbrechen, wenn ein DF leer ist
        if (df.shape[0] == 0):
            abbrechen = True

        parts[i] = df

    if abbrechen == True:
        print("#{}: From {} to {} - {}".format(
            enu, datetime.datetime.fromtimestamp(b, tz=pytz.UTC),
            datetime.datetime.fromtimestamp(e, tz=pytz.UTC), stat))
        return Series()

    if year == 2015:
        # cam 0 und cam1 nach rechts verschieben
        parts[0].xpos = parts[0].xpos + xmax + offset
        parts[1].xpos = parts[1].xpos + xmax + offset

        # Seiten zusammenfugen
        side0 = pd.concat([parts[3], parts[0]])
        side1 = pd.concat([parts[2], parts[1]])

    if year == 2016:
        # cam 1 und cam 3 nach rechts verschieben
        parts[1].xpos = parts[1].xpos + xmax + offset
        parts[3].xpos = parts[3].xpos + xmax + offset

        # Syncronisieren der Kameras pro Seite
        parts[0], parts[1] = prep.mapping(parts[0], parts[1])
        parts[2], parts[3] = prep.mapping(parts[2], parts[3])

        d0 = len(parts[0].frame_idx.unique())
        d1 = len(parts[1].frame_idx.unique())
        d2 = len(parts[2].frame_idx.unique())
        d3 = len(parts[3].frame_idx.unique())

        print("#{}: From {} to {} - {} - {} {} {} {}".format(
            enu, datetime.datetime.fromtimestamp(b, tz=pytz.UTC),
            datetime.datetime.fromtimestamp(e, tz=pytz.UTC), stat, d0, d1, d2,
            d3))

        # Seiten zusammenfugen
        side0 = pd.concat([parts[0], parts[1]])
        side1 = pd.concat([parts[2], parts[3]])

    dt = datetime.datetime.fromtimestamp(b, tz=pytz.UTC)
    # Detectionen wegschmeißen, dessen ID insgesamt sehr wenig detektiert wurde
    side0 = prep.removeDetectionsList(side0, dt.strftime("%Y-%m-%d"))
    side1 = prep.removeDetectionsList(side1, dt.strftime("%Y-%m-%d"))

    close1 = prep.get_close_bees_ckd(side0, distance)
    close2 = prep.get_close_bees_ckd(side1, distance)

    close = pd.concat([close1, close2])

    # Zeitreihe für Paare machen
    p = prep.bee_pairs_to_timeseries(close)

    # Coorect pair time series
    p_corrected = p.apply(prep.fill_gaps, axis=1, args=[gap])

    return prep.extract_interactions(p_corrected, ilen)
예제 #5
0
    for i in list(range(4)):
        fc = load_frame_container(file_list[i])
        df = prep.get_dataframe(fc)
        df = prep.calcIds(df, CONFIDENCE)

        camIdx = int(file_list[i].split("/")[-1].split("_")[1])
        dataframes[camIdx] = df

    # cam 0 und cam1 nach rechts verschieben
    dataframes[0].xpos = dataframes[0].xpos + xmax
    dataframes[1].xpos = dataframes[1].xpos + xmax

    # Seiten zusammenfugen
    side0 = pd.concat([dataframes[3], dataframes[0]])
    side1 = pd.concat([dataframes[2], dataframes[1]])

    close1 = prep.get_close_bees(side0, DISTANCE)
    close2 = prep.get_close_bees(side1, DISTANCE)

    close = pd.concat([close1, close2])

    p = prep.bee_pairs_to_timeseries(close)

    edges = prep.extract_interactions(p, LENGTH)

    interactions = pd.concat([interactions, edges])

G = prep.create_graph2(interactions)
print(nx.info(G))

nx.write_graphml(G, filename + ".graphml")