Beispiel #1
0
def setup(filelist, prestim_frames=400, **kwargs):
    stores = []
    framenums = []
    for vid in filelist:
        pf = pd.read_pickle(vid + '/track/perframe_stats.pickle')
        log = stims.get_logfile(vid)
        store = imgstore.new_for_filename(vid + '/metadata.yaml')
        stores.append(store)
        ret, pf2 = stims.synch_coherence_with_rotation(pf, log,
                                                       store)  #sync_reversals

        pf3 = sync_by_stimStart(pf2)
        startframe = pf3.loc[pf3['stimStart'].idxmax() - prestim_frames,
                             'FrameNumber']

        framenums.append(startframe)
    return filelist, stores, framenums
Beispiel #2
0
def run(MAIN_DIR, RESUME=True):
    print("processing: ", MAIN_DIR)
    #getColumnNames('_'.join(MAIN_DIR.split('/')[-1]..split('.')[0].split('_')[-2:]))
    trackdir = slashdir(MAIN_DIR) + 'track/'
    PF_DONE = False
    if os.path.exists(trackdir + 'perframe_stats.pickle'):
        if datetime.datetime.fromtimestamp(
                os.path.getmtime(trackdir + 'perframe_stats.pickle')
        ) > getTimeFromTimeString('20191218_000000'):
            PF_DONE = True
            perframe_stats = pd.read_pickle(trackdir + 'perframe_stats.pickle')
            if len(perframe_stats.shape) == 1:
                perframe_stats = joblib.load(trackdir +
                                             'perframe_stats.pickle')
            if len(perframe_stats.shape) == 1:
                PF_DONE = False
    if PF_DONE == False:
        if os.path.exists(trackdir + 'frameByFrameData.pickle'):
            try:
                fbf = pd.read_pickle(trackdir + 'frameByFrameData.pickle')
            except:
                fbf = joblib.load(trackdir + 'frameByFrameData.pickle')
            if len(fbf.shape) == 1:
                fbf = getFrameByFrameData(trackdir, RESUME, args.maxthreads)
                #print("CORRUPTED FILE. DELETING frameByFrameData:", trackdir)
                #os.remove(trackdir + 'frameByFrameData.pickle')
                #return
        else:
            fbf = getFrameByFrameData(trackdir, RESUME, args.maxthreads)

        if not 'VX#smooth#wcentroid' in fbf.columns:
            print('VX#smooth#wcentroid', "not found in columns.", MAIN_DIR)
            print(fbf.columns)
            os.remove(trackdir + 'frameByFrameData.pickle')
            shutil.rmtree(trackdir + 'fishdata')
            if os.path.exists(trackdir + 'frameByFrame_complete'):
                os.remove(trackdir + 'frameByFrame_complete')
            return

        if len(set(fbf.frame)) < 501:
            print("FOUND INCOMPLETE TRACKING DATA. DELETING TRACKDIR")
            shutil.rmtree(trackdir)
            return
        perframe_stats = calculate_perframe_stats(fbf, trackdir,
                                                  args.maxthreads)

        store = imgstore.new_for_filename(slashdir(MAIN_DIR) + 'metadata.yaml')
        log = stim_handling.get_logfile(MAIN_DIR)
        if 'reversals' in MAIN_DIR:
            #ret, perframe_stats = stim_handling.sync_reversals(perframe_stats, log, store)
            plot_perframe_vs_time(slashdir(MAIN_DIR), [
                'dir', 'median_polarization', 'median_dRotation_cMass',
                'median_dRotation_cArea', 'median_swimSpeed', 'entropy_Ra'
            ], [
                'Direction', 'Pol. Order', 'Rot. Order (CofM)',
                'Rot. Order (Area)', 'Median Speed', 'Entropy'
            ], perframe_stats, '_median')
        elif 'coherence' in MAIN_DIR:
            #ret, perframe_stats = stim_handling.synch_coherence_with_rotation(perframe_stats, log, store)
            plot_perframe_vs_time(slashdir(MAIN_DIR), [
                'coherence', 'median_polarization', 'median_dRotation_cMass',
                'median_dRotation_cArea', 'median_swimSpeed', 'entropy_Ra'
            ], [
                'Coherence', 'Pol. Order', 'Rot. Order (CofM)',
                'Rot. Order (Area)', 'Median Speed', 'Entropy'
            ], perframe_stats, '_median')
        elif 'cogs' in MAIN_DIR:

            pass  #FIXME
Beispiel #3
0
def calculate_perframe_stats(fbf, TRACK_DIR, nCores=8):

    # SETUP PARALLEL PROCESSING

    ppe = ProcessPoolExecutor(nCores)
    futures = []
    statResults = []
    rotMResults = []
    rotAResults = []
    fishR = []

    # PREPARE DATAFRAME
    #fbf = fbf.loc[fbf['frame'] < 20100, :] #large files cause memory error on this old machine
    fbf = fbf.loc[fbf[XPOS].notnull(), :]
    fbf = fbf.loc[fbf[YPOS].notnull(), :]
    fbf.loc[:, 'uVX'] = fbf.loc[:, XVEL] / fbf.loc[:, SPEED]
    fbf.loc[:, 'uVY'] = fbf.loc[:, YVEL] / fbf.loc[:, SPEED]
    if 'header' in fbf.columns:
        fbf = fbf.drop(columns=['header'])
    fbf['coreGroup'] = fbf[
        'frame'] % nCores  #divide into chunks labelled range(nCores)
    fbf.reset_index(inplace=True, drop=True)
    # INITIATE PARALLEL PROCESSES
    for n in range(nCores):
        p = ppe.submit(process_chunk, fbf.loc[fbf['coreGroup'] == n, :])
        futures.append(p)

    # COLLECT PROCESSED DATA AS IT IS FINISHED
    for future in as_completed(futures):
        stats, rotM, rotA, fish = future.result()
        statResults.append(stats)
        rotMResults.append(rotM)
        rotAResults.append(rotA)
        fishR.append(fish)

    #CONCATENATE RESULTS
    perframe_stats = pd.concat(statResults)
    rotationDf = pd.concat(fishR)

    rotationOrders_cMass = {}
    for r in rotMResults:
        rotationOrders_cMass.update(r)
    pick = open(TRACK_DIR + '/rotationOrders_cMass.pickle', "wb")
    pickle.dump(rotationOrders_cMass, pick)
    pick.close()

    rotationOrders_cArea = {}
    for r in rotAResults:
        rotationOrders_cArea.update(r)
    pick = open(TRACK_DIR + '/rotationOrders_cArea.pickle', "wb")
    pickle.dump(rotationOrders_cArea, pick)
    pick.close()

    ARENA_WIDTH = get_arena_width(TRACK_DIR.split('/track')[0])
    #perframe_stats.loc[:,'centroidRotation'] = get_centroid_rotation(perframe_stats, TRACK_DIR,  ARENA_WIDTH)
    perframe_stats['frame'] = perframe_stats.index
    log = stim_handling.get_logfile(TRACK_DIR.rsplit('/', 2)[0])
    store = imgstore.new_for_filename(
        TRACK_DIR.rsplit('/', 2)[0] + '/metadata.yaml')
    ret, perframe_stats = stim_handling.sync_data(perframe_stats, log, store)
    perframe_stats.to_pickle(TRACK_DIR + '/perframe_stats.pickle')
    try:
        rotationDf.to_pickle(TRACK_DIR + '/frameByFrameData.pickle')
    except:
        import joblib
        joblib.dump(rotationDf, TRACK_DIR + '/frameByFrameData.jl')
    return perframe_stats
    import os
    import pandas as pd
    import numpy as np

    MAIN_DIR = '/media/recnodes/recnode_2mfish/reversals3m_64_dotbot_20181024_153201.stitched/'

    DATA_COL = 'EigenCen'
    STIM_COL = 'dir'
    SKIP_FRAMES = 3

    SAVE_DIR = MAIN_DIR + 'track/eigencentricity_overlay/'

    nfbf = joblib.load(MAIN_DIR + 'track/network_FBF.pickle')
    nfbf.index = nfbf.frame
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    log = stims.get_logfile(MAIN_DIR)
    ret, pf = stims.sync_data(
        pd.read_pickle(MAIN_DIR + 'track/perframe_stats.pickle'), log, store)

    ret, nfbf = stims.sync_data(nfbf, log, store)

    frames = pd.DataFrame(store.get_frame_metadata())
    frames.columns = ['FrameNumber', 'Timestamp']

    _MIN = nfbf[DATA_COL].min()
    _MAX = nfbf[DATA_COL].max()

    g = nfbf.groupby('frame')

    for i in np.arange(0, len(frames), SKIP_FRAMES):
        if os.path.exists(SAVE_DIR + '%06d.png' % i):
Beispiel #5
0
def get_Tseries(expFileName):
    expID = expFileName.split('/')[-1].split('_', 3)[-1].split('.')[0]
    if 'reversal' in expFileName:
        REVERSAL = True
        prestim_frames = 400
        poststim_frames = 2400
        file_prefix = 'REV_'
        COLUMN = 'dir'
    else:
        REVERSAL = False
        prestim_frames = 1200
        poststim_frames = 16000
        file_prefix = 'COH_'
        COLUMN = 'speed'
    try:
        fbf = pd.read_pickle(expFileName + '/track/frameByFrameData.pickle')
        if len(fbf.shape) == 1:
            fbf = joblib.load(expFileName + '/track/frameByFrameData.pickle')
        pf = pd.read_pickle(
            expFileName + '/track/perframe_stats.pickle'
        )  #FIXME these may have the wrong stim direction because of sync_data vs sync_coherence (if made before 20191218)....
        if len(pf.shape) == 1:
            pf = joblib.load(expFileName + '/track/perframe_stats.pickle')
        if not 'frame' in pf.columns:
            fbf['frame'] = pf.index
        if not 'FrameNumber' in pf.columns:
            try:
                ret, pf = stims.sync_data(
                    pf, stims.get_logfile(expFileName),
                    imgstore.new_for_filename(expFileName + '/metadata.yaml'))
            except:
                print("oppala")
                return 0
        ret, sy = sync_by_stimStart(pf, expID, col=COLUMN, REVERSALS=REVERSAL)
        if ret == 0:
            return 0

    except Exception as e:
        print(e)
        return 0
    for ID, data in sy.groupby('trialID'):

        frame_0 = data.loc[data['syncTime'] == abs(data.syncTime).min(),
                           'frame'].values[0]
        md = data.loc[frame_0 - prestim_frames:frame_0 + poststim_frames]
        prestim = data.loc[data['frame'] < frame_0, :]
        psMeta = dict(prestim[prestim.columns[prestim.dtypes ==
                                              'float64']].fillna(0).mean())

        a = area[area.index == ID]
        if len(a) > 0:
            for col in [
                    'Area', 'Area_rank', 'Density', 'Density_rank',
                    'groupsize', 'trialID'
            ]:
                psMeta[col] = a[col][0]
        else:
            psMeta['trialID'] = ID
        d = fbf[fbf['frame'].between(frame_0 - prestim_frames,
                                     frame_0 + poststim_frames)]
        rotA = d.groupby(['frame',
                          'trackid'])['rotation_cArea'].mean().unstack()
        rotM = d.groupby(['frame',
                          'trackid'])['rotation_cMass'].mean().unstack()
        stimdir = md['dir'].fillna(0)
        stimcoh = md['coh'].fillna(0)
        stimspeed = md['speed'].fillna(0)
        meta = {}

        COH = str(np.around(pf.coh.median(), 1))
        GS = expFileName.split('/')[-1].split('_')[1]
        #ID = expFileName.split('/')[-1].split('_',3)[-1].split('.')[0]
        FN = '/media/recnodes/Dan_storage/Jake_TS/' + file_prefix + GS + '_' + COH + '_' + ID + '.npz'

        #FN = '/media/recnodes/Dan_storage/Jake_TS/'+ expFileName.split('/')[-1].rsplit('_',2)[0] + '_' + ID + '.npz'

        np.savez(FN,
                 rA=rotA,
                 rM=rotM,
                 prestim=psMeta,
                 meta=meta,
                 direction=stimdir,
                 coherence=stimcoh,
                 speed=stimspeed)
        print('completed:', FN)

    return 1
Beispiel #6
0
def save_rotation_data(expFileName):

    if 'reversal' in expFileName:
        REVERSAL = True
        XLIM = (30, 60)
        COLUMN = 'dir'
    else:
        REVERSAL = False
        XLIM = (30, 300)
        COLUMN = 'speed'
    try:
        fbf = pd.read_pickle(expFileName + '/track/perframe_stats.pickle')
        if len(fbf) == 0:
            print("FAILED TO READ PICKLE. TRYING JOBLIB")
            fbf = joblib.load(expFileName + '/track/perframe_stats.pickle')
        rot = pd.read_pickle(expFileName +
                             '/track/rotationOrders_cArea.pickle')
        if not 'frame' in fbf.columns:
            fbf['frame'] = fbf.index
        if not 'FrameNumber' in fbf.columns:
            ret, fbf = stims.sync_data(
                fbf, stims.get_logfile(expFileName),
                imgstore.new_for_filename(expFileName + '/metadata.yaml'))
        ID = expFileName.split('/')[-1].split('_', 3)[-1].split('.')[0]
        ret, synced = sync_by_stimStart(fbf,
                                        ID,
                                        col=COLUMN,
                                        REVERSALS=REVERSAL)
        if ret == 0:
            return 0
        for IDx in list(set(synced['trialID'])):
            chunk = synced.loc[synced['trialID'] == IDx, :]
            ix = chunk[chunk.syncTime.between(np.timedelta64(XLIM[0], 's'),
                                              np.timedelta64(XLIM[1],
                                                             's'))].index
            DIR = np.sign(chunk.loc[ix, 'dir'].mean())
            if DIR == 0:
                return 0
            data = np.concatenate([
                rot[x] for x in range(ix.min(), ix.max())
            ]) * DIR * -1.0  #FLIP TO MAKE POSITIVE, JAKE
            COH = str(np.around(fbf.coh.mean(), 1))
            GS = expFileName.split('/')[-1].split('_')[1]
            if REVERSAL:
                np.save(
                    '/media/recnodes/Dan_storage/20200121_reversal_rotation_data/'
                    + GS + '_' + COH + '_' + IDx + '.npy', data)
                print(
                    '/media/recnodes/Dan_storage/20200121_reversal_rotation_data/'
                    + GS + '_' + COH + '_' + IDx + '.npy')
            else:
                continue  #FIXME danno's too chicken to try this cuz it'll surely break
                np.save(
                    '/media/recnodes/Dan_storage/191205_rotation_data/' + GS +
                    '_' + COH + '_' + ID + '.npy', data)
                print('/media/recnodes/Dan_storage/191205_rotation_data/' +
                      GS + '_' + COH + '_' + ID + '.npy')
        return 1
    except Exception as e:
        print(e)
        return 0
Beispiel #7
0
        if not os.path.exists(MAIN_DIR + '/track/localData_FBF.pickle'):
            continue

        expID = MAIN_DIR.split('/')[-1].split('.')[0]
        exp, groupsize, _, trialID = expID.split('_', 3)
        #don't repeat if already done
        if trialID in results.index:
            continue
        try:

            store = imgstore.new_for_filename(MAIN_DIR + '/metadata.yaml')
            fbf = pd.read_pickle(MAIN_DIR + '/track/frameByFrameData.pickle')
            if len(fbf) < 1000:
                fbf = joblib.load(MAIN_DIR + '/track/frameByFrameData.pickle')

            ret, fbf = stims.sync_data(fbf, stims.get_logfile(MAIN_DIR), store)

            fbf = drop_bad_points(fbf)
            if 'coherence' in expID:
                synced = sync_by_stimStart(fbf, trialID)
            elif 'reversal' in expID:
                synced = sync_by_reversal(fbf, trialID)

            prestim = synced[synced.syncTime.between(np.timedelta64(-30, 's'),
                                                     np.timedelta64(0, 's'))]

            for group, data in prestim.groupby('trialID'):
                df = get_prestim_area(data)
                df['exp'] = exp
                df['groupsize'] = groupsize
                s = pd.Series(df.median(), name=group)
Beispiel #8
0
    lastStim = df.loc[df['Time'] > df['Time'].median(), 'speed'].idxmin()
    df.loc[lastStim, 'stimEnd'] = 1
    return df


allData = pd.DataFrame()
for fn in glob.glob(
        '/media/recnodes/recnode_2mfish/coherencetestangular3m_*_dotbot_*/track/perframe_stats.pickle'
):
    expID, groupsize, _, trialID = fn.split('/')[4].split('.')[0].split('_', 3)
    if fn.split('/track/perframe_stats')[0] in blacklist:
        print "excluding", fn
        continue
    print fn
    ret, pf = stims.sync_data(
        pd.read_pickle(fn), stims.get_logfile(fn.rsplit('/', 2)[0]),
        imgstore.new_for_filename(fn.rsplit('/', 2)[0] + '/metadata.yaml'))
    pf['dir'] = pd.to_numeric(pf['dir'], errors='coerce')
    pf['coh'] = pf['coh'].fillna(method='pad').fillna(method='backfill')
    try:
        pf = sync_by_stimStart(pf)
        pf = align_by_stim(pf, trialID)

        #slope = pd.Series(np.gradient(pf['median_dRotation_cArea'].values), pf['Timestamp'], name='slope')
        s = splrep(pf.Timestamp, pf.median_dRotation_cArea, k=5, s=17)
        newdf = pd.DataFrame({
            'syncTime': pf['syncTime'],
            'Orotation': pf['median_dRotation_cArea'],
            'smoothedOrotation': splev(pf.Timestamp, s),
            'dO_by_dt': splev(pf.Timestamp, s, der=1),
            'dO_by_dt2': splev(pf.Timestamp, s, der=2)
def get_trial_data(MAIN_DIR):

    try:
        fbf = pd.read_pickle(MAIN_DIR + 'track/frameByFrameData.pickle')
        assert len(fbf.shape) > 1
    except:
        fbf = joblib.load(MAIN_DIR + 'track/frameByFrameData.pickle')
        assert len(fbf.shape) > 1
    fbf['trackid'] = fbf['trackid'].astype(int)
    try:
        l = pd.read_pickle(MAIN_DIR + 'track/localData_FBF.pickle')
        assert len(l.shape) > 1
    except:
        l = joblib.load(MAIN_DIR + 'track/localData_FBF.pickle')
        assert len(l.shape) > 1
    l['trackid'] = l['trackid'].astype(int)
    l['frame'] = l['frame'].astype(int)

    df = pd.merge(fbf,
                  l,
                  how='left',
                  left_on=['frame', 'trackid'],
                  right_on=['frame', 'trackid'])
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    log = stims.get_logfile(MAIN_DIR)

    ret, synced = stims.sync_data(df, log, store)
    #calculate individual rotation data:
    synced['CX'] = synced[XPOS] - 160
    synced['CY'] = synced[YPOS] - 167
    synced['radius'] = np.sqrt(synced['CX']**2 + synced['CY']**2)
    synced['uCX'] = synced['CX'] / synced['radius']
    synced['uCY'] = synced['CY'] / synced['radius']
    synced['uVX'] = synced[XVEL] / synced[SPEED]
    synced['uVY'] = synced[YVEL] / synced[SPEED]
    synced['Rotation'] = np.cross(synced[['uCX', 'uCY']],
                                  synced[['uVX', 'uVY']])

    synced = synced.sort_values('Timestamp').reset_index()
    synced = synced[:-10]
    synced['reversal'] = 0
    reversals = synced[abs(synced['dir'] - synced.shift()['dir']) == 2].index
    synced.loc[reversals, 'reversal'] = 1
    synced.loc[synced['Time'] > 300,
               'reversal'] = 0  #FIXME this is a hack solution to sort out ends
    synced['firstStim'] = 0
    firstStim = synced[synced['Time'] < synced['Time'].median()]
    firstStim = firstStim[abs(firstStim['dir'] -
                              firstStim.shift()['dir']) == 1].index
    synced.loc[firstStim, 'firstStim'] = 1
    synced['lastStim'] = 0
    lastStim = synced[synced['Time'] > synced['Time'].median()]
    lastStim = lastStim[abs(lastStim['dir'] -
                            lastStim.shift()['dir']) == 1].index
    synced.loc[lastStim, 'lastStim'] = 1

    alignPoints = list(synced[synced['reversal'] == 1]['Timestamp'].values)
    synced = synced[synced['dir'].isnull() == False]
    trials = pd.DataFrame()
    fileID = MAIN_DIR.split('/')[-2].split('.')[0].split('_', 3)[-1]
    trialID = 0
    for i in alignPoints:
        data = synced.loc[synced['Timestamp'].between(i - 10.0, i + 60.0), [
            'Timestamp', 'speed', 'dir', 'coh', 'frame', 'neighbourDist',
            'localArea', 'localPackingFraction', 'localMedianRotation',
            'localRscore', 'localPolarization', 'localPDcor',
            'localSpeedScore', 'radius', 'Rotation'
        ]]
        data['syncTime'] = pd.to_timedelta(data['Timestamp'] - i, 's')
        data['localMedianRotation'] = data['localMedianRotation'] * np.sign(
            data['dir'].median()) * -1.0  #make congruent and positive
        data['Rotation'] = data['Rotation'] * np.sign(
            data['dir'].median()) * -1.0  #make congruent and positive
        data.index = data['syncTime']
        data['syncTime'] = data.index.copy()
        GBF = data.resample('250ms')
        median = GBF.median()

        median['Spearman_Rotation_neighbourDist'] = [
            spearmanr(x['Rotation'], x['neighbourDist'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localArea'] = [
            spearmanr(x['Rotation'], x['localArea'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localPackingFraction'] = [
            spearmanr(x['Rotation'], x['localPackingFraction'])[0]
            for i, x in GBF
        ]
        median['Spearman_Rotation_localMedianRotation'] = [
            spearmanr(x['Rotation'], x['localMedianRotation'])[0]
            for i, x in GBF
        ]
        median['Spearman_Rotation_localPolarization'] = [
            spearmanr(x['Rotation'], x['localPolarization'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localPDcor'] = [
            spearmanr(x['Rotation'], x['localPDcor'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localSpeedScore'] = [
            spearmanr(x['Rotation'], x['localSpeedScore'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_radius'] = [
            spearmanr(x['Rotation'], x['radius'])[0] for i, x in GBF
        ]

        median['trialID'] = fileID + '_' + str(trialID)
        median['date'] = fileID.split('_')[0]
        trialID += 1
        trials = pd.concat([trials, median], axis=0)

    return trials
Beispiel #10
0
groupsizes = [64,128,256,512,1024]
coherences = [0,0.2,0.4,0.6,0.8,1]

allData = pd.DataFrame()
prestim_meanvals = pd.DataFrame()
meanvals = pd.DataFrame()

for groupsize in groupsizes:
    print groupsize
    groupData = pd.DataFrame()
    for fn in glob.glob('/media/recnodes/recnode_2mfish/coherencetestangular3m_' + str(groupsize) + '_dotbot_*/track/perframe_stats.pickle'):
        if fn.split('/track/perframe_stats')[0] in blacklist:
            print "excluding", fn
            continue
        print fn
        ret, pf = stims.sync_data(pd.read_pickle(fn), stims.get_logfile(fn.rsplit('/',2)[0]), imgstore.new_for_filename(fn.rsplit('/',2)[0] + '/metadata.yaml'))
        pf['dir'] = pd.to_numeric(pf['dir'], errors='coerce')
        pf['coh'] = pf['coh'].fillna(method='pad').fillna(method='backfill')
        prestim_mean = pf.loc[pf['Time'] < pf[pf['speed'] !=0]['Time'].min(), :].mean()
        prestim_mean['ID'] = fn.split('/')[-3].split('_',3)[-1].split('.')[0]
        prestim_mean['groupsize'] = groupsize
        prestim_meanvals = pd.concat([prestim_meanvals, prestim_mean], axis=1)
        _mean = pf.mean()
        _mean['ID'] = fn.split('/')[-3].split('_',3)[-1].split('.')[0]
        _mean['groupsize'] = groupsize
        meanvals = pd.concat([meanvals, _mean], axis=1)

        pf = sync_by_stimStart(pf)
        fileID = fn.split('/')[-3].split('.')[0].split('_',3)[-1]
        aligned = align_by_stim(pf, fileID)
        aligned['groupsize'] = groupsize
Beispiel #11
0
for groupsize in groupsizes:
    print groupsize
    groupData = pd.DataFrame()
    for fn in glob.glob('/media/recnodes/recnode_2mfish/reversals3m_' +
                        str(groupsize) +
                        '_dotbot_*/track/perframe_stats.pickle'):
        if '20181026' in fn:  #
            continue
        print fn
        try:
            pf = pd.read_pickle(fn)
            if not 'FrameNumber' in pf.columns:
                if not 'frame' in pf.columns:
                    pf['frame'] = pf.index.copy()
                ret, pf = stims.sync_reversals(
                    pf, stims.get_logfile(fn.rsplit('/', 2)[0]),
                    imgstore.new_for_filename(
                        fn.rsplit('/', 2)[0] + '/metadata.yaml'))
        except Exception as e:
            print "failed for ", fn
            print e
            continue
        pf['dir'] = pd.to_numeric(pf['dir'], errors='coerce')

        pf = sync_by_reversal(pf)
        fileID = fn.split('/')[-3].split('.')[0].split('_', 3)[-1]
        reversals = align_by_stim(pf, fileID)
        if len(reversals) == 0:
            continue
        reversals['groupsize'] = groupsize
        """
Beispiel #12
0
def doit(MAIN_DIR, saveas="Not_defined", nCores=16, window='undefined'):

    # SETUP PARALLEL PROCESSING

    ppe = ProcessPoolExecutor(nCores)
    futures = []
    Results = []

    MAIN_DIR = slashdir(
        MAIN_DIR)  #sometimes people call dirs without a trailing slash
    print "Processing: ", MAIN_DIR
    if saveas == 'Not_defined':
        saveas = MAIN_DIR + 'voronoi_overlay'
    if not os.path.exists(saveas):
        os.makedirs(saveas)
    fbf = joblib.load(MAIN_DIR + 'track/frameByFrameData.pickle')
    fbf = fbf.replace(to_replace=np.inf, value=np.nan)

    # PREPARE DATAFRAME
    fbf = fbf.loc[fbf[XPOS].notnull(), :]
    fbf = fbf.loc[fbf[YPOS].notnull(), :]
    fbf.loc[:, 'uVX'] = fbf.loc[:, XVEL] / fbf.loc[:, SPEED]
    fbf.loc[:, 'uVY'] = fbf.loc[:, YVEL] / fbf.loc[:, SPEED]

    #filter out tracked reflections by removing tracks that are always near the border
    foo = fbf.groupby('trackid').max()['BORDER_DISTANCE#wcentroid']
    fbf = fbf[~(fbf.trackid.isin(foo[foo < 50].index))]
    arena_centre = (160.0, 160.0)
    fbf['radius'] = np.sqrt((fbf[XPOS] - arena_centre[0])**2 +
                            (fbf[YPOS] - arena_centre[1])**2)
    fbf = fbf.loc[fbf['radius'] < 215, :]
    fbf = fbf.loc[fbf[XPOS] < 315, :]  #FIXME hardcoding bad
    if 'header' in fbf.columns:
        fbf = fbf.drop(columns=['header'])
    fbf['coreGroup'] = fbf[
        'frame'] % nCores  #divide into chunks labelled range(nCores)
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')

    if not 'FrameNumber' in fbf.columns:
        ret, fbf = stim_handling.sync_data(fbf,
                                           stim_handling.get_logfile(MAIN_DIR),
                                           store)
    fbf = sync_by_stimStart(fbf, MAIN_DIR.split('/')[-2].split('_', 3)[-1])

    if not window == 'undefined':
        fbf = fbf.loc[fbf.syncTime.between(np.timedelta64(window[0], 's'),
                                           np.timedelta64(window[1], 's')), :]

    # INITIATE PARALLEL PROCESSES
    for n in range(nCores):
        p = ppe.submit(process_chunk, fbf[fbf['coreGroup'] == n])
        futures.append(p)

    # COLLECT PROCESSED DATA AS IT IS FINISHED
    for future in as_completed(futures):
        stats = future.result()
        Results.append(stats)
    NN = pd.concat(Results)
    if not window == 'undefined':
        joblib.dump(
            NN, MAIN_DIR + 'track/nearest_neighbours_FBF_' + str(window[0]) +
            '-' + str(window[1]) + '.pickle')
    else:
        joblib.dump(NN, MAIN_DIR + 'track/nearest_neighbours_FBF.pickle')

    return
Beispiel #13
0
    ]

    return df.merge(foo, left_on='community', right_on='community'), edge2cid


from overlay_data_on_position import plot_data_on_video
from mpl_toolkits.axes_grid1 import make_axes_locatable

#MAIN_DIR = '/media/recnodes/recnode_2mfish/reversals3m_128_dotbot_20181211_151201.stitched/'
MAIN_DIR = '/media/recnodes/recnode_2mfish/reversals3m_128_dotbot_20181106_145202.stitched/'
store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')

from local_properties import sync_rotation
import stim_handling as stims
local = pd.read_pickle(MAIN_DIR + 'track/localData_FBF.pickle')
local = sync_rotation(local, stims.get_logfile(MAIN_DIR), store)
local['trackid'] = local['trackid'].astype(float).astype(int)
local['Time'] = local.loc[:, 'Timestamp'] - local.iloc[0]['Timestamp']

cols = [
    'objIDcol', 'EigenCen', 'comm_EigenCen', 'localArea', 'comm_R', 'R',
    'localMedianRotation', 'localRscore', 'localPackingFraction',
    'localPolarization', 'localPScore', 'localSpeedScore'
]

cMin = [0, 0, 0.0, 0, 0, -1.0, -1.0, -1.0, 0, 0, 0, 0, 0]

cMax = [12, 0.2, 0.2, 6000, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]

CT = CentroidTracker()
FBF = pd.DataFrame()