import pandas as pd
    import numpy as np

    MAIN_DIR = '/media/recnodes/recnode_2mfish/reversals3m_64_dotbot_20181024_153201.stitched/'

    DATA_COL = 'EigenCen'
    STIM_COL = 'dir'
    SKIP_FRAMES = 3

    SAVE_DIR = MAIN_DIR + 'track/eigencentricity_overlay/'

    nfbf = joblib.load(MAIN_DIR + 'track/network_FBF.pickle')
    nfbf.index = nfbf.frame
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    log = stims.get_logfile(MAIN_DIR)
    ret, pf = stims.sync_data(
        pd.read_pickle(MAIN_DIR + 'track/perframe_stats.pickle'), log, store)

    ret, nfbf = stims.sync_data(nfbf, log, store)

    frames = pd.DataFrame(store.get_frame_metadata())
    frames.columns = ['FrameNumber', 'Timestamp']

    _MIN = nfbf[DATA_COL].min()
    _MAX = nfbf[DATA_COL].max()

    g = nfbf.groupby('frame')

    for i in np.arange(0, len(frames), SKIP_FRAMES):
        if os.path.exists(SAVE_DIR + '%06d.png' % i):
            continue
        print(i)
Example #2
0
def calculate_perframe_stats(fbf, TRACK_DIR, nCores=8):

    # SETUP PARALLEL PROCESSING

    ppe = ProcessPoolExecutor(nCores)
    futures = []
    statResults = []
    rotMResults = []
    rotAResults = []
    fishR = []

    # PREPARE DATAFRAME
    #fbf = fbf.loc[fbf['frame'] < 20100, :] #large files cause memory error on this old machine
    fbf = fbf.loc[fbf[XPOS].notnull(), :]
    fbf = fbf.loc[fbf[YPOS].notnull(), :]
    fbf.loc[:, 'uVX'] = fbf.loc[:, XVEL] / fbf.loc[:, SPEED]
    fbf.loc[:, 'uVY'] = fbf.loc[:, YVEL] / fbf.loc[:, SPEED]
    if 'header' in fbf.columns:
        fbf = fbf.drop(columns=['header'])
    fbf['coreGroup'] = fbf[
        'frame'] % nCores  #divide into chunks labelled range(nCores)
    fbf.reset_index(inplace=True, drop=True)
    # INITIATE PARALLEL PROCESSES
    for n in range(nCores):
        p = ppe.submit(process_chunk, fbf.loc[fbf['coreGroup'] == n, :])
        futures.append(p)

    # COLLECT PROCESSED DATA AS IT IS FINISHED
    for future in as_completed(futures):
        stats, rotM, rotA, fish = future.result()
        statResults.append(stats)
        rotMResults.append(rotM)
        rotAResults.append(rotA)
        fishR.append(fish)

    #CONCATENATE RESULTS
    perframe_stats = pd.concat(statResults)
    rotationDf = pd.concat(fishR)

    rotationOrders_cMass = {}
    for r in rotMResults:
        rotationOrders_cMass.update(r)
    pick = open(TRACK_DIR + '/rotationOrders_cMass.pickle', "wb")
    pickle.dump(rotationOrders_cMass, pick)
    pick.close()

    rotationOrders_cArea = {}
    for r in rotAResults:
        rotationOrders_cArea.update(r)
    pick = open(TRACK_DIR + '/rotationOrders_cArea.pickle', "wb")
    pickle.dump(rotationOrders_cArea, pick)
    pick.close()

    ARENA_WIDTH = get_arena_width(TRACK_DIR.split('/track')[0])
    #perframe_stats.loc[:,'centroidRotation'] = get_centroid_rotation(perframe_stats, TRACK_DIR,  ARENA_WIDTH)
    perframe_stats['frame'] = perframe_stats.index
    log = stim_handling.get_logfile(TRACK_DIR.rsplit('/', 2)[0])
    store = imgstore.new_for_filename(
        TRACK_DIR.rsplit('/', 2)[0] + '/metadata.yaml')
    ret, perframe_stats = stim_handling.sync_data(perframe_stats, log, store)
    perframe_stats.to_pickle(TRACK_DIR + '/perframe_stats.pickle')
    try:
        rotationDf.to_pickle(TRACK_DIR + '/frameByFrameData.pickle')
    except:
        import joblib
        joblib.dump(rotationDf, TRACK_DIR + '/frameByFrameData.jl')
    return perframe_stats
Example #3
0
def save_rotation_data(expFileName):

    if 'reversal' in expFileName:
        REVERSAL = True
        XLIM = (30, 60)
        COLUMN = 'dir'
    else:
        REVERSAL = False
        XLIM = (30, 300)
        COLUMN = 'speed'
    try:
        fbf = pd.read_pickle(expFileName + '/track/perframe_stats.pickle')
        if len(fbf) == 0:
            print("FAILED TO READ PICKLE. TRYING JOBLIB")
            fbf = joblib.load(expFileName + '/track/perframe_stats.pickle')
        rot = pd.read_pickle(expFileName +
                             '/track/rotationOrders_cArea.pickle')
        if not 'frame' in fbf.columns:
            fbf['frame'] = fbf.index
        if not 'FrameNumber' in fbf.columns:
            ret, fbf = stims.sync_data(
                fbf, stims.get_logfile(expFileName),
                imgstore.new_for_filename(expFileName + '/metadata.yaml'))
        ID = expFileName.split('/')[-1].split('_', 3)[-1].split('.')[0]
        ret, synced = sync_by_stimStart(fbf,
                                        ID,
                                        col=COLUMN,
                                        REVERSALS=REVERSAL)
        if ret == 0:
            return 0
        for IDx in list(set(synced['trialID'])):
            chunk = synced.loc[synced['trialID'] == IDx, :]
            ix = chunk[chunk.syncTime.between(np.timedelta64(XLIM[0], 's'),
                                              np.timedelta64(XLIM[1],
                                                             's'))].index
            DIR = np.sign(chunk.loc[ix, 'dir'].mean())
            if DIR == 0:
                return 0
            data = np.concatenate([
                rot[x] for x in range(ix.min(), ix.max())
            ]) * DIR * -1.0  #FLIP TO MAKE POSITIVE, JAKE
            COH = str(np.around(fbf.coh.mean(), 1))
            GS = expFileName.split('/')[-1].split('_')[1]
            if REVERSAL:
                np.save(
                    '/media/recnodes/Dan_storage/20200121_reversal_rotation_data/'
                    + GS + '_' + COH + '_' + IDx + '.npy', data)
                print(
                    '/media/recnodes/Dan_storage/20200121_reversal_rotation_data/'
                    + GS + '_' + COH + '_' + IDx + '.npy')
            else:
                continue  #FIXME danno's too chicken to try this cuz it'll surely break
                np.save(
                    '/media/recnodes/Dan_storage/191205_rotation_data/' + GS +
                    '_' + COH + '_' + ID + '.npy', data)
                print('/media/recnodes/Dan_storage/191205_rotation_data/' +
                      GS + '_' + COH + '_' + ID + '.npy')
        return 1
    except Exception as e:
        print(e)
        return 0
Example #4
0
def get_Tseries(expFileName):
    expID = expFileName.split('/')[-1].split('_', 3)[-1].split('.')[0]
    if 'reversal' in expFileName:
        REVERSAL = True
        prestim_frames = 400
        poststim_frames = 2400
        file_prefix = 'REV_'
        COLUMN = 'dir'
    else:
        REVERSAL = False
        prestim_frames = 1200
        poststim_frames = 16000
        file_prefix = 'COH_'
        COLUMN = 'speed'
    try:
        fbf = pd.read_pickle(expFileName + '/track/frameByFrameData.pickle')
        if len(fbf.shape) == 1:
            fbf = joblib.load(expFileName + '/track/frameByFrameData.pickle')
        pf = pd.read_pickle(
            expFileName + '/track/perframe_stats.pickle'
        )  #FIXME these may have the wrong stim direction because of sync_data vs sync_coherence (if made before 20191218)....
        if len(pf.shape) == 1:
            pf = joblib.load(expFileName + '/track/perframe_stats.pickle')
        if not 'frame' in pf.columns:
            fbf['frame'] = pf.index
        if not 'FrameNumber' in pf.columns:
            try:
                ret, pf = stims.sync_data(
                    pf, stims.get_logfile(expFileName),
                    imgstore.new_for_filename(expFileName + '/metadata.yaml'))
            except:
                print("oppala")
                return 0
        ret, sy = sync_by_stimStart(pf, expID, col=COLUMN, REVERSALS=REVERSAL)
        if ret == 0:
            return 0

    except Exception as e:
        print(e)
        return 0
    for ID, data in sy.groupby('trialID'):

        frame_0 = data.loc[data['syncTime'] == abs(data.syncTime).min(),
                           'frame'].values[0]
        md = data.loc[frame_0 - prestim_frames:frame_0 + poststim_frames]
        prestim = data.loc[data['frame'] < frame_0, :]
        psMeta = dict(prestim[prestim.columns[prestim.dtypes ==
                                              'float64']].fillna(0).mean())

        a = area[area.index == ID]
        if len(a) > 0:
            for col in [
                    'Area', 'Area_rank', 'Density', 'Density_rank',
                    'groupsize', 'trialID'
            ]:
                psMeta[col] = a[col][0]
        else:
            psMeta['trialID'] = ID
        d = fbf[fbf['frame'].between(frame_0 - prestim_frames,
                                     frame_0 + poststim_frames)]
        rotA = d.groupby(['frame',
                          'trackid'])['rotation_cArea'].mean().unstack()
        rotM = d.groupby(['frame',
                          'trackid'])['rotation_cMass'].mean().unstack()
        stimdir = md['dir'].fillna(0)
        stimcoh = md['coh'].fillna(0)
        stimspeed = md['speed'].fillna(0)
        meta = {}

        COH = str(np.around(pf.coh.median(), 1))
        GS = expFileName.split('/')[-1].split('_')[1]
        #ID = expFileName.split('/')[-1].split('_',3)[-1].split('.')[0]
        FN = '/media/recnodes/Dan_storage/Jake_TS/' + file_prefix + GS + '_' + COH + '_' + ID + '.npz'

        #FN = '/media/recnodes/Dan_storage/Jake_TS/'+ expFileName.split('/')[-1].rsplit('_',2)[0] + '_' + ID + '.npz'

        np.savez(FN,
                 rA=rotA,
                 rM=rotM,
                 prestim=psMeta,
                 meta=meta,
                 direction=stimdir,
                 coherence=stimcoh,
                 speed=stimspeed)
        print('completed:', FN)

    return 1
Example #5
0
        if not os.path.exists(MAIN_DIR + '/track/localData_FBF.pickle'):
            continue

        expID = MAIN_DIR.split('/')[-1].split('.')[0]
        exp, groupsize, _, trialID = expID.split('_', 3)
        #don't repeat if already done
        if trialID in results.index:
            continue
        try:

            store = imgstore.new_for_filename(MAIN_DIR + '/metadata.yaml')
            fbf = pd.read_pickle(MAIN_DIR + '/track/frameByFrameData.pickle')
            if len(fbf) < 1000:
                fbf = joblib.load(MAIN_DIR + '/track/frameByFrameData.pickle')

            ret, fbf = stims.sync_data(fbf, stims.get_logfile(MAIN_DIR), store)

            fbf = drop_bad_points(fbf)
            if 'coherence' in expID:
                synced = sync_by_stimStart(fbf, trialID)
            elif 'reversal' in expID:
                synced = sync_by_reversal(fbf, trialID)

            prestim = synced[synced.syncTime.between(np.timedelta64(-30, 's'),
                                                     np.timedelta64(0, 's'))]

            for group, data in prestim.groupby('trialID'):
                df = get_prestim_area(data)
                df['exp'] = exp
                df['groupsize'] = groupsize
                s = pd.Series(df.median(), name=group)
Example #6
0
    lastStim = df.loc[df['Time'] > df['Time'].median(), 'speed'].idxmin()
    df.loc[lastStim, 'stimEnd'] = 1
    return df


allData = pd.DataFrame()
for fn in glob.glob(
        '/media/recnodes/recnode_2mfish/coherencetestangular3m_*_dotbot_*/track/perframe_stats.pickle'
):
    expID, groupsize, _, trialID = fn.split('/')[4].split('.')[0].split('_', 3)
    if fn.split('/track/perframe_stats')[0] in blacklist:
        print "excluding", fn
        continue
    print fn
    ret, pf = stims.sync_data(
        pd.read_pickle(fn), stims.get_logfile(fn.rsplit('/', 2)[0]),
        imgstore.new_for_filename(fn.rsplit('/', 2)[0] + '/metadata.yaml'))
    pf['dir'] = pd.to_numeric(pf['dir'], errors='coerce')
    pf['coh'] = pf['coh'].fillna(method='pad').fillna(method='backfill')
    try:
        pf = sync_by_stimStart(pf)
        pf = align_by_stim(pf, trialID)

        #slope = pd.Series(np.gradient(pf['median_dRotation_cArea'].values), pf['Timestamp'], name='slope')
        s = splrep(pf.Timestamp, pf.median_dRotation_cArea, k=5, s=17)
        newdf = pd.DataFrame({
            'syncTime': pf['syncTime'],
            'Orotation': pf['median_dRotation_cArea'],
            'smoothedOrotation': splev(pf.Timestamp, s),
            'dO_by_dt': splev(pf.Timestamp, s, der=1),
            'dO_by_dt2': splev(pf.Timestamp, s, der=2)
def get_trial_data(MAIN_DIR):

    try:
        fbf = pd.read_pickle(MAIN_DIR + 'track/frameByFrameData.pickle')
        assert len(fbf.shape) > 1
    except:
        fbf = joblib.load(MAIN_DIR + 'track/frameByFrameData.pickle')
        assert len(fbf.shape) > 1
    fbf['trackid'] = fbf['trackid'].astype(int)
    try:
        l = pd.read_pickle(MAIN_DIR + 'track/localData_FBF.pickle')
        assert len(l.shape) > 1
    except:
        l = joblib.load(MAIN_DIR + 'track/localData_FBF.pickle')
        assert len(l.shape) > 1
    l['trackid'] = l['trackid'].astype(int)
    l['frame'] = l['frame'].astype(int)

    df = pd.merge(fbf,
                  l,
                  how='left',
                  left_on=['frame', 'trackid'],
                  right_on=['frame', 'trackid'])
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    log = stims.get_logfile(MAIN_DIR)

    ret, synced = stims.sync_data(df, log, store)
    #calculate individual rotation data:
    synced['CX'] = synced[XPOS] - 160
    synced['CY'] = synced[YPOS] - 167
    synced['radius'] = np.sqrt(synced['CX']**2 + synced['CY']**2)
    synced['uCX'] = synced['CX'] / synced['radius']
    synced['uCY'] = synced['CY'] / synced['radius']
    synced['uVX'] = synced[XVEL] / synced[SPEED]
    synced['uVY'] = synced[YVEL] / synced[SPEED]
    synced['Rotation'] = np.cross(synced[['uCX', 'uCY']],
                                  synced[['uVX', 'uVY']])

    synced = synced.sort_values('Timestamp').reset_index()
    synced = synced[:-10]
    synced['reversal'] = 0
    reversals = synced[abs(synced['dir'] - synced.shift()['dir']) == 2].index
    synced.loc[reversals, 'reversal'] = 1
    synced.loc[synced['Time'] > 300,
               'reversal'] = 0  #FIXME this is a hack solution to sort out ends
    synced['firstStim'] = 0
    firstStim = synced[synced['Time'] < synced['Time'].median()]
    firstStim = firstStim[abs(firstStim['dir'] -
                              firstStim.shift()['dir']) == 1].index
    synced.loc[firstStim, 'firstStim'] = 1
    synced['lastStim'] = 0
    lastStim = synced[synced['Time'] > synced['Time'].median()]
    lastStim = lastStim[abs(lastStim['dir'] -
                            lastStim.shift()['dir']) == 1].index
    synced.loc[lastStim, 'lastStim'] = 1

    alignPoints = list(synced[synced['reversal'] == 1]['Timestamp'].values)
    synced = synced[synced['dir'].isnull() == False]
    trials = pd.DataFrame()
    fileID = MAIN_DIR.split('/')[-2].split('.')[0].split('_', 3)[-1]
    trialID = 0
    for i in alignPoints:
        data = synced.loc[synced['Timestamp'].between(i - 10.0, i + 60.0), [
            'Timestamp', 'speed', 'dir', 'coh', 'frame', 'neighbourDist',
            'localArea', 'localPackingFraction', 'localMedianRotation',
            'localRscore', 'localPolarization', 'localPDcor',
            'localSpeedScore', 'radius', 'Rotation'
        ]]
        data['syncTime'] = pd.to_timedelta(data['Timestamp'] - i, 's')
        data['localMedianRotation'] = data['localMedianRotation'] * np.sign(
            data['dir'].median()) * -1.0  #make congruent and positive
        data['Rotation'] = data['Rotation'] * np.sign(
            data['dir'].median()) * -1.0  #make congruent and positive
        data.index = data['syncTime']
        data['syncTime'] = data.index.copy()
        GBF = data.resample('250ms')
        median = GBF.median()

        median['Spearman_Rotation_neighbourDist'] = [
            spearmanr(x['Rotation'], x['neighbourDist'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localArea'] = [
            spearmanr(x['Rotation'], x['localArea'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localPackingFraction'] = [
            spearmanr(x['Rotation'], x['localPackingFraction'])[0]
            for i, x in GBF
        ]
        median['Spearman_Rotation_localMedianRotation'] = [
            spearmanr(x['Rotation'], x['localMedianRotation'])[0]
            for i, x in GBF
        ]
        median['Spearman_Rotation_localPolarization'] = [
            spearmanr(x['Rotation'], x['localPolarization'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localPDcor'] = [
            spearmanr(x['Rotation'], x['localPDcor'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_localSpeedScore'] = [
            spearmanr(x['Rotation'], x['localSpeedScore'])[0] for i, x in GBF
        ]
        median['Spearman_Rotation_radius'] = [
            spearmanr(x['Rotation'], x['radius'])[0] for i, x in GBF
        ]

        median['trialID'] = fileID + '_' + str(trialID)
        median['date'] = fileID.split('_')[0]
        trialID += 1
        trials = pd.concat([trials, median], axis=0)

    return trials
Example #8
0
def doit(MAIN_DIR, saveas="Not_defined", nCores=16, window='undefined'):

    # SETUP PARALLEL PROCESSING

    ppe = ProcessPoolExecutor(nCores)
    futures = []
    Results = []

    MAIN_DIR = slashdir(
        MAIN_DIR)  #sometimes people call dirs without a trailing slash
    print "Processing: ", MAIN_DIR
    if saveas == 'Not_defined':
        saveas = MAIN_DIR + 'voronoi_overlay'
    if not os.path.exists(saveas):
        os.makedirs(saveas)
    fbf = joblib.load(MAIN_DIR + 'track/frameByFrameData.pickle')
    fbf = fbf.replace(to_replace=np.inf, value=np.nan)

    # PREPARE DATAFRAME
    fbf = fbf.loc[fbf[XPOS].notnull(), :]
    fbf = fbf.loc[fbf[YPOS].notnull(), :]
    fbf.loc[:, 'uVX'] = fbf.loc[:, XVEL] / fbf.loc[:, SPEED]
    fbf.loc[:, 'uVY'] = fbf.loc[:, YVEL] / fbf.loc[:, SPEED]

    #filter out tracked reflections by removing tracks that are always near the border
    foo = fbf.groupby('trackid').max()['BORDER_DISTANCE#wcentroid']
    fbf = fbf[~(fbf.trackid.isin(foo[foo < 50].index))]
    arena_centre = (160.0, 160.0)
    fbf['radius'] = np.sqrt((fbf[XPOS] - arena_centre[0])**2 +
                            (fbf[YPOS] - arena_centre[1])**2)
    fbf = fbf.loc[fbf['radius'] < 215, :]
    fbf = fbf.loc[fbf[XPOS] < 315, :]  #FIXME hardcoding bad
    if 'header' in fbf.columns:
        fbf = fbf.drop(columns=['header'])
    fbf['coreGroup'] = fbf[
        'frame'] % nCores  #divide into chunks labelled range(nCores)
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')

    if not 'FrameNumber' in fbf.columns:
        ret, fbf = stim_handling.sync_data(fbf,
                                           stim_handling.get_logfile(MAIN_DIR),
                                           store)
    fbf = sync_by_stimStart(fbf, MAIN_DIR.split('/')[-2].split('_', 3)[-1])

    if not window == 'undefined':
        fbf = fbf.loc[fbf.syncTime.between(np.timedelta64(window[0], 's'),
                                           np.timedelta64(window[1], 's')), :]

    # INITIATE PARALLEL PROCESSES
    for n in range(nCores):
        p = ppe.submit(process_chunk, fbf[fbf['coreGroup'] == n])
        futures.append(p)

    # COLLECT PROCESSED DATA AS IT IS FINISHED
    for future in as_completed(futures):
        stats = future.result()
        Results.append(stats)
    NN = pd.concat(Results)
    if not window == 'undefined':
        joblib.dump(
            NN, MAIN_DIR + 'track/nearest_neighbours_FBF_' + str(window[0]) +
            '-' + str(window[1]) + '.pickle')
    else:
        joblib.dump(NN, MAIN_DIR + 'track/nearest_neighbours_FBF.pickle')

    return
Example #9
0
    MAIN_DIR = slashdir(args.dir)

    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    log = stims.get_logfile(MAIN_DIR)

    print('loading pf')
    pf = pd.read_pickle(MAIN_DIR + 'track/perframe_stats.pickle')

    if len(pf.shape) == 1:
        import joblib
        pf = joblib.load(MAIN_DIR + 'track/perframe_stats.pickle')
    if not 'frame' in pf.columns:
        pf['frame'] = pf.index
    if not 'FrameNumber' in pf.columns:
        ret, pf = stims.sync_data(pf, log, store)

    T0 = pf.loc[np.argmin(
        abs(pf.loc[abs(pf.dir - pf.dir.shift()) == 2, 'Time'] - args.time)),
                'Time']

    print('loading fbf')
    fbf = pd.read_pickle(MAIN_DIR + 'track/frameByFrameData.pickle')
    if len(fbf.shape) == 1:
        import joblib
        fbf = joblib.load(MAIN_DIR + 'track/frameByFrameData.pickle')
    if not 'frame' in fbf.columns:
        fbf['frame'] = fbf.index
    if not 'FrameNumber' in fbf.columns:
        print('merging framenumber')
        fbf = fbf.merge(pf[['FrameNumber', 'frame']],