Example #1
0
def sync_data(r,log,store):
    if '_201909' in store.filename:
        checkname = store.filename.rsplit('.',1)[0] + '.21990449/metadata.yaml'
        check9 = imgstore.new_for_filename(checkname)
        deltaT = check9.get_frame_metadata()['frame_time'][0] - store.get_frame_metadata()['frame_time'][0]
        if abs(deltaT) > 1.0:
            print("**********TIMESTAMP MISMATCH ", str(deltaT), "sec. REPAIRING STORE.********")
            correct_timestamps(store.filename)
            print("*** repair successful ***")
            store = imgstore.new_for_filename(store.filename + '/metadata.yaml')
    foo = get_frame_metadata(r.reset_index(drop=True), store)
    bar = foo.merge(log, how='outer') 
    bar = bar.sort_values('Timestamp')  #integrate log data with tracking data
    bar = bar.fillna(method='ffill')  #forward fill data from log to tracking data
    bar = bar.loc[foo.index] #drop rows from log entries
    bar = bar.sort_values('Timestamp') #sort again because lazy danno
    if 'coh' in bar.columns:
        bar['coh'] = bar['coh'].fillna(method='ffill')
        bar['coherence'] = bar['coh'].copy()
    if 'speed' in bar.columns:
        bar['speed'] = bar['speed'].fillna(0)
    if 'dir' in bar.columns:
        bar['dir'] = bar['dir'].fillna(0)
    bar.loc[:,'Time'] = (bar.loc[:,'Timestamp'] - bar.loc[0,'Timestamp'])
    if bar.iloc[5].Timestamp - bar.iloc[0].Timestamp > 10: #check if log overlaps with track data
        return 0, bar
    else:    
        return 1, bar  
Example #2
0
def synch_reversals(MAIN_DIR, r=None):
    MAIN_DIR = slashdir(MAIN_DIR)
    TRACK_DIR = MAIN_DIR + 'track/'
    LOG_FN = '/media/recnodes/Dan_storage/dotbot_logs/dotbotLog_' + MAIN_DIR.split('/')[-2] + '.txt'
    
    if r == None:
        r = pd.read_pickle(TRACK_DIR + 'frame_means_rotation_polarization.pickle')
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')

    framelist = pd.DataFrame(store.get_frame_metadata())
    framelist.columns=['FrameNumber','Timestamp'] 

    foo = r.merge(framelist, left_index=True, right_index=True)

    log = pd.read_table(LOG_FN)
    log.loc[:,'Timestamp'] /=  1000.0

    bar = foo.merge(log, how='outer') 
    bar = bar.sort_values('Timestamp') 
    bar['speed'] = bar['speed'].fillna(method='ffill').fillna(0)
    bar['dir'] = bar['dir'].fillna(method='ffill').fillna(0)
    bar.loc[:,'Time'] = (bar.loc[:,'Timestamp'] - bar.loc[0,'Timestamp'])
    if bar.iloc[5].Timestamp - bar.iloc[0].Timestamp > 10: #check if log overlaps with track data
        return 0, bar
    else:    
        return 1, bar  
Example #3
0
def doit(DIR, HANDLE):
    for x in glob.glob(slashdir(DIR) + '*' + HANDLE + '*/metadata.yaml'):
        inStore = imgstore.new_for_filename(x)
        UND = Undistort(inStore)
        img, (frame_number, frame_timestamp) = inStore.get_next_image()

        newdir = x.rsplit('/', 1)[0] + '_undistorted'
        if os.path.exists(newdir):
            shutil.rmtree(newdir)
        os.mkdir(newdir)
        outStore = imgstore.new_for_format('avc1/mp4',
                                           mode='w',
                                           basedir=newdir,
                                           imgshape=img.shape,
                                           imgdtype=img.dtype,
                                           chunksize=500)

        for i in range(inStore.frame_count - 1):
            try:
                outStore.add_image(UND.undistort(img), frame_number,
                                   frame_timestamp)
                img, (frame_number, frame_timestamp) = inStore.get_next_image()
            except:
                print "failed at frame: ", i, "of", inStore.frame_count, inStore.frame_max

        outStore.close()
        open(x.rsplit('/', 1)[0] + '/undistortion_complete', 'a').close()

    return
Example #4
0
def Xsynch_coherence_with_rotation(MAIN_DIR):
    #MAIN_DIR = slashdir('/media/recnodes/kn-crec06/juvenilesfwdmix_3264_dotbot_20171205_101600')
    MAIN_DIR = slashdir(MAIN_DIR)
    TRACK_DIR = MAIN_DIR + 'track/'
    LOG_FN = '/media/recnodes/recnode_jolle2/dotbot_logs/dotbotLog_' + MAIN_DIR.split('/')[-2] + '.txt'

    r = pd.read_pickle(TRACK_DIR + 'frame_means_rotation_polarization.pickle')
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')


    log = process_logfile(LOG_FN)
    coherence = get_coherence(log)
    coherence['speed'] = get_speed(log)['speed']  
    coherence['dir'] = get_direction(log)['dir'] 
    framelist = pd.DataFrame(store.get_frame_metadata())
    framelist.columns=['FrameNumber','Timestamp'] 

    foo = r.merge(framelist, left_index=True, right_index=True)

    bar = foo.merge(coherence, how='outer')
    bar = bar.sort_values('Timestamp')
    bar['coherence'] = log['coh'].mean()#bar['coherence'].fillna(method='ffill')
    bar['speed'] = bar['speed'].fillna(method='ffill').fillna(0)
    bar['dir'] = bar['dir'].fillna(method='ffill').fillna(0)
    bar.loc[:,'Time'] = (bar.loc[:,'Timestamp'] - bar.loc[0,'Timestamp'])

    return bar#.fillna(np.inf)
Example #5
0
def fixTimestamps(fn, camserial='21990449'):
    """
    fixes timestamps of "0 0 0 ...", which sometimes occurs during stitching. Pass the filename of a stitched video (/full/path/to/video_XXXXXX_XXXXXX.stitch'), and this function takes timestamps from one of the constituent videos (camserial) and assigns them to the stitched video. It also removes converted.pv.
    """
    cornervid = imgstore.new_for_filename(
        fn.split('.')[0] + '.' + camserial + '/metadata.yaml')

    timestamps = cornervid.get_frame_metadata().items()[1][1]

    fileList = []
    for i in glob.glob(fn + '/*.npz'):
        fileList.append(i)

    fileList = sorted(fileList)
    cumsum = 0

    for f in fileList:
        d = np.load(f)
        keys = [key for key in d.files]
        nf = {}
        for name in keys:
            nf[name] = d[name]
        nFrames = len(d['frame_time'])
        nf['frame_time'] = timestamps[cumsum:cumsum + nFrames]
        np.savez(f, **nf)
        cumsum += nFrames

    #need to generate a new converted.pv with timestamps.
    if os.path.exists(slashdir(fn) + 'track/converted.pv'):
        os.remove(slashdir(fn) + 'track/converted.pv')

    return
    def __init__(self, video_file):
        import imgstore

        self.vid = imgstore.new_for_filename(video_file)
        
        self.first_frame = self.vid.frame_min
        self.frame_max = self.vid.frame_max
        
        img, (frame_number, frame_timestamp) = self.vid.get_next_image()
        self.height = img.shape[0]
        self.width = img.shape[1]
        self.dtype = img.dtype
        
        self.vid.close()
        self.vid = imgstore.new_for_filename(video_file)
        self.frames_read = []
Example #7
0
 def __init__(self, filename):
     try:
         self.store = imgstore.new_for_filename(str(filename))
     except:
         self.store = None
     self.filename = filename
     print(self.filename)
     self.ext = '.extra_data.json'
     self.extra_data = None
Example #8
0
    def __init__(self, directory):
        self.DIR = directory
        self.u_raw = np.load(directory + '/piv_u.npy')
        self.v_raw = np.load(directory + '/piv_v.npy')
        self.x = np.load(directory + '/piv_x.npy')
        self.y = np.load(directory + '/piv_y.npy')
        self.radius = self.get_radius_matrix()
        self.vort_raw = np.load(directory + '/piv_vort.npy')
        self.shape = self.x.shape

        self.store = imgstore.new_for_filename(directory + '/metadata.yaml')
Example #9
0
def create_stitched_video_from_scratch(fnString, pos):

    stores = (fnString + '.21990443', fnString + '.21990445',
              fnString + '.21990447', fnString + '.21990449')

    cams_stores = {}
    for fn in stores:
        store = new_for_filename(op.join(BASE_DATA, fn))
        print store.full_path
        nFrames = store.frame_count
        cams_stores[get_store_camera_serial(store)] = store

    sorted_stores = [cams_stores[i] for i in best_order]

    undistortions = [Undistort(i) for i in sorted_stores]

    aligned = StoreAligner(*sorted_stores)
    aligned.extract_common_frames(StoreAligner.MISSING_POLICY_DROP)

    if args.truncate == True:
        suffix = '.partial_stitch'
        aligned._fns = aligned._fns[
            2000:5000]  # THIS IS HOW WE TRUNCATE. #FIXME hardcoded cut
    elif args.truncate == False:
        suffix = '.stitched'

    if not os.path.exists(fnString + suffix):
        os.mkdir(fnString + suffix)

    out = imgstore.new_for_format('avc1/mp4',
                                  mode='w',
                                  basedir=fnString + suffix,
                                  imgshape=s.panorama_shape,
                                  imgdtype='uint8',
                                  chunksize=500)
    for n, (imgs, (fn, ts)) in enumerate(aligned.iter_imgs(return_times=True)):

        _imgs = []
        for i in range(len(undistortions)):
            _imgs.append(undistortions[i].undistort(imgs[i]))

        #with silence_stdout():
        ok, img = s.stitch_images(*[ensure_color(i) for i in _imgs])
        assert ok

        out.add_image(img, fn, ts)

        printProgressBar(n,
                         nFrames,
                         prefix='Stitching progress:',
                         stdoutpos=pos)
    out.close()

    return
Example #10
0
    def __init__(self, src_file):
        self.frame_save_interval = 1
        self.is_light_background = 1
        self.video_data = None
        self.groupnames = ['None']

        self.src_file = src_file

        self.video_data = imgstore.new_for_filename(str(src_file))
        self._ini_frame = self.video_data.frame_min

        img, (frame_number, frame_timestamp) = self.video_data.get_next_image()
        self.height, self.width = img.shape
Example #11
0
    def open(self):
        """
        Open the image store if it isn't already open.

        Returns:
            None
        """
        if not self._store_:
            # Open the imgstore
            self._store_ = imgstore.new_for_filename(self.filename)

            # Read a frame so we can compute shape an such
            self._img_, (frame_number, frame_timestamp) = self._store_.get_next_image()
Example #12
0
def setup(filelist, prestim_frames=400, **kwargs):
    stores = []
    framenums = []
    for vid in filelist:
        pf = pd.read_pickle(vid + '/track/perframe_stats.pickle')
        log = stims.get_logfile(vid)
        store = imgstore.new_for_filename(vid + '/metadata.yaml')
        stores.append(store)
        ret, pf2 = stims.synch_coherence_with_rotation(pf, log,
                                                       store)  #sync_reversals

        pf3 = sync_by_stimStart(pf2)
        startframe = pf3.loc[pf3['stimStart'].idxmax() - prestim_frames,
                             'FrameNumber']

        framenums.append(startframe)
    return filelist, stores, framenums
Example #13
0
def create_stitched_video_from_scratch(path):

    DIRECTORY, fnString = path.rsplit('/', 1)
    DIRECTORY = DIRECTORY + '/'
    stores = (fnString + '.21990443', fnString + '.21990445',
              fnString + '.21990447', fnString + '.21990449')

    cams_stores = {}
    for fn in stores:
        store = new_for_filename(op.join(BASE_DATA, fn))
        print store.full_path
        cams_stores[get_store_camera_serial(store)] = store

    sorted_stores = [cams_stores[i] for i in best_order]

    undistortions = [Undistort(i) for i in sorted_stores]

    aligned = StoreAligner(*sorted_stores)
    aligned.extract_common_frames(StoreAligner.MISSING_POLICY_DROP)
    if not os.path.exists(DIRECTORY + fnString + '.stitched'):
        os.mkdir(DIRECTORY + fnString + '.stitched')
    """
    out = new_for_format('avc1/mp4', DIRECTORY + fnString + '.stitched/metadata.yaml',
                         imgshape=s.panorama_shape,
                         imgdtype=np.uint8)
    """
    out = imgstore.new_for_format('avc1/mp4',
                                  mode='w',
                                  basedir=DIRECTORY + fnString + '.stitched',
                                  imgshape=s.panorama_shape,
                                  imgdtype='uint8',
                                  chunksize=500)
    for n, (imgs, (fn, ts)) in enumerate(aligned.iter_imgs(return_times=True)):

        _imgs = []
        for i in range(len(undistortions)):
            _imgs.append(undistortions[i].undistort(imgs[i]))

        ok, img = s.stitch_images(*[ensure_color(i) for i in _imgs])
        assert ok

        out.add_image(img, fn, ts)

    out.close()

    return
Example #14
0
def read_images_loopbio(file_name, batch_size, queue):

    store = imgstore.new_for_filename(str(file_name))
    bn = Path(file_name).parent.name

    batch = []
    for frame_number in tqdm.trange(store.frame_count, desc=bn):
        img = store.get_next_image()[0]
        batch.append((frame_number, img))
        if len(batch) >= batch_size:
            frames, X = _prepare_batch(batch)
            batch = []
            queue.put((frames, X))

    if len(batch) > 0:
        frames, X = _prepare_batch(batch)
        queue.put((frames, X))

    queue.put(None)

    while not queue.empty():
        #wait until the consumer empty the queue before destroying the process.
        time.sleep(1)
Example #15
0
def get_stim_data(MAIN_DIR):
       
    LOG_FN = '/media/recnodes/recnode_jolle2/dotbot_logs/dotbotLog_' + MAIN_DIR.split('/')[-2].rsplit('.',1)[0] + '.txt'
    log = pd.read_table(LOG_FN)
    log['Timestamp'] = log['Timestamp'] / 1000.0
    
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    framelist = pd.DataFrame(store.get_frame_metadata())
    framelist.columns=['FrameNumber','Timestamp'] 

    g = log.groupby('comment')
    directions = {}
    for _name in g.groups.keys():
        g1 = g.get_group(_name)
        bar = framelist.merge(g1, how='outer')
        bar = bar.sort_values('Timestamp')
        bar['CX'].interpolate(inplace=True)
        bar['CY'].interpolate(inplace=True)
        framelist[_name + '_X'] = bar['CX']
        framelist[_name + '_Y'] = bar['CY']
        directions[_name] = g1.dir.mode()[0]
    
    return framelist, directions
Example #16
0
def create_stitched_video_from_undistorted(fnString):

    stores = (fnString + '.21990443_undistorted',
              fnString + '.21990445_undistorted',
              fnString + '.21990447_undistorted',
              fnString + '.21990449_undistorted')

    cams_stores = {}
    for fn in stores:
        store = new_for_filename(op.join(BASE_DATA, fn))
        print store.full_path
        cams_stores[get_store_camera_serial(store)] = store

    sorted_stores = [cams_stores[i] for i in best_order]

    aligned = StoreAligner(*sorted_stores)
    aligned.extract_common_frames(StoreAligner.MISSING_POLICY_DROP)
    if not os.path.exists(BASE_DATA + fnString + '.stitched'):
        os.mkdir(BASE_DATA + fnString + '.stitched')

    out = imgstore.new_for_format('avc1/mp4',
                                  mode='w',
                                  basedir=BASE_DATA + fnString,
                                  imgshape=s.panorama_shape,
                                  imgdtype='uint8',
                                  chunksize=500)
    for n, (fn, imgs) in enumerate(aligned.iter_imgs()):

        ok, img = s.stitch_images(*[ensure_color(i) for i in imgs])
        assert ok

        out.add_image(img, fn, 0)

        print n
    out.close()

    return
Example #17
0
def collectProcessingStats(d):
    d = slashdir(d)

    list_of_files = glob.glob(d + '*.mp4')
    latest_file = max(list_of_files, key=os.path.getctime)

    DATA = dict({
        'framecount':
        imgstore.new_for_filename(d + 'metadata.yaml').frame_count,
        'recorded':
        getTimeFromTimeString(
            d.split('/')[-2].split('_', 3)[-1].split('.')[0]),
        'stitchBegin':
        checkDate(d + 'metadata.yaml'),
        'stitchEnd':
        checkDate(latest_file),
        'converted':
        checkDate(d + 'track/converted.pv'),
        'tracked':
        checkDate(d + 'track/converted.results'),
        'pickled':
        checkDate(d + 'track/frameByFrameData.pickle')
    })
    return DATA
Example #18
0
def correct_timestamps(stitched_store_fn):
    """
    gets timestamps from master machine and transfers them to the stitched store. this is to correct timestamps of videos taken in september 2019 when the slave clock was fast by 338 seconds.
    """
    master_store = imgstore.new_for_filename(stitched_store_fn.rsplit('.',1)[0] + '.21990449/metadata.yaml')
    
    md = pd.DataFrame(master_store.get_frame_metadata())
    
    try:
        for z in glob.glob(stitched_store_fn + '/*.npz'):
            f = np.load(z)
            keys = [key for key in f.files]
            nf = {}
            for name in keys:
                nf[name] = f[name]
            frames = nf['frame_number']
            nf['frame_time'] = [md.loc[(md['frame_number']-i).abs().argsort()[0],'frame_time'] for i in frames]

            np.savez(z.split('.npz')[0], **nf)

        return 1
    except:
        print("FAILED FOR:", stitched_store_fn)
        return 0
Example #19
0
    #Camera orientations before renovation (august 2018):
    [tl, bl, tr, br] = ['21990445',
                        '21990447',
                        '21990449',
                        '21990443']

    #Camera orientations after renovation (August 2018):
    """
    #Current settings as of 180827
    [tl, bl, tr, br] = ['21990447', '21990449', '21990443', '21990445']

    for x in glob.glob(SEARCH_FILES):
        ID = x.split('/')[-2].split('.')[-1]
        print ID
        if br in ID:
            BOTTOM_RIGHT = imgstore.new_for_filename(x)
        elif tl in ID:
            TOP_LEFT = imgstore.new_for_filename(x)
        elif bl in ID:
            BOTTOM_LEFT = imgstore.new_for_filename(x)
        elif tr in ID:
            TOP_RIGHT = imgstore.new_for_filename(x)

    if args.saveas == 'notAssigned':
        SAVEAS = TOP_LEFT.filename.rsplit('.', 1)[0] + '_stitched'
    else:
        SAVEAS = slashdir(args.dir) + args.saveas

    VIDEOS = [TOP_LEFT, BOTTOM_LEFT, TOP_RIGHT, BOTTOM_RIGHT]

    VIDEO_TIME = '_'.join(
Example #20
0
def convert(_main_dir, _make_bkg, NEW_ONLY, fishnum):
    MAIN_DIR = _main_dir
    if MAIN_DIR[-1] != '/':
        MAIN_DIR += '/'
    track_dir = MAIN_DIR + 'track'
    if not os.path.exists(track_dir):
        os.makedirs(track_dir)

    if (NEW_ONLY) and (os.path.exists(track_dir + 'converted.results')):
        return

    if _make_bkg == True:
        replace_background(MAIN_DIR)

    #copy default settings files and make fishdata dir

    if not os.path.exists(track_dir + 'fishTracker.settings'):
        if '_jwj_' in MAIN_DIR:
            shutil.copyfile(
                os.path.expanduser(
                    '~/fishMAD/tristrack_defaults/fishTracker_stickleback.settings'
                ), track_dir + '/fishTracker.settings')
        else:
            shutil.copyfile(
                os.path.expanduser(
                    '~/fishMAD/tristrack_defaults/fishTracker.settings'),
                track_dir + '/fishTracker.settings')
    if not os.path.exists(track_dir + 'conversion.settings'):
        if '_jwj_' in MAIN_DIR:
            shutil.copyfile(
                os.path.expanduser(
                    '~/fishMAD/tristrack_defaults/conversion_stickleback.settings'
                ), track_dir + '/conversion.settings')
        else:
            shutil.copyfile(
                os.path.expanduser(
                    '~/fishMAD/tristrack_defaults/conversion.settings'),
                track_dir + '/conversion.settings')
    if not os.path.exists(track_dir + '/fishdata'):
        os.makedirs(track_dir + '/fishdata')

    print "set up tristrack environment..."
    #Get metadata from videos
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    FPS = store.user_metadata['acquisitionframerate']
    videoSize = store.image_shape

    if (fishnum == 0) or (fishnum == None):
        #Get number of fish from MAIN_DIR filename.
        fishnum = int(MAIN_DIR.split('/')[-2].split('_')[1])
    if fishnum <= 5:
        fishnum += 2
    elif fishnum <= 15:
        fishnum += 4
    elif fishnum <= 55:
        fishnum += 6
    elif fishnum <= 100:
        fishnum += 10
    elif fishnum <= 200:
        fishnum += 15
    else:
        fishnum = fishnum * 1.1

    #  customize conversion.settings
    openFile = open(track_dir + '/conversion.settings', 'r+b')
    SETTINGS = openFile.readlines()
    for item in SETTINGS:
        if item.find('frame_rate') != -1:
            framerate_loc = SETTINGS.index(item)
        if item.find('cam_framerate') != -1:
            camFrameRate_loc = SETTINGS.index(item)
        if item.find('cam_resolution') != -1:
            vidSize_loc = SETTINGS.index(item)
    SETTINGS[framerate_loc] = 'frame_rate = ' + str(FPS) + '\n'
    SETTINGS[camFrameRate_loc] = 'cam_framerate = ' + str(FPS) + '\n'
    SETTINGS[vidSize_loc] = 'cam_resolution = ' + str(
        [videoSize[0], videoSize[1]]) + '\n'
    # return pointer to top of file so we can re-write the content with replaced string
    openFile.seek(0)
    # clear file content
    openFile.truncate()
    # re-write the content with the updated content
    openFile.write(''.join(SETTINGS))
    openFile.close()

    print "wrote new conversion file..."
    # Customize fishTracker.settings

    #   get and read tracker settings
    openFile = open(track_dir + '/fishTracker.settings', 'r+b')
    SETTINGS = openFile.readlines()
    for item in SETTINGS:
        if item.find('frame_rate') != -1:
            framerate_loc = SETTINGS.index(item)
        if item.find('number_fish') != -1:
            fishNum_loc = SETTINGS.index(item)
        if item.find('output_dir') != -1:
            outDir_loc = SETTINGS.index(item)
        if item.find('fish_minmax_size') != -1:
            fishSize_loc = SETTINGS.index(item)
    SETTINGS[framerate_loc] = 'frame_rate = ' + str(FPS) + '\n'
    SETTINGS[fishNum_loc] = 'number_fish = ' + str(fishnum) + '\n'
    SETTINGS[outDir_loc] = 'output_dir = "' + track_dir + '"\n'
    if 'juvenile' in MAIN_DIR:
        SETTINGS[fishSize_loc] = "fish_minmax_size = [0.5,7]"
    # return pointer to top of file so we can re-write the content with replaced string
    openFile.seek(0)
    # clear file content
    openFile.truncate()
    # re-write the content with the updated content
    openFile.write(''.join(SETTINGS))
    openFile.close()

    print "wrote new settings file..."

    FNULL = open(os.devnull, 'w')

    # Launch conversion
    vidSet = MAIN_DIR + '%6d.mp4'
    launch_conversion = "~/setup_debian/FishTracker/Application/build/framegrabber -d '" + track_dir + "' -i '" + vidSet + "' -o converted.pv -settings conversion -nowindow"
    if not (os.path.exists(track_dir + '/converted.pv')):
        if os.path.exists(
                os.path.expanduser(
                    '~/setup_debian/FishTracker/Application/build/video_average.png'
                )):
            os.remove(
                os.path.expanduser(
                    '~/setup_debian/FishTracker/Application/build/video_average.png'
                ))
        print datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S"
        ), '\t', "Running conversion on file: ", track_dir
        try:
            subprocess.check_call([launch_conversion],
                                  stdout=FNULL,
                                  stderr=subprocess.STDOUT,
                                  shell=True)
        except Exception, e:
            errorLog = open(
                os.path.expanduser(
                    '~/setup_debian/FishTracker/Application/build/batchlog.txt'
                ), 'w')
            errorLog.write(
                datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + '\t')
            errorLog.write(track_dir + '\t')
            errorLog.write('error during conversion step' + '\n')
            errorLog.write(str(e) + '\n\n\n')
            errorLog.close()
            FNULL.close()
            print datetime.datetime.now().strftime(
                "%Y-%m-%d %H:%M:%S"
            ), '\t', "ERROR converting file: ", track_dir
            return
Example #21
0
    df.loc[lastStim, 'stimEnd'] = 1
    return df


groupData = pd.DataFrame()
for fn in glob.glob(
        '/media/recnodes/recnode_2mfish/coherencetestangular3m_*_dotbot_*/track/perframe_stats.pickle'
):
    expID, groupsize, _, trialID = fn.split('/')[4].split('.')[0].split('_', 3)
    if fn.split('/track/perframe_stats')[0] in blacklist:
        print "excluding", fn
        continue
    print fn
    ret, pf = stims.sync_data(
        pd.read_pickle(fn), stims.get_logfile(fn.rsplit('/', 2)[0]),
        imgstore.new_for_filename(fn.rsplit('/', 2)[0] + '/metadata.yaml'))
    pf['dir'] = pd.to_numeric(pf['dir'], errors='coerce')
    pf['coh'] = pf['coh'].fillna(method='pad').fillna(method='backfill')
    #try:
    pf = sync_by_stimStart(pf)
    pf = align_by_stim(pf, trialID)

    #slope = pd.Series(np.gradient(pf['median_dRotation_cArea'].values), pf['Timestamp'], name='slope')
    s = splrep(pf.Timestamp, pf.median_swimSpeed, k=5, s=17)
    newdf = pd.DataFrame({
        'syncTime': pf['syncTime'],
        'swimSpeed': pf['median_swimSpeed'],
        'smoothedSpeed': splev(pf.Timestamp, s),
        'dS_by_dt': splev(pf.Timestamp, s, der=1),
        'dS_by_dt2': splev(pf.Timestamp, s, der=2)
    })
Example #22
0
                        '21990443']
    """
    #Camera orientations after renovation (August 2018):
     #Current settings as of 180827
    [tr, br, tl, bl] = ['21990447',
                        '21990449',
                        '21990443',
                        '21990445']

    
    
    for x in glob.glob(SEARCH_FILES):
        ID = x.split('.')[-1]
        print ID
        if br in ID:
            BOTTOM_RIGHT = imgstore.new_for_filename(x+'/metadata.yaml')
        elif tl in ID:
            TOP_LEFT = imgstore.new_for_filename(x+'/metadata.yaml')
        elif bl in ID:
            BOTTOM_LEFT = imgstore.new_for_filename(x+'/metadata.yaml')
        elif tr in ID:
            TOP_RIGHT = imgstore.new_for_filename(x+'/metadata.yaml')

            
    VIDEOS = [TOP_LEFT, BOTTOM_LEFT, TOP_RIGHT, BOTTOM_RIGHT]


    if args.saveas == 'notAssigned':
        ts = '_'.join(TOP_LEFT.filename.split('/')[-1].split('.')[0].split('_')[-2:])
        SAVEAS = '/home/dan/videoStitch/calibrations/homography/homography_' + ts
    else:
Example #23
0
    args = parser.parse_args()

    DIRECTORY = args.v
    if DIRECTORY[-1] != '/':
        DIRECTORY += '/'

    LOG = args.log

    stimData = pd.read_table(LOG,
                             sep='\t',
                             header=None,
                             names=['Timestamp', 'nDots', 'C', 'vel',
                                    'dir']).fillna(0)
    stimData['frame_time'] = stimData['Timestamp'] / 1000.0

    store = imgstore.new_for_filename(DIRECTORY + 'metadata.yaml')
    FPS = 30
    videoSize = store.image_shape

    frameDF = pd.DataFrame(store.get_frame_metadata())
    merged = frameDF.merge(stimData, how='outer', on='frame_time')
    merged = merged.sort('frame_time').reset_index(drop=True)
    merged.frame_number.fillna(method='ffill', inplace=True)

    merged['frame_number'] = merged['frame_number'].fillna(0)

    merged[['nDots', 'C', 'vel',
            'dir']] = merged[['nDots', 'C', 'vel',
                              'dir']].fillna(method='ffill', axis=0)

    merged['date_and_time'] = pd.to_datetime(merged['frame_time'], unit='s')
Example #24
0
def calculate_perframe_stats(fbf, TRACK_DIR, nCores=8):

    # SETUP PARALLEL PROCESSING

    ppe = ProcessPoolExecutor(nCores)
    futures = []
    statResults = []
    rotMResults = []
    rotAResults = []
    fishR = []

    # PREPARE DATAFRAME
    #fbf = fbf.loc[fbf['frame'] < 20100, :] #large files cause memory error on this old machine
    fbf = fbf.loc[fbf[XPOS].notnull(), :]
    fbf = fbf.loc[fbf[YPOS].notnull(), :]
    fbf.loc[:, 'uVX'] = fbf.loc[:, XVEL] / fbf.loc[:, SPEED]
    fbf.loc[:, 'uVY'] = fbf.loc[:, YVEL] / fbf.loc[:, SPEED]
    if 'header' in fbf.columns:
        fbf = fbf.drop(columns=['header'])
    fbf['coreGroup'] = fbf[
        'frame'] % nCores  #divide into chunks labelled range(nCores)
    fbf.reset_index(inplace=True, drop=True)
    # INITIATE PARALLEL PROCESSES
    for n in range(nCores):
        p = ppe.submit(process_chunk, fbf.loc[fbf['coreGroup'] == n, :])
        futures.append(p)

    # COLLECT PROCESSED DATA AS IT IS FINISHED
    for future in as_completed(futures):
        stats, rotM, rotA, fish = future.result()
        statResults.append(stats)
        rotMResults.append(rotM)
        rotAResults.append(rotA)
        fishR.append(fish)

    #CONCATENATE RESULTS
    perframe_stats = pd.concat(statResults)
    rotationDf = pd.concat(fishR)

    rotationOrders_cMass = {}
    for r in rotMResults:
        rotationOrders_cMass.update(r)
    pick = open(TRACK_DIR + '/rotationOrders_cMass.pickle', "wb")
    pickle.dump(rotationOrders_cMass, pick)
    pick.close()

    rotationOrders_cArea = {}
    for r in rotAResults:
        rotationOrders_cArea.update(r)
    pick = open(TRACK_DIR + '/rotationOrders_cArea.pickle', "wb")
    pickle.dump(rotationOrders_cArea, pick)
    pick.close()

    ARENA_WIDTH = get_arena_width(TRACK_DIR.split('/track')[0])
    #perframe_stats.loc[:,'centroidRotation'] = get_centroid_rotation(perframe_stats, TRACK_DIR,  ARENA_WIDTH)
    perframe_stats['frame'] = perframe_stats.index
    log = stim_handling.get_logfile(TRACK_DIR.rsplit('/', 2)[0])
    store = imgstore.new_for_filename(
        TRACK_DIR.rsplit('/', 2)[0] + '/metadata.yaml')
    ret, perframe_stats = stim_handling.sync_data(perframe_stats, log, store)
    perframe_stats.to_pickle(TRACK_DIR + '/perframe_stats.pickle')
    try:
        rotationDf.to_pickle(TRACK_DIR + '/frameByFrameData.pickle')
    except:
        import joblib
        joblib.dump(rotationDf, TRACK_DIR + '/frameByFrameData.jl')
    return perframe_stats
Example #25
0
def save_rotation_data(expFileName):

    if 'reversal' in expFileName:
        REVERSAL = True
        XLIM = (30, 60)
        COLUMN = 'dir'
    else:
        REVERSAL = False
        XLIM = (30, 300)
        COLUMN = 'speed'
    try:
        fbf = pd.read_pickle(expFileName + '/track/perframe_stats.pickle')
        if len(fbf) == 0:
            print("FAILED TO READ PICKLE. TRYING JOBLIB")
            fbf = joblib.load(expFileName + '/track/perframe_stats.pickle')
        rot = pd.read_pickle(expFileName +
                             '/track/rotationOrders_cArea.pickle')
        if not 'frame' in fbf.columns:
            fbf['frame'] = fbf.index
        if not 'FrameNumber' in fbf.columns:
            ret, fbf = stims.sync_data(
                fbf, stims.get_logfile(expFileName),
                imgstore.new_for_filename(expFileName + '/metadata.yaml'))
        ID = expFileName.split('/')[-1].split('_', 3)[-1].split('.')[0]
        ret, synced = sync_by_stimStart(fbf,
                                        ID,
                                        col=COLUMN,
                                        REVERSALS=REVERSAL)
        if ret == 0:
            return 0
        for IDx in list(set(synced['trialID'])):
            chunk = synced.loc[synced['trialID'] == IDx, :]
            ix = chunk[chunk.syncTime.between(np.timedelta64(XLIM[0], 's'),
                                              np.timedelta64(XLIM[1],
                                                             's'))].index
            DIR = np.sign(chunk.loc[ix, 'dir'].mean())
            if DIR == 0:
                return 0
            data = np.concatenate([
                rot[x] for x in range(ix.min(), ix.max())
            ]) * DIR * -1.0  #FLIP TO MAKE POSITIVE, JAKE
            COH = str(np.around(fbf.coh.mean(), 1))
            GS = expFileName.split('/')[-1].split('_')[1]
            if REVERSAL:
                np.save(
                    '/media/recnodes/Dan_storage/20200121_reversal_rotation_data/'
                    + GS + '_' + COH + '_' + IDx + '.npy', data)
                print(
                    '/media/recnodes/Dan_storage/20200121_reversal_rotation_data/'
                    + GS + '_' + COH + '_' + IDx + '.npy')
            else:
                continue  #FIXME danno's too chicken to try this cuz it'll surely break
                np.save(
                    '/media/recnodes/Dan_storage/191205_rotation_data/' + GS +
                    '_' + COH + '_' + ID + '.npy', data)
                print('/media/recnodes/Dan_storage/191205_rotation_data/' +
                      GS + '_' + COH + '_' + ID + '.npy')
        return 1
    except Exception as e:
        print(e)
        return 0
Example #26
0
        calibs = ('stitch_dry_26c_20190624_091343.21990443_undistorted',
                  'stitch_dry_26c_20190624_091343.21990445_undistorted',
                  'stitch_dry_26c_20190624_091343.21990447_undistorted',
                  'stitch_dry_26c_20190624_091343.21990449_undistorted')
    else:
        calibs = ('stitch09_20180910_165817.21990443_undistorted',
                  'stitch09_20180910_165817.21990447_undistorted',
                  'stitch09_20180910_165817.21990445_undistorted',
                  'stitch09_20180910_165817.21990449_undistorted')

    cams_imgs = {}

    # load the first frame for the calibration
    for fn in calibs:
        with new_for_filename(op.join(BASE_CALIB, fn)) as store:
            camera_serial = get_store_camera_serial(store)
            img, _ = store.get_image(frame_number=None,
                                     exact_only=True,
                                     frame_index=0)
            cams_imgs[camera_serial] = ensure_color(img)

    sorted_imgs = [cams_imgs[i] for i in best_order]

    s = Stitcher(use_gpu=True,
                 estimator_type="homography",
                 matcher_type="affine",
                 warp_type="plane")

    # s.enable_exposure_compensation('gain_blocks')
    # s.enable_seam_finding('gc_color')
Example #27
0
def get_Tseries(expFileName):
    expID = expFileName.split('/')[-1].split('_', 3)[-1].split('.')[0]
    if 'reversal' in expFileName:
        REVERSAL = True
        prestim_frames = 400
        poststim_frames = 2400
        file_prefix = 'REV_'
        COLUMN = 'dir'
    else:
        REVERSAL = False
        prestim_frames = 1200
        poststim_frames = 16000
        file_prefix = 'COH_'
        COLUMN = 'speed'
    try:
        fbf = pd.read_pickle(expFileName + '/track/frameByFrameData.pickle')
        if len(fbf.shape) == 1:
            fbf = joblib.load(expFileName + '/track/frameByFrameData.pickle')
        pf = pd.read_pickle(
            expFileName + '/track/perframe_stats.pickle'
        )  #FIXME these may have the wrong stim direction because of sync_data vs sync_coherence (if made before 20191218)....
        if len(pf.shape) == 1:
            pf = joblib.load(expFileName + '/track/perframe_stats.pickle')
        if not 'frame' in pf.columns:
            fbf['frame'] = pf.index
        if not 'FrameNumber' in pf.columns:
            try:
                ret, pf = stims.sync_data(
                    pf, stims.get_logfile(expFileName),
                    imgstore.new_for_filename(expFileName + '/metadata.yaml'))
            except:
                print("oppala")
                return 0
        ret, sy = sync_by_stimStart(pf, expID, col=COLUMN, REVERSALS=REVERSAL)
        if ret == 0:
            return 0

    except Exception as e:
        print(e)
        return 0
    for ID, data in sy.groupby('trialID'):

        frame_0 = data.loc[data['syncTime'] == abs(data.syncTime).min(),
                           'frame'].values[0]
        md = data.loc[frame_0 - prestim_frames:frame_0 + poststim_frames]
        prestim = data.loc[data['frame'] < frame_0, :]
        psMeta = dict(prestim[prestim.columns[prestim.dtypes ==
                                              'float64']].fillna(0).mean())

        a = area[area.index == ID]
        if len(a) > 0:
            for col in [
                    'Area', 'Area_rank', 'Density', 'Density_rank',
                    'groupsize', 'trialID'
            ]:
                psMeta[col] = a[col][0]
        else:
            psMeta['trialID'] = ID
        d = fbf[fbf['frame'].between(frame_0 - prestim_frames,
                                     frame_0 + poststim_frames)]
        rotA = d.groupby(['frame',
                          'trackid'])['rotation_cArea'].mean().unstack()
        rotM = d.groupby(['frame',
                          'trackid'])['rotation_cMass'].mean().unstack()
        stimdir = md['dir'].fillna(0)
        stimcoh = md['coh'].fillna(0)
        stimspeed = md['speed'].fillna(0)
        meta = {}

        COH = str(np.around(pf.coh.median(), 1))
        GS = expFileName.split('/')[-1].split('_')[1]
        #ID = expFileName.split('/')[-1].split('_',3)[-1].split('.')[0]
        FN = '/media/recnodes/Dan_storage/Jake_TS/' + file_prefix + GS + '_' + COH + '_' + ID + '.npz'

        #FN = '/media/recnodes/Dan_storage/Jake_TS/'+ expFileName.split('/')[-1].rsplit('_',2)[0] + '_' + ID + '.npz'

        np.savez(FN,
                 rA=rotA,
                 rM=rotM,
                 prestim=psMeta,
                 meta=meta,
                 direction=stimdir,
                 coherence=stimcoh,
                 speed=stimspeed)
        print('completed:', FN)

    return 1
Example #28
0
    import imgstore
    import os
    import pandas as pd
    import numpy as np

    MAIN_DIR = '/media/recnodes/recnode_2mfish/reversals3m_64_dotbot_20181024_153201.stitched/'

    DATA_COL = 'EigenCen'
    STIM_COL = 'dir'
    SKIP_FRAMES = 3

    SAVE_DIR = MAIN_DIR + 'track/eigencentricity_overlay/'

    nfbf = joblib.load(MAIN_DIR + 'track/network_FBF.pickle')
    nfbf.index = nfbf.frame
    store = imgstore.new_for_filename(MAIN_DIR + 'metadata.yaml')
    log = stims.get_logfile(MAIN_DIR)
    ret, pf = stims.sync_data(
        pd.read_pickle(MAIN_DIR + 'track/perframe_stats.pickle'), log, store)

    ret, nfbf = stims.sync_data(nfbf, log, store)

    frames = pd.DataFrame(store.get_frame_metadata())
    frames.columns = ['FrameNumber', 'Timestamp']

    _MIN = nfbf[DATA_COL].min()
    _MAX = nfbf[DATA_COL].max()

    g = nfbf.groupby('frame')

    for i in np.arange(0, len(frames), SKIP_FRAMES):
Example #29
0
def run(MAIN_DIR, RESUME=True):
    print("processing: ", MAIN_DIR)
    #getColumnNames('_'.join(MAIN_DIR.split('/')[-1]..split('.')[0].split('_')[-2:]))
    trackdir = slashdir(MAIN_DIR) + 'track/'
    PF_DONE = False
    if os.path.exists(trackdir + 'perframe_stats.pickle'):
        if datetime.datetime.fromtimestamp(
                os.path.getmtime(trackdir + 'perframe_stats.pickle')
        ) > getTimeFromTimeString('20191218_000000'):
            PF_DONE = True
            perframe_stats = pd.read_pickle(trackdir + 'perframe_stats.pickle')
            if len(perframe_stats.shape) == 1:
                perframe_stats = joblib.load(trackdir +
                                             'perframe_stats.pickle')
            if len(perframe_stats.shape) == 1:
                PF_DONE = False
    if PF_DONE == False:
        if os.path.exists(trackdir + 'frameByFrameData.pickle'):
            try:
                fbf = pd.read_pickle(trackdir + 'frameByFrameData.pickle')
            except:
                fbf = joblib.load(trackdir + 'frameByFrameData.pickle')
            if len(fbf.shape) == 1:
                fbf = getFrameByFrameData(trackdir, RESUME, args.maxthreads)
                #print("CORRUPTED FILE. DELETING frameByFrameData:", trackdir)
                #os.remove(trackdir + 'frameByFrameData.pickle')
                #return
        else:
            fbf = getFrameByFrameData(trackdir, RESUME, args.maxthreads)

        if not 'VX#smooth#wcentroid' in fbf.columns:
            print('VX#smooth#wcentroid', "not found in columns.", MAIN_DIR)
            print(fbf.columns)
            os.remove(trackdir + 'frameByFrameData.pickle')
            shutil.rmtree(trackdir + 'fishdata')
            if os.path.exists(trackdir + 'frameByFrame_complete'):
                os.remove(trackdir + 'frameByFrame_complete')
            return

        if len(set(fbf.frame)) < 501:
            print("FOUND INCOMPLETE TRACKING DATA. DELETING TRACKDIR")
            shutil.rmtree(trackdir)
            return
        perframe_stats = calculate_perframe_stats(fbf, trackdir,
                                                  args.maxthreads)

        store = imgstore.new_for_filename(slashdir(MAIN_DIR) + 'metadata.yaml')
        log = stim_handling.get_logfile(MAIN_DIR)
        if 'reversals' in MAIN_DIR:
            #ret, perframe_stats = stim_handling.sync_reversals(perframe_stats, log, store)
            plot_perframe_vs_time(slashdir(MAIN_DIR), [
                'dir', 'median_polarization', 'median_dRotation_cMass',
                'median_dRotation_cArea', 'median_swimSpeed', 'entropy_Ra'
            ], [
                'Direction', 'Pol. Order', 'Rot. Order (CofM)',
                'Rot. Order (Area)', 'Median Speed', 'Entropy'
            ], perframe_stats, '_median')
        elif 'coherence' in MAIN_DIR:
            #ret, perframe_stats = stim_handling.synch_coherence_with_rotation(perframe_stats, log, store)
            plot_perframe_vs_time(slashdir(MAIN_DIR), [
                'coherence', 'median_polarization', 'median_dRotation_cMass',
                'median_dRotation_cArea', 'median_swimSpeed', 'entropy_Ra'
            ], [
                'Coherence', 'Pol. Order', 'Rot. Order (CofM)',
                'Rot. Order (Area)', 'Median Speed', 'Entropy'
            ], perframe_stats, '_median')
        elif 'cogs' in MAIN_DIR:

            pass  #FIXME
Example #30
0
        type=str,
        required=False,
        default='notDefined',
        help=
        'name for calibration, including date time string, ex: 20180404_123456'
    )

    args = parser.parse_args()

    CHECKERSIZE = tuple([int(k) for k in args.checkersize.split('x')])

    for vid in glob.glob(
            slashdir(args.dir) + '*' + args.handle + '*/metadata.yaml'):
        if "undistorted" in vid:  #skip already processed videos
            continue
        inStore = imgstore.new_for_filename(vid)

        if args.saveas == 'notDefined':
            DATETIME = '_'.join(
                vid.split('/')[-2].split('.')[0].rsplit('_', 2)[1:])
            SERIAL = inStore.user_metadata['camera_serial']
            SAVE_AS = '_'.join([DATETIME, SERIAL])
        else:
            SAVE_AS = '_'.join(
                [args.saveas, inStore.user_metadata['camera_serial']])
        print SAVE_AS
        calibrate(
            inStore, CHECKERSIZE,
            os.path.expanduser('~/videoStitch/calibrations/distortion/' +
                               SAVE_AS + '.yaml'))