def load_and_process(self):
     self.pdata = helpy.load_data(self.prefix, 'p')
     self.x0, self.y0, self.R = ring_motion.boundary(self.prefix)
     self.side_len = self.R * self.real_particle /4.0  ##4inch in experiment 
     self.max_dist = self.side_len/1.25        
     self.frames = self.pdata['f']
     self.boundary_shape = Point(self.y0, 1024 - self.x0).buffer(self.R)
 def load_and_process(self):
     self.pdata, self.odata = helpy.load_data(self.prefix, 'p o')
     self.x0, self.y0, self.R = ring_motion.boundary(self.prefix)
     self.side_len = self.R * self.real_particle / 4.0
     self.max_dist = self.side_len / 1.25
     self.odata['orient'] = (self.odata['orient'] + np.pi) % (2 * np.pi)
     self.frames = self.pdata['f']
 def convol_load_and_process(self):
     self.pdata, self.odata = helpy.load_data(self.prefix, 'p o')
     self.x0, self.y0, self.R = ring_motion.boundary(self.prefix)
     self.side_len = self.R * self.real_particle /4.0
     self.max_dist = self.side_len/1.25
     self.odata['orient'] = (self.odata['orient'] + np.pi)%(2 * np.pi) 
     self.frames = self.pdata['f']   
     self.boundary_shape = Point(self.y0, 1024 - self.x0).buffer(self.R) 
Esempio n. 4
0
 def load_process(self):
     self.x0, self.y0, self.R = ring_motion.boundary(self.prefix)
     data = helpy.load_data(self.prefix)
     data['o'] = (data['o'] + np.pi)%(2 * np.pi)
     max_frame = data['f'].max()
     tracksets = helpy.load_tracksets(data, run_track_orient=True, min_length = max_frame//2, run_repair = 'interp')
     track_prefix = {self.prefix: tracksets}
     v_data = velocity.compile_noise(track_prefix, width=(0.525,), cat = False, side = self.side, \
                             fps = self.fps, ring = True, x0= self.x0, y0 = self.y0, skip = 5, \
                             grad = True, start = 0)
     self.v_data = v_data[self.prefix]
Esempio n. 5
0
def find_data(args):
    suf = '_TRACKS.npz'
    if '*' in args.prefix or '?' in args.prefix:
        fs = iglob(args.prefix + suf)
    else:
        dirname, prefix = os.path.split(args.prefix)
        dirm = (dirname or '*') + (prefix + '*/')
        basm = prefix.strip('/._')
        fs = iglob(dirm + basm + '*' + suf)
    prefixes = [s[:-len(suf)] for s in fs] or [args.prefix]
    if args.verbose:
        print 'prefixes:',
        print '\n          '.join(prefixes)

    return {prefix: helpy.load_data(prefix, 'tracks') for prefix in prefixes}
Esempio n. 6
0
def find_data(args):
    suf = '_TRACKS.npz'
    if '*' in args.prefix or '?' in args.prefix:
        fs = iglob(args.prefix+suf)
    else:
        dirname, prefix = os.path.split(args.prefix)
        dirm = (dirname or '*') + (prefix + '*/')
        basm = prefix.strip('/._')
        fs = iglob(dirm + basm + '*' + suf)
    prefixes = [s[:-len(suf)] for s in fs] or [args.prefix]
    if args.verbose:
        print 'prefixes:',
        print '\n          '.join(prefixes)

    return {prefix: helpy.load_data(prefix, 'tracks') for prefix in prefixes}
Esempio n. 7
0
def compile_noise(prefixes, vs, width=3, side=1, fps=1, cat=True,
                  do_orientation=True, do_translation=True, subtract=True,
                  minlen=10, torient=True, interp=True, dupes=False, **ignored):
    if np.isscalar(prefixes):
        prefixes = [prefixes]
    for prefix in prefixes:
        if args.verbose:
            print "Loading data for", prefix
        data = helpy.load_data(prefix, 'tracks')
        if dupes:
            data['t'] = tracks.remove_duplicates(data['t'], data)
        tracksets = helpy.load_tracksets(data, min_length=minlen,
                run_track_orient=torient, run_fill_gaps=interp)
        for track in tracksets:
            tdata = tracksets[track]
            velocities = noise_derivatives(tdata, width=width,
                    side=side, fps=fps, do_orientation=do_orientation,
                    do_translation=do_translation, subtract=subtract)
            for v in velocities:
                vs[v].append(velocities[v])
    if cat:
        for v in vs:
            vs[v] = np.concatenate(vs[v], -1)
    return len(tracksets)
Esempio n. 8
0
    meta = helpy.load_meta(absprefix)
    if args.load:
        datapath = absprefix+'_CORNER'*args.corner+'_POSITIONS.txt'
        helpy.txt_to_npz(datapath, verbose=True, compress=True)
        if args.orient or args.track:
            print 'NOTICE: not tracking, only converting file from txt to npz'
            print '        please run again without `-l` to track/orient'
        sys.exit()

    if args.track or args.orient:
        from scipy.spatial import cKDTree as KDTree
        if args.track != args.orient and helpy.bool_input("Would you like to "
                "simultaneously track and find orientations? (It's faster)\n"):
            args.track = args.orient = True
        if args.orient:
            pdata, cdata = helpy.load_data(absprefix, 'position corner')
        else:
            pdata = helpy.load_data(absprefix, 'position')
        pfsets = helpy.splitter(pdata, ret_dict=True)
        pftrees = { f: KDTree(np.column_stack([pfset['x'], pfset['y']]), leafsize=50)
                   for f, pfset in pfsets.iteritems() }
    if args.track:
        meta.update(track_sidelength=args.side, track_maxdist=args.maxdist,
                track_maxtime=args.giveup, track_stub=args.stub,
                track_cut=args.cut)
        trackids = find_tracks(pdata, maxdist=args.maxdist, giveup=args.giveup,
                               n=args.number, cut=args.cut, stub=args.stub)
        trackids = remove_duplicates(trackids, data=pdata)
    else:
        trackids = None
    if args.orient:
Esempio n. 9
0
    arg('-v', '--verbose', action='count', help='Be verbose, may repeat: -vv')

    args = parser.parse_args()
    if not args.verbose:
        from warnings import filterwarnings
        filterwarnings('ignore', category=RuntimeWarning,
                       module='numpy|scipy|matplot')

    if '*' in args.prefix or '?' in args.prefix:
        prefix_pattern = args.prefix
        args.prefix = helpy.replace_all(args.prefix, '*?', '') + '_MRG'
        helpy.save_log_entry(args.prefix, 'argv')
        prefixes = [p[:-9] for p in glob.iglob(
            helpy.with_suffix(prefix_pattern, '_MELT.npz'))]
        metas, mdatas = zip(*[(helpy.load_meta(prefix),
                               helpy.load_data(prefix, 'm'))
                              for prefix in prefixes])
        for meta, mdata in zip(metas, mdatas):
            mdata['f'] = mdata['f'].astype(int) - int(meta['start_frame'])
        mdata = np.concatenate(mdatas)
        meta = helpy.merge_meta(metas, excl={'start_frame'},
                                excl_start=('center', 'corner'))
        if args.save:
            np.savez_compressed(args.prefix+'_MELT', data=mdata)
            helpy.save_meta(args.prefix, meta, merged=prefixes)
            print 'merged sets', prefixes, 'saved to', args.prefix
    else:
        helpy.save_log_entry(args.prefix, 'argv')
        meta = helpy.load_meta(args.prefix)

    helpy.sync_args_meta(
Esempio n. 10
0
    if not args.verbose:
        from warnings import filterwarnings
        filterwarnings('ignore',
                       category=RuntimeWarning,
                       module='numpy|scipy|matplot')

    if '*' in args.prefix or '?' in args.prefix:
        prefix_pattern = args.prefix
        args.prefix = helpy.replace_all(args.prefix, '*?', '') + '_MRG'
        helpy.save_log_entry(args.prefix, 'argv')
        prefixes = [
            p[:-9]
            for p in glob.iglob(helpy.with_suffix(prefix_pattern, '_MELT.npz'))
        ]
        metas, mdatas = zip(*[(helpy.load_meta(prefix),
                               helpy.load_data(prefix, 'm'))
                              for prefix in prefixes])
        for meta, mdata in zip(metas, mdatas):
            mdata['f'] = mdata['f'].astype(int) - int(meta['start_frame'])
        mdata = np.concatenate(mdatas)
        meta = helpy.merge_meta(metas,
                                excl={'start_frame'},
                                excl_start=('center', 'corner'))
        if args.save:
            np.savez_compressed(args.prefix + '_MELT', data=mdata)
            helpy.save_meta(args.prefix, meta, merged=prefixes)
            print 'merged sets', prefixes, 'saved to', args.prefix
    else:
        helpy.save_log_entry(args.prefix, 'argv')
        meta = helpy.load_meta(args.prefix)
Esempio n. 11
0
            try:
                bgimage = pl.imread(locdir+prefix+'_001.tif')
            except IOError:
                bgimage = None
        if args.singletracks:
            mask = np.in1d(trackids, args.singletracks)
        else:
            mask = None
        plot_tracks(data, trackids, bgimage, mask=mask,
                    save=args.save, show=args.show)

if __name__=='__main__' and args.nn:
    # Calculate the <nn> correlation for all the tracks in a given dataset
    # TODO: fix this to combine multiple datasets (more than one prefix)

    data, trackids, odata, omask = helpy.load_data(prefix, True, False)
    tracksets, otracksets = helpy.load_tracksets(data, trackids, odata, omask,
                                                 min_length=args.stub)

    coscorrs = [ corr.autocorr(np.cos(otrackset), cumulant=False, norm=False)
                for otrackset in otracksets.values() ]
    sincorrs = [ corr.autocorr(np.sin(otrackset), cumulant=False, norm=False)
                for otrackset in otracksets.values() ]

    # Gather all the track correlations and average them
    allcorr = coscorrs + sincorrs
    allcorr = helpy.pad_uneven(allcorr, np.nan)
    tcorr = np.arange(allcorr.shape[1])/fps
    meancorr = np.nanmean(allcorr, 0)
    added = np.sum(np.isfinite(allcorr), 0)
    errcorr = np.nanstd(allcorr, 0)/np.sqrt(added - 1)
Esempio n. 12
0
        try:
            bgimage = Im.open(extdir+prefix+'_0001.tif')
        except IOError:
            try:
                bgimage = Im.open(locdir+prefix+'_001.tif')
            except IOError:
                bgimage = None
        if singletracks:
            mask = np.in1d(trackids, singletracks)
        plot_tracks(data, trackids, bgimage, mask=mask)

if __name__=='__main__' and args.nn:
    # Calculate the <nn> correlation for all the tracks in a given dataset
    # TODO: fix this to combine multiple datasets (more than one prefix)

    data, trackids, odata, omask = helpy.load_data(prefix, True, False)
    tracksets, otracksets = helpy.load_tracksets(data, trackids, odata, omask,
                                                 min_length=args.stub)

    coscorrs = [ corr.autocorr(np.cos(otrackset), cumulant=False, norm=False)
                for otrackset in otracksets.values() ]
    sincorrs = [ corr.autocorr(np.sin(otrackset), cumulant=False, norm=False)
                for otrackset in otracksets.values() ]

    # Gather all the track correlations and average them
    allcorr = coscorrs + sincorrs
    allcorr = helpy.pad_uneven(allcorr, np.nan)
    tcorr = np.arange(allcorr.shape[1])/fps
    meancorr = np.nanmean(allcorr, 0)
    errcorr = np.nanstd(allcorr, 0)/sqrt(len(allcorr))
    if verbose: