def link_trajectories(self, search_range, memory, filename='data.h5',
                       progress_listener=lambda *args: None):
     self.log.info('Linking particles in file "' + filename + '"')
     with trackpy.PandasHDFStore(filename) as store:
         i = 0
         for linked in trackpy.link_df_iter(store, search_range, memory=memory):
             store.put(linked)
             progress_listener(i)
             i += 1
     return self
 def locate_particles(self, frames, diameter, minmass, filename='data.h5',
                      progress_listener=lambda *args: None):
     self.log.info('Starting particle tracking to file: ' + filename)
     with trackpy.PandasHDFStore(filename, t_column='frame', mode='w') as s:
         for i, image in enumerate(frames):
             features = self.locate_on_frame(image, diameter, minmass)
             features['frame'] = i
             s.put(features)
             progress_listener(i)
     self.log.info('Particles located')
     return self
Exemple #3
0
def compute_drift(path, smoothing=0, pos_columns=None):
    """Return the ensemble drift, xy(t).

    Parameters
    ----------
    path : string p
        path to the HDF5 file which contains DataFrames(['x','y','particle'])
    smoothing : integer
        Smooth the drift using a forward-looking rolling mean over
        this many frames.

    Returns
    -------
    drift : DataFrame([x, y], index=frame)
    """

    if pos_columns is None:
        pos_columns = ['x', 'y']

    # Drift calculation
    print('Drift calc')
    with tp.PandasHDFStore(path) as traj:
        Nframe = traj.max_frame
        # initialize drift DataFrame
        dx = pd.DataFrame(data=np.zeros((Nframe + 1, 2)), columns=['x', 'y'])

        for f, frameB in enumerate(traj):  # loop frame
            print('Frame:', f)
            if f > 0:
                delta = frameB.set_index('particle')[
                    pos_columns] - frameA.set_index('particle')[pos_columns]
                dx.iloc[f].x = np.nanmean(delta.x.values)
                dx.iloc[f].y = np.nanmean(delta.y.values)  # compute drift
            #remember the current frame
            frameA = frameB

        if smoothing > 0:
            dx = pd.rolling_mean(dx, smoothing, min_periods=0)
        x = np.cumsum(dx)
    return x
    def batch_locate_particles(self, frames, diameter, minmass, filename='data.h5',
                               progress_listener=lambda *args: None):
        self.log.info('Starting particle tracking to file: ' + filename)
        self.log.info('Diameter: ' + str(diameter) + "; Minmass: " + str(minmass))

        def batches_gen(iterator, size):
            batch = []
            i = 0
            for item in iterator:
                batch.append(item)
                i += 1

                if i == size:
                    yield batch
                    batch = []
                    i = 0

        with trackpy.PandasHDFStore(filename, t_column='frame') as s:
            for batch in batches_gen(frames, 5):
                trackpy.batch(batch, diameter, minmass=minmass, invert=True,
                              output=s, engine='numba')
        self.log.info('Particles located')
        return self
Exemple #5
0
 def __init__(self, filename='data.h5', particle_id=None):
     self.log = logging.getLogger("Analytics")
     self.plt_lock = threading.Lock()
     with tp.PandasHDFStore(filename) as store:
         trajectories = pd.concat(iter(store))
         self.chart_data = ChartData(trajectories, particle_id)
Exemple #6
0
def trajectory_length(filename, particle_id):
    with tp.PandasHDFStore(filename) as store:
        df = pd.concat(iter(store))
        track = df.loc[df['particle'] == particle_id]
        return len(track.index)
Exemple #7
0
lg = 1

grid = RegularGrid([110,200], [30,30], [10,10])
Mtot = np.zeros(grid.shape+(3,))
Ntot = np.zeros(grid.shape, np.int64)
Ctot = np.zeros(grid.shape+(2,2))
Ttot = np.zeros_like(Mtot)
Na_tot = np.zeros_like(Ntot)
Nd_tot = np.zeros_like(Ntot)
Nc_tot = np.zeros_like(Ntot)


for f in f0:
    
    # get two frames
    with tp.PandasHDFStore(path_traj.format(act)) as s:
        frame0 = s.get(f)
        frame1 = s.get(f + lg)
    
    # work only on particles that exist in both frames
    ## gather in one DataFrame the two times so that in a single row we have the information about the particle at the two time steps
    joined = frame0.join(frame1.set_index('particle'), on ="particle", how='inner', lsuffix='0', rsuffix='1')
    
    # extract only the coordinates at each time step. Thanks to the previous step, we have ensured that the rows are sorted consistently
    pos0 = joined[['x0', 'y0']].to_numpy()
    pos1 = joined[['x1', 'y1']].to_numpy()
    
    # Voronoi neighbors
    ## You may want to refine this with a distance criterion or use another definition of the bonds
    edges0 = voro_edges(pos0)
    edges1 = voro_edges(pos1)
#     ## Optionally, label the axes.
#     ax.set(xlabel='size', ylabel='count');
#  
#     fig, ax = plt.subplots()
#     ax.hist(f['ecc'], bins=20)
#     ## Optionally, label the axes.
#     ax.set(xlabel='ecc', ylabel='count');
#        
#     ## Subpixel accuracy
#     plt.figure()
#     tp.subpx_bias(f);
# =============================================================================
    
    
    # Collect all detected positions GB
    with tp.PandasHDFStore(savepath_position.format(act)) as s:
        tp.batch(frames[fmin[ind]:fmax[ind]], size, 
                 minmass=mm ,
                 smoothing_size = smoothing_size, 
                 separation = separation, 
                 threshold =  threshold,
                 percentile = percentile, 
                 characterize = True,
                 output=s)
 
            


# =============================================================================
#     with tp.PandasHDFStoreSingleNode(savepath_positionh5.format(act)) as s:
#         with tp.PandasHDFStoreSingleNode(savepath_temp.format(act)) as temp:
Exemple #9
0
    plt.legend(frameon=False)
    plt.text(901, 30, "Average x displacement: %.4f px/fm" % (x_velocity))
    plt.text(901, 30.5, "Average y displacement: %.4f px/fm" % (y_velocity))
    plt.axes([0.2, 0.2, .3, .3])

    squared_displacements = []
    a = coeff[0]
    for i in range(len(x)):
        disp = (1 / (1 + a**2)) * ((y[i] - y_approx[i])**2) / len(x)
        squared_displacements.append(disp)
    plt.title('MSD')
    plt.hist(squared_displacements, 100)
    plt.show()


with tp.PandasHDFStore('data.h5') as store:
    trajectories = pd.concat(iter(store))
    #filtered = tp.filter_stubs(trajectories)
    filtered = trajectories
    drift = tp.compute_drift(filtered)

    im = tp.imsd(filtered, 1, 1)
    plt.plot(im.index, im, 'k-', alpha=0.1)
    plt.xscale('log')
    plt.yscale('log')
    plt.title("Mean squared displacement for each particle")
    plt.show()

    disp_x = []
    disp_y = []
    for i in range(1, len(drift.x.values)):