for j in range(len(NewFirstFrame_list)): NewFirstFrame = NewFirstFrame_list[j] NewNoFrames = NewNoFrames_list[j] try: ### Load locs, info = io.load_locs(path) ### Modify istrue = locs.frame >= NewFirstFrame istrue = istrue & (locs.frame < (NewFirstFrame + NewNoFrames)) locs_filter = locs[istrue] locs_filter.frame = locs_filter.frame - NewFirstFrame # Set first frame to zero ### Save info_filter = info.copy() info_filter[0]['Frames'] = NewNoFrames extension = '_f%i-%i' % (NewFirstFrame, NewFirstFrame + NewNoFrames) io.save_locs(path.replace('.hdf5', extension + '.hdf5'), locs_filter, info_filter + [{ 'NewNoFrames': NewNoFrames, 'NewFirstFrame': NewFirstFrame, 'extension': extension }], mode='picasso_compatible') except: failed_path.extend([path]) print() print('Failed attempts: %i' % (len(failed_path)))
#%% ############################################ Main loop failed_path = [] for i in range(0, len(file_names)): ### Create path path = os.path.join(dir_names[i], file_names[i]) ### Run main function try: ### Load locs, info = addon_io.load_locs(path) ### Split locs_split = special.split_trajectories(locs, subN) ### Save path = os.path.splitext(path)[0] info_split = info.copy() + [{'subN': subN}] addon_io.save_locs(path + '_split%i.hdf5' % subN, locs_split, info_split, mode='picasso_compatible') except Exception: traceback.print_exc() failed_path.extend([path]) print() print('Failed attempts: %i' % (len(failed_path)))
### Run main function try: ### Load locs,info=addon_io.load_locs(path) ### Split locs_double=special.apply_multiply_jumps(locs,factor,segment,ratio) ### Save f='%i'%(factor*100) # Convert to percentage value f=f.zfill(3) # Convert 3 digit leading zeros string path=os.path.splitext(path)[0] info_double=info.copy()+[{'segment':segment,'ratio':ratio}] addon_io.save_locs(path+'_f%ss%ir%i.hdf5'%(f,segment,ratio), locs_double, info_double, mode='picasso_compatible') except Exception: traceback.print_exc() failed_path.extend([path]) print() print('Failed attempts: %i'%(len(failed_path)))
def main(locs, info, path, **params): ''' Get immobile properties for each group in _picked.hdf5 file (see `picasso.addon`_) and filter. Args: locs(pandas.DataFrame): Grouped localization list, i.e. _picked.hdf5 as in `picasso.addon`_ info(list): Info _picked.yaml to _picked.hdf5 localizations as list of dictionaries. path(str): Path to _picked.hdf5 file. Keyword Args: ignore(int=1): Maximum interruption (frames) allowed to be regarded as one bright time. parallel(bool=True): Apply parallel computing using DASK? Local cluster should be started before according to cluster_setup_howto() filter(string='th'): Which filter to use, either None, 'th' or 'sd' or 'none' Returns: list: - [0](dict): Dict of keyword arguments passed to function. - [1](pandas.DataFrame): Immobile properties of each group in ``locs`` as calulated by apply_props() ''' ### Path of file that is processed and number of frames path = os.path.splitext(path)[0] NoFrames = info[0]['Frames'] ### Define standard standard_params = { 'ignore': 1, 'parallel': True, 'filter': 'th', } ### Set standard if not contained in params for key, value in standard_params.items(): try: params[key] if params[key] == None: params[key] = standard_params[key] except: params[key] = standard_params[key] ### Remove keys in params that are not needed delete_key = [] for key, value in params.items(): if key not in standard_params.keys(): delete_key.extend([key]) for key in delete_key: del params[key] ### Procsessing marks: extension&generatedby params['generatedby'] = 'spt.immobile_props.main()' ##################################### Calculate kinetic properties print('Calculating kinetic information ...') if params['parallel'] == True: print('... in parallel') locs_props = apply_props_dask( locs, ignore=params['ignore'], ) else: locs_props = apply_props( locs, ignore=params['ignore'], ) ##################################### Filtering print('Filtering ..(%s)' % (params['filter'])) params['NoGroups_nofilter'] = len( locs_props) # Number of groups before filter locs_props = filter_(locs_props, NoFrames, params['filter']) # Apply filter params['NoGroups_filter'] = len( locs_props) # Number of groups after filter ##################################### Saving print('Saving _tprops ...') locs_props.reset_index( inplace=True) # Write group index into separate column info_props = info.copy() + [params] addon_io.save_locs(path + '_tprops.hdf5', locs_props, info_props, mode='picasso_compatible') return [params, locs_props]
def main(locs, info, path, **params): ''' Get mobile properties for each group in trajectories list (_pickedxxxx.hdf5) file as obtained by linklocs.main(). Args: locs(pandas.DataFrame): Trajectories list (_pickedxxxx.hdf5) as obtained by linklocs.main() info(list): Info _pickedxxxx.yaml to _pickedxxxx.hdf5 trajectories as list of dictionaries. path(str): Path to _pickedxxxx.hdf5 file. Keyword Args: parallel(bool=True): Apply parallel computing using DASK? Local cluster should be started before according to cluster_setup_howto() Returns: list: - [0](dict): Dict of keyword arguments passed to function. - [1](pandas.DataFrame): Mobile properties of each group in ``locs`` as calulated by apply_props() ''' ##################################### Params and file handling ### Path of file that is processed and number of frames path = os.path.splitext(path)[0] ### Define standard standard_params = { 'parallel': True, } ### Set standard if not contained in params for key, value in standard_params.items(): try: params[key] if params[key] == None: params[key] = standard_params[key] except: params[key] = standard_params[key] ### Remove keys in params that are not needed delete_key = [] for key, value in params.items(): if key not in standard_params.keys(): delete_key.extend([key]) for key in delete_key: del params[key] ### Processing marks params['generatedby'] = 'spt.mob_props.main()' ##################################### Calculate kinetic properties print('Calculating kinetic information ...') if params['parallel'] == True: print('... in parallel') locs_props = apply_props_dask(locs) else: locs_props = apply_props(locs) print('Saving _tmobprops ...') locs_props = locs_props.assign(group=locs_props.index.values ) # Write group index into separate column info_props = info.copy() + [params] addon_io.save_locs(path + '_tmobprops.hdf5', locs_props, info_props, mode='picasso_compatible') return [params, locs_props]
X = locs.copy() Y = props.copy() ############################################################################## ''' Define criteria for ensemble selection based on _props. Rewrite this section as needed ... ''' ### Only get center FOV istrue = np.sqrt((Y.x - 350)**2 + (Y.y - 350)**2) <= 200 ### Number of localizations threshold istrue = istrue & (Y.n_locs >= 100) & (Y.n_locs <= 200) ############################################################################## ''' Query both _props and _picked for positive groups (istrue!) and save picked ''' ### Query Y = Y[istrue] groups = Y.group.unique() # Positives X = X.query('group in @groups') ### Only first localizations... istrue = X.frame <= 100 #%% ### Save info_query = info.copy() addon_io.save_locs(path[1].replace('.hdf5', '_query.hdf5'), X, info_query, mode='picasso_compatible')