def cumulate(fn, opt: PreProcOpts): """Applies cumulative summation to a data set comprising movie frame stacks. At the moment, requires the summed frame stacks to have the same shape as the raw data. Arguments: fn {function} -- [description] opt {PreProcOpts} -- [description] Raises: err: [description] Returns: [type] -- [description] """ if isinstance(fn, list) and len(fn) == 1: fn = fn[0] def log(*args): if isinstance(fn, list): dispfn = os.path.basename(fn[0]) + ' etc.' else: dispfn = os.path.basename(fn) idstring = '[{} - {} - cumulate] '.format( datetime.datetime.now().time(), dispfn) print(idstring, *args) dssel = Dataset().from_list(fn) log('Cumulating from frame', opt.cum_first_frame) dssel.open_stacks(readonly=False) # chunks for aggregation chunks = tuple( dssel.shots.groupby(opt.idfields).count()['selected'].values) for k, stk in dssel.stacks.items(): if stk.chunks[0] != chunks: if k == 'index': continue log(k, 'needs rechunking...') dssel.add_stack(k, stk.rechunk({0: chunks}), overwrite=True) dssel._zchunks = chunks def cumfunc(movie): movie_out = movie movie_out[opt.cum_first_frame:, ...] = np.cumsum(movie[opt.cum_first_frame:, ...], axis=0) return movie_out for k in opt.cum_stacks: dssel.stacks[k] = dssel.stacks[k].map_blocks( cumfunc, dtype=dssel.stacks[k].dtype) dssel.change_filenames(opt.cum_file_suffix) dssel.init_files(overwrite=True, keep_features=False) log('File initialized, writing tables...') dssel.store_tables(shots=True, features=True) try: dssel.open_stacks(readonly=False) log('Writing stack data...') dssel.store_stacks(overwrite=True, progress_bar=False) except Exception as err: log('Cumulative processing failed.') raise err finally: dssel.close_stacks() log('Cumulation done.') return dssel.files
def subtract_bg(fn, opt: PreProcOpts): """Subtracts the background of a diffraction pattern by azimuthal integration excluding the Bragg peaks. Arguments: fn {function} -- [description] opt {PreProcOpts} -- [description] Returns: [type] -- [description] """ if isinstance(fn, list) and len(fn) == 1: fn = fn[0] def log(*args): if not (opt.verbose or any([isinstance(err, Exception) for e in args])): return if isinstance(fn, list): dispfn = os.path.basename(fn[0]) + ' etc.' else: dispfn = os.path.basename(fn) idstring = '[{} - {} - subtract_bg] '.format( datetime.datetime.now().time(), dispfn) print(idstring, *args) ds = Dataset().from_list(fn) ds.open_stacks(readonly=False) if opt.rerun_peak_finder: pks = find_peaks(ds, opt=opt) nPeaks = da.from_array(pks['nPeaks'][:, np.newaxis, np.newaxis], chunks=(ds.centered.chunks[0], 1, 1)) peakX = da.from_array(pks['peakXPosRaw'][:, :, np.newaxis], chunks=(ds.centered.chunks[0], -1, 1)) peakY = da.from_array(pks['peakYPosRaw'][:, :, np.newaxis], chunks=(ds.centered.chunks[0], -1, 1)) else: nPeaks = ds.nPeaks[:, np.newaxis, np.newaxis] peakX = ds.peakXPosRaw[:, :, np.newaxis] peakY = ds.peakYPosRaw[:, :, np.newaxis] original = ds.centered bg_corrected = da.map_blocks(proc2d.remove_background, original, original.shape[2] / 2, original.shape[1] / 2, nPeaks, peakX, peakY, peak_radius=opt.peak_radius, filter_len=opt.filter_len, dtype=np.float32 if opt.float else np.int32, chunks=original.chunks) ds.add_stack('centered', bg_corrected, overwrite=True) ds.change_filenames(opt.nobg_file_suffix) ds.init_files(keep_features=False, overwrite=True) ds.store_tables(shots=True, features=True) ds.open_stacks(readonly=False) # for lbl in ['nPeaks', 'peakTotalIntensity', 'peakXPosRaw', 'peakYPosRaw']: # if lbl in ds.stacks: # ds.delete_stack(lbl, from_files=False) try: ds.store_stacks(overwrite=True, progress_bar=False) except Exception as err: log('Error during background correction:', err) raise err finally: ds.close_stacks() return ds.files