def analyze_pixel(ibw_file, param_file): ''' Analyzes a single pixel Parameters ---------- ibw_file : str path to \*.ibw file param_file : str path to parameters.cfg file Returns ------- pixel : Pixel The pixel object read and analyzed ''' signal_array = signal(ibw_file) n_pixels, params = configuration(param_file) pixel = Pixel(signal_array, params=params) pixel.analyze() pixel.plot() plt.xlabel('Time Step') plt.ylabel('Freq Shift (Hz)') print('tFP is', pixel.tfp, 's') return pixel.tfp
def params_from_experiment(can_params_file, params_cfg): ''' Generates a simulation-compatible configuration given a Cantilever Parameters file (typically acquired in the experiment) and a Params.cfg file saved with FFtrEFM data can_params : string Path to cantilever parameters file (from Force Calibration tab) params_cfg : string Path to parameters.cfg file (from FFtrEFM experiment, in the data folder) ''' can = cantilever_params(can_params_file) _, par = configuration(params_cfg) if isinstance(params_cfg, dict): par = params_cfg can_params = {} can_params['amp_invols'] = can['Initial']['AMPINVOLS'] can_params['def_invols'] = can['Initial']['DEFINVOLS'] can_params['soft_amp'] = 0.3 can_params['drive_freq'] = par['drive_freq'] can_params['res_freq'] = par['drive_freq'] can_params['k'] = can['Initial']['SpringConstant'] can_params['q_factor'] = can['Initial']['Q'] force_params = {} force_params['es_force'] = can['Differential']['ElectroForce'] force_params['ac_force'] = can['Initial']['DrivingForce'] force_params['dc_force'] = 0 # only for GKPFM force_params['delta_freq'] = can['Differential']['ResFrequency'] force_params['tau'] = 1e-5 force_params['dFdz'] = can['Differential']['dFdZ'] force_params['lift_height'] = can['Initial']['LiftHeight'] sim_params = {} sim_params['trigger'] = par['trigger'] sim_params['total_time'] = par['total_time'] sim_params['sampling_rate'] = par['sampling_rate'] return can_params, force_params, sim_params, can, par
def cal_curve(can_path, param_cfg, taus_range=[-7, -3], plot=True, **kwargs): ''' Generates a calibration curve for a given cantilever given some particular parameters. Ideally you would have a tip parameters file as well. Usage: ------ >>> param_cfg = 'path' >>> can_params = 'path' >>> taus, tfp, spl = cal_curve(param_cfg, can_params) >>> from matplotlib import pyplot as plt >>> plt.plot(tfp, taus, 'bX-') If you want to change the fit parameters per tau taus, tfp, spl = cal_curve(param_cfg, can_params, roi=0.001, n_taps=199) :param can_path: Path to cantilever parameters.txt file :type can_path: str :param params_cfg: Path to parameters.cfg file (from FFtrEFM experiment, in the data folder) :type params_cfg: string :param taus_range: taus_range to set a range for the simulations, taken as [low, high] :type taus_range: ndarray (2-index array), optional :param plot: Plots the last taus vs tfps for verification :type plot: bool, optional :param kwargs: :type kwargs: :returns: tuple (taus, tfps, spl) WHERE ndarray taus is the single exponential taus that were simulated ndarray tfps is the measured time to first peaks UnivariateSpline spl is spline object of the calibration curve. To scale an image, type spl(x) ''' if isinstance(can_path, str): can_params, force_params, sim_params, _, parms = load_parm( can_path, param_cfg) elif isinstance(can_path, tuple): can_params, force_params, sim_params = load_sim_config(can_path) _, parms = configuration(param_cfg) can_params['drive_freq'] = parms['drive_freq'] can_params['res_freq'] = parms['drive_freq'] sim_params['trigger'] = parms['trigger'] sim_params['total_time'] = parms['total_time'] sim_params['sampling_rate'] = parms['sampling_rate'] if len(taus_range) != 2 or (taus_range[1] <= taus_range[0]): raise ValueError('Range must be ascending and 2-items') # Check if given as log or actual values if taus_range[0] < 0 or taus_range[1] < 0: _rlo = taus_range[0] _rhi = taus_range[1] else: _rlo = np.log10(taus_range[0]) _rhi = np.log10(taus_range[1]) taus = np.logspace(_rlo, _rhi, 50) tfps = [] for t in taus: force_params['tau'] = t cant = MechanicalDrive(can_params, force_params, sim_params) Z, _ = cant.simulate() try: pix = cant.analyze(plot=False, **kwargs) tfps.append(pix.tfp) except: print('Error', t) # sort the arrays taus = taus[np.argsort(tfps)] tfps = np.sort(tfps) # Splines work better on shorter lengthscales taus = np.log(taus) tfps = np.log(tfps) # Error corrections # negative x-values (must be monotonic for spline) dtfp = np.diff(tfps) tfps = np.array(tfps) taus = np.array(taus) tfps = np.delete(tfps, np.where(dtfp < 0)[0]) taus = np.delete(taus, np.where(dtfp < 0)[0]) # "hot" pixels in the cal-curve hotpixels = np.abs(taus - medfilt(taus)) taus = np.delete(taus, np.where(hotpixels > 0)) tfps = np.delete(tfps, np.where(hotpixels > 0)) # Negative slopes neg_slope = np.diff(taus) / np.diff(tfps) while any(np.where(neg_slope < 0)[0]): tfps = np.delete(tfps, np.where(neg_slope < 0)[0]) taus = np.delete(taus, np.where(neg_slope < 0)[0]) neg_slope = np.diff(taus) / np.diff(tfps) # Infinite slops (tfp saturation at long taus) while (any(np.where(neg_slope == np.inf)[0])): tfps = np.delete(tfps, np.where(neg_slope == np.inf)[0]) taus = np.delete(taus, np.where(neg_slope == np.inf)[0]) neg_slope = np.diff(taus) / np.diff(tfps) try: spl = ius(tfps, taus, k=4) except: print('=== Error generating cal-curve. Check manually ===') spl = None print(taus) print(tfps) if plot: pix.plot() fig, ax = plt.subplots(facecolor='white') ax.loglog(np.exp(tfps), np.exp(taus), 'bX-') try: ax.loglog(np.exp(tfps), np.exp(spl(tfps)), 'r--') except: pass ax.set_xlabel('$t_{fp}$ (s)') ax.set_ylabel(r'$\tau$ (s)') ax.set_title('Calibration curve') # Save Calibration Curve df = pd.DataFrame(index=taus, data=tfps) df = df.rename(columns={0: 'tfps'}) df.index.name = 'taus' df.to_csv('Calibration_Curve.csv') print('Do not forget that the spline is in log-space') return taus, tfps, spl
def main(argv=None): """Main function of the executable file.""" logging.basicConfig(filename='error.log', level=logging.INFO) # Get the CPU count to display in help. cpu_count = multiprocessing.cpu_count() if argv is None: argv = sys.argv[1:] # Parse arguments from the command line, and print out help. parser = ap.ArgumentParser(description='Analysis software for FF-trEFM') parser.add_argument('path', nargs='?', default=os.getcwd(), help='path to directory') parser.add_argument('-p', help='parallel computing option should be' 'followed by the number of CPUs.', type=int, choices=range(2, cpu_count + 1)) parser.add_argument('-v', action='version', version='FFtr-EFM 2.0 Release Candidate') args = parser.parse_args(argv) # Scan the path for .ibw and .cfg files. path = args.path filelist = os.listdir(path) data_files = [os.path.join(path, name) for name in filelist if name[-3:] == 'ibw'] config_file = [os.path.join(path, name) for name in filelist if name[-3:] == 'cfg'][0] # Load parameters from .cfg file. n_pixels, parameters = load.configuration(config_file) print('Recombination: ', parameters['recombination']) if 'phase_fitting' in parameters: print('Phase fitting: ', parameters['phase_fitting']) print( 'ROI: ', parameters['roi']) if not args.p: # Initialize arrays. tfp = np.zeros((len(data_files), n_pixels)) shift = np.zeros((len(data_files), n_pixels)) # Initialize plotting. plt.ion() fig = plt.figure(figsize=(12, 6), tight_layout=True) grid = gs.GridSpec(1, 2) tfp_ax = plt.subplot(grid[0, 0]) shift_ax = plt.subplot(grid[0, 1]) plt.setp(tfp_ax.get_xticklabels(), visible=False) plt.setp(tfp_ax.get_yticklabels(), visible=False) plt.setp(shift_ax.get_xticklabels(), visible=False) plt.setp(shift_ax.get_yticklabels(), visible=False) tfp_ax.set_title('tFP Image') shift_ax.set_title('Shift Image') kwargs = {'origin': 'lower', 'aspect': 'equal'} tfp_image = tfp_ax.imshow(tfp * 1e6, cmap='afmhot', **kwargs) shift_image = shift_ax.imshow(shift, cmap='cubehelix', **kwargs) text = plt.figtext(0.4, 0.1, '') plt.show() # Load every file in the file list one by one. for i, data_file in enumerate(data_files): signal_array = load.signal(data_file) line_inst = line.Line(signal_array, parameters, n_pixels) tfp[i, :], shift[i, :], _ = line_inst.analyze() # line_inst = line.Line(signal_array, parameters, n_pixels,fitphase=True) # tfpphase[i, :], _, _ = line_inst.analyze() tfp_image = tfp_ax.imshow(tfp * 1e6, cmap='inferno', **kwargs) shift_image = shift_ax.imshow(shift, cmap='cubehelix', **kwargs) tfp_sc = tfp[tfp.nonzero()] * 1e6 tfp_image.set_clim(vmin=tfp_sc.min(), vmax=tfp_sc.max()) shift_sc = shift[shift.nonzero()] shift_image.set_clim(vmin=shift_sc.min(), vmax=shift_sc.max()) tfpmean = 1e6 * tfp[i, :].mean() tfpstd = 1e6 * tfp[i, :].std() string = ("Line {0:.0f}, average tFP (us) =" " {1:.2f} +/- {2:.2f}".format(i + 1, tfpmean, tfpstd)) text.remove() text = plt.figtext(0.35, 0.1, string) plt.draw() plt.pause(0.0001) del line_inst # Delete the instance to open up memory. elif args.p: print('Starting parallel processing, using {0:1d} \ CPUs.'.format(args.p)) start_time = time.time() # Keep when it's started. # Create a pool of workers. pool = multiprocessing.Pool(processes=args.p) # Create the iterable and map onto the function. n_files = len(data_files) iterable = zip(data_files, [parameters] * n_files, [n_pixels] * n_files) result = pool.map(process_line, iterable) # Do not forget to close spawned processes. pool.close() pool.join() # Unzip the result. tfp_list, shift_list = zip(*result) # Initialize arrays. tfp = np.zeros((n_files, n_pixels)) shift = np.zeros((n_files, n_pixels)) # Convert list of arrays to 2D array. for i in range(n_files): tfp[i, :] = tfp_list[i] shift[i, :] = shift_list[i] elapsed_time = time.time() - start_time print ('It took {0:.1f} seconds.'.format(elapsed_time)) # Filter bad pixels tfp_fixed, _ = badpixels.fix_array(tfp, threshold=2) tfp_fixed = np.array(tfp_fixed) # Save csv files. os.chdir(path) np.savetxt('tfp.csv', np.fliplr(tfp).T, delimiter=',') np.savetxt('shift.csv', np.fliplr(shift).T, delimiter=',') np.savetxt('tfp_fixed.csv', np.fliplr(tfp_fixed).T, delimiter=',') return
def load_folder(folder_path='', xy_scansize=[0, 0], file_name='FF_H5', textload=False, verbose=False): """ Sets up loading the HDF5 files. Parses the data file list and creates the .H5 file path :param folder_path: Path to folder you want to process :type folder_path: string :param xy_scansize: Width by Height in meters (e.g. [8e-6, 4e-6]), if not in parameters file :type xy_scansize: 2-float array :param file_name: Desired file name, otherwise is auto-generated :type file_name: str :param textload: If you have a folder of .txt instead of .ibw (older files, some synthetic data) :type textload: bool, optional :param verbose: Whether to output the datasets being processed :type verbose: bool, optional :returns: tuple (h5_path, data_files, parm_dict) WHERE str h5_path is the filename path to the H5 file created List data_files is the list of \*.ibw files in the folder to be processed dict parm_dict is the dictionary of relevant scan parameters """ if any(xy_scansize) and len(xy_scansize) != 2: raise Exception('XY Scan Size must be either empty (in .cfg) or length-2') if not any(folder_path): folder_path = sidpy.io.interface_utils.openfile_dialog(caption='Select Config File in FF-trEFM folder', file_types='Config Files (*.cfg)') folder_path = '/'.join(folder_path.split('/')[:-1]) print(folder_path, 'folder path') filelist = sorted(os.listdir(folder_path)) if textload == False: data_files = [os.path.join(folder_path, name) for name in filelist if (name[-3:] == 'ibw' and 'FFtrEFM' in name)] else: data_files = [os.path.join(folder_path, name) for name in filelist if name[-3:] == 'txt'] if not data_files: raise OSError('No data files found! Are these text files?') config_file = [os.path.join(folder_path, name) for name in filelist if name[-3:] == 'cfg'][0] n_pixels, parm_dict = load.configuration(config_file) parm_dict['num_rows'] = len(data_files) parm_dict['num_cols'] = n_pixels # Add dimensions if not in the config file if 'FastScanSize' not in parm_dict.keys(): if not any(xy_scansize): raise Exception('Need XY Scan Size! Save "Width" and "Height" in Config or pass xy_scansize') [width, height] = xy_scansize if width > 1e-3: # if entering as microns width = width * 1e-6 height = height * 1e-6 parm_dict['FastScanSize'] = width parm_dict['SlowScanSize'] = height # sometimes use width/height in config files if 'width' in parm_dict.keys(): parm_dict['FastScanSize'] = width parm_dict['SlowScanSize'] = height # Check ratio is correct ratio = np.round(parm_dict['FastScanSize'] * 1e6, 4) / np.round(parm_dict['SlowScanSize'] * 1e6, 4) if n_pixels / len(data_files) != ratio: print(ratio) print(parm_dict['FastScanSize'], parm_dict['SlowScanSize'], n_pixels / len(data_files), len(data_files)) raise Exception('X-Y Dimensions do not match filelist. Add manually to config file. Check n-pixels.') # add associated dimension info # # e.g. if a 16000 point signal with 2000 averages and 10 pixels # (10MHz sampling of a 1.6 ms long signal=16000, 200 averages per pixel) # parm_dict['pnts_per_pixel'] = 200 (# signals at each pixel) # ['pnts_per_avg'] = 16000 (# pnts per signal, called an "average") # ['pnts_per_line'] = 2000 (# signals in each line) if 'pnts_per_pixel' not in parm_dict.keys(): print('Loading first signal') # Uses first data set to determine parameters line_file = load.signal(data_files[0]) parm_dict['pnts_per_avg'] = int(line_file.shape[0]) try: # for 1 average per pixel, this will fail parm_dict['pnts_per_pixel'] = int(line_file.shape[1] / parm_dict['num_cols']) parm_dict['pnts_per_line'] = int(line_file.shape[1]) except: parm_dict['pnts_per_pixel'] = 1 parm_dict['pnts_per_line'] = 1 folder_path = folder_path.replace('/', '\\') if os.path.exists(file_name) == False: h5_path = os.path.join(folder_path, file_name) + '.h5' else: h5_path = file_name return h5_path, data_files, parm_dict