def plot_xy_orbit(dirname, allfiles=True, user_filind=0, filter=True, fdrive=41.0): print('Analyzing: ', dirname, ' ...') files, lengths = bu.find_all_fnames(dirname) nfiles = len(files) for filind, fil in enumerate(files): if not allfiles: if filind != user_filind: continue bu.progress_bar(filind, nfiles) df = bu.DataFile() df.load(fil) freqs = np.fft.rfftfreq(df.nsamp, d=1.0/df.fsamp) plt.loglog(freqs, np.abs(np.fft.rfft(df.pos_data[0]))) plt.loglog(freqs, np.abs(np.fft.rfft(df.pos_data[1]))) plt.figure() plt.scatter(df.pos_data[0], df.pos_data[1]) plt.show()
def proc_dir(dir, data_column=0, plot=False): files, lengths = bu.find_all_fnames(dir) file_profs = [] hs = [] for fi in files: b, y, e, h = profile(fi, nbins=nbins, plot=plot, data_column=data_column) #print h if h not in hs: #if new height then create new profile object hs.append(h) f = File_prof(b, y, e, h) f.date = re.search(r"\d{8,}", dir)[0] file_profs.append(f) else: #if height repeated then append data to object for that height for fp in file_profs: if fp.cant_height == h: fp.bins = np.append(fp.bins, b) fp.y = np.append(fp.y, y) fp.errors = np.append(fp.errors, e) #now rebin all profiles for fp in file_profs: sorter = np.argsort(fp.bins) fp.bins = fp.bins[sorter] fp.y = fp.y[sorter] fp.errors = fp.errors[sorter] fp.dxs = np.append(np.diff(fp.bins), 0) # if len(fp.bins) > nbins: # b, y, e = bu.rebin(fp.bins, fp.y, nbins=nbins, correlated_errs=True) # fp.bins = b # fp.y = y # fp.errors = e # fp.dxs = np.append(np.diff(b), 0)#0 pad left trapizoid rule sigmasqs = [] hs = [] for f in file_profs: f.sigsq(cutoff=sigmasq_cutoff) sigmasqs.append(f.sigmasq) hs.append(f.cant_height) if baseline_fit: fit_inds = np.abs(f.bins) > baseline_edge baseline = np.mean((f.y)[fit_inds]) f.y -= baseline return file_profs, np.array(hs), np.array(sigmasqs)
def proc_dir(dir): files = bu.find_all_fnames(dir) file_profs = [] cents = [] for fi in files: b, y, e = profile(fi) f = File_prof(b, y, e) f.date = dir[8:16] file_profs.append(f) f.dist_mean() cents.append(f.mean) return cents, file_profs
def make_file_objs(datadir, hpf=False, hpf_freq=1.0, \ detrend=False, diag=False): objs = [] files = bu.find_all_fnames(datadir) for fil in files: df = bu.DataFile() df.load(fil) df.calibrate_stage_position() if hpf: df.high_pass_filter(fc=hpf_freq) if detrend: df.detrend_poly() objs.append(df) return objs
limitlab2 = 'With Decca 2' ################################################################## ################################################################## ################################################################## if not plot_just_current: grav_funcs = gu.build_mod_grav_funcs(theory_data_dir) gfuncs = grav_funcs['gfuncs'] yukfuncs = grav_funcs['yukfuncs'] lambdas = grav_funcs['lambdas'] lims = grav_funcs['lims'] grav_funcs.clear() print("Loaded grav sim data") datafiles = bu.find_all_fnames(data_dir, ext=config.extensions['data']) datafiles = datafiles[file_inds[0]:file_inds[1]] print("Processing %i files..." % len(datafiles)) if len(datafiles) == 0: print("Found no files in: ", data_dir) quit() if not load_alphadat: if not load_fildat: fildat = gu.get_data_at_harms(datafiles, p0_bead=p0_bead, ax_disc=0.5, plotfilt=plotfilt, \ cantind=0, ax1='x', ax2='z', diag=diag, plottf=False, \ nharmonics=nharmonics, harms=harms, \ ext_cant_drive=True, ext_cant_ind=1, \ max_file_per_pos=max_file_per_pos, userlims=userlims, \
import numpy as np import matplotlib.pyplot as plt import matplotlib import bead_util as bu from scipy.optimize import curve_fit import dill as pickle cpath = "/data/20180927/bead1/spinning/1k_valve_of" filesc = bu.find_all_fnames(cpath) freqs = np.fft.rfftfreq(50000, d=1. / 5000) def get_dir_data(files, drive_range=[1., 1500.], drive_amp=5000.): nf = len(files) ns = 50000 nfreq = len(freqs) d_ave = np.zeros((nf, nfreq), dtype=complex) pb = [] pc = [] pp = [] t = [] xs = np.zeros((nf, nfreq), dtype=complex) ys = np.zeros((nf, nfreq), dtype=complex) zs = np.zeros((nf, nfreq), dtype=complex) for i, f in enumerate(files): df = bu.DataFile() df.load(f) df.load_other_data() drive = df.other_data[2] dbool = (np.abs(np.fft.rfft(drive)) > drive_amp) * (
import dill as pickle import scipy.interpolate as interp import numpy as np import matplotlib.pyplot as plt import matplotlib.mlab as mlab import bead_util as bu import calib_util as cu import configuration as config import time dirname = '/data/old_trap/20201202/power/init' files, _ = bu.find_all_fnames(dirname, sort_time=True) fb_set = [] power = [] for filname in files: df = bu.DataFile() df.load(filname) fb_set.append(np.mean(df.pos_fb[2])) power.append(np.abs(np.mean(df.power))) plt.plot(fb_set, power) plt.show()
if len(dirlengths) != 0: oldlength = 0 for dirind, length in enumerate(dirlengths): oldlength += length tlength = oldlength - np.sum(bad_inds < oldlength) if tlength < plot_ind: continue if dirind+2 in label_keys: ax[0].axvline(x=times2[tlength]*(1.0/3600), lw=2, label=dirmarkers[dirind+2][0], \ color=dirmarkers[dirind+2][1], ls=dirmarkers[dirind+2][2]) ax[0].legend() plt.tight_layout() pickle.dump(computed_freq_dict, open(computed_freq_path, 'wb')) plt.show() allfiles, lengths = bu.find_all_fnames(dirs) fit_monochromatic_line(allfiles, minfreq=minfreq, maxfreq=maxfreq, \ file_inds=file_inds, diag=diag, dirlengths=lengths)
fits['x'] = (xdat, diag_xdat) else: fits['x'] = (xdat) if fit_zdat: if diag: fits['z'] = (zdat, diag_zdat) else: fits['z'] = (zdat) if diag: return force_curves, diag_force_curves, fits else: return force_curves, fits datafiles, lengths = bu.find_all_fnames(dir1, ext=config.extensions['data']) datafiles = datafiles[start_file:start_file + maxfiles] force_dic, diag_force_dic, fits= \ get_force_curve_dictionary(datafiles, ax1=ax1_lab, diag=diag, \ fit_xdat=fit_xdat, fit_zdat=fit_zdat, plottf=False) if fit_xdat: xdat = fits['x'][0] if diag: diag_xdat = fits['x'][1] if fit_zdat: zdat = fits['z'][0] if diag: diag_zdat = fits['z'][1]
from bead_util import find_all_fnames import sem_util as su img_dir = '/Users/manifestation/Stanford/beads/photos/sem/20200624_gbeads-7_5um/' max_file = 1000 substr = '7_5um_5000x_uc' substr = '7_5um_calibration_5000x_uc' devlist = [] with_info = True show_after = True filenames, _ = find_all_fnames(img_dir, ext='.tif', substr=substr) # filenames.sort(key = su.get_devnum) if len(devlist): bad_inds = [] for fileind, filename in enumerate(filenames): found = False for dev in devlist: if dev in filename: found = True break if not found: bad_inds.append(fileind) for ind in bad_inds[::-1]:
import dill as pickle import scipy.interpolate as interp import numpy as np import matplotlib.pyplot as plt import matplotlib.mlab as mlab import bead_util as bu import configuration as config import time dirname = '/data/20180618/bead1/discharge/fine3/' files = bu.find_all_fnames(dirname) #files = ['/data/20180618/bead1/discharge/fine3/turbombar_xyzcool_elec3_10000mV41Hz0mVdc_56.h5'] print(files[:5]) for filname in files[:1000]: df = bu.DataFile() df.load(filname, plot_sync=True) print(filname) posdat_range = np.max(df.pos_data[0]) - np.min(df.pos_data[0]) cantdat_range = np.max(df.electrode_data[3]) - np.min(df.electrode_data[3]) fac = cantdat_range / posdat_range
fsig = params['drive_freq'] p0 = params['p0'] fsamp = params['fsamp'] beta_rot = pressure * np.sqrt(m0) / kappa gamma = beta_rot / Ibead pressures.append(pressure) amps.append(drive_amp) if hdf5: ext = '.h5' else: ext = '.npy' datfiles, lengths = bu.find_all_fnames(cdir, ext=ext) nfiles = lengths[0] gammas = [] longdat = [] nsamp = 0 lib_freqs.append([]) lib_calc = np.sqrt(drive_amp * p0 / Ibead) / (2.0 * np.pi) for fileind, file in enumerate(datfiles[::-1]): if fileind > 5: break bu.progress_bar(fileind, nfiles,
date = '20200322' date = '20200924' # fig_base = '/home/cblakemore/plots/20190626/' savefig = True fig_base = '/home/cblakemore/plots/{:s}/spinning/'.format(date) #fig_base = '/home/cblakemore/plots/spinsim/' suffix = '' # suffix = '_less-yrange' #suffix = '_3_5e-6mbar_110kHz_real-noise' #dirname = '/data/old_trap_processed/spinning/ringdown/20190626/' dirname = '/data/old_trap_processed/spinning/ringdown/{:s}/'.format(date) #dirname = '/data/old_trap_processed/spinning/ringdown_manual/{:s}/'.format(date) paths, lengths = bu.find_all_fnames(dirname, ext='.p') newpaths = paths # # for 20190626: # newpaths = [paths[1], paths[2]] # labels = ['Initial', 'Later'] # mbead = 85.0e-15 # convert picograms to kg # mbead_err = 1.6e-15 priors = False manual_priors = False fix_fterm = False fit_end_time = 3000.0 exp_fit_end_time = 3000.0
freqs = np.fft.rfftfreq(df.nsamp, d=1.0 / df.fsamp) fft = np.fft.rfft(dat1) fft_fb = np.fft.rfft(df.pos_fb[2]) #plt.loglog(freqs, np.abs(fft)) #plt.loglog(freqs, np.abs(fft_fb)) #plt.show() times = (df.daqmx_time - df.daqmx_time[0]) * 1e-9 plt.plot(times, dat1 - np.mean(dat1), label='Phase Measurement, Naive Calibration') plt.plot(times, dat2 - np.mean(dat2), label='Cantilever Monitor', ls='--') plt.xlabel('Time [s]', fontsize=14) plt.ylabel('Amplitude [$\mu$m]', fontsize=14) plt.legend(loc=1) plt.tight_layout() plt.show() allfiles = [] for dir in dirs: allfiles += bu.find_all_fnames(dir) check_backscatter(allfiles)
import scipy.interpolate as interp import scipy.constants as constants import scipy.signal as signal import sys, time, os import bead_util as bu fac = 425 * constants.elementary_charge base_path = '/processed_data/comsol_data/patch_potentials/' fnames = bu.find_all_fnames(base_path, ext='') names = [] for filname in fnames: parts = filname.split('/') name = parts[-1].split('.')[0] if name not in names: names.append(name) names = [#'patch_pot_2um_1Vrms_50um-deep-patches', \ #'patch_pot_2um_1Vrms_150um-deep-patches', \ #'patch_pot_2um_1Vrms_250um-deep-patches', \ #'patch_pot_2um_1Vrms_150um-deep-patches_4mmBC', \ # 'patch_pot_2um_1Vrms_150um-deep-patches_4mmBC_seed0', \
import copy from scipy.optimize import curve_fit import re path0 = "/data/20181025/bead1/spinning/sudden_turn_on_600Hz_good" #path1 = "/data/20180927/bead1/spinning/amp_ramp_20181014_unlocked_good" bw = 0.5 Ns = 250000 Fs = 5000. k = 1e-13*(370*2.*np.pi)**2 axis = 0 freqs = np.fft.rfftfreq(Ns, 1./Fs) nwind = int(np.floor(bw/(freqs[1]-freqs[0]))) files0 = bu.find_all_fnames(path0) #files1 = bu.find_all_fnames(path1) def get_dfreq(fname, darr, min_ind = 10): fft = np.fft.rfft(darr) fft[np.arange(len(fft))<min_ind] = 0 #ig = float(re.findall('\d+Hz', fname)[0][:-2]) #ind_ig = np.argmin(np.abs(freqs-ig)) #fbool = np.abs(np.arange(len(fft))-ind_ig)>nwind*s_fac #fft[fbool] = 0. return freqs[np.argmax(np.abs(fft))] def get_sigma(fft, dfreq, nn = 36): fb = np.abs(freqs-dfreq)<bw/2. rolls = np.arange(-nn/2, nn/2)
import peakdetect as pdet from numba import jit from joblib import Parallel, delayed nfiles = 5 datadir = '/data/old_trap/20191017/bead1/spinning/ringdown/110kHz_start_6/' bw = 10.0 mon_fac = 100.0 volt_to_efield = np.abs(bu.trap_efield([0, 0, 0, 1, 0, 0, 0, 0], nsamp=1)[0]) files, lengths = bu.find_all_fnames(datadir, ext='.h5', sort_time=True) files = files[:nfiles] real_drive = {} for fileind, file in enumerate(files): obj = hs.hsDat(file) fsamp = obj.attribs['fsamp'] nsamp = obj.attribs['nsamp'] freqs = np.fft.rfftfreq(nsamp, d=1.0 / fsamp) sig_filt = obj.dat[:, 1] for i in range(10): notch_freq = 60.0 * (2.0 * i + 1) Q = 0.05 * notch_freq
for gas in other_gases[pathind]: other_dict[gas] = {} for key in keys: other_dict[gas][key] = [] other_outdat.append(other_dict) fig_ex, ax_ex = plt.subplots(1,1,figsize=(6,3),dpi=200) temp_colors = bu.get_color_map(len(example_gases), cmap='plasma')[::-1] example_colors = {} for gasind, gas in enumerate(example_gases): example_colors[gas] = temp_colors[gasind] outgassing_dir = '/data/old_trap_processed/spinning/pramp_data/20190626/outgassing/' files, lengths = bu.find_all_fnames(outgassing_dir, ext='.txt') rates = [] for filename in files: file_obj = open(filename, 'rb') lines = file_obj.readlines() file_obj.close() rate = float(lines[2]) rates.append(rate) outgassing_rate = np.mean(rates) # outgassing_rate = 0.0 def get_delta_phi(fname): delta_phi = np.load(fname + "_phi.npy") return delta_phi
import bead_util as bu import os import glob import matplotlib.mlab as ml import sys import imp sys.path.append("../microgravity") from scipy.optimize import minimize_scalar as ms import alex_limit_v2 as al2 from scipy.optimize import curve_fit import matplotlib imp.reload(al2) #################################################################################testing code dat_dir = "/data/20180404/bead2/scatt_light_tests_20180419/pinhole_lens_tube_initial_freq_sweep2" files = bu.sort_files_by_timestamp(bu.find_all_fnames(dat_dir)) def proc_dir(files, T=10., G=15., tuning=.14): T = 10. gf = al2.GravFile() gf.load(files[0]) amps = np.zeros((len(files), 3, gf.num_harmonics)) delta_f = np.zeros(len(files)) phis = np.zeros((len(files), 3, gf.num_harmonics)) sig_amps = np.zeros((len(files), 3, gf.num_harmonics)) sig_phis = np.zeros((len(files), 3, gf.num_harmonics)) ps = np.zeros(len(files)) n = len(files) for i, f in enumerate(files[:-1]):
daxarr[axind, 1].set_xlabel('t [s]', fontsize=10) else: daxarr[axind].plot(t, data * fac, color=color) daxarr[axind].grid(alpha=0.5) daxarr[axind].set_ylabel('[N]', fontsize=10) if ax == data_axes[-1]: daxarr[axind].set_xlabel('t [s]', fontsize=10) if len(cant_axes): for axind, ax in enumerate(cant_axes): t = np.arange(len(df.cant_data[ax])) * (1.0 / df.fsamp) caxarr[axind].plot(t, df.cant_data[ax], color=color) if len(elec_axes): for axind, ax in enumerate(elec_axes): t = np.arange(len(df.electrode_data[ax])) * (1.0 / df.fsamp) eaxarr[axind].plot(t, df.electrode_data[ax], color=color) #daxarr[0].set_xlim(0.5, 25000) #daxarr[0].set_ylim(1e-21, 1e-14) plt.tight_layout() plt.show() allfiles = bu.find_all_fnames(dir1) plot_vs_time(allfiles, data_axes=data_axes, file_inds=file_inds, diag=diag)
tabor_mon_fac = 100 #tabor_mon_fac = 100 * (1.0 / 0.95) ######################### ### Plotting behavior ### ######################### output_band = (3, 2000) ### Full spectra plot limits xlim = (0.5, 5000) ylim = (3e-4, 5e0) date = re.search(r"\d{8,}", dir_name)[0] files, _ = bu.find_all_fnames(dir_name, ext='.h5', sort_time=False, skip_subdirectories=False) files = files[file_inds[0]:file_inds[1]:file_step] nfiles = len(files) Ibead = bu.get_Ibead(date=date) fobj = bu.hsDat(files[0], load=False, load_attribs=True) nsamp = fobj.nsamp fsamp = fobj.fsamp fac = bu.fft_norm(nsamp, fsamp) time_vec = np.arange(nsamp) * (1.0 / fsamp) full_freqs = np.fft.rfftfreq(nsamp, 1.0 / fsamp)
import numpy as np import bead_util as bu import matplotlib.pyplot as plt import os, re import scipy.signal as signal from scipy.special import erf from scipy.optimize import curve_fit data_dir = '/data/new_trap/20200320/Bead1/Shaking/' xfiles, _ = bu.find_all_fnames(data_dir, ext='.h5', substr='_X_', \ skip_subdirectories=True) yfiles, _ = bu.find_all_fnames(data_dir, ext='.h5', substr='_Y_', \ skip_subdirectories=True) nbins = 300 plot_raw_data = False log_profs = True gauss_fit = True use_quad_sum = True def gauss_wconst(x, A, x0, w0, C): return A * np.exp(-2 * (x - x0)**2 / (w0**2)) + C
plt.annotate(ann_str, xy=(0.2, 0.9), xycoords='axes fraction') plt.tight_layout() plt.grid() plt.show() for ddir in data_dirs: paths = gu.build_paths(ddir, opt_ext=opt_ext) datafiles = bu.find_all_fnames(ddir) p0_bead = p0_bead_dict[paths['date']] if load_files: agg_dat = gu.AggregateData(datafiles, p0_bead=p0_bead, harms=harms, \ elec_drive=False, elec_ind=0, plot_harm_extraction=False, \ tfdate=tfdate) agg_dat.save(paths['agg_path']) else: agg_dat = gu.AggregateData([], p0_bead=p0_bead, harms=harms) agg_dat.load(paths['agg_path']) agg_dat.p0_bead = p0_bead agg_dat.bin_rough_stage_positions(ax_disc=1.0)
decca_path = "/home/arider/limit_data/just_decca.csv" pre_decca_path = "/home/arider/limit_data/pre_decca.csv" calculate_sps = False recalculate = True calculate_limit = True save_name = "binned_force_data.npy" save_limit_data = "limit_data.npy" dat_dir = "/data/20180625/bead1/nobead/grav_data/shield/X50-75um_Z15-25um_17Hz_elec-term" increment = 1 plt_file = 10 plt_increment = 100 ah5 = lambda fname: fname + '.h5' files = bu.find_all_fnames(dat_dir) if calculate_sps: sps = np.array(list(map(iu.getNanoStage, list(map(ah5, files))))) np.save("sps.npy", sps) else: sps = np.load("sps.npy") ba0 = sps[:, 0] > 74. ba1 = sps[:, 2] > 24. files = np.array(files) files = files[ba0 * ba1] #files = files[:100] lam25umind = np.argmin((yf.lambdas - 25E-6)**2) n_file = len(files) #files = map(ah5, files) p0 = [20., 0., 25.] force_data = [] #np.zeros((n_file, 3, 2, 100))
Ibead = bu.get_Ibead(date=date, verbose=True) def sqrt(x, A, x0, b): return A * np.sqrt(x-x0) + b for gas in gases: fig, ax = plt.subplots(1,1) paths = path_dict[gas] for pathind, path in enumerate(paths): dipole_filename = path[:-1] + '.dipole' color = 'C' + str(pathind) files, lengths = bu.find_all_fnames(path, ext='.npy') if one_path: colors = bu.get_color_map(len(files), cmap='inferno') popt_arr = [] pcov_arr = [] max_field = 0 A_arr = [] A_sterr_arr = [] A_syserr_arr = [] x0_arr = [] x0_err_arr = [] for fileind, file in enumerate(files):
import numpy as np import matplotlib.pyplot as plt import flywheel as fw import bead_util as bu import re path = "/data/20180927/bead1/spinning/amp_ramp_10s" files = bu.find_all_fnames(path) freqs = np.fft.rfftfreq(50000, 1. / 5000.) bw = 1. axis = 0 d_freqs = [] r_amps = [] sr_amps = [] r_phis = [] sr_phis = [] times = [] d_amps = [] df = bu.DataFile() for i, f in enumerate(files): df.load(f) df.load_other_data() ig = float(re.findall('\d+Hz', f)[0][:-2]) ig_ind = np.argmin(np.abs(freqs - ig)) di, d_amp = fw.get_drive_ind(df.other_data[2], ig_ind) dfreq = freqs[di] filt = np.abs(freqs - dfreq) < bw respft = np.fft.rfft(df.pos_data[axis]) respft[np.logical_not(filt)] = 0.
print(' {:s}'.format(agg_path)) print('----------------------------------') print('Will save plots to:') print(' {:s}'.format(plot_dir)) print('----------------------------------') print() if save: bu.make_all_pardirs(agg_path) if reprocess: datafiles, lengths = bu.find_all_fnames(ddir, ext=config.extensions['data'], \ substr=substr, sort_by_index=True, \ sort_time=False) datafiles = datafiles[:Nfiles] agg_dat = gu.AggregateData(datafiles, p0_bead=p0_bead, harms=harms, \ plot_harm_extraction=plot_harms, new_trap=new_trap, \ step_cal_drive_freq=71.0, ncore=ncore, noisebins=10, \ aux_data=aux_data, suppress_off_diag=suppress_off_diag, \ fake_attractor_data=fake_attractor_data, \ fake_attractor_data_amp=fake_attractor_data_amp, \ fake_attractor_data_dc=fake_attractor_data_dc, \ fake_attractor_data_freq=fake_attractor_data_freq, \ fake_attractor_data_axis=fake_attractor_data_axis) agg_dat.load_grav_funcs(theory_data_dir)
if new_trap: charge_path = charge_path.replace('old_trap', 'new_trap') bu.make_all_pardirs(charge_path) if new_trap: savepath = savepath.replace('old_trap', 'new_trap') bu.make_all_pardirs(savepath) use_origin_timestamp = False # if new_trap: # use_origin_timestamp = True # Find all the relevant files step_cal_files, lengths = bu.find_all_fnames(step_cal_dir, sort_by_index=sort_by_index, \ sort_time=sort_time, \ use_origin_timestamp=use_origin_timestamp, \ skip_subdirectories=skip_subdirectories) # for name in step_cal_files: # print(name) # input() for filind in files_to_pop[::-1]: step_cal_files.pop(filind) # for i in range(5): # step_cal_files.pop(559) #print len(step_cal_files) # for 20180827, uncomment this #step_cal_files.pop(53)
out_f = save_base + dirname bu.make_all_pardirs(out_f) if load: outdict[out_f] = pickle.load(open(out_f + '_all.p', 'rb')) all_time = outdict[out_f]['all_time'] all_freq = outdict[out_f]['all_freq'] all_freq_err = outdict[out_f]['all_freq_err'] plt.errorbar(all_time.flatten(), all_freq.flatten(), yerr=all_freq_err.flatten()) plt.show() continue files, lengths = bu.find_all_fnames(path, sort_time=True) files = files[:1000] fobj = hsDat(files[0]) nsamp = fobj.attribs["nsamp"] fsamp = fobj.attribs["fsamp"] t0 = fobj.attribs["time"] time_vec = np.arange(nsamp) * (1.0 / fsamp) freqs = np.fft.rfftfreq(nsamp, 1.0 / fsamp) # upper1 = (2.0 / fsamp) * (fc + 0.5 * bandwidth) # lower1 = (2.0 / fsamp) * (fc - 0.5 * bandwidth) upper1 = (2.0 / fsamp) * (fc + 5 * bandwidth) lower1 = (2.0 / fsamp) * (fc - 5 * bandwidth)
fbfig.subplots_adjust(top=0.91) if savefigs: plt.savefig(title_pre + '.png') daxarr[0].set_xlim(2000, 25000) plt.tight_layout() plt.savefig(title_pre + '_zoomhf.png') daxarr[0].set_xlim(1, 80) plt.tight_layout() plt.savefig(title_pre + '_zoomlf.png') daxarr[0].set_xlim(0.5, 25000) if not savefigs: plt.show() if use_dir: allfiles, lengths = bu.find_all_fnames(dir1, sort_time=True) allfiles = allfiles[:maxfiles] #allfiles = bu.sort plot_many_spectra(allfiles, file_inds=file_inds, diag=diag, \ data_axes=data_axes, other_axes=other_axes, \ fb_axes=fb_axes, cant_axes=cant_axes, \ plot_power=plot_power, colormap=cmap)