def plot_dT_comparison(dats: List[DatHDF], plot=True): plotter = OneD(dats=dats) fig = plotter.figure(xlabel='Fridge Temp /mK', ylabel='% Difference between calculated dTs', title=f'Dats{dats[0].datnum}-{dats[-1].datnum}: Difference between DC bias calculated dT and ' f'Square Entropy Calculated dT') hover_infos = [ HoverInfo(name='Dat', func=lambda dat: dat.datnum, precision='.d', units=''), HoverInfo(name='Temperature', func=lambda dat: dat.Logs.temps.mc * 1000, precision='.1f', units='mK'), HoverInfo(name='Bias', func=lambda dat: dat.AWG.max(0) / 10, precision='.1f', units='nA'), ] funcs, template = _additional_data_dict_converter(hover_infos) for bias in sorted(list(set([dat.AWG.max(0) for dat in dats]))): ds = [dat for dat in dats if dat.AWG.max(0) == bias] diffs = [compare_dTs(dat, verbose=False) for dat in ds] hover_data = [[func(dat) for func in funcs] for dat in ds] fig.add_trace(plotter.trace(data=diffs, x=[dat.Logs.temps.mc * 1000 for dat in ds], name=f'Bias={bias / 10:.0f}nA', mode='markers+lines', trace_kwargs={'customdata': hover_data, 'hovertemplate': template})) if plot: fig.show(renderer='browser') return fig
def entropy_vs_gate_fig(dats: Optional[List[DatHDF]] = None, x_gate: str = None): plotter = OneD(dats=dats) if dats is not None: title_pre = f'Dats{dats[0].datnum}->{dats[-1].datnum}: ' else: title_pre = '' title = title_pre + f'Entropy vs {x_gate}' fig = plotter.figure(xlabel=f'{x_gate}/mV', ylabel="Entropy /kB", title=title) return fig
def avg_data(self) -> go.Figure: """Transition figure""" if not self._correct_call_args(): logger.warning(f'Bad call args to GraphCallback') return go.Figure() dat = self.dat plotter = OneD(dat=dat) fig = plotter.figure(title=f'Dat{dat.datnum}: Transition') return fig
def theta_slope_in_weakly_coupled(dats: List[DatHDF], show_intermediate=False, fit_name: str = 'simple') -> go.Figure: """Calculates and plots slope of theta in weakly coupled for all temperatures (option to show the linear fits for each temp) """ plotter = OneD(dats=dats) fig = plotter.figure(xlabel='Fridge Temp /mK', ylabel=f'Slope ({DELTA}{THETA}/{DELTA}ESC)', title=f'Dats{dats[0].datnum}-{dats[-1].datnum}:' f' Slope of thetas in weakly coupled') dats_sorted_by_temp = sort_by_temps(dats) line = lm.models.LinearModel() slopes = [] temps = [] for temp, dats in sorted(dats_sorted_by_temp.items()): if len(dats) > 0: dats = [dat for dat in dats if dat.Logs.fds['ESC'] < -235] escs = np.array([dat.Logs.fds['ESC'] for dat in dats]) thetas = np.array([dat.Transition.get_fit(name=fit_name).best_values.theta for dat in dats]) pars = line.guess(thetas, x=escs) fit = calculate_fit(x=escs, data=thetas, params=pars, func=line.func) slopes.append(fit.best_values.slope) temps.append(temp) p = OneD(dats=dats) f = p.figure(xlabel='ESC /mV', ylabel='Theta /mV', title=f'Dats{dats[0].datnum}-{dats[-1].datnum}: Temp = {temp}mK, Fit of theta slope') f.add_trace(p.trace(x=escs, data=thetas, name='Data')) f.add_trace(p.trace(x=escs, data=fit.eval_fit(x=escs), name='Fit', mode='lines')) if show_intermediate: f.show() print(f'{temp}mK:\nSlope = {fit.best_values.slope:.3g}{PM}{U.sig_fig(fit.params["slope"].stderr, 2):.2g}\n' f'Reduced Chi Square = {fit.reduced_chi_sq:.3g}\n') fig.add_trace(plotter.trace(data=slopes, x=temps, mode='markers+lines')) return fig
def plot_avg_thetas(dats: Iterable[DatHDF]) -> go.Figure: dats = list(dats) thetas = [dat.Transition.avg_fit.best_values.theta for dat in dats] x = [dat.Logs.fds['ESS'] for dat in dats] plotter = OneD(dats=dats) fig = plotter.figure( xlabel='ESS /mV', ylabel='Theta /mV', title=f'Dats{dats[0].datnum}-{dats[-1].datnum}: Avg Theta') fig.add_trace( plotter.trace(data=thetas, x=x, mode='markers', name='Avg Fit Theta')) return fig
def plot_gamma_dcbias(datnums: List[int], save_name: str, show_each_data=True): """ Makes a figure for Theta vs DCbias with option to show the data which is being used to obtain thetas Args: datnums (): Datnums that form DCbias measurement (i.e. repeats at fixed Biases) save_name (): Name of fits etc to be loaded (must already exist) show_each_data (): Whether to show the fit for each dataset (i.e. to check everything looks good) Returns: go.Figure: A plotly figure of Theta vs DCbias """ # if calculate: # with ProcessPoolExecutor() as pool: # list(pool.map(partial(do_transition_only_calc, save_name=save_name, theta=theta, gamma=None, width=600, # t_func_name='i_sense_digamma', overwrite=False), GAMMA_DCbias)) dats = get_dats(datnums) plotter = OneD(dats=dats) # fig = plotter.figure(ylabel='Current /nA', # title=f'Dats{dats[0].datnum}-{dats[-1].datnum}: DCbias in Gamma broadened' ) # dat_pairs = np.array(dats).reshape((-1, 2)) # line = lm.models.LinearModel() # params = line.make_params() # for ds in dat_pairs: # for dat, color in zip(ds, ['blue', 'red']): # params['slope'].value = dat.Transition.avg_fit.best_values.lin # params['intercept'].value = dat.Transition.avg_fit.best_values.const # fig.add_trace(plotter.trace(x=dat.Transition.avg_x, data=dat.Transition.avg_data-line.eval(params=params, x=dat.Transition.avg_x), # name=f'Dat{dat.datnum}: Bias={dat.Logs.fds["HO1/10M"]/10:.1f}nA', # mode='lines', # trace_kwargs=dict(line=dict(color=color)), # )) fig = plotter.figure( ylabel='Current /nA', title= f'Dats{dats[0].datnum}-{dats[-1].datnum}: DCbias in Gamma broadened') line = lm.models.LinearModel() params = line.make_params() for dat in dats[1::2]: params['slope'].value = dat.Transition.avg_fit.best_values.lin params['intercept'].value = dat.Transition.avg_fit.best_values.const fig.add_trace( plotter.trace( x=dat.Transition.avg_x, data=dat.Transition.avg_data - line.eval(params=params, x=dat.Transition.avg_x), name= f'Dat{dat.datnum}: Bias={dat.Logs.fds["HO1/10M"] / 10:.1f}nA', mode='lines', )) fig.show()
def plot_csq_trace(dat: DatHDF, cutoff: Optional[float] = None) -> Data1D: plotter = OneD(dat=dat) plotter.TEMPLATE = 'simple_white' fig = plotter.figure(xlabel='CSQ Gate (mV)', ylabel='Current (nA)', title='CS current vs CSQ gate') x = dat.Data.x data = dat.Data.i_sense if cutoff: upper_lim = U.get_data_index(x, cutoff) x, data = x[:upper_lim], data[:upper_lim] fig.add_trace(plotter.trace(data=data, x=x)) fig.show() return Data1D(x=x, data=data)
def check_centers(): """Assuming 'simple' exists as a Transition fit name, will just plot centers and print any that deviate far from 0 """ global all_dats for dat in all_dats: fit = dat.Transition.get_fit(name='simple') if fit.best_values.mid > 5 or fit.best_values.mid < -5: print(f'Dat{dat.datnum}: mid = {fit.best_values.mid:.1f}mV') plotter = OneD(dats=all_dats) fig = plotter.figure(xlabel='Datnum', ylabel='Center /mV', title=f'Dats{all_dats[0].datnum}-{all_dats[-1].datnum}: Centers') fig.add_trace(plotter.trace(data=[dat.Transition.get_fit(name='simple').best_values.mid for dat in all_dats], x=[dat.datnum for dat in all_dats], mode='markers')) fig.show()
def check_rough_broadening_vs_temp() -> go.Figure: global all_dats, dats_by_temp dats = all_dats plotter = OneD(dats=dats) fig = plotter.figure(xlabel='ESC /mV', ylabel='Theta /mV', title=f'Dats{dats[0].datnum}-{dats[-1].datnum}:' f' Rough idea of broadening') for temp, dats in dats_by_temp.items(): if len(dats) != 0: dat = dats[0] dats = U.order_list(dats, [dat.Logs.fds['ESC'] for dat in dats]) x = [dat.Logs.fds['ESC'] for dat in dats] thetas = [dat.Transition.get_fit(name='simple').best_values.theta for dat in dats] fig.add_trace(plotter.trace(x=x, data=thetas, mode='markers+lines', name=f'{temp}mK')) return fig
def plot_param(dat: DatHDF, param: str): if param == 'theta': name = 'Theta' units = '/mV' elif param == 'mid': name = 'Center' units = '/mV' else: raise NotImplementedError(f'{param} not recognized') fits = dat.Transition.get_row_fits(name='default', check_exists=False) plotter = OneD(dat=dat) fig = plotter.figure(xlabel=dat.Logs.ylabel, ylabel=f'{name} {units}', title=f'Dat{dat.datnum}: {name} vs {dat.Logs.ylabel}') trace = _get_param_trace(fits, param, dat.Data.get_data('y')) fig.add_trace(trace) return fig
def one_d_data_vs_n(self): if self.datnum: self.which = ['i_sense_cold', 'dndt', 'occupation'] try: x, data_dndt = _get_x_and_data(self.datnum, self.experiment_name, 'dndt') except NotFoundInHdfError: logger.warning(f'Dat{self.datnum}: dndt data not found, probably a transition only dat') return go.Figure() nrg_func = NRG_func_generator(which='dndt') nrg_dndt = nrg_func(x, self.mid, self.g, self.theta, self.amp, self.lin, self.const, self.occ_lin) nrg_func = NRG_func_generator(which='occupation') occupation = nrg_func(x, self.mid, self.g, self.theta, self.amp, self.lin, self.const, self.occ_lin) # Rescale dN/dTs to have a peak at 1 nrg_dndt = nrg_dndt * (1 / np.nanmax(nrg_dndt)) x_max = x[get_data_index(data_dndt, np.nanmax(data_dndt))] x_range = abs(x[-1] - x[0]) indexs = get_data_index(x, [x_max - x_range / 50, x_max + x_range / 50]) avg_peak = np.nanmean(data_dndt[indexs[0]:indexs[1]]) # avg_peak = np.nanmean(data_dndt[np.nanargmax(data_dndt) - round(x.shape[0] / 50): # np.nanargmax(data_dndt) + round(x.shape[0] / 50)]) data_dndt = data_dndt * (1 / avg_peak) if (new_max := np.nanmax(np.abs([np.nanmax(data_dndt), np.nanmin(data_dndt)]))) > 5: # If very noisy data_dndt = data_dndt / (new_max / 5) # Reduce to +-5ish interp_range = np.where(np.logical_and(occupation < 0.99, occupation > 0.01)) if len(interp_range[0]) > 5: # If enough data to actually plot something interp_data = occupation[interp_range] interp_x = x[interp_range] interper = interp1d(x=interp_x, y=interp_data, assume_sorted=True, bounds_error=False) occ_x = interper(x) plotter = OneD(dat=None) fig = plotter.figure(xlabel='Occupation', ylabel='Arbitrary', title=f'dN/dT vs N: G={self.g:.2f}mV, ' f'{THETA}={self.theta:.2f}mV, ' f'{THETA}/G={self.theta / self.g:.2f}' f' -- Dat{self.datnum}') fig.add_trace(plotter.trace(x=occ_x, data=data_dndt, name='Data dN/dT', mode='lines+markers')) fig.add_trace(plotter.trace(x=occ_x, data=nrg_dndt, name='NRG dN/dT', mode='lines')) return fig
def one_d_data_subtract_fit(self): if self.datnum: dat = get_dat(self.datnum, exp2hdf=self.experiment_name) plotter = OneD(dat=dat) xlabel = 'Sweepgate /mV' fig = plotter.figure(xlabel=xlabel, ylabel='Current /nA', title=f'Data Subtract Fit: G={self.g:.2f}mV, ' f'{THETA}={self.theta:.2f}mV, ' f'{THETA}/G={self.theta / self.g:.2f}') for i, which in enumerate(self.which): if 'i_sense' in which: x, data = _get_x_and_data(self.datnum, self.experiment_name, which) nrg_func = NRG_func_generator(which='i_sense') nrg_data = nrg_func(x, self.mid, self.g, self.theta, self.amp, self.lin, self.const, self.occ_lin) data_sub_nrg = data - nrg_data fig.add_trace(plotter.trace(x=x, data=data_sub_nrg, name=f'{which} subtract NRG', mode='lines')) return fig return go.Figure()
def plot_entropy_vs_temp(dats: List[DatHDF], integrated=False, plot=True): fit_name = 'SPS.0045' plotter = OneD(dats=dats) _tname = 'Integrated' if integrated else 'Fit' fig = plotter.figure( title=f'Dats{dats[0].datnum}-{dats[-1].datnum}: {_tname} Entropy', xlabel='Heating Bias /nA', ylabel='Entropy /kB') temps = list(range(0, 300, 10)) hover_infos = [ HoverInfo(name='Dat', func=lambda dat: dat.datnum, precision='.d', units=''), HoverInfo(name='Temperature', func=lambda dat: dat.Logs.temps.mc * 1000, precision='.1f', units='mK'), HoverInfo(name='Bias', func=lambda dat: dat.AWG.max(0) / 10, precision='.1f', units='nA'), ] funcs, template = _additional_data_dict_converter(hover_infos) for temp in temps: ds = [dat for dat in dats if np.isclose(dat.Logs.temps.mc * 1000, temp, atol=5)] if len(ds) > 0: x = [dat.AWG.max(0) / 10 for dat in ds] hover_data = [[func(dat) for func in funcs] for dat in ds] if integrated is False: entropies = [dat.Entropy.get_fit(name=fit_name).best_values.dS for dat in ds] entropy_errs = [np.nanstd([ f.best_values.dS if f.best_values.dS is not None else np.nan for f in dat.Entropy.get_row_fits(name=fit_name) for dat in ds ]) / np.sqrt(dat.Data.y_array.shape[0]) for dat in ds] fig.add_trace(plotter.trace( data=entropies, data_err=entropy_errs, x=x, name=f'{temp:.0f}mK', mode='markers+lines', trace_kwargs={'customdata': hover_data, 'hovertemplate': template}) ) else: integrated_entropies = [np.nanmean(dat.Entropy.integrated_entropy[-10:]) for dat in ds] fig.add_trace(plotter.trace( data=integrated_entropies, x=x, name=f'{temp:.0f}mK', mode='markers+lines', trace_kwargs={'customdata': hover_data, 'hovertemplate': template}) ) if plot: fig.show(renderer='browser') return fig
def get_integrated_trace(dats: List[DatHDF], x_func: Callable, trace_name: str, int_info_name: Optional[str] = None, SE_output_name: Optional[str] = None, ) -> go.Scatter: if int_info_name is None: int_info_name = 'default' if SE_output_name is None: SE_output_name = 'default' plotter = OneD(dats=dats) x = [x_func(dat) for dat in dats] integrated_entropies = [np.nanmean( dat.Entropy.get_integrated_entropy(name=int_info_name, data=dat.SquareEntropy.get_Outputs(name=SE_output_name).average_entropy_signal )[-10:]) for dat in dats] trace = plotter.trace( data=integrated_entropies, x=x, name=trace_name, mode='markers+lines', ) return trace
def fit_single_esc_varying_width(show_figs=True) -> List[go.Figure]: """Fits transition with varying width based on temperature at a single ESC""" global dats_by_coupling_gate dats = dats_by_coupling_gate[-260] dats = U.order_list(dats, [dat.Logs.temps.mc for dat in dats]) figs = [] for dat, w in progressbar(zip(dats, [20, 30, 50, 100, 200])): save_name = 'varying_width' do_transition_only_calc(dat.datnum, save_name=save_name, theta=None, gamma=0, width=w, t_func_name='i_sense') plotter = OneD(dat=dat) fig = plotter.figure(title=f'Dat{dat.datnum}: I_sense at {dat.Logs.temps.mc * 1000:.0f}mK', ylabel='Current /nA') fig.add_trace(plotter.trace(data=dat.Transition.avg_data, x=dat.Transition.avg_x, mode='lines', name='Data')) fig.add_trace(plotter.trace(data=dat.Transition.get_fit(name=save_name).eval_fit(x=dat.Transition.avg_x), x=dat.Transition.avg_x, mode='lines', name='Fit')) [plotter.add_line(fig, value=xx, mode='vertical', color='black', linetype='dash') for xx in [w, -w]] if show_figs: fig.show() figs.append(fig) w_theta = dat.Transition.get_fit(name="simple").best_values.theta n_theta = dat.Transition.get_fit(name=save_name).best_values.theta print(f'Temp {dat.Logs.temps.mc * 1000:.0f}mK:\n' f'Wide Theta: {w_theta:.2f}mV\n' f'Narrow Theta: {n_theta:.2f}mV\n' f'Change: {(n_theta - w_theta) / w_theta * 100:.2f}%\n' ) return figs
def plotting_center_shift(): nrg_func = NRG_func_generator('occupation') params = lm.Parameters() params.add_many( ('mid', 0, True, -200, 200, None, 0.001), ('theta', 3.9, False, 1, 500, None, 0.001), ('amp', 1, True, 0, 3, None, 0.001), ('lin', 0, True, 0, 0.005, None, 0.00001), ('occ_lin', 0, True, -0.0003, 0.0003, None, 0.000001), ('const', 0, True, -2, 10, None, 0.001), ('g', 1, True, 0.2, 2000, None, 0.01), ) model = lm.Model(nrg_func) x = np.linspace(-10, 5000, 10000) gs = np.linspace(0, 200, 201) thetas = np.logspace(0.1, 2, 20) # thetas = np.linspace(1, 500, 10) # thetas = [1, 2, 5, 10, 20] all_mids = [] for theta in thetas: params['theta'].value = theta mids = [] for g in gs: params['g'].value = g occs = model.eval(x=x, params=params) mids.append(x[U.get_data_index(occs, 0.5, is_sorted=True)]) all_mids.append(mids) plotter = OneD(dat=None) fig = plotter.figure(xlabel='Gamma /mV', ylabel='Shift of 0.5 OCC', title='Shift of 0.5 Occupation vs Theta and G') fig.update_layout(legend=dict(title='Theta /mV')) for mids, theta in zip(all_mids, thetas): fig.add_trace( plotter.trace(data=mids, x=gs, name=f'{theta:.1f}', mode='lines')) fig.show() return fig
def entropy_vs_gate_trace(dats: List[DatHDF], x_gate, y_gate=None): fit_name = "SPS.0045" plotter = OneD(dats=dats) entropy = [dat.Entropy.get_fit(which='avg', name=fit_name).best_values.dS for dat in dats] entropy_errs = [np.nanstd([f.best_values.dS if f.best_values.dS is not None else np.nan for f in dat.Entropy.get_row_fits(name=fit_name)]) for dat in dats] x = [dat.Logs.fds[x_gate] for dat in dats] trace = plotter.trace(data=entropy, data_err=entropy_errs, x=x, mode='markers+lines', name=f'Dats{dats[0].datnum}->{dats[-1].datnum}') hover_infos = [ HoverInfo(name='Dat', func=lambda dat: dat.datnum, precision='.d', units=''), HoverInfo(name=x_gate, func=lambda dat: dat.Logs.fds[x_gate], precision='.1f', units='mV'), # HoverInfo(name='Time', func=lambda dat: dat.Logs.time_completed.strftime('%H:%M'), precision='', units=''), ] if y_gate: hover_infos.append(HoverInfo(name=y_gate, func=lambda dat: dat.Logs.fds[y_gate], precision='.2f', units='mV')) funcs, hover_template = _additional_data_dict_converter(info=hover_infos) hover_data = [[f(dat) for f in funcs] for dat in dats] trace.update(hovertemplate=hover_template, customdata=hover_data) return trace
if transition_only: fit = do_transition_only_calc(dat.datnum, save_name=fit_name, theta=theta, gamma=gamma, width=fit_width, t_func_name=fit_func, csq_mapped=csq_map, overwrite=False) else: raise NotImplementedError # do_narrow_fits([dat], theta=3.9756, gamma=None, width=fit_width, output_name='SPS.005', overwrite=True, # fit_func=fit_func, fit_name=fit_name, transition_only=True) plotter = OneD(dat=dat) fig_fit = plotter.figure( title= f'Dat{dat.datnum}: Transition Data with Fit (width={fit_width})' + title_row_2, ylabel=y_label) fig_fit.add_trace( single_transition_trace(dat, label='Data', fit_name=fit_name, transition_only=transition_only, csq_mapped=csq_map, se_output_name=save_name)) fig_fit.add_trace( single_transition_trace(dat, label='Fit',
# plotter.plot_integrated_entropy_avg().show(renderer='browser') # dats_100 = get_dats((8516, 8534+1), overwrite=False, exp2hdf=Sep20.SepExp2HDF) # dats_50 = get_dats((8600, 8626+1), overwrite=False, exp2hdf=Sep20.SepExp2HDF) dats_100 = get_dats((8796, 8816+1), overwrite=False, exp2hdf=Sep20.SepExp2HDF) dats_50 = get_dats((8710, 8729+1), overwrite=False, exp2hdf=Sep20.SepExp2HDF) for all_dats, temp in zip([dats_100, dats_50], [100, 50]): dc_info = dc_bias_infos[temp] for dat in all_dats: try: info = dat.Entropy.integration_info except NotFoundInHdfError: set_integration_info(dc_info, dat) plotter = OneD(dats=dats_100) # fig = plotter.figure(xlabel='LCB /mV', ylabel='Entropy /kB', title=None) fig = plotter.figure(xlabel='LCT /mV', ylabel='Entropy /kB', title=None) for all_dats in [dats_100, dats_50]: plotter = OneD(dats=all_dats) fits = [entropy_fit_sp_start(dat, 50) for dat in all_dats] data = np.array([dat.Entropy.avg_fit.best_values.dS for dat in all_dats]) data_50 = np.array([fit.best_values.dS for fit in fits]) # x = np.array([dat.Logs.fds['LCB'] for dat in dats]) x = np.array([dat.Logs.fds['LCT'] for dat in all_dats]) fig.add_trace(plotter.trace(data=data, x=x, mode='markers', name='sp_0')) fig.add_trace(plotter.trace(data=data_50, x=x, mode='markers', name='sp_50')) fig.show(renderer='browser')
import plotly.graph_objects as go from typing import Optional, TYPE_CHECKING from progressbar import progressbar from dat_analysis.analysis_tools.general_fitting import FitInfo from dat_analysis.dat_analysis.characters import DELTA from dat_analysis.plotting.plotly.dat_plotting import OneD, TwoD from dat_analysis.core_util import Data1D from dat_analysis.analysis_tools.nrg import NRGParams, NrgUtil import dat_analysis.useful_functions as U from temp import get_avg_entropy_data, get_avg_i_sense_data, get_linear_theta, get_initial_params, get_2d_data if TYPE_CHECKING: from dat_analysis.dat_object.make_dat import DatHDF p1d = OneD(dat=None) p2d = TwoD(dat=None) p1d.TEMPLATE = 'simple_white' p2d.TEMPLATE = 'simple_white' NRG_OCC_FIT_NAME = 'csq_forced_theta' CSQ_DATNUM = 2197 class StrongGammaNoise: def __init__(self, dat: DatHDF): self.dat = dat # The averaged data which does show some signal, but quite small given the number of repeats and duration self.avg_data = get_avg_entropy_data(self.dat, lambda _: False,
def testing_fit_methods(): # Weakly coupled entropy dat # dat = get_dat(2164) # dat = get_dat(2167) dat = get_dat(2170) out = dat.SquareEntropy.get_Outputs(name='default') x = out.x data = np.nanmean(out.averaged[( 0, 2, ), :], axis=0) plotter = OneD(dat=dat) fig = plotter.figure( ylabel='Current /nA', title=f'Dat{dat.datnum}: Fitting Weakly coupled to NRG') fig.add_trace(plotter.trace(x=x, data=data, name='Data', mode='lines')) print(dat.SquareEntropy.get_fit(fit_name='default').best_values) params = lm.Parameters() params.add_many( # ('mid', 2.2, True, None, None, None, None), ('mid', 0, True, -200, 200, None, 0.001), # ('mid', 1, True, -100, 100, None, 0.001), ('theta', 3.9, False, 1, 6, None, 0.001), ('amp', 0.94, True, 0, 3, None, 0.001), # ('lin', 0.0015, True, 0, 0.005, None, None), # ('lin', 0.0, True, 0, 0.005, None, 0.00001), ('lin', 0.01, True, 0, 0.005, None, 0.00001), ('occ_lin', 0, True, -0.0003, 0.0003, None, 0.000001), # ('const', 7.2, True, None, None, None, None), ('const', 7, True, -2, 10, None, 0.001), # ('g', 0.2371, True, 0.2, 200, None, 0.01), ('g', 1, True, 0.2, 200, None, 0.01), ) dfs = [] for method in [ # 'leastsq', 'least_squares', 'differential_evolution', # 'brute', # 'basinhopping', # 'ampgo', 'nelder', # 'lbfgsb', 'powell', # 'cg', # 'newton', 'cobyla', # 'bfgs', # 'tnc', # 'trust-ncg', # 'trust-exact', # 'trust-krylov', # 'trust-constr', # 'dogleg', # 'slsqp', # 'emcee', # 'shgo', 'dual_annealing' ]: try: t1 = time.time() fit = calculate_fit(x, data, params=params, func=NRG_func_generator(which='i_sense'), method=method) total_time = time.time() - t1 # fig.add_trace((plotter.trace(x=x, data=fit.eval_init(x=x), name='Initial Fit', mode='lines'))) fig.add_trace((plotter.trace(x=x, data=fit.eval_fit(x=x), name=f'{method} Fit', mode='lines'))) df = fit.to_df() df['name'] = method df['duration'] = total_time df['reduced chi sq'] = fit.fit_result.redchi dfs.append(df) except Exception as e: print(f'Failed for {method} with error: {e}') df = pd.concat(dfs) df.index = df.name df.pop('name') print(df.to_string()) fig.show()
from typing import Tuple, Optional, List from scipy.interpolate import interp1d import lmfit as lm import dat_analysis.analysis_tools.nrg from dat_analysis.dat_analysis.characters import DELTA from dat_analysis.plotting.plotly.dat_plotting import OneD, TwoD from dat_analysis.core_util import Data1D, Data2D from dat_analysis.analysis_tools.nrg import NRG_func_generator from dat_analysis.analysis_tools.nrg import NRGParams, NrgUtil, get_x_of_half_occ import dat_analysis.useful_functions as U from temp import get_avg_entropy_data, get_avg_i_sense_data, _center_func kb = 0.08617 p1d = OneD(dat=None) p2d = TwoD(dat=None) p1d.TEMPLATE = 'simple_white' p2d.TEMPLATE = 'simple_white' NRG_OCC_FIT_NAME = 'forced_theta' # NRG_OCC_FIT_NAME = 'csq_forced_theta' CSQ_DATNUM = None # CSQ_DATNUM = 2197 GAMMA_EXPECTED_THETA_PARAMS = NRGParams( gamma=23.4352, theta=4.5, center=78.4, amp=0.675,
# fig = plot_per_row_of_transition_param(dats, param_name=param, # x=[dat.Logs.fds['ESS'] for dat in dats], xlabel='ESS /mV', # stdev_only=False) # fig.show(renderer='browser') # fig = plot_stdev_of_avg(dats[0]) # for dat in dats[1:]: # fig.add_trace(trace_stdev_of_avg(dat)) # fig = waterfall_stdev_of_avg(dats) # fig.show(renderer='browser') # datnums = [702, 703, 707, 708] datnums = [7436, 7435] all_dats = get_dats(datnums) plotter = OneD(dats=all_dats) fig = plotter.figure( xlabel='Time /s', ylabel='Current /Arbitrary', title= f'Dats{all_dats[0].datnum}-{all_dats[-1].datnum}: Transition ReadVsTime<br>Decimated to 10Hz', ) for dat, name, bias in zip(all_dats, [ 'On Transition 300uV', 'Off Transition 300uV', 'On Transition 500uV', 'Off Transition 500uV' ], [300, 300, 500, 500]): data = dat.Data.get_data('i_sense') numpts = data.shape[-1] time_elapsed = numpts / dat.Logs.measure_freq x = np.linspace(0, time_elapsed, numpts)
def plot_stacked_square_heated(datnums: List[int], save_name: str, plot=True): dats = get_dats(datnums) # Plot Integrated integrated_plot_info = PlotInfo( title_append='Integrated Entropy', ylabel='Entropy /kB', data_func=lambda dat: dat.Entropy.get_integrated_entropy( name=save_name, data=dat.SquareEntropy.get_Outputs( name=save_name, check_exists=True).average_entropy_signal), x_func=lambda dat: dat.SquareEntropy.get_Outputs(name=save_name, check_exists=True).x, trace_name=lambda dat: f'Dat{dat.datnum}') fit_plot_info = PlotInfo( title_append='Fit Entropy', ylabel='Entropy /kB', data_func=lambda dat: dat.SquareEntropy.get_Outputs( name=save_name, check_exists=True).average_entropy_signal, x_func=lambda dat: dat.SquareEntropy.get_Outputs(name=save_name, check_exists=True).x, trace_name=lambda dat: f'Dat{dat.datnum}') figs = [] for plot_info in [integrated_plot_info]: plotter = OneD(dats=dats) dat = dats[0] fig = plotter.figure( xlabel=dat.Logs.xlabel, ylabel=plot_info.ylabel, title= f'Dats{dats[0].datnum}-{dats[-1].datnum}: {plot_info.title_append}' ) for dat in dats: data = plot_info.data_func(dat) x = plot_info.x_func(dat) hover_infos = [ HoverInfo(name='Datnum', func=lambda dat: dat.datnum, precision='d', units=''), HoverInfo(name=dat.Logs.xlabel, func=lambda dat: plot_info.x_func(dat), precision='.2f', units='/mV'), HoverInfo(name=plot_info.ylabel, func=lambda dat: dat.datnum, precision='d', units=''), ] hover_funcs, template = _additional_data_dict_converter( hover_infos) hover_data = [] for func in hover_funcs: v = func(dat) if not hasattr(v, '__len__') or len( v) == 1: # Make sure a hover info for each x_coord v = [v] * len(x) hover_data.append(v) fig.add_trace( plotter.trace(x=x, data=data, name=plot_info.trace_name(dat), hover_data=hover_data, hover_template=template, mode='lines')) if plot: fig.show() figs.append(fig) return figs
def _get_param_trace(fits: List, param: str, y_array: np.ndarray): plotter = OneD(dat=None) trace = plotter.trace(x=y_array, data=[getattr(fit.best_values, param) for fit in fits], mode='markers+lines') return trace
def one_d(self, invert_fit_on_data=False) -> go.Figure: """ Args: invert_fit_on_data (): False to modify NRG to fit data, True to modify Data to fit NRG Returns: """ plotter = OneD(dat=None) title_prepend = f'NRG fit to Data' if not invert_fit_on_data else 'Data fit to NRG' title_append = f' -- Dat{self.datnum}' if self.datnum else '' xlabel = 'Sweepgate /mV' if not invert_fit_on_data else 'Ens*1000' ylabel = 'Current /nA' if not invert_fit_on_data else '1-Occupation' fig = plotter.figure(xlabel=xlabel, ylabel=ylabel, title=f'{title_prepend}: G={self.g:.2f}mV, ' f'{THETA}={self.theta:.2f}mV, ' f'{THETA}/G={self.theta / self.g:.2f}' f'{title_append}') min_, max_ = 0, 1 if self.datnum: x_for_nrg = None for i, which in enumerate(self.which): x, data = _get_x_and_data(self.datnum, self.experiment_name, which) x_for_nrg = x if invert_fit_on_data is True: x, data = invert_nrg_fit_params(x, data, gamma=self.g, theta=self.theta, mid=self.mid, amp=self.amp, lin=self.lin, const=self.const, occ_lin=self.occ_lin, data_type=which) if i == 0 and data is not None: min_, max_ = np.nanmin(data), np.nanmax(data) fig.add_trace(plotter.trace(x=x, data=data, name=f'Data - {which}', mode='lines')) else: if data is not None: scaled = scale_data(data, min_, max_) fig.add_trace( plotter.trace(x=x, data=scaled.scaled_data, name=f'Scaled Data - {which}', mode='lines')) if min_ - (max_ - min_) / 10 < scaled.new_zero < max_ + (max_ - min_) / 10: plotter.add_line(fig, scaled.new_zero, mode='horizontal', color='black', linetype='dot', linewidth=1) else: x_for_nrg = np.linspace(-100, 100, 1001) for i, which in enumerate(self.which): if which == 'i_sense_cold': which = 'i_sense' elif which == 'i_sense_hot': if 'i_sense_cold' in self.which: continue which = 'i_sense' nrg_func = NRG_func_generator(which=which) if invert_fit_on_data: # nrg_func(x, mid, gamma, theta, amp, lin, const, occ_lin) nrg_data = nrg_func(x_for_nrg, self.mid, self.g, self.theta, 1, 0, 0, 0) if which == 'i_sense': nrg_data += 0.5 # 0.5 because that still gets subtracted otherwise # x = (x_for_nrg - self.mid - self.g*(-1.76567) - self.theta*(-1)) / self.g x = (x_for_nrg - self.mid) / self.g else: x = x_for_nrg nrg_data = nrg_func(x, self.mid, self.g, self.theta, self.amp, self.lin, self.const, self.occ_lin) cmin, cmax = np.nanmin(nrg_data), np.nanmax(nrg_data) if i == 0 and min_ == 0 and max_ == 1: fig.add_trace(plotter.trace(x=x, data=nrg_data, name=f'NRG {which}', mode='lines')) min_, max_ = cmin, cmax else: scaled = scale_data(nrg_data, min_, max_) fig.add_trace(plotter.trace(x=x, data=scaled.scaled_data, name=f'Scaled NRG {which}', mode='lines')) if min_ - (max_ - min_) / 10 < scaled.new_zero < max_ + (max_ - min_) / 10: plotter.add_line(fig, scaled.new_zero, mode='horizontal', color='black', linetype='dot', linewidth=1) return fig
# single_fit = linear_fit_thetas(dats=transition_dats, fit_name='forced_gamma_zero', # filter_func=lambda dat: True if (-282 < dat.logs.fds['esc'] < -265) or (-255 < dat.logs.fds['esc'] < -235) else False, # show_plots=False) from dat_analysis.analysis_tools.general_fitting import calculate_fit tdats = get_dats(VS_GAMMA_Tonly) linear_fit_thetas(dats=tdats, fit_name='forced_gamma_zero', filter_func=lambda dat: True if dat.Logs.dacs["ESC"] < -285 else False, show_plots=True, sweep_gate_divider=100) # print('done') p1d = OneD(dat=None) fig = p1d.figure(xlabel='ESC /mV', ylabel='dT') dats = get_dats(VS_GAMMA) escs = [dat.Logs.dacs['ESC'] for dat in dats] # dts = [dat.SquareEntropy.get_fit(fit_name='forced_gamma_zero_non_csq_hot').best_values.theta - # dat.SquareEntropy.get_fit(fit_name='forced_gamma_zero_non_csq_cold').best_values.theta for dat in dats] dts = [ dat.Entropy.get_integration_info(name='forced_theta_linear_non_csq').dT for dat in dats ] line = lm.models.LinearModel() fit = calculate_fit(x=np.array(escs[:11]), data=np.array(dts[:11]),
data_dndt = all_data['Scaled Data - dndt'] nrg_dndt = all_data['Scaled NRG dndt'] occupation = all_data['Scaled NRG occupation'] print(x.shape, data_dndt.shape, nrg_dndt.shape, occupation.shape) interp_range = np.where( np.logical_and(occupation < 0.99, occupation > 0.01)) interp_data = occupation[interp_range] interp_x = x[interp_range] interper = interp1d(x=interp_x, y=interp_data, assume_sorted=True) occ_x = interper(x) plotter = OneD(dat=None) fig = plotter.figure( xlabel='Occupation', ylabel='Arbitrary', title='dN/dT vs Occupation at Temp/Gamma = 0.04 (Temp = 4e-5 in NRG)') fig = plotter.figure( xlabel='Occupation', ylabel='Arbitrary', title='dN/dT vs Occupation at Temp/Gamma = 0.19 (Temp = 1.9e-4 in NRG)' ) fig.add_trace( plotter.trace(x=occ_x, data=(data_dndt - 0.2) * 1.2, name='Data', mode='lines+markers'))
import lmfit as lm from deprecation import deprecated import dat_analysis.useful_functions as U from dat_analysis.dat_analysis.characters import DELTA from dat_analysis.dat_object.make_dat import get_dat from dat_analysis.plotting.plotly.dat_plotting import OneD, TwoD from dat_analysis.analysis_tools.general_fitting import calculate_fit from dat_analysis.analysis_tools.nrg import NRG_func_generator from OLD.new_dash.pages import invert_nrg_fit_params import plotly.io as pio pio.renderers.default = 'browser' p1d = OneD(dat=None) p2d = TwoD(dat=None) @dataclass class Params: gamma: float theta: float center: float amp: float lin: float const: float lin_occ: float vary_theta: bool = False vary_gamma: bool = False datnum: Optional[int] = None
if __name__ == '__main__': # dat = get_dat(2214) # x = dat.Data.get_data('x') # data = dat.Data.get_data('i_sense')[63] # all_data = dat.Transition.data # dat = get_dat(2216) dat = get_dat(2164) out = dat.SquareEntropy.get_row_only_output(name='default') x = out.x all_data = np.nanmean(np.array(out.cycled[:, (0, 2), :]), axis=1) single_row = 10 data = all_data[single_row] plotter = OneD(dat=dat) plotter.MAX_POINTS = 100000 fig = plotter.figure( ylabel='Current /nA', title=f'Dat{dat.datnum}: Checking Accuracy of Center from fit') # Whole row of data fig.add_trace( plotter.trace(x=x, data=data, name=f'All data of row{single_row}', mode='lines')) # Fits reports = [] fits = []