def hanger_func_complex_SI_Guess(data, f, fit_window=None): ## This is complete garbage, just to get some return value xvals = f abs_data = np.abs(data) peaks = a_tools.peak_finder(xvals, abs_data) # Search for peak if peaks['dip'] is not None: # look for dips first f0 = peaks['dip'] amplitude_factor = -1. elif peaks['peak'] is not None: # then look for peaks f0 = peaks['peak'] amplitude_factor = 1. else: # Otherwise take center of range f0 = np.median(xvals) amplitude_factor = -1. min_index = np.argmin(abs_data) max_index = np.argmax(abs_data) min_frequency = xvals[min_index] max_frequency = xvals[max_index] amplitude_guess = max(dm_tools.reject_outliers(abs_data)) # Creating parameters and estimations S21min = (min(dm_tools.reject_outliers(abs_data)) / max(dm_tools.reject_outliers(abs_data))) Q = f0 / abs(min_frequency - max_frequency) Qe = abs(Q / abs(1 - S21min)) guess_dict = {'f0': {'value': f0*1e-9, 'min': min(xvals)*1e-9, 'max': max(xvals)*1e-9}, 'A': {'value': amplitude_guess}, 'Qe': {'value': Qe, 'min': 1, 'max': 50e6}, 'Ql': {'value': Q, 'min': 1, 'max': 50e6}, 'theta': {'value': 0, 'min': -np.pi/2, 'max': np.pi/2}, 'alpha': {'value':0, 'vary':True}, 'phi_0': {'value':0, 'vary':True}, 'phi_v': {'value':0, 'vary':True}} return guess_dict
def get_peaks(dac_vector, f_vector, z_vector): int_interval = np.arange(20) plot_z=[] for i in range(len(dac_vector)): plot_z.append(a_tools.smooth( z_vector[i][:], 11)-np.mean(z_vector[i][1:10])) peaks = np.zeros(len(dac_vector)) for i in range(len(dac_vector)): p_dict = a_tools.peak_finder(f_vector[i][1:-2], plot_z[i][1:-2]) if (np.mean(plot_z[i][1:-2])-np.min(plot_z[i][1:-2])) > (np.max(plot_z[i][1:-2])-np.mean(plot_z[i][1:-2])): peaks[i] = p_dict['dip'] else: peaks[i] = p_dict['peak'] return peaks
def SlopedHangerFuncAmplitudeGuess(data, f, fit_window=None): xvals = f peaks = a_tools.peak_finder(xvals, data) # Search for peak if peaks['dip'] is not None: # look for dips first f0 = peaks['dip'] amplitude_factor = -1. elif peaks['peak'] is not None: # then look for peaks f0 = peaks['peak'] amplitude_factor = 1. else: # Otherwise take center of range f0 = np.median(xvals) amplitude_factor = -1. min_index = np.argmin(data) max_index = np.argmax(data) min_frequency = xvals[min_index] max_frequency = xvals[max_index] amplitude_guess = max(dm_tools.reject_outliers(data)) # Creating parameters and estimations S21min = (min(dm_tools.reject_outliers(data)) / max(dm_tools.reject_outliers(data))) Q = f0 / abs(min_frequency - max_frequency) Qe = abs(Q / abs(1 - S21min)) guess_dict = {'f0': {'value': f0*1e-9, 'min': min(xvals)*1e-9, 'max': max(xvals)*1e-9}, 'A': {'value': amplitude_guess}, 'Q': {'value': Q, 'min': 1, 'max': 50e6}, 'Qe': {'value': Qe, 'min': 1, 'max': 50e6}, 'Qi': {'expr': 'abs(1./(1./Q-1./Qe*cos(theta)))', 'vary': False}, 'Qc': {'expr': 'Qe/cos(theta)', 'vary': False}, 'theta': {'value': 0, 'min': -np.pi/2, 'max': np.pi/2}, 'slope': {'value':0, 'vary':True}} return guess_dict
def qubit_fit(self, sweep_points, linecut_mag, **kw): """ This is basically a modified copy of the 'fit_data' function of the Qubit_Spectroscopy_Analysis method. Does not support 2nd peak fitting, as it does not seem necessary. """ frequency_guess = kw.get('frequency_guess', None) percentile = kw.get('percentile', 20) num_sigma_threshold = kw.get('num_sigma_threshold', 5) window_len_filter = kw.get('window_len_filter', 3) optimize = kw.pop('optimize', True) verbose = kw.get('verbose', False) self.data_dist = linecut_mag data_dist_smooth = a_tools.smooth(self.data_dist, window_len=window_len_filter) self.peaks = a_tools.peak_finder( sweep_points, data_dist_smooth, percentile=percentile, num_sigma_threshold=num_sigma_threshold, optimize=optimize, window_len=0) # extract highest peak -> ge transition if frequency_guess is not None: f0 = frequency_guess kappa_guess = (max(self.sweep_points) - min(self.sweep_points)) / 20 key = 'peak' elif self.peaks['dip'] is None: f0 = self.peaks['peak'] kappa_guess = self.peaks['peak_width'] / 4 key = 'peak' elif self.peaks['peak'] is None: f0 = self.peaks['dip'] kappa_guess = self.peaks['dip_width'] / 4 key = 'dip' # elif self.peaks['dip'] < self.peaks['peak']: elif np.abs(data_dist_smooth[self.peaks['dip_idx']]) < \ np.abs(data_dist_smooth[self.peaks['peak_idx']]): f0 = self.peaks['peak'] kappa_guess = self.peaks['peak_width'] / 4 key = 'peak' # elif self.peaks['peak'] < self.peaks['dip']: elif np.abs(data_dist_smooth[self.peaks['dip_idx']]) > \ np.abs(data_dist_smooth[self.peaks['peak_idx']]): f0 = self.peaks['dip'] kappa_guess = self.peaks['dip_width'] / 4 key = 'dip' else: # Otherwise take center of range and raise warning f0 = np.median(self.sweep_points) kappa_guess = 0.005 * 1e9 logging.warning('No peaks or dips have been found. Initial ' 'frequency guess taken ' 'as median of sweep points (f_guess={}), ' 'initial linewidth ' 'guess was taken as kappa_guess={}'.format( f0, kappa_guess)) key = 'peak' tallest_peak = f0 # the ge freq if verbose: print('Largest ' + key + ' is at ', tallest_peak) if f0 == self.peaks[key]: tallest_peak_idx = self.peaks[key + '_idx'] if verbose: print('Largest ' + key + ' idx is ', tallest_peak_idx) amplitude_guess = np.pi * kappa_guess * \ abs(max(self.data_dist) - min(self.data_dist)) if key == 'dip': amplitude_guess = -amplitude_guess LorentzianModel = fit_mods.LorentzianModel LorentzianModel.set_param_hint('f0', min=min(self.sweep_points), max=max(self.sweep_points), value=f0) LorentzianModel.set_param_hint('A', value=amplitude_guess) LorentzianModel.set_param_hint('offset', value=np.mean(self.data_dist), vary=True) LorentzianModel.set_param_hint('kappa', value=kappa_guess, min=1, vary=True) LorentzianModel.set_param_hint('Q', expr='f0/kappa', vary=False) self.params = LorentzianModel.make_params() fit_res = LorentzianModel.fit(data=self.data_dist, f=self.sweep_points, params=self.params) return fit_res