def _regress(self, plot, scatter, line): fit, err = convert_fit(scatter.fit) if fit is None: print('fit is none, {}'.format(scatter.fit)) return r = None if line and hasattr(line, 'regressor'): r = line.regressor if fit in [1, 2, 3, 4]: r = self._poly_regress(scatter, r, fit) elif fit == 'exponential': r = self._exponential_regress(scatter, r, fit) elif isinstance(fit, tuple): r = self._least_square_regress(scatter, r, fit) elif isinstance(fit, BaseRegressor): r = self._custom_regress(scatter, r, fit) else: r = self._mean_regress(scatter, r, fit) if r: r.error_calc_type = err if line: plow = plot.index_range._low_value phigh = plot.index_range._high_value # print plow, phigh if hasattr(line, 'regression_bounds') and line.regression_bounds: low, high, first, last = line.regression_bounds if first: low = min(low, plow) elif last: high = max(high, phigh) else: low, high = plow, phigh fx = linspace(low, high, 100) fy = r.predict(fx) line.regressor = r try: line.index.set_data(fx) line.value.set_data(fy) except BaseException as e: print('Regerssion Exception, {}'.format(e)) return if hasattr(line, 'error_envelope'): ci = r.calculate_error_envelope(fx, fy) if ci is not None: ly, uy = ci else: ly, uy = fy, fy line.error_envelope.lower = ly line.error_envelope.upper = uy line.error_envelope.invalidate() return r
def _regress(self, plot, scatter, line): if scatter.no_regression: return fit, err = convert_fit(scatter.fit) if fit is None: return r = None if line and hasattr(line, 'regressor'): r = line.regressor if fit in [1, 2, 3]: r = self._poly_regress(scatter, r, fit) elif isinstance(fit, tuple): r = self._least_square_regress(scatter, r, fit) elif isinstance(fit, BaseRegressor): r = self._custom_regress(scatter, r, fit) else: r = self._mean_regress(scatter, r, fit) if r: r.error_calc_type = err if line: plow = plot.index_range.low phigh = plot.index_range.high if hasattr(line, 'regression_bounds') and line.regression_bounds: low, high, first, last = line.regression_bounds if first: low = min(low, plow) elif last: high = max(high, phigh) else: low, high = plow, phigh fx = linspace(low, high, 100) fy = r.predict(fx) line.regressor = r line.index.set_data(fx) line.value.set_data(fy) if hasattr(line, 'error_envelope'): ci = r.calculate_ci(fx, fy) # print ci if ci is not None: ly, uy = ci else: ly, uy = fy, fy line.error_envelope.lower = ly line.error_envelope.upper = uy line.error_envelope.invalidate() return r
def _regress(self, plot, scatter, line): if scatter.no_regression: return fit, err = convert_fit(scatter.fit) if fit is None: return r = None if line and hasattr(line, 'regressor'): r = line.regressor if fit in [1, 2, 3]: r=self._poly_regress(scatter, r, fit) elif isinstance(fit, tuple): r=self._least_square_regress(scatter, r, fit) elif isinstance(fit, BaseRegressor): r=self._custom_regress(scatter, r, fit) else: r=self._mean_regress(scatter, r, fit) if r: r.error_calc_type=err if line: plow = plot.index_range.low phigh = plot.index_range.high if hasattr(line, 'regression_bounds') and line.regression_bounds: low, high, first, last=line.regression_bounds if first: low=min(low, plow) elif last: high=max(high, phigh) else: low,high=plow, phigh fx = linspace(low, high, 100) fy = r.predict(fx) line.regressor = r line.index.set_data(fx) line.value.set_data(fy) if hasattr(line, 'error_envelope'): ci = r.calculate_ci(fx, fy) # print ci if ci is not None: ly, uy = ci else: ly, uy = fy, fy line.error_envelope.lower = ly line.error_envelope.upper = uy line.error_envelope.invalidate() return r
def _save_db_fit(self, unk, meas_analysis, fit_hist, name, fit, filter_dict): db = self.processor.db # print name if name.endswith('bs'): name = name[:-2] dbfit = unk.get_db_fit(name, meas_analysis, 'baseline') kind = 'baseline' iso = unk.isotopes[name].baseline else: dbfit = unk.get_db_fit(name, meas_analysis, 'signal') kind = 'signal' iso = unk.isotopes[name] if filter_dict: iso.filter_outliers = filter_dict['use'] iso.filter_outlier_iterations = filter_dict['n'] iso.filter_outlier_std_devs = filter_dict['std_devs'] if dbfit != fit: v = iso.uvalue f, e = convert_fit(fit) iso.fit = f # iso.fit = convert_fit(fit) if fit_hist is None: fit_hist = db.add_fit_history(meas_analysis, user=db.save_username) dbiso = next( (iso for iso in meas_analysis.isotopes if iso.molecular_weight.name == name and iso.kind == kind), None) if fit_hist is None: self.warning('Failed added fit history for {}'.format( unk.record_id)) return db.add_fit(fit_hist, dbiso, fit=fit, filter_outliers=iso.filter_outliers, filter_outlier_iterations=iso.filter_outlier_iterations, filter_outlier_std_devs=iso.filter_outlier_std_devs) #update isotoperesults v, e = float(v.nominal_value), float(v.std_dev) db.add_isotope_result(dbiso, fit_hist, signal_=v, signal_err=e) # self.debug('adding {} fit {} - {}'.format(kind, name, fit)) return fit_hist
def _exponential_regress(self, scatter, r, fit): from pychron.core.regression.least_squares_regressor import ExponentialRegressor, FitError if r is None or not isinstance(r, ExponentialRegressor): r = ExponentialRegressor() self._set_regressor(scatter, r) try: r.calculate() self._set_excluded(scatter, r) except FitError: f, e = convert_fit(scatter.ofit) r = self._poly_regress(scatter, r, f) return r
def _save_db_fit(self, unk, meas_analysis, fit_hist, name, fit, et, filter_dict, include_baseline_error, time_zero_offset): db = self.processor.db if name.endswith('bs'): name = name[:-2] # dbfit = unk.get_db_fit(meas_analysis, name, 'baseline') kind = 'baseline' iso = unk.isotopes[name].baseline else: # dbfit = unk.get_db_fit(meas_analysis, name, 'signal') kind = 'signal' iso = unk.isotopes[name] f, e = convert_fit(fit) iso.fit = f iso.error_type = et or e iso.include_baseline_error = bool(include_baseline_error) iso.time_zero_offset = time_zero_offset if filter_dict: iso.set_filtering(filter_dict) if fit_hist is None: fit_hist = db.add_fit_history(meas_analysis, user=db.save_username) dbiso = next((i for i in meas_analysis.isotopes if i.molecular_weight.name == name and i.kind == kind), None) if fit_hist is None: self.warning('Failed added fit history for {}'.format( unk.record_id)) return fod = iso.filter_outliers_dict db.add_fit(fit_hist, dbiso, fit=fit, error_type=iso.error_type, filter_outliers=fod['filter_outliers'], filter_outlier_iterations=fod['iterations'], filter_outlier_std_devs=fod['std_devs'], include_baseline_error=include_baseline_error, time_zero_offset=time_zero_offset) # update isotoperesults v, e = float(iso.value), float(iso.error) db.add_isotope_result(dbiso, fit_hist, signal_=v, signal_err=e) return fit_hist
def _save_db_fit(self, unk, meas_analysis, fit_hist, name, fit, filter_dict): db = self.processor.db # print name if name.endswith('bs'): name = name[:-2] dbfit = unk.get_db_fit(name, meas_analysis, 'baseline') kind = 'baseline' iso = unk.isotopes[name].baseline else: dbfit = unk.get_db_fit(name, meas_analysis, 'signal') kind = 'signal' iso = unk.isotopes[name] if filter_dict: iso.filter_outliers = filter_dict['use'] iso.filter_outlier_iterations = filter_dict['n'] iso.filter_outlier_std_devs = filter_dict['std_devs'] if dbfit != fit: v = iso.uvalue f,e=convert_fit(fit) iso.fit=f # iso.fit = convert_fit(fit) if fit_hist is None: fit_hist = db.add_fit_history(meas_analysis, user=db.save_username) dbiso = next((iso for iso in meas_analysis.isotopes if iso.molecular_weight.name == name and iso.kind == kind), None) if fit_hist is None: self.warning('Failed added fit history for {}'.format(unk.record_id)) return db.add_fit(fit_hist, dbiso, fit=fit, filter_outliers=iso.filter_outliers, filter_outlier_iterations=iso.filter_outlier_iterations, filter_outlier_std_devs=iso.filter_outlier_std_devs) #update isotoperesults v, e = float(v.nominal_value), float(v.std_dev) db.add_isotope_result(dbiso, fit_hist, signal_=v, signal_err=e) # self.debug('adding {} fit {} - {}'.format(kind, name, fit)) return fit_hist
def _save_db_fit(self, unk, meas_analysis, fit_hist, name, fit, et, filter_dict, include_baseline_error, time_zero_offset): db = self.processor.db if name.endswith('bs'): name = name[:-2] # dbfit = unk.get_db_fit(meas_analysis, name, 'baseline') kind = 'baseline' iso = unk.isotopes[name].baseline else: # dbfit = unk.get_db_fit(meas_analysis, name, 'signal') kind = 'signal' iso = unk.isotopes[name] f, e = convert_fit(fit) iso.fit = f iso.error_type = et or e iso.include_baseline_error = bool(include_baseline_error) iso.time_zero_offset = time_zero_offset if filter_dict: iso.set_filtering(filter_dict) if fit_hist is None: fit_hist = db.add_fit_history(meas_analysis, user=db.save_username) dbiso = next((i for i in meas_analysis.isotopes if i.molecular_weight.name == name and i.kind == kind), None) if fit_hist is None: self.warning('Failed added fit history for {}'.format(unk.record_id)) return fod = iso.filter_outliers_dict db.add_fit(fit_hist, dbiso, fit=fit, error_type=iso.error_type, filter_outliers=fod['filter_outliers'], filter_outlier_iterations=fod['iterations'], filter_outlier_std_devs=fod['std_devs'], include_baseline_error=include_baseline_error, time_zero_offset=time_zero_offset) # update isotoperesults v, e = float(iso.value), float(iso.error) db.add_isotope_result(dbiso, fit_hist, signal_=v, signal_err=e) return fit_hist
def test_convert_parabolic_werr(self): fit = 'parabolic_SD' ofit, err = convert_fit(fit) self.assertEqual(ofit, 2) self.assertEqual(err, 'SD')
def show_evolutions_factory(record_id, isotopes, show_evo=True, show_equilibration=False, show_baseline=False, show_statistics=False, ncols=1, scale_to_equilibration=False): if WINDOW_CNT > 20: information( None, 'You have too many Isotope Evolution windows open. Close some before proceeding' ) return if not show_evo: xmi = Inf xma = -Inf else: xmi, xma = 0, -Inf if ncols > 1: isotopes = sort_isotopes(isotopes, reverse=True, key=attrgetter('name')) def reorder(l, n): l = [l[i:i + n] for i in range(0, len(l), n)] nl = [] for ri in range(len(l[0])): for col in l: try: nl.append(col[ri]) except IndexError: pass return nl nrows = ceil(len(isotopes) / ncols) isotopes = reorder(isotopes, nrows) g = ColumnStackedRegressionGraph(resizable=True, ncols=ncols, nrows=nrows, container_dict={ 'padding_top': 15 * nrows, 'spacing': (0, 15), 'padding_bottom': 40 }) resizable = 'hv' else: resizable = 'h' isotopes = sort_isotopes(isotopes, reverse=False, key=attrgetter('name')) g = StackedRegressionGraph(resizable=True, container_dict={'spacing': 15}) # g.plotcontainer.spacing = 10 g.window_height = min(275 * len(isotopes), 800) g.window_x = OX + XOFFSET * WINDOW_CNT g.window_y = OY + YOFFSET * WINDOW_CNT for i, iso in enumerate(isotopes): ymi, yma = Inf, -Inf p = g.new_plot(padding=[80, 10, 10, 40], resizable=resizable) g.add_limit_tool(p, 'x') g.add_limit_tool(p, 'y') g.add_axis_tool(p, p.x_axis) g.add_axis_tool(p, p.y_axis) if show_statistics: g.add_statistics(i) p.y_axis.title_spacing = 50 if show_equilibration: sniff = iso.sniff if sniff.xs.shape[0]: g.new_series(sniff.offset_xs, sniff.ys, type='scatter', fit=None, color='red') ymi, yma = min_max(ymi, yma, sniff.ys) xmi, xma = min_max(xmi, xma, sniff.offset_xs) if show_evo: if iso.fit is None: iso.fit = 'linear' g.new_series(iso.offset_xs, iso.ys, fit=iso.efit, truncate=iso.truncate, filter_outliers_dict=iso.filter_outliers_dict, color='black') g.set_regressor(iso.regressor, i) xmi, xma = min_max(xmi, xma, iso.offset_xs) if not scale_to_equilibration: ymi, yma = min_max(ymi, yma, iso.ys) if show_baseline: baseline = iso.baseline g.new_series(baseline.offset_xs, baseline.ys, type='scatter', fit=baseline.efit, filter_outliers_dict=baseline.filter_outliers_dict, color='blue') xmi, xma = min_max(xmi, xma, baseline.offset_xs) if not scale_to_equilibration: ymi, yma = min_max(ymi, yma, baseline.ys) xpad = '0.025,0.05' ypad = '0.05' if scale_to_equilibration: ypad = None r = (yma - ymi) * 0.02 # ymi = yma - r fit = iso.fit if fit != 'average': fit, _ = convert_fit(iso.fit) fy = polyval(polyfit(iso.offset_xs, iso.ys, fit), 0) if ymi > fy: ymi = fy - r fy = polyval(polyfit(iso.offset_xs, iso.ys, fit), xma) if fy > yma: yma = fy elif fy < ymi: ymi = fy - r # yma += r g.set_x_limits(min_=xmi, max_=xma, pad=xpad) g.set_y_limits(min_=ymi, max_=yma, pad=ypad, plotid=i) g.set_x_title('Time (s)', plotid=i) g.set_y_title('{} ({})'.format(iso.name, iso.units), plotid=i) g.refresh() g.window_title = '{} {}'.format( record_id, ','.join([i.name for i in reversed(isotopes)])) return g
def _regress(self, plot, scatter, line): fit, err = convert_fit(scatter.fit) if fit is None: return r = None if line and hasattr(line, 'regressor'): r = line.regressor if fit in [1, 2, 3, 4]: r = self._poly_regress(scatter, r, fit) elif fit == 'exponential': r = self._exponential_regress(scatter, r, fit) elif isinstance(fit, tuple): r = self._least_square_regress(scatter, r, fit) elif isinstance(fit, BaseRegressor): r = self._custom_regress(scatter, r, fit) else: r = self._mean_regress(scatter, r, fit) if r: r.error_calc_type = err if line: plow = plot.index_range._low_value phigh = plot.index_range._high_value # print plow, phigh if hasattr(line, 'regression_bounds') and line.regression_bounds: low, high, first, last = line.regression_bounds if first: low = min(low, plow) elif last: high = max(high, phigh) else: low, high = plow, phigh fx = linspace(low, high, 100) fy = r.predict(fx) line.regressor = r try: line.index.set_data(fx) line.value.set_data(fy) except BaseException as e: print('Regerssion Exception, {}'.format(e)) return if hasattr(line, 'error_envelope'): ci = r.calculate_error_envelope(fx, fy) if ci is not None: ly, uy = ci else: ly, uy = fy, fy line.error_envelope.lower = ly line.error_envelope.upper = uy line.error_envelope.invalidate() if hasattr(line, 'filter_bounds'): ci = r.calculate_filter_bounds(fy) if ci is not None: ly, uy = ci else: ly, uy = fy, fy line.filter_bounds.lower = ly line.filter_bounds.upper = uy line.filter_bounds.invalidate() return r
def test_convert_linear(self): fit='linear' ofit,err=convert_fit(fit) self.assertEqual(ofit,1) self.assertEqual(err,None)
def test_convert_parabolic(self): fit = 'parabolic_CI' ofit, err = convert_fit(fit) self.assertEqual(ofit, 2) self.assertEqual(err, 'CI')
def test_convert_average(self): fit = 'average' ofit, err = convert_fit(fit) self.assertEqual(ofit, 'average') self.assertEqual(err, 'SD')
def test_convert_linear_fail(self): fit = 'linaer' ofit, err = convert_fit(fit) self.assertEqual(ofit, None) self.assertEqual(err, None)
def test_convert_average_werr(self): fit = 'linear_SEM' ofit, err = convert_fit(fit) self.assertEqual(ofit, 1) self.assertEqual(err, 'SEM')