def baserate_uncertainty(database, label, general_label): with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute( ''' SELECT Hall, DetNo, RunNo, acc.Label, singles.Rate_Hz, singles.IsolatedEventCount, singles.IsolatedEventRate_Hz, acc.BaseRate_Hz FROM singles_rates AS singles INNER JOIN accidental_subtraction AS acc USING ( RunNo, DetNo ) NATURAL JOIN runs WHERE acc.Label = ? AND singles.Label = ? ''', (label, general_label)) inputs = np.array(cursor.fetchall()) # Annoying casts because of numpy's homogeneous array types halls = inputs[:, 0].astype(int) dets = inputs[:, 1].astype(int) runs = inputs[:, 2].astype(int) labels = inputs[:, 3].astype(str) singles_rate = inputs[:, 4].astype(float) n_1fold = inputs[:, 5].astype(float) r_1fold = inputs[:, 6].astype(float) r_ss = inputs[:, 7].astype(float) base_rate_error = (r_ss * (ERROR_COEFFICIENT * r_1fold / singles_rate + 1) / np.sqrt(n_1fold)) result_rows = np.vstack((base_rate_error, runs, dets, labels)).T with common.get_db(database) as conn: cursor = conn.cursor() cursor.executemany( ''' UPDATE accidental_subtraction SET BaseRate_Hz_error = ? WHERE RunNo = ? AND DetNo = ? AND Label = ? ''', result_rows)
def daq_livetime_s(database, label): """Return an array of DAQ livetimes ordered from EH1-AD1 to EH3-AD4.""" with common.get_db(database) as conn: cursor = conn.cursor() # Total DAQ Livetime cursor.execute(''' SELECT SUM(Livetime_ns/Efficiency/1e9) FROM muon_rates NATURAL JOIN runs WHERE Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label,) ) livetimes_s = np.array(cursor.fetchall()).reshape(-1) return livetimes_s
def singles_rate_Hz(database, label): """Return an array of singles rates from EH1-AD1 to EH3-AD4.""" with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute(''' SELECT SUM(singles.Rate_Hz * Livetime_ns/Efficiency)/ SUM(Livetime_ns/Efficiency) FROM singles_rates AS singles NATURAL JOIN runs INNER JOIN muon_rates USING ( RunNo, DetNo, Label ) WHERE Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label,) ) singles_rates = np.array(cursor.fetchall()).reshape(-1) return singles_rates
def counts(database, label, general_label): """Return an array of predicted accidentals counts.""" with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute( ''' SELECT SUM(acc.BaseRate_Hz * Total_Acc_Eff * Livetime_ns/1e9) FROM accidental_subtraction AS acc NATURAL JOIN runs INNER JOIN muon_rates as mu USING ( RunNo, DetNo ) WHERE acc.Label = ? AND mu.Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label, general_label)) counts = np.array(cursor.fetchall()).reshape(-1) return counts
def target_protons(database, label): """Return a 2D array of target protons (x1e25) and uncertainties from EH1-AD1 to EH3-AD4.""" with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute(''' SELECT GdLS_kg, GdLS_err_kg, LS_kg, LS_err_kg, Acrylic_kg, Acrylic_err_kg FROM target_mass ORDER BY Hall, DetNo ''' ) masses = np.array(cursor.fetchall()) cursor.execute(''' SELECT GdLS_density, GdLS_err, LS_density, LS_err, Acrylic_density, Acrylic_err FROM proton_densities WHERE Source = ? ''', (label,) ) densities = np.array(cursor.fetchall()).reshape(-1) num_protons_GdLS = masses[:, 0] * densities[0] num_protons_LS = masses[:, 2] * densities[2] num_protons_acrylic = masses[:, 4] * densities[4] num_protons_total = num_protons_GdLS + num_protons_LS + num_protons_acrylic err_GdLS = num_protons_GdLS * np.sqrt( (masses[:, 1]/masses[:, 0])**2 + (densities[1]/densities[0])**2 ) err_LS = num_protons_LS * np.sqrt( (masses[:, 3]/masses[:, 2])**2 + (densities[3]/densities[2])**2 ) err_acrylic = num_protons_acrylic * np.sqrt( (masses[:, 5]/masses[:, 4])**2 + (densities[5]/densities[4])**2 ) err_total = np.sqrt(err_GdLS**2 + err_LS**2 + err_acrylic**2) return np.stack((num_protons_total, err_total), axis=-1)
def count_errors(database, label, general_label): """Return the statistical error on the number of accidentals in each AD. Return value is an array sorted from EH1-AD1 to EH3-AD4. """ with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute( ''' SELECT -- easier to compute variance in SQL then take sqrt later SUM( -- add in quadrature BaseRate_Hz * Total_Acc_Eff * Livetime_ns/1e9 * -- num acc squared BaseRate_Hz * Total_Acc_Eff * Livetime_ns/1e9 * ( -- Add relative errors of R_ss and eps_total in quadrature -- (R_ss = BaseRate_Hz, eps_total = Total_Acc_Eff) BaseRate_Hz_error * BaseRate_Hz_error / ( BaseRate_Hz * BaseRate_Hz ) + Total_Acc_Eff_err * Total_Acc_Eff_err / ( Total_Acc_Eff * Total_Acc_Eff ) ) ) FROM accidental_subtraction AS acc INNER JOIN muon_rates as mu USING ( RunNo, DetNo ) NATURAL JOIN runs WHERE acc.Label = ? AND mu.Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label, general_label)) result = np.sqrt(np.array(cursor.fetchall()).reshape(-1)) return result
def load_acc_counts_to_db(read_database, label, general_label, bg_database, bg_label): """Compute the counts and errors and load them into the specifified db.""" acc_counts = counts(read_database, label, general_label) acc_errors = count_errors(read_database, label, general_label) with common.get_db(bg_database) as conn: cursor = conn.cursor() for halldet, count, error in zip(common.all_ads, acc_counts, acc_errors): row = (bg_label, halldet[0], halldet[1], 'accidental', count, error) cursor.execute( ''' INSERT INTO bg_counts VALUES (?, ?, ?, ?, ?, ?) ''', row) return
def muon_total_counts(database, label): """Return an array of muon counts from EH1-AD1 to EH3-AD4.""" with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute(''' SELECT SUM(`Count`) FROM muon_rates NATURAL JOIN runs WHERE Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label,) ) muon_counts = np.array(cursor.fetchall()).reshape(-1) return muon_counts
def muon_efficiency(database, label): """Return an array of muon efficiencies from EH1-AD1 to EH3-AD4.""" with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute(''' SELECT SUM(Livetime_ns)/SUM(Livetime_ns/Efficiency) FROM muon_rates NATURAL JOIN runs WHERE Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label,) ) muon_effs = np.array(cursor.fetchall()).reshape(-1) return muon_effs
def coincidences_counts(database, label): """Return an array of coincidence counts from EH1-AD1 to EH3-AD4.""" with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute(''' SELECT SUM(NumCoincidences) FROM num_coincidences_by_run NATURAL JOIN runs WHERE Label = ? GROUP BY Hall, DetNo ORDER BY Hall, DetNo ''', (label,) ) counts = np.array(cursor.fetchall()).reshape(-1) return counts
def load_result(database, id_or_description): """Load saved results into (FitParams, dict of other attributes).""" if isinstance(id_or_description, int): use_id = True else: use_id = False with common.get_db(database) as conn: conn.row_factory = sqlite3.Row cursor = conn.cursor() cursor.execute( f''' SELECT * FROM fits WHERE {"Id" if use_id else "Description"} = ? LIMIT 1 ''', (id_or_description, ), ) fit_info = dict(cursor.fetchone()) fit_info['IsRateOnly'] = bool(fit_info['IsRateOnly']) fit_info['IsAvgNear'] = bool(fit_info['IsAvgNear']) fit_info['Hierarchy'] = bool(fit_info['Hierarchy']) fit_id = fit_info['Id'] fit_theta13 = 0.5 * np.arcsin(np.sqrt(fit_info['FitSinSqT13'])) cursor.execute( ''' SELECT ParamName, ParamIndex, ParamValue FROM pulls WHERE FitId = ? ORDER BY ParamName, ParamIndex ''', (fit_id, ), ) rows = cursor.fetchall() param_list = [None] * (2 + pred.FitParams.num_pulls ) # 2 for theta13, dm2 index_map = pred.FitParams.index_map() param_list[index_map['theta13']] = fit_theta13 param_list[index_map['m2_ee']] = fit_info['FitDM2ee'] for row in rows: index = index_map[row['ParamName']] if isinstance(index, int): param_list[index] = row['ParamValue'] else: offset = row['ParamIndex'] real_index = index.start + offset param_list[real_index] = row['ParamValue'] fit_params = pred.FitParams.from_list(param_list) cursor.execute( ''' SELECT Contents FROM fit_configs WHERE Id = ? ''', (fit_info['FitConfigId'], )) fit_config_str = cursor.fetchone()[0] constants = pred.load_constants(fit_config_str, json_str=True) return fit_params, constants, fit_info
def save_result( database, description, constants, fit_params, rate_only, avg_near, fit_config_filename, sin2_error_plus=None, sin2_error_minus=None, m2_ee_error_plus=None, m2_ee_error_minus=None, ): """Save the specified results to the database. The FitConstants object is not saved to the db. It is only used to compute the chi-squareds. The fit configuration used to generate the constants is saved via the fit config file. """ chi2_poisson = chi_square(constants, fit_params, rate_only=rate_only, avg_near=avg_near) chi2_pearson = chi_square(constants, fit_params, rate_only=rate_only, avg_near=avg_near, variant='pearson') fits_db_row = ( description, int(rate_only), int(avg_near), fit_params.sin2_2theta13, fit_params.m2_ee, chi2_poisson, chi2_pearson, sin2_error_plus, sin2_error_minus, m2_ee_error_plus, m2_ee_error_minus, constants.input_osc_params.theta12, constants.input_osc_params.m2_21, int(constants.input_osc_params.hierarchy), constants.input_osc_params.m2_ee_conversion, ) params_list = fit_params.to_list() params_rows = [] for name, index in fit_params.index_map().items(): if name in ('theta13', 'm2_ee'): # The best-fit values get saved separately continue if isinstance(index, int): # Only 1 value, not an array param_value = params_list[index] params_rows.append((name, None, param_value)) else: # Multiple params in a list, index is a slice param_values = params_list[index] for param_index, param_value in enumerate(param_values): params_rows.append((name, param_index, param_value)) with open(fit_config_filename, 'r') as fit_config_file: fit_config_contents = fit_config_file.read() with common.get_db(database) as conn: cursor = conn.cursor() cursor.execute( f''' INSERT OR IGNORE INTO fit_configs (Contents) VALUES (?) ''', (fit_config_contents, )) cursor.execute( f''' SELECT Id FROM fit_configs WHERE Contents = ? ''', (fit_config_contents, )) fit_config_id, = cursor.fetchone() cursor.execute( f''' INSERT INTO fits ( Description, IsRateOnly, IsAvgNear, FitSinSqT13, FitDM2ee, ChiSquareFit, ChiSquareGof, SinSqT13_ErrorPlus, SinSqT13_ErrorMinus, DM2ee_ErrorPlus, DM2ee_ErrorMinus, Theta12, DM2_21, Hierarchy, DM2ee_conversion, FitConfigId ) VALUES ({", ".join("?"*16)}); -- 15 parameters ''', fits_db_row + (fit_config_id, ), ) fit_id = cursor.lastrowid # Update params_rows to include the fit_id real_params_rows = [] for row in params_rows: real_params_rows.append((fit_id, *row)) cursor.executemany( ''' INSERT INTO pulls VALUES (?, ?, ?, ?) ''', real_params_rows, ) return