def get_effective_stats_grid(base_couplings, base_events_list, kv_val, k2v_val_range, kl_val_range): reweight_vector_function = get_amplitude_function(base_couplings, as_scalar=False) base_event_weights = [events[1] for events in base_events_list] base_events_sum_square = sum( [events.sum() for events in base_event_weights])**2 base_square_events_sum = sum([(events**2).sum() for events in base_event_weights]) base_effective_stats = base_events_sum_square / base_square_events_sum effective_stats_grid = numpy.zeros((len(k2v_val_range), len(kl_val_range))) for k2v_i, k2v_val in enumerate(k2v_val_range): for kl_j, kl_val in enumerate(kl_val_range): weight_vector = reweight_vector_function(k2v_val, kl_val, kv_val)[0] weighted_events = [ events * w for events, w in zip(base_event_weights, weight_vector) ] events_sum_square = sum( [events.sum() for events in weighted_events])**2 square_events_sum = sum([(events**2).sum() for events in weighted_events]) effective_stats = events_sum_square / square_events_sum #normalized_stats = effective_stats / base_effective_stats #effective_stats_grid[k2v_i][kl_j] = normalized_stats effective_stats_grid[k2v_i][kl_j] = effective_stats return effective_stats_grid
def get_theory_effective_stats_map(couplings, kv_val, k2v_val_range, kl_val_range, grid=False, mask=None): numpy.set_printoptions(threshold=sys.maxsize, linewidth=230, precision=0, floatmode='fixed', suppress=True) theory_xsec_function = get_theory_xsec_function() xsec_array = numpy.array([ theory_xsec_function(c) for c in couplings ]) reweight_vector_function = get_amplitude_function(couplings, as_scalar=False) k2v_grid, kl_grid = numpy.meshgrid(k2v_val_range, kl_val_range) multiplier_grid_vector = reweight_vector_function(k2v_grid, kl_grid, kv_val)[0] #scaled_xsecs = numpy.array([ multiplier_grid*xsec for multiplier_grid, xsec in zip(multiplier_grid_vector, xsec_list) ]) scaled_xsecs = multiplier_grid_vector * xsec_array[:,None,None] #abs_stdev = abs(scaled_xsecs).std(axis=0) #combined_xsecs = scaled_xsecs.sum(axis=0) #solidarity_grid = combined_xsecs / abs_stdev effective_stats_grid = scaled_xsecs.sum(axis=0)**2 / (scaled_xsecs**2).sum(axis=0) if type(mask) != type(None): mask_grid = mask(k2v_grid, kl_grid) effective_stats_grid = effective_stats_grid * mask_grid if grid: return effective_stats_grid else: grid_pixel_area = (k2v_val_range[1] - k2v_val_range[0]) * (kl_val_range[1] - kl_val_range[0]) effective_stats_integral = effective_stats_grid.sum() * grid_pixel_area return effective_stats_integral
def get_reco_solidarity_map(couplings, weights, kv_val, k2v_val_range, kl_val_range, grid=False): xsec_list = [ w.sum() for w in weights ] reweight_vector_function = get_amplitude_function(couplings, as_scalar=False) k2v_grid, kl_grid = numpy.meshgrid(k2v_val_range, kl_val_range) multiplier_grid_vector = reweight_vector_function(k2v_grid, kl_grid, kv_val)[0] scaled_xsecs = numpy.array([ multiplier_grid*xsec for multiplier_grid, xsec in zip(multiplier_grid_vector, xsec_list) ]) abs_stdev = abs(scaled_xsecs).std(axis=0) combined_xsecs = scaled_xsecs.sum(axis=0) solidarity_grid = combined_xsecs / abs_stdev if grid: return solidarity_grid else: grid_pixel_area = (k2v_val_range[1] - k2v_val_range[0]) * (kl_val_range[1] - kl_val_range[0]) solidarity_integral = solidarity_grid.sum() * grid_pixel_area return solidarity_integral
def get_Nweight_sum(couplings, weights, kv_val, k2v_val_range, kl_val_range, grid=False, mask=None): #numpy.set_printoptions(threshold=sys.maxsize, linewidth=230, precision=0, floatmode='fixed', suppress=True) reweight_vector_function = combination_utils.get_amplitude_function(couplings, as_scalar=False, base_equations=combination_utils.full_scan_terms) k2v_grid, kl_grid = numpy.meshgrid(k2v_val_range, kl_val_range) multiplier_grid_vector = reweight_vector_function(k2v_grid, kl_grid, kv_val)[0] combined_weights = sum([ multiplier_grid[...,None] * w for multiplier_grid, w in zip(multiplier_grid_vector, weights) ]) negative_boolean_grid = combined_weights < 0 if type(mask) != type(None): mask_grid = mask(k2v_grid, kl_grid) negative_boolean_grid = negative_boolean_grid * mask_grid[...,None] if grid: nWeight_totals = negative_boolean_grid.sum(axis=2) return nWeight_totals else: grid_pixel_area = (k2v_val_range[1] - k2v_val_range[0]) * (kl_val_range[1] - kl_val_range[0]) nWeight_integral = negative_boolean_grid.sum() * grid_pixel_area return nWeight_integral
def get_Nweight_sum1D(couplings, weights, k2v_vals, kl_vals, kv_vals, vector=False, base_equations=None, which_coupling=None): #numpy.set_printoptions(threshold=sys.maxsize, linewidth=230, precision=0, floatmode='fixed', suppress=True) reweight_vector_function = combination_utils.get_amplitude_function(couplings, as_scalar=False, base_equations=base_equations) multiplier_array_vector = reweight_vector_function(k2v_vals, kl_vals, kv_vals)[0] combined_weights = sum([ multiplier_array[...,None] * w for multiplier_array, w in zip(multiplier_array_vector, weights) ]) negative_boolean_vector = combined_weights < 0 if vector: print(negative_boolean_vector) nWeight_totals = negative_boolean_vector.sum(axis=1) print(nWeight_totals) exit() return nWeight_totals else: if which_coupling == 'k2v': delta_variation = k2v_vals[1] - k2v_vals[0] else: delta_variation = kl_vals[1] - kl_vals[0] nWeight_integral = negative_boolean_vector.sum() * delta_variation return nWeight_integral
def old_get_theory_effective_stats_map(couplings, kv_val, k2v_val_range, kl_val_range, mask=None): reweight_vector = get_amplitude_function(couplings, as_scalar=False) theory_xsec_function = get_theory_xsec_function() xsec_vector = [ theory_xsec_function(c) for c in couplings ] weight_contribution_grid = numpy.zeros( (len(k2v_val_range),len(kl_val_range)) ) for k2v_i, k2v_val in enumerate(k2v_val_range): for kl_j, kl_val in enumerate(kl_val_range): coupling_parameters = (k2v_val, kl_val, kv_val) vector = reweight_vector(*coupling_parameters)[0] weighted_xsec = xsec_vector * vector effective_stats = sum(weighted_xsec)**2 / sum(weighted_xsec**2) if type(mask) != type(None): mask_val = mask(k2v_val, kl_val) effective_stats *= mask_val weight_contribution_grid[k2v_i][kl_j] = effective_stats return weight_contribution_grid
def validate_reco_method(basis_parameters, verification_parameters, base_equations=combination_utils.full_scan_terms, name_suffix='', title_suffix=''): reweight_vector = get_amplitude_function(basis_parameters, as_scalar=False, base_equations=base_equations) #var_edges = numpy.linspace(200, 1200, 31) var_edges = numpy.linspace(200, 2000, 55) #var_edges = numpy.arange(0, 2050, 50) data_files = fileio_utils.read_coupling_file() base_events_list = fileio_utils.get_events(basis_parameters, data_files) verification_events_list = fileio_utils.get_events(verification_parameters, data_files) base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in base_events_list ] base_weights, base_errors = numpy.array(list(zip(*base_histograms))) for verification_events, coupling_parameters in zip( verification_events_list, verification_parameters): verification_weights, verification_errors = fileio_utils.retrieve_reco_weights( var_edges, verification_events) combined_weights, combined_errors = reco_reweight( reweight_vector, coupling_parameters, base_weights, base_errors) plot_histogram('reco_mHH' + name_suffix, 'NNT-Based Linear Combination:\n$m_{HH}$' + title_suffix, var_edges, coupling_parameters, combined_weights, combined_errors, verification_weights, verification_errors, xlabel='Reconstructed $m_{HH}$ (GeV)')
def get_variance_count_map(couplings, kv_val, k2v_val_range, kl_val_range): reweight_vector = get_amplitude_function(couplings, as_scalar=False) weight_contribution_grid = numpy.zeros( (len(k2v_val_range),len(kl_val_range)) ) for k2v_i, k2v_val in enumerate(k2v_val_range): for kl_j, kl_val in enumerate(kl_val_range): coupling_parameters = (k2v_val, kl_val, kv_val) vector = reweight_vector(*coupling_parameters)[0] mean = statistics.mean(vector) stdev = statistics.stdev(vector) contribution = 0 for v in vector: if abs(v-mean) > stdev: contribution += 1 #norm_vector = vector/vector.sum() #contribution = statistics.stdev(norm_vector) #max_contribution = max(abs(norm_vector)) weight_contribution_grid[k2v_i][kl_j] = contribution return weight_contribution_grid
def get_test_map(couplings, kv_val, k2v_val_range, kl_val_range): reweight_vector = get_amplitude_function(couplings, as_scalar=False) theory_xsec_function = get_theory_xsec_function() xsec_vector = [ theory_xsec_function(c) for c in couplings ] weight_contribution_grid = numpy.zeros( (len(k2v_val_range),len(kl_val_range)) ) #numpy.set_printoptions(precision=None, linewidth=400, threshold=100, sign=' ', formatter={'float':lambda n: f'{n: 4.1f}'}, floatmode='fixed') for k2v_i, k2v_val in enumerate(k2v_val_range): for kl_j, kl_val in enumerate(kl_val_range): coupling_parameters = (k2v_val, kl_val, kv_val) vector = reweight_vector(*coupling_parameters)[0] weighted_xsec = xsec_vector * vector contribution = 0 for wxi in weighted_xsec: for wxj in weighted_xsec: #if wxj == wxi: ratio = 1 #elif wxj == 0: ratio = 100000 #else: ratio = abs(wxi/wxj) #contribution += ratio contribution += abs(wxi - wxj) weight_contribution_grid[k2v_i][kl_j] = contribution/weighted_xsec.sum() return weight_contribution_grid
def view_reco_method(basis_parameters, view_params): reweight_vector = get_amplitude_function(basis_parameters, as_scalar=False) var_edges = numpy.linspace(200, 1200, 31) data_files = fileio_utils.read_coupling_file() base_events_list = fileio_utils.get_events(basis_parameters, data_files) base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in base_events_list ] base_weights, base_errors = numpy.array(list(zip(*base_histograms))) for coupling_parameters in view_params: print(coupling_parameters) combined_weights, combined_errors = reco_reweight( reweight_vector, coupling_parameters, base_weights, base_errors) plot_histogram('preview_reco_mHH_new', 'NNT-Based Linear Combination:\n$m_{HH}$', var_edges, coupling_parameters, combined_weights, combined_errors, xlabel='Reconstructed $m_{HH}$ (GeV)')
def optimize_reco(which_coupling): var_edges = numpy.linspace(200, 1200, 31) kv_val = 1.0 num_kappa_bins = 100 data_files = fileio_utils.read_coupling_file() if which_coupling == 'k2v': hold_index = 1 base_equations = combination_utils.k2v_scan_terms k2v_vals = numpy.linspace(-2, 4, num_kappa_bins + 1) kl_vals = 1 elif which_coupling == 'kl': hold_index = 0 base_equations = combination_utils.kl_scan_terms k2v_vals = 1 kl_vals = numpy.linspace(-14, 16, num_kappa_bins + 1) else: print("What are you doing??") exit(1) which_variations = [ variation for variation in data_files.keys() if variation[2] == 1 and variation[hold_index] == 1 ] all_events = fileio_utils.get_events(which_variations, data_files) all_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, events) for events in all_events ] # Wrap all variations up together with their histograms so I can find combinations all_variations = list(zip(which_variations, all_histograms)) print('Histograms loaded, proceeding to integrate Nweight grids...') valid_bases = [] total = 0 for basis_set in itertools.combinations(all_variations, len(base_equations)): # Unwrap each combination couplings, histograms = list(zip(*basis_set)) if (1.0, 1.0, 1.0) not in couplings: continue if not combination_utils.is_valid_combination( couplings, base_equations=base_equations): continue weights, errors = numpy.array(list(zip(*histograms))) nWeight_integral = negative_weight_map.get_Nweight_sum1D( couplings, weights, k2v_vals, kl_vals, kv_val, base_equations=base_equations, which_coupling=which_coupling) valid_bases.append((nWeight_integral, couplings, weights)) total += 1 if total % 10 == 0: print(total) print('Integrals computed, sorting and printing...') valid_bases.sort() for rank, (integral, couplings, weight) in enumerate(valid_bases): print(rank, f'{integral:.9f}', couplings) ranks_to_draw = 0, 1, 2 draw_rankings(ranks_to_draw, valid_bases, which_variations, var_edges, k2v_vals, kl_vals, kv_val, which_coupling, base_equations) combination_utils.get_amplitude_function(valid_bases[0][1], base_equations=base_equations, name='optimalR0_' + which_coupling, output='tex') combination_utils.get_amplitude_function(valid_bases[1][1], base_equations=base_equations, name='optimalR1_' + which_coupling, output='tex')
def generate_1D9S_pojection_scans(k2v_9S_basis_tuple, vmin, vmax): var_edges = numpy.linspace(200, 1200, 31) #var_edges = numpy.linspace(200, 2000, 55) data_files = fileio_utils.read_coupling_file() numpy.set_printoptions(threshold=sys.maxsize, linewidth=230, precision=1, floatmode='fixed', suppress=True) num_kappa_bins = 100 kl_fixed, kv_fixed = 1, 1 k2v_vals = numpy.linspace(-2, 4, num_kappa_bins + 1) index_bounds = k2v_9S_basis_tuple[0] grid_bounds = [ k2v_vals <= index_bounds[0], numpy.logical_and(index_bounds[0] < k2v_vals, k2v_vals <= index_bounds[1]), index_bounds[1] < k2v_vals ] grid_list = [] for k2v_list, boundary in zip(k2v_9S_basis_tuple[1], grid_bounds): basis_parameters = [(k2v, 1, 1) for k2v in k2v_list] base_events_list = fileio_utils.get_events(basis_parameters, data_files) base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in base_events_list ] weights, errors = numpy.array(list(zip(*base_histograms))) reweight_vector_function = combination_utils.get_amplitude_function( basis_parameters, as_scalar=False, base_equations=combination_utils.k2v_scan_terms) multiplier_array_vector = reweight_vector_function( k2v_vals, kl_fixed, kv_fixed)[0] partial_weight_grid = sum([ multiplier_array[..., None] * w for multiplier_array, w in zip(multiplier_array_vector, weights) ]) bounded_weight_grid = (partial_weight_grid.transpose() * boundary).transpose() grid_list.append(bounded_weight_grid) weight_grid = sum(grid_list) num_bins = len(k2v_vals) - 1 ranges = k2v_vals[0], k2v_vals[-1], var_edges[0], var_edges[-1] title = 'Combined $m_{HH}$ Across ' r'$\kappa_{2V}$ Using Multi-Basis Combination' axis_title = r'$\kappa_{2V}$' #plottable_couplings = [ c[0] for c in couplings ] tick_vals = numpy.arange(ranges[0], ranges[1] + 1, 1) fig, ax = plt.subplots() im = ax.imshow(weight_grid.transpose(), cmap='viridis', extent=ranges, origin='lower', norm=matplotlib.colors.LogNorm(vmin, vmax)) #im = ax.imshow(weight_grid.transpose(), extent=ranges, origin='lower', cmap='viridis') ax.set_xticks(ticks=tick_vals) ax.set_xlabel(axis_title) ax.set_ylabel('$m_{HH}$') ax.grid() #for x in plottable_couplings: ax.vlines(x, ymin=var_edges[0], ymax=var_edges[-1], color='red') ax.set_aspect('auto', 'box') fig.subplots_adjust(right=0.85) cbar_ax = fig.add_axes([0.87, 0.11, 0.03, 0.7]) fig.colorbar(im, cax=cbar_ax, label='Bin Weight') #basis_table = '$\kappa_{2V}$ , $\kappa_{\lambda}$ , $\kappa_{V}$ ' #for coupling in couplings: basis_table += '\n'+combination_utils.nice_coupling_string(coupling) #fig.text(.99, 1, basis_table, ha='right', va='top', fontsize='xx-small', family='monospace') fig.suptitle(title, fontsize=10, fontweight='bold') dpi = 500 figname = 'c2v_9S_projection' #plt.savefig('plots/scan_maps/'+figname+'.png',dpi=dpi) plt.savefig('plots/scan_maps/' + figname + '.pdf', dpi=dpi)
def compare_bases_reco_method(basis_parameters_list, verification_parameters, base_equations=combination_utils.full_scan_terms, name_suffix='', title_suffix='', labels=('', ''), is_verification=True, truth_level=False, truth_data_files=None): #var_edges = numpy.linspace(200, 1200, 31) #var_edges = numpy.arange(0, 2050, 50) var_edges = numpy.linspace(200, 2000, 55) basis_tuple_list = [] for basis_parameters in basis_parameters_list: reweight_vector = get_amplitude_function(basis_parameters, as_scalar=False, base_equations=base_equations) if truth_level: data_files = fileio_utils.read_coupling_file( coupling_file='basis_files/truth_LHE_couplings_extended.dat') basis_files = [ truth_data_files[coupling] for coupling in basis_parameters ] truth_weights, truth_errors = fileio_utils.extract_lhe_truth_data( basis_files, var_edges) basis_tuple_list.append( (truth_weights, truth_errors, reweight_vector)) else: data_files = fileio_utils.read_coupling_file() base_events_list = fileio_utils.get_events(basis_parameters, data_files) base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in base_events_list ] base_weights, base_errors = numpy.array(list( zip(*base_histograms))) basis_tuple_list.append( (base_weights, base_errors, reweight_vector)) testpoint_list = verification_parameters if is_verification: if truth_level: verification_files = [ data_files[key] for key in verification_parameters ] truth_verification_weights, truth_verification_errors = fileio_utils.extract_lhe_truth_data( verification_files, var_edges) testpoint_list = zip(verification_parameters, truth_verification_weights, truth_verification_errors) else: testpoint_list = [] verification_events_list = fileio_utils.get_events( verification_parameters, data_files) for events, param in zip(verification_events_list, verification_parameters): verification_weights, verification_errors = fileio_utils.retrieve_reco_weights( var_edges, events) testpoint_list.append( (param, verification_weights, verification_errors)) for testpoint in testpoint_list: verification_weights, verification_errors = None, None if is_verification: coupling_parameters, verification_weights, verification_errors = testpoint else: coupling_parameters = testpoint combined_tuples = [] for base_weights, base_errors, reweight_vector in basis_tuple_list: combined_tuples.append( reco_reweight(reweight_vector, coupling_parameters, base_weights, base_errors)) if truth_level: name = 'truth_mHH_compare' + name_suffix title = 'Truth LHE-Based Linear Combination:\nTruth $m_{HH}$' + title_suffix xlabel = 'Truth $m_{HH}$ (GeV)' else: name = 'reco_mHH_compare' + name_suffix title = 'NNT-Based Linear Combination:\n$m_{HH}$' + title_suffix xlabel = 'Reconstructed $m_{HH}$ (GeV)' plot_histogram( name, title, var_edges, coupling_parameters, combined_tuples[0][0], combined_tuples[0][1], verification_weights, verification_errors, alt_linearly_combined_weights=combined_tuples[1][0], alt_linearly_combined_errors=combined_tuples[1][1], generated_label=labels[0], alt_label=labels[1], xlabel=xlabel, )
def compare12_reco_method(basis_parameters, k2v_basis_parameters, kl_basis_parameters, verification_parameters, base_equations=combination_utils.full_scan_terms, name_suffix='', title_suffix=''): reweight_vector = get_amplitude_function(basis_parameters, as_scalar=False, base_equations=base_equations) k2v_reweight_vector = get_amplitude_function( k2v_basis_parameters, as_scalar=False, base_equations=combination_utils.k2v_scan_terms) kl_reweight_vector = get_amplitude_function( kl_basis_parameters, as_scalar=False, base_equations=combination_utils.kl_scan_terms) #var_edges = numpy.linspace(200, 1200, 31) var_edges = numpy.linspace(200, 2000, 55) #var_edges = numpy.arange(0, 2050, 50) data_files = fileio_utils.read_coupling_file() base_events_list = fileio_utils.get_events(basis_parameters, data_files) k2v_base_events_list = fileio_utils.get_events(k2v_basis_parameters, data_files) kl_base_events_list = fileio_utils.get_events(kl_basis_parameters, data_files) verification_events_list = fileio_utils.get_events(verification_parameters, data_files) base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in base_events_list ] base_weights, base_errors = numpy.array(list(zip(*base_histograms))) k2v_base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in k2v_base_events_list ] k2v_base_weights, k2v_base_errors = numpy.array( list(zip(*k2v_base_histograms))) kl_base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in kl_base_events_list ] kl_base_weights, kl_base_errors = numpy.array( list(zip(*kl_base_histograms))) for verification_events, coupling_parameters in zip( verification_events_list, verification_parameters): k2v, kl, kv = coupling_parameters if coupling_parameters == (1, 1, 1): continue if k2v != 1 and kl != 1: continue if kv != 1: continue alt_combined_weights, alt_combined_errors = None, None if k2v != 1 and kl == 1: alt_combined_weights, alt_combined_errors = reco_reweight( k2v_reweight_vector, coupling_parameters, k2v_base_weights, k2v_base_errors) if k2v == 1 and kl != 1: alt_combined_weights, alt_combined_errors = reco_reweight( kl_reweight_vector, coupling_parameters, kl_base_weights, kl_base_errors) verification_weights, verification_errors = fileio_utils.retrieve_reco_weights( var_edges, verification_events) combined_weights, combined_errors = reco_reweight( reweight_vector, coupling_parameters, base_weights, base_errors) plot_histogram( 'reco_mHH_1-2D_compare' + name_suffix, 'NNT-Based Linear Combination:\n$m_{HH}$' + title_suffix, var_edges, coupling_parameters, combined_weights, combined_errors, verification_weights, verification_errors, alt_linearly_combined_weights=alt_combined_weights, alt_linearly_combined_errors=alt_combined_errors, generated_label='3D Combination', xlabel='Reconstructed $m_{HH}$ (GeV)', )
def compare1D3S9S_reco_method(k2v_3S_basis_parameters, k2v_9S_basis_tuple): vmin, vmax = 1e-5, 5 generate_1D9S_pojection_scans(k2v_9S_basis_tuple, vmin, vmax) #var_edges = numpy.linspace(200, 1200, 31) alt_var_edges = numpy.linspace(200, 1200, 31) var_edges = numpy.linspace(200, 2000, 55) #var_edges = numpy.arange(0, 2050, 50) num_kappa_bins = 10 k2v_vals = numpy.linspace(-2, 4, num_kappa_bins + 1) k2v_vals_alt = numpy.linspace(-2, 4, 100 + 1) data_files = fileio_utils.read_coupling_file() k2v_3S_reweight_vector = get_amplitude_function( k2v_3S_basis_parameters, as_scalar=False, base_equations=combination_utils.k2v_scan_terms) k2v_3S_base_events_list = fileio_utils.get_events(k2v_3S_basis_parameters, data_files) k2v_3S_base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in k2v_3S_base_events_list ] k2v_3S_base_weights, k2v_3S_base_errors = numpy.array( list(zip(*k2v_3S_base_histograms))) k2v_3S_base_histograms_alt = [ fileio_utils.retrieve_reco_weights(alt_var_edges, base_events) for base_events in k2v_3S_base_events_list ] k2v_3S_base_weights_alt, k2v_3S_base_errors_alt = numpy.array( list(zip(*k2v_3S_base_histograms_alt))) draw_1D_mhh_heatmap(k2v_3S_basis_parameters, k2v_3S_base_weights_alt, alt_var_edges, k2v_vals_alt, 1, 1, base_equations=combination_utils.k2v_scan_terms, which_coupling='k2v', filename='projectionscan_k2v_multicompare', title_suffix='Using Single Basis', vrange=(vmin, vmax)) multibasis_list = [] for k2v_list in k2v_9S_basis_tuple[1]: basis_parameters = [(k2v, 1, 1) for k2v in k2v_list] base_events_list = fileio_utils.get_events(basis_parameters, data_files) base_histograms = [ fileio_utils.retrieve_reco_weights(var_edges, base_events) for base_events in base_events_list ] weights, errors = numpy.array(list(zip(*base_histograms))) reweight_vector_function = combination_utils.get_amplitude_function( basis_parameters, as_scalar=False, base_equations=combination_utils.k2v_scan_terms) multibasis_list.append((weights, errors, reweight_vector_function)) index_bounds = k2v_9S_basis_tuple[0] for k2v in k2v_vals: coupling_parameters = [k2v, 1, 1] k2v_combined_weights, k2v_combined_errors = reco_reweight( k2v_3S_reweight_vector, coupling_parameters, k2v_3S_base_weights, k2v_3S_base_errors) multibasis_index = None if k2v <= index_bounds[0]: multibasis_index = 0 elif k2v <= index_bounds[1]: multibasis_index = 1 else: multibasis_index = 2 multibasis_weights, multibasis_errors, multibasis_reweight_vector_function = multibasis_list[ multibasis_index] multicombined_weights, multicombined_errors = reco_reweight( multibasis_reweight_vector_function, coupling_parameters, multibasis_weights, multibasis_errors) view_linear_combination.plot_histogram( 'preview_reco_mHH_multibasis', 'NNT-Based Linear Combination:\n$m_{HH}$', var_edges, coupling_parameters, k2v_combined_weights, k2v_combined_errors, alt_linearly_combined_weights=multicombined_weights, alt_linearly_combined_errors=multicombined_errors, alt_label='3-Basis Set', generated_label='1-Basis Equation', xlabel='Reconstructed $m_{HH}$ (GeV)', )
def calculate_solidarity_results(): kv_fixed = 1.0 num_kappa_bins = 100 k2v_val_range = numpy.linspace(-2,4,num_kappa_bins+1) kl_val_range = numpy.linspace(-14,16,num_kappa_bins+1) grid_pixel_area = (k2v_val_range[1] - k2v_val_range[0]) * (kl_val_range[1] - kl_val_range[0]) k2v_grid, kl_grid = numpy.meshgrid(k2v_val_range, kl_val_range) mask = lambda k2v, kl: ((k2v-1)/1)**2 + ((kl-1)/10)**2 < 1 mask_grid = mask(k2v_grid, kl_grid) pre_existing_variations = [ #(1 , 1 , 1 ), # Ignore SM point b/c I hard-code it in further down to ease the combinatorics (0 , 1 , 1 ), (0.5 , 1 , 1 ), (1.5 , 1 , 1 ), (2 , 1 , 1 ), (3 , 1 , 1 ), (1 , 0 , 1 ), #(1 , 2 , 1 ), (1 , 10 , 1 ), (1 , 1 , 0.5 ), (1 , 1 , 1.5 ), (0 , 0 , 1 ) ] sm_coupling = (1,1,1) kl2_coupling = (1,2,1) theory_xsec_function = combination_utils.get_theory_xsec_function() sm_tuple = (sm_coupling, theory_xsec_function(sm_coupling)) kl2_tuple = (kl2_coupling, theory_xsec_function(kl2_coupling)) solidarity_cap = 10 variation_result_list = [] variations_computed = 0 total_variations = len(_possible_variations) coupling_xsec_tuples = [ (var, theory_xsec_function(var)) for var in pre_existing_variations ] for new_variation in _possible_variations: print('------ Calculating ' + str(new_variation)) prospective_variations = pre_existing_variations new_tuple = (new_variation, theory_xsec_function(new_variation)) solidarity_list = [] total = 0 for nonSM_coupling_tuples in itertools.combinations(coupling_xsec_tuples,3): coupling_base_tuple = [ sm_tuple, kl2_tuple, *nonSM_coupling_tuples, new_tuple ] coupling_base, xsec_list = list(zip(*coupling_base_tuple)) xsec_array = numpy.array(xsec_list) reweight_vector_function = combination_utils.get_amplitude_function(coupling_base, as_scalar=False) if type(reweight_vector_function) == type(None): continue multiplier_grid_vector = reweight_vector_function(k2v_grid, kl_grid, kv_fixed)[0] scaled_xsecs = multiplier_grid_vector * xsec_array[:,None,None] abs_stdev = abs(scaled_xsecs).std(axis=0) combined_xsecs = scaled_xsecs.sum(axis=0) solidarity_grid = combined_xsecs / abs_stdev metric_integral = solidarity_grid.sum() * grid_pixel_area #effective_stats = scaled_xsecs.sum(axis=0)**2 / (scaled_xsecs**2).sum(axis=0) #effective_stats_grid = solidarity_grid * mask_grid #effective_stats_grid[solidarity_grid > solidarity_cap] = solidarity_cap #metric_integral = effective_stats.sum() * grid_pixel_area solidarity_list.append(metric_integral) total += 1 if total % 100 == 0: print(total) variation_result_list.append( (new_variation, solidarity_list) ) variations_computed += 1 print(f'Completed variation {variations_computed} / {total_variations}\n') print(f'\n\nAll possible variations checked!') return variation_result_list
def draw_1D_mhh_heatmap(couplings, weights, var_edges, k2v_vals, kl_vals, kv_vals, base_equations=None, which_coupling=None, filename='test', title_suffix=None, vrange=None): #numpy.set_printoptions(threshold=sys.maxsize, linewidth=230, precision=0, floatmode='fixed', suppress=True) reweight_vector_function = combination_utils.get_amplitude_function( couplings, as_scalar=False, base_equations=base_equations) multiplier_array_vector = reweight_vector_function(k2v_vals, kl_vals, kv_vals)[0] weight_grid = sum([ multiplier_array[..., None] * w for multiplier_array, w in zip(multiplier_array_vector, weights) ]) if which_coupling == 'k2v': num_bins = len(k2v_vals) - 1 ranges = k2v_vals[0], k2v_vals[-1], var_edges[0], var_edges[-1] title = 'Combined $m_{HH}$ Across ' r'$\kappa_{2V}$' axis_title = r'$\kappa_{2V}$' plottable_couplings = [c[0] for c in couplings] tick_vals = numpy.arange(ranges[0], ranges[1] + 1, 1) elif which_coupling == 'kl': num_bins = len(kl_vals) - 1 ranges = kl_vals[0], kl_vals[-1], var_edges[0], var_edges[-1] title = 'Combined $m_{HH}$ Across ' r'$\kappa_{\lambda}$' axis_title = r'$\kappa_{\lambda}$' plottable_couplings = [c[1] for c in couplings] tick_vals = numpy.linspace(ranges[0], ranges[1], 7) fig, ax = plt.subplots() if type(vrange) == type(None): im = ax.imshow(weight_grid.transpose(), cmap='viridis', extent=ranges, origin='lower', norm=matplotlib.colors.LogNorm()) else: im = ax.imshow(weight_grid.transpose(), cmap='viridis', extent=ranges, origin='lower', norm=matplotlib.colors.LogNorm(*vrange)) #im = ax.imshow(weight_grid.transpose(), extent=ranges, origin='lower', cmap='viridis') ax.set_xticks(ticks=tick_vals) ax.set_xlabel(axis_title) ax.set_ylabel('$m_{HH}$') ax.grid() for x in plottable_couplings: ax.vlines(x, ymin=var_edges[0], ymax=var_edges[-1], color='red') ax.set_aspect('auto', 'box') fig.subplots_adjust(right=0.85) cbar_ax = fig.add_axes([0.87, 0.11, 0.03, 0.7]) fig.colorbar(im, cax=cbar_ax, label='Bin Weight') basis_table = '$\kappa_{2V}$ , $\kappa_{\lambda}$ , $\kappa_{V}$ ' for coupling in couplings: basis_table += '\n' + combination_utils.nice_coupling_string(coupling) fig.text(.99, 1, basis_table, ha='right', va='top', fontsize='xx-small', family='monospace') if type(title_suffix) != type(None): title += '\n' + title_suffix fig.suptitle(title, fontsize=10, fontweight='bold') dpi = 500 figname = filename #plt.savefig('plots/scan_maps/'+figname+'.png',dpi=dpi) plt.savefig('plots/scan_maps/' + figname + '.pdf', dpi=dpi)