def __init__(self, data_path): self.data = tables.openFile(data_path, 'a') self.station_groups = ['/s%d' % u for u in self.stations] self.cluster = clusters.ScienceParkCluster(self.stations) self.trig_threshold = .5
def __init__(self, data_path): self.data = tables.open_file(data_path, 'a') self.station_groups = ['/s%d' % u for u in self.stations] self.cluster = clusters.ScienceParkCluster(self.stations) self.detector_offsets = [] self.station_offsets = []
def plot_sciencepark_cluster(): stations = range(501, 507) cluster = clusters.ScienceParkCluster(stations) figure() x_list, y_list = [], [] x_stations, y_stations = [], [] for station in cluster.stations: x_detectors, y_detectors = [], [] for detector in station.detectors: x, y = detector.get_xy_coordinates() x_detectors.append(x) y_detectors.append(y) scatter(x, y, c='black', s=3) x_list.extend(x_detectors) y_list.extend(y_detectors) x_stations.append(mean(x_detectors)) y_stations.append(mean(y_detectors)) axis('equal') cluster = clusters.ScienceParkCluster([501, 503, 506]) pos = [] for station in cluster.stations: x, y, alpha = station.get_xyalpha_coordinates() pos.append((x, y)) for (x0, y0), (x1, y1) in itertools.combinations(pos, 2): plot([x0, x1], [y0, y1], 'gray') utils.savedata([x_list, y_list]) utils.saveplot() artist.utils.save_data([x_list, y_list], suffix='detectors', dirname='plots') artist.utils.save_data([stations, x_stations, y_stations], suffix='stations', dirname='plots')
def calc_theta_error_for_station_cluster(theta, station, cluster): phis = linspace(-pi, pi, 50) rec = DirectionReconstruction sciencepark = clusters.ScienceParkCluster(range(501, 507)) r1, phi1 = sciencepark.stations[station].calc_r_and_phi_for_detectors(1, 3) r2, phi2 = sciencepark.stations[station].calc_r_and_phi_for_detectors(1, 4) err_single = rec.rel_theta1_errorsq(theta, phis, phi1, phi2, r1, r2) r1, phi1 = sciencepark.calc_r_and_phi_for_stations(cluster[0], cluster[1]) r2, phi2 = sciencepark.calc_r_and_phi_for_stations(cluster[0], cluster[2]) err_cluster = rec.rel_theta1_errorsq(theta, phis, phi1, phi2, r1, r2) # errors are already squared!! err_total = sqrt(STATION_TIMING_ERR**2 * err_single + CLUSTER_TIMING_ERR**2 * err_cluster) return mean(err_total)
def calc_phi_error_for_station_station(theta, station1, station2): phis = linspace(-pi, pi, 50) rec = DirectionReconstruction sciencepark = clusters.ScienceParkCluster(range(501, 507)) r1, phi1 = sciencepark.stations[station1].calc_r_and_phi_for_detectors( 1, 3) r2, phi2 = sciencepark.stations[station1].calc_r_and_phi_for_detectors( 1, 4) err_single1 = rec.rel_phi_errorsq(theta, phis, phi1, phi2, r1, r2) r1, phi1 = sciencepark.stations[station2].calc_r_and_phi_for_detectors( 1, 3) r2, phi2 = sciencepark.stations[station2].calc_r_and_phi_for_detectors( 1, 4) err_single2 = rec.rel_phi_errorsq(theta, phis, phi1, phi2, r1, r2) # errors are already squared!! err_total = sqrt(STATION_TIMING_ERR**2 * err_single1 + STATION_TIMING_ERR**2 * err_single2) return mean(err_total)
def do_energies_simulations(self): cluster = clusters.ScienceParkCluster() for energy in ['100TeV', '10PeV']: shower_group = '/showers/E_%s/zenith_22_5' % energy for shower in self.get_showers_in_group(shower_group): self.perform_simulation(cluster, shower)
def do_cluster_simulations(self): cluster = clusters.ScienceParkCluster() for angle in self.get_shower_angles_from_shower_data(): for shower in self.get_showers_in_group(angle): self.perform_simulation(cluster, shower)
def plot_N_vs_R(data): stations = range(501, 507) station_ids = range(6) cluster = clusters.ScienceParkCluster(stations) c_index = data.root.coincidences.c_index observables = data.root.coincidences.observables figure() #clf() global c_x, c_y if 'c_x' in globals(): scatter(c_x, c_y) else: stations_in_coincidence = [] for coincidence_events in c_index: stations = [ observables[u]['station_id'] for u in coincidence_events ] stations_in_coincidence.append(stations) c_x = [] c_y = [] for station1, station2 in itertools.combinations(station_ids, 2): condition = [ station1 in u and station2 in u for u in stations_in_coincidence ] N = sum(condition) R, phi = cluster.calc_r_and_phi_for_stations(station1, station2) scatter(R, N) c_x.append(R) c_y.append(N) print R, N, station1, station2 ldf = KascadeLdf() R = linspace(100, 500) E = linspace(1e14, 1e19, 100) F = E**-2.7 N = [] for r in R: x = [] for f, e in zip(F, E): Ne = e / 1e15 * 10**4.8 density = ldf.get_ldf_value_for_size(r, Ne) prob = 1 - exp(-.5 * density) x.append(f * prob) N.append(mean(x)) N = array(N) f = lambda x, S: S * interp(x, R, N) c_x = array(c_x) c_y = array(c_y) # WTF wrong with point at slightly less than 100 m? 501 / 502?? sc_x = c_x.compress(c_x >= 100) sc_y = c_y.compress(c_x >= 100) popt, pcov = curve_fit(f, sc_x, sc_y, p0=(1e45)) plot(R, f(R, popt[0])) #ylim(0, 150000) ylim(0, 500000) xlim(0, 500) xlabel("Distance [m]") ylabel("Number of coincidences") utils.saveplot() utils.savedata([sc_x, sc_y], suffix='data') utils.savedata([R, f(R, popt[0])], suffix='fit')