def determine_dt_for_pair(stations): """Determine and store dt for a pair of stations :param ref_station: reference station number to use as refernece :param station: station number to determine the dt for """ path = DATA_PATH + 'dt_ref%d_%d.h5' % stations if os.path.exists(path): print 'dt data already exists for %d-%d' % stations return ref_station, station = stations try: with tables.open_file(PAIR_DATAPATH % tuple(sorted(stations)), 'r') as data: cq = CoincidenceQuery(data) ref_detector_offsets = Station(ref_station).detector_timing_offset detector_offsets = Station(station).detector_timing_offset for dt0, dt1 in monthrange((2004, 1), (2015, 9)): coins = cq.all(stations, start=dt0, stop=dt1, iterator=True) coin_events = cq.events_from_stations(coins, stations) ets, dt = determine_time_differences(coin_events, ref_station, station, ref_detector_offsets, detector_offsets) store_dt(ref_station, station, ets, dt) except Exception as e: print 'Failed for %d, %d' % stations print e return
def find_coincidence(self, date, session): """Find coincidences for the given cluster on the given date Store the found coincidences and events in the database. Then return the number of found coincidences. """ stations = Station.objects.filter(cluster=self.cluster, pcs__is_test=False).distinct().values_list('number', flat=True) path = get_esd_data_path(date) if not os.path.isfile(path): # No data file, so no coincidences return 0 number_of_coincidences = 0 # Get all coincidences containing stations in the requested cluster with tables.open_file(path, 'r') as data: cq = CoincidenceQuery(data) all_coincidences = cq.any(stations) coincidences = cq.events_from_stations(all_coincidences, stations, n=3) for coincidence in coincidences: # Todo: Double check for multiple events from same station, self.save_coincidence(coincidence, session) number_of_coincidences += 1 return number_of_coincidences
def analyse_reconstructions(data): cq = CoincidenceQuery(data) c_ids = data.root.coincidences.coincidences.read_where('s505 & (timestamp < 1366761600)', field='id') c_recs = cq.reconstructions.read_coordinates(c_ids) s_ids = data.root.hisparc.cluster_amsterdam.station_505.events.get_where_list('timestamp < 1366761600') s_recs = data.root.hisparc.cluster_amsterdam.station_505.reconstructions.read_coordinates(s_ids) assert len(c_recs) == len(s_recs) zenc = c_recs['zenith'] azic = c_recs['azimuth'] zens = s_recs['zenith'] azis = s_recs['azimuth'] high_zenith = (zenc > .2) & (zens > .2) for minn in [1, 2, 4, 8, 16]: filter = (s_recs['min_n'] > minn) length = len(azis.compress(high_zenith & filter)) shifts501 = np.random.normal(0, .06, length) azicounts, x, y = np.histogram2d(azis.compress(high_zenith & filter) + shifts501, azic.compress(high_zenith & filter), bins=np.linspace(-np.pi, np.pi, 73)) plota = Plot() plota.histogram2d(azicounts, np.degrees(x), np.degrees(y), type='reverse_bw', bitmap=True) # plota.set_title('Reconstructed azimuths for events in coincidence (zenith gt .2 rad)') plota.set_xlabel(r'$\phi_{505}$ [\si{\degree}]') plota.set_ylabel(r'$\phi_{Science Park}$ [\si{\degree}]') plota.set_xticks([-180, -90, 0, 90, 180]) plota.set_yticks([-180, -90, 0, 90, 180]) plota.save_as_pdf('azimuth_505_spa_minn%d' % minn)
def analyse(data, id): event_node = data.get_node('/station_99/events') print 'Total number of events: %d' % event_node.nrows cq = CoincidenceQuery(data) coincidences = cq.all(stations=[99]) coincident_events = cq.events_from_stations(coincidences, [99], n=1) coincident_event_ids = [e[0][1]['event_id'] for e in coincident_events] event_ids = [i for i in range(event_node.nrows) if i not in coincident_event_ids] events = event_node.read_coordinates(event_ids) coincident_events = event_node.read_coordinates(coincident_event_ids) print 'Total number of events not in coincidence: %d' % len(events) print 'Total number of events in coincidence: %d' % len(coincident_events) cph1 = coincident_events['pulseheights'][:, 0] cph2 = coincident_events['pulseheights'][:, 1] ph1 = events['pulseheights'][:, 0] ph2 = events['pulseheights'][:, 1] plot = Plot() bins = np.arange(0, 4000, 50) for ph, ls in [(cph1, 'black,dotted'), (cph2, 'red,dotted'), (ph1, 'black'), (ph2, 'red')]: counts, bins = np.histogram(ph, bins=bins) plot.histogram(counts, bins, linestyle=ls) plot.set_xlimits(min=0, max=4000) plot.set_ylimits(min=.5) plot.set_ylabel('Counts') plot.set_xlabel('Pulseheight [ADC]') plot.save_as_pdf('muonlab_pulseheights_%d' % id) cdt = coincident_events['t2'] - coincident_events['t1'] dt = events['t2'] - events['t1'] plot = Plot() bins = np.arange(-100, 100, 2.5) for t, ls in [(dt, ''), (cdt, 'dotted')]: counts, bins = np.histogram(t, bins=bins) plot.histogram(counts, bins, linestyle=ls) plot.set_ylimits(min=0) plot.set_ylabel('Counts') plot.set_xlabel('Time difference [ns]') plot.save_as_pdf('muonlab_dt_%d' % id)
def get_coincidences_from_esd_in_range(start, end, stations, n): """Get coincidences from ESD in time range. :param start: start of datetime range :param end: end of datetime range :param stations: station numbers :param n: minimum number of events in coincidence :yield: id, station number and event """ id = -1 for t0, t1 in single_day_ranges(start, end): try: NetworkSummary.objects.get(date=t0) except NetworkSummary.DoesNotExist: continue with tables.open_file(esd.get_esd_data_path(t0)) as f: try: cq = CoincidenceQuery(f) ts0 = datetime_to_gps(t0) ts1 = datetime_to_gps(t1) if stations: coincidences = cq.at_least(stations, n, start=ts0, stop=ts1) events = cq.events_from_stations(coincidences, stations, n) else: coincidences = cq.timerange(start=ts0, stop=ts1) events = cq.all_events(coincidences, n) for id, coin in enumerate(events, id + 1): for number, event in coin: yield id, number, event except (IOError, tables.NoSuchNodeError): continue
def analyse_reconstructions(data): cq = CoincidenceQuery(data) total_count = cq.reconstructions.nrows succesful_direction = sum(~isnan(cq.reconstructions.col('zenith'))) succesful_fraction = 100. * succesful_direction / total_count print '%.2f%% successful out of %d coincidences' % (succesful_fraction, total_count) rec_d = cq.data.get_node('/coincidences', 'reconstructions_detectors') total_count_d = rec_d.nrows succesful_direction_d = sum(~isnan(rec_d.col('zenith'))) succesful_fraction_d = 100. * succesful_direction_d / total_count_d print '%.2f%% successful out of %d coincidences' % (succesful_fraction_d, total_count_d)
def print_coincident_time_delta(): cq = CoincidenceQuery(DATA, coincidence_group='/coincidences') coincidences = cq.coincidences events = [cq._get_events(c) for c in coincidences] cq_orig = CoincidenceQuery(DATA, coincidence_group='/coincidences_original') coincidences_orig = cq_orig.coincidences events_orig = [cq_orig._get_events(c) for c in coincidences_orig] t3_501 = [] t3_510 = [] for event1, event2 in events: if event1[0] == 501: t3_501.append(event1[1]['t3']) t3_510.append(event2[1]['t3']) else: t3_501.append(event2[1]['t3']) t3_510.append(event1[1]['t3']) t3_501_orig = [] t3_510_orig = [] for event1, event2 in events_orig: if event1[0] == 501: t3_501_orig.append(event1[1]['t3']) t3_510_orig.append(event2[1]['t3']) else: t3_501_orig.append(event2[1]['t3']) t3_510_orig.append(event1[1]['t3']) t3_501 = array(t3_501) t3_510 = array(t3_510) t3_501_orig = array(t3_501_orig) t3_510_orig = array(t3_510_orig) filter = (t3_501_orig != -999) & (t3_510_orig != -999) dt3_501 = t3_501 - t3_501_orig dt3_510 = t3_510 - t3_510_orig dt = dt3_501 - dt3_510 # Plot distribution plot = Plot() counts, bins = histogram(dt.compress(filter), bins=arange(-10.5, 11.5, 1)) plot.histogram(counts, bins) plot.set_ylimits(min=0) plot.set_ylabel('counts') plot.set_xlabel(r'time delta [\si{\nano\second}]') plot.save_as_pdf('time_delta_501_510')
def plot_map(data): cluster = data.root.coincidences._v_attrs['cluster'] map = make_map(cluster) cq = CoincidenceQuery(data) cq.reconstructions = cq.data.get_node('/coincidences', 'recs_curved') cq.reconstructed = True for i, coincidence in enumerate(cq.coincidences.read_where('N > 6')): if i > 50: break coincidence_events = next(cq.all_events([coincidence])) reconstruction = cq._get_reconstruction(coincidence) display_coincidences(cluster, coincidence_events, coincidence, reconstruction, map)
def plot_reconstructions(): print 'Plotting . . .' plot = Plot() bins = linspace(0, 90, 30) # Degrees plot.set_ylimits(min=0) plot.set_xlimits(0, 90) plot.set_ylabel('counts') plot.set_xlabel(r'Angle between [\si{\degree}]') colors = ['black', 'red', 'green', 'blue'] for i, c_group in enumerate([ '/coincidences', '/coincidences_original', '/coincidences_501_original', '/coincidences_510_original' ]): cq = CoincidenceQuery(DATA, coincidence_group=c_group) coincidences = cq.all([501, 510], iterator=True) reconstructions = [cq._get_reconstructions(c) for c in coincidences] cq.finish() azi501 = [] zen501 = [] azi510 = [] zen510 = [] for rec1, rec2 in reconstructions: if rec1[0] == 501: azi501.append(rec1[1]['azimuth']) zen501.append(rec1[1]['zenith']) azi510.append(rec2[1]['azimuth']) zen510.append(rec2[1]['zenith']) else: azi501.append(rec2[1]['azimuth']) zen501.append(rec2[1]['zenith']) azi510.append(rec1[1]['azimuth']) zen510.append(rec1[1]['zenith']) azi501 = array(azi501) zen501 = array(zen501) azi510 = array(azi510) zen510 = array(zen510) # Compare angles between old and new d_angle = angle_between(zen501, azi501, zen510, azi510) print c_group, r'67\%% within %.1f degrees' % degrees( percentile(d_angle[isfinite(d_angle)], 67)) plot.histogram(*histogram(degrees(d_angle), bins=bins), linestyle=colors[i]) plot.save_as_pdf('angle_between_501_510')
509: [-24.8369, -23.0218, -20.6011, -24.3757] } COLORS = { 501: 'black', 502: 'red!80!black', 503: 'blue!80!black', 504: 'green!80!black', 505: 'orange!80!black', 506: 'pink!80!black', 508: 'blue!40!black', 509: 'red!40!black' } if __name__ == "__main__": with tables.open_file(COIN_DATA, 'r') as data: cq = CoincidenceQuery(data) coincidence = cq.coincidences[4323] coincidence_events = next( cq.events_from_stations([coincidence], STATIONS)) reconstruction = cq._get_reconstruction(coincidence) core_x = reconstruction['x'] core_y = reconstruction['y'] plot = Plot() ref_extts = coincidence_events[0][1]['ext_timestamp'] distances = arange(1, 370, 1) times = (2.43 * (1 + distances / 30.)**1.55) + 20 plot.plot(distances, times, mark=None)
def plot_distance_vs_delay(data): colors = { 501: 'black', 502: 'red!80!black', 503: 'blue!80!black', 504: 'green!80!black', 505: 'orange!80!black', 506: 'pink!80!black', 508: 'blue!40!black', 509: 'red!40!black', 510: 'green!40!black', 511: 'orange!40!black' } cq = CoincidenceQuery(data) cq.reconstructions = cq.data.get_node('/coincidences', 'recs_curved') cq.reconstructed = True cluster = data.root.coincidences._v_attrs['cluster'] offsets = { s.number: [d.offset + s.gps_offset for d in s.detectors] for s in cluster.stations } front = CorsikaStationFront() front_r = np.arange(500) front_t = front.delay_at_r(front_r) for i, coincidence in enumerate(cq.coincidences.read_where('N > 6')): if i > 50: break coincidence_events = next(cq.all_events([coincidence])) reconstruction = cq._get_reconstruction(coincidence) core_x = coincidence['x'] core_y = coincidence['y'] plot = MultiPlot(2, 1) splot = plot.get_subplot_at(0, 0) rplot = plot.get_subplot_at(1, 0) splot.plot(front_r, front_t, mark=None) ref_extts = coincidence_events[0][1]['ext_timestamp'] front_detect_r = [] front_detect_t = [] for station_number, event in coincidence_events: station = cluster.get_station(station_number) t = event_utils.relative_detector_arrival_times( event, ref_extts, offsets=offsets[station_number], detector_ids=DETECTOR_IDS) core_distances = [] for i, d in enumerate(station.detectors): x, y, z = d.get_coordinates() core_distances.append(distance_between(core_x, core_y, x, y)) t += d.get_coordinates()[-1] / c splot.scatter(core_distances, t, mark='o', markstyle=colors[station_number]) splot.scatter([np.mean(core_distances)], [np.nanmin(t)], mark='*', markstyle=colors[station_number]) rplot.scatter( [np.mean(core_distances)], [np.nanmin(t) - front.delay_at_r(np.mean(core_distances))], mark='*', markstyle=colors[station_number]) splot.set_ylabel('Relative arrival time [ns]') rplot.set_ylabel(r'Residuals') rplot.set_axis_options(r'height=0.25\textwidth') splot.set_ylimits(-10, 150) plot.set_xlimits_for_all(None, 0, 400) plot.set_xlabel('Distance from core [m]') plot.show_xticklabels(1, 0) plot.show_yticklabels_for_all() plot.save_as_pdf('front_shape/distance_v_time_%d_core' % coincidence['id'])
def analyse_reconstructions(data): cq = CoincidenceQuery(data) c_ids = data.root.coincidences.coincidences.read_where('s501', field='id') c_recs = cq.reconstructions.read_coordinates(c_ids) s_recs = data.root.hisparc.cluster_amsterdam.station_501.reconstructions zenc = c_recs['zenith'] azic = c_recs['azimuth'] zens = s_recs.col('zenith') azis = s_recs.col('azimuth') high_zenith = (zenc > .2) & (zens > .2) for minn in [1, 2, 4, 8, 16]: filter = (s_recs.col('min_n') > minn) length = len(azis.compress(high_zenith & filter)) shifts501 = np.random.normal(0, .06, length) azicounts, x, y = np.histogram2d(azis.compress(high_zenith & filter) + shifts501, azic.compress(high_zenith & filter), bins=np.linspace(-np.pi, np.pi, 73)) plota = Plot() plota.histogram2d(azicounts, np.degrees(x), np.degrees(y), type='reverse_bw', bitmap=True) # plota.set_title('Reconstructed azimuths for events in coincidence (zenith gt .2 rad)') plota.set_xlabel(r'$\phi_{501}$ [\si{\degree}]') plota.set_ylabel(r'$\phi_{Science Park}$ [\si{\degree}]') plota.set_xticks([-180, -90, 0, 90, 180]) plota.set_yticks([-180, -90, 0, 90, 180]) plota.save_as_pdf('azimuth_501_spa_minn%d' % minn) length = sum(filter) shifts501 = np.random.normal(0, .04, length) zencounts, x, y = np.histogram2d(zens.compress(filter) + shifts501, zenc.compress(filter), bins=np.linspace(0, np.pi / 3., 41)) plotz = Plot() plotz.histogram2d(zencounts, np.degrees(x), np.degrees(y), type='reverse_bw', bitmap=True) # plotz.set_title('Reconstructed zeniths for station events in coincidence') plotz.set_xlabel(r'$\theta_{501}$ [\si{\degree}]') plotz.set_ylabel(r'$\theta_{Science Park}$ [\si{\degree}]') plotz.set_xticks([0, 15, 30, 45, 60]) plotz.set_yticks([0, 15, 30, 45, 60]) plotz.save_as_pdf('zenith_501_spa_minn%d' % minn) distances = angle_between(zens.compress(filter), azis.compress(filter), zenc.compress(filter), azic.compress(filter)) counts, bins = np.histogram(distances, bins=np.linspace(0, np.pi, 91)) plotd = Plot() plotd.histogram(counts, np.degrees(bins)) sigma = np.degrees(np.percentile(distances[np.isfinite(distances)], 67)) plotd.set_label(r'67\%% within \SI{%.1f}{\degree}' % sigma) # plotd.set_title('Distance between reconstructed angles for station and cluster') plotd.set_xlabel('Angle between reconstructions [\si{\degree}]') plotd.set_ylabel('Counts') plotd.set_xlimits(min=0, max=90) plotd.set_ylimits(min=0) plotd.save_as_pdf('angle_between_501_spa_minn%d' % minn)
if max(trace) <= 10: trace = array(trace) else: trace = array(trace) / float(max(trace)) * 100 trace = insert(trace, 0, 0) trace = append(trace, 0) plot.plot(t, trace + (100 * j) + (500 * i), mark=None, linestyle=COLORS[j]) tick_labels.append(station_number) tick_positions.append(500 * i) plot.set_yticks(tick_positions) plot.set_ytick_labels(tick_labels) plot.set_xlimits(min=-250, max=1300) plot.set_xlabel('t [\si{n\second}]') plot.set_ylabel('Signal strength') plot.save_as_pdf('traces_%d' % t0) if __name__ == '__main__': map = make_map(CLUSTER) with tables.open_file(COIN_DATA, 'r') as data: cq = CoincidenceQuery(data) for coincidence in cq.coincidences: coincidence_events = next( cq.events_from_stations([coincidence], STATIONS)) display_coincidences(coincidence_events, coincidence['id'], map)
def determine_station_timing_offsets(d, data): # First determine detector offsets for each station offsets = {} for s in [501, 510]: station_group = data.get_node('/hisparc/cluster_amsterdam/station_%d' % s) offsets[s] = determine_detector_timing_offsets2(station_group.events) ref_station = 501 ref_d_off = offsets[ref_station] station = 510 cq = CoincidenceQuery(data, '/coincidences') dt = [] d_off = offsets[station] stations = [ref_station, station] coincidences = cq.all(stations) c_events = cq.events_from_stations(coincidences, stations) for events in c_events: # Filter for possibility of same station twice in coincidence if len(events) is not 2: continue if events[0][0] == ref_station: ref_event = events[0][1] event = events[1][1] else: ref_event = events[1][1] event = events[0][1] try: ref_t = min([ref_event['t%d' % (i + 1)] - ref_d_off[i] for i in range(4) if ref_event['t%d' % (i + 1)] not in ERR]) t = min([event['t%d' % (i + 1)] - d_off[i] for i in range(4) if event['t%d' % (i + 1)] not in ERR]) except ValueError: continue if (ref_event['t_trigger'] in ERR or event['t_trigger'] in ERR): continue dt.append((int(event['ext_timestamp']) - int(ref_event['ext_timestamp'])) - (event['t_trigger'] - ref_event['t_trigger']) + (t - ref_t)) bins = linspace(-150, 150, 200) y, bins = histogram(dt, bins=bins) x = (bins[:-1] + bins[1:]) / 2 try: popt, pcov = curve_fit(gauss, x, y, p0=(len(dt), 0., 50)) station_offset = popt[1] except RuntimeError: station_offset = 0. offsets[station] = [detector_offset + station_offset for detector_offset in offsets[station]] print 'Station 501 - 510: %f (%f)' % (popt[1], popt[2]) graph = Plot() graph.histogram(y, bins) graph.set_title('Time difference, between station 501-510') graph.set_label('%s' % d.replace('_', ' ')) graph.set_xlimits(-150, 150) graph.set_ylimits(min=0) graph.set_xlabel('$\Delta t$') graph.set_ylabel('Counts') graph.save_as_pdf('%s' % d)