def get_singles_from_esd_in_range(station, start, end): """Get singles from ESD in time range. :param station: Station object :param start: start of datetime range :param end: end of datetime range """ for t0, t1 in single_day_ranges(start, end): try: Summary.objects.get(station=station, date=t0, num_singles__isnull=False) except Summary.DoesNotExist: continue filepath = esd.get_esd_data_path(t0) try: with tables.open_file(filepath) as f: station_node = esd.get_station_node(f, station) if (t1 - t0).days == 1: events = station_node.singles.read() else: ts0 = datetime_to_gps(t0) # noqa: F841 ts1 = datetime_to_gps(t1) # noqa: F841 events = station_node.singles.read_where('(ts0 <= timestamp) & (timestamp < ts1)') except (IOError, tables.NoSuchNodeError): continue else: yield events
def get_coincidences_from_esd_in_range(start, end, stations, n): """Get coincidences from ESD in time range. :param start: start of datetime range :param end: end of datetime range :param stations: station numbers :param n: minimum number of events in coincidence :yield: id, station number and event """ id = -1 for t0, t1 in single_day_ranges(start, end): try: NetworkSummary.objects.get(date=t0) except NetworkSummary.DoesNotExist: continue with tables.open_file(esd.get_esd_data_path(t0)) as f: try: cq = CoincidenceQuery(f) ts0 = datetime_to_gps(t0) ts1 = datetime_to_gps(t1) if stations: coincidences = cq.at_least(stations, n, start=ts0, stop=ts1) events = cq.events_from_stations(coincidences, stations, n) else: coincidences = cq.timerange(start=ts0, stop=ts1) events = cq.all_events(coincidences, n) for id, coin in enumerate(events, id + 1): for number, event in coin: yield id, number, event except (IOError, tables.NoSuchNodeError): continue
def get_events(analyzed_coincidence): """Get events that belong to this coincidence""" events = [] for event in analyzed_coincidence.coincidence.events.all(): try: config = (Configuration.objects .filter(summary__station=event.station, summary__date__lte=event.date) .exclude(gps_latitude=0, gps_longitude=0).latest()) except Configuration.DoesNotExist: continue timestamp = datetime_to_gps(datetime.combine(event.date, event.time)) event_dict = dict(timestamp=timestamp, nanoseconds=event.nanoseconds, number=event.station.number, latitude=config.gps_latitude, longitude=config.gps_longitude, altitude=config.gps_altitude, status='on', detectors=len(event.traces), traces=event.traces, pulseheights=event.pulseheights, integrals=event.integrals, mips=[ph / 200. if ph > 0 else ph for ph in event.pulseheights]) events.append(event_dict) return events
def get_events(analyzed_coincidence): """Get events that belong to this coincidence""" events = [] for event in analyzed_coincidence.coincidence.events.all(): try: config = (Configuration.objects.filter( summary__station=event.station, summary__date__lte=event.date).exclude( gps_latitude=0, gps_longitude=0).latest()) except Configuration.DoesNotExist: continue timestamp = datetime_to_gps(datetime.combine(event.date, event.time)) event_dict = dict( timestamp=timestamp, nanoseconds=event.nanoseconds, number=event.station.number, latitude=config.gps_latitude, longitude=config.gps_longitude, altitude=config.gps_altitude, status='on', detectors=len(event.traces), traces=event.traces, pulseheights=event.pulseheights, integrals=event.integrals, mips=[ph / 200. if ph > 0 else ph for ph in event.pulseheights]) events.append(event_dict) return events
def __init__(self, dt=datetime.datetime.now()): self.datetime = dt self.timestamp = datetime_to_gps(dt) self.nanoseconds = int(random.uniform(0, 1e9)) self.ext_timestamp = int(self.timestamp) * int(1e9) + self.nanoseconds self.trigger_pattern = 1 self.trace_ch1 = np.arange(10) self.trace_ch2 = np.arange(10)
def get_lightning_in_range(lightning_type, start, end): """Get lightning in time range. :param lightning_type: lighting of specific type :param start: start of datetime range :param end: end of datetime range """ for t0, t1 in single_day_ranges(start, end): filepath = knmi_lightning.data_path(t0) try: with tables.open_file(filepath) as f: ts0 = datetime_to_gps(t0) ts1 = datetime_to_gps(t1) for event in knmi_lightning.discharges(f, ts0, ts1, type=lightning_type): yield event except (IOError, tables.NoSuchNodeError): continue
def data_json(coincidence, events): """Construct json with data for jSparc to display""" timestamp = datetime_to_gps(datetime.combine(coincidence.coincidence.date, coincidence.coincidence.time)) data = dict(pk=coincidence.pk, timestamp=timestamp, nanoseconds=coincidence.coincidence.nanoseconds, events=events) response = HttpResponse(json.dumps(data), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response
def data_json(coincidence, events): """Construct json with data for jSparc to display""" timestamp = datetime_to_gps( datetime.combine(coincidence.coincidence.date, coincidence.coincidence.time)) data = dict(pk=coincidence.pk, timestamp=timestamp, nanoseconds=coincidence.coincidence.nanoseconds, events=events) response = HttpResponse(json.dumps(data), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response
def get_events_from_esd_in_range(station, start, end): """Get events from ESD in time range. :param station: Station object :param start: start of datetime range :param end: end of datetime range """ for t0, t1 in single_day_ranges(start, end): try: Summary.objects.get(station=station, date=t0, num_events__isnull=False) except Summary.DoesNotExist: continue filepath = esd.get_esd_data_path(t0) try: with tables.open_file(filepath) as f: try: station_node = esd.get_station_node(f, station) events_table = station_node.events except tables.NoSuchNodeError: continue try: reconstructions_table = station_node.reconstructions except tables.NoSuchNodeError: reconstructions_table = FakeReconstructionsTable() if (t1 - t0).days == 1: events = events_table.read() reconstructions = reconstructions_table[events['event_id']] else: ts0 = datetime_to_gps(t0) # noqa: F841 ts1 = datetime_to_gps(t1) # noqa: F841 event_ids = events_table.get_where_list('(ts0 <= timestamp) & (timestamp < ts1)') events = events_table.read_coordinates(event_ids) reconstructions = reconstructions_table[event_ids] except IOError: continue else: yield events, reconstructions
import datetime import json import re import os import tables from sapphire import (Network, Station, download_data, download_coincidences, datetime_to_gps) from sapphire.utils import pbar EVENT_DISPLAY_DIR = os.path.dirname(__file__) STATIONS = Network().station_numbers() START = datetime.datetime(2016, 2, 1, 11, 0) END = datetime.datetime(2016, 2, 1, 11, 20) LIMITS = [datetime_to_gps(START) * int(1e9), datetime_to_gps(END) * int(1e9)] re_station_number = re.compile(".*/station_([0-9]+)$") def download_coincidences_data(data): """Download coincidence data for each subcluster and for all stations""" for subcluster in Network().subclusters(): group = ('/coincidences_%s' % subcluster['name'].lower().replace(' ', '_')) if group in data: continue stations = Network().station_numbers(subcluster=subcluster['number']) if len(stations) < 2: continue
import datetime import json import re import os import tables from sapphire import (Network, Station, download_data, download_coincidences, datetime_to_gps) from sapphire.utils import pbar STATIONS = Network().station_numbers() START = datetime.datetime(2016, 2, 1, 11, 0) END = datetime.datetime(2016, 2, 1, 11, 20) LIMITS = [datetime_to_gps(START) * int(1e9), datetime_to_gps(END) * int(1e9)] re_station_number = re.compile(".*/station_([0-9]+)$") def download_coincidences_data(data): for subcluster in Network().subclusters(): group = ('/coincidences_%s' % subcluster['name'].lower().replace(' ', '_')) if group in data: continue stations = Network().station_numbers(subcluster=subcluster['number']) if len(stations) < 2: continue download_coincidences(data, group=group, stations=stations,
"""Check if the offsets remain consistent if calculated via other stations""" from datetime import datetime from functools import partial from itertools import combinations, permutations from numpy import array, histogram, histogram2d, isnan, nanmean from artist import Plot from sapphire import Station, datetime_to_gps START = datetime_to_gps(datetime(2011, 6, 1)) STOP = datetime_to_gps(datetime(2016, 2, 1)) STEP = int(86400 * 1) STATIONS = [501, 502, 503, 504, 505, 506, 508, 509, 510, 511] def get_offsets(): """Setup nested dictionary with station timing offsets The result looks like this: {reference1: {station1: func, station2: func}, reference2: {...}, ...} Get the offset (and error) between 501-502 on timestamp 1425168000 using: offsets[501][502](1425168000) # (6.1, 0.72) """