Ejemplo n.º 1
0
def main(ts, ns):
    station = Station(501)
    traces = station.event_trace(ts, ns, True)

    dr = DataReduction()
    reduced_traces, o = dr.reduce_traces(array(traces).T, return_offset=True)
    reduced_traces = reduced_traces.T
    plot = Plot()

    t = arange(len(traces[0])) * 2.5
    for i, trace in enumerate(traces):
        plot.plot(t, trace, linestyle='%s, thin' % COLORS[i], mark=None)
    plot.draw_vertical_line(o * 2.5, 'gray')
    plot.draw_vertical_line((o + len(reduced_traces[0])) * 2.5, 'gray')

    plot.set_axis_options('line join=round')
    plot.set_xlabel(r'Event time [\si{\ns}]')
    plot.set_ylabel(r'Signal strength [ADCcounts]')
    plot.set_xlimits(t[0], t[-1])

    plot.save_as_pdf('raw_traces_%d_%d' % (ts, ns))

    t = arange(o, o + len(reduced_traces[0])) * 2.5
    for i, trace in enumerate(reduced_traces):
        plot.plot(t, trace, linestyle='%s, thin' % COLORS[i], mark=None)
    plot.set_axis_options('line join=round')
    plot.set_xlabel(r'Event time [\si{\ns}]')
    plot.set_ylabel(r'Signal strength [ADCcounts]')
    plot.set_xlimits(t[0], t[-1])

    plot.save_as_pdf('reduced_traces_%d_%d' % (ts, ns))
Ejemplo n.º 2
0
def determine_dt_for_pair(stations):
    """Determine and store dt for a pair of stations

    :param ref_station: reference station number to use as refernece
    :param station: station number to determine the dt for

    """
    path = DATA_PATH + 'dt_ref%d_%d.h5' % stations
    if os.path.exists(path):
        print 'dt data already exists for %d-%d' % stations
        return

    ref_station, station = stations
    try:
        with tables.open_file(PAIR_DATAPATH % tuple(sorted(stations)),
                              'r') as data:
            cq = CoincidenceQuery(data)
            ref_detector_offsets = Station(ref_station).detector_timing_offset
            detector_offsets = Station(station).detector_timing_offset
            for dt0, dt1 in monthrange((2004, 1), (2015, 9)):
                coins = cq.all(stations, start=dt0, stop=dt1, iterator=True)
                coin_events = cq.events_from_stations(coins, stations)
                ets, dt = determine_time_differences(coin_events, ref_station,
                                                     station,
                                                     ref_detector_offsets,
                                                     detector_offsets)
                store_dt(ref_station, station, ets, dt)
    except Exception as e:
        print 'Failed for %d, %d' % stations
        print e
        return
Ejemplo n.º 3
0
def get_zenith_distribution():
    station = Station(501)
    counts = []
    angles = []
    for j in range(1, 13):
        for i in range(1, 29):
            try:
                zenith_hist = station.zenith(2015, j, i)
            except:
                continue
            total_counts = zenith_hist['counts'].sum()
            if total_counts:
                angles.extend(zenith_hist['angle'])
                # Normalized
                counts.extend(zenith_hist['counts'] / total_counts)
    angles = array(angles).astype('float64')
    counts = array(counts)

    angles += 1.5  # bin edges to bin centers
    angles_bins = arange(0, 91, 3)

    mean_counts = binned_statistic(angles,
                                   counts,
                                   statistic='mean',
                                   bins=angles_bins)[0]
    std_counts = binned_statistic(angles,
                                  counts,
                                  statistic=std,
                                  bins=angles_bins)[0]
    angle_centers = (angles_bins[1:] + angles_bins[:-1]) / 2.

    return angle_centers, mean_counts, std_counts
Ejemplo n.º 4
0
def coincidences_stations(station_numbers,
                          group_name='Specific stations',
                          date=None):
    if date is None:
        date = datetime.date(2016, 2, 1)
    stations_with_data = []
    cluster_groups = []
    for station_id in station_numbers:
        try:
            info = Station(station_id)
        except:
            continue
        if info.has_data(year=date.year, month=date.month, day=date.day):
            cluster_groups.append(info.cluster().lower())
            stations_with_data.append(station_id)

    if len(stations_with_data) <= 1:
        return

    filepath = os.path.join(ESD_PATH, date.strftime('%Y/%-m/%Y_%-m_%-d.h5'))
    with tables.open_file(filepath, 'r') as data:
        coinc, event_tables = get_event_tables(data, cluster_groups,
                                               stations_with_data)
        windows, counts, n_events = find_n_coincidences(coinc, event_tables)
        n_stations = len(stations_with_data)
        plot_coinc_window(windows, counts, group_name, n_events, n_stations,
                          date)
    return windows, counts
Ejemplo n.º 5
0
def get_all_configs():
    stats = {}
    for station in STATIONS:
        stats[station] = {}
        s = Station(station, force_stale=True)
        for field in FIELDS:
            timestamps = s.__getattribute__(field)['timestamp']
            filter = (timestamps >= START_TS) & (timestamps < END_TS)
            stats[station][field] = s.__getattribute__(field).compress(filter)
    return stats
Ejemplo n.º 6
0
def get_latlon_coordinates(station_number):
    """Retrieve the GPS coordinates for a specific station

    An exception is raised if the station does not have valid coordinates.

    """
    station = Station(station_number)
    gps_location = station.gps_location(START)
    if gps_location['latitude'] == 0.:
        raise Exception
    return gps_location['latitude'], gps_location['longitude']
Ejemplo n.º 7
0
def get_latlon_coordinates(station_number):
    """Retrieve the GPS coordinates for a specific station

    An exception is raised if the station does not have valid coordinates.

    """
    station = Station(station_number)
    gps_location = station.gps_location(START)
    if gps_location['latitude'] == 0.:
        raise Exception
    return gps_location['latitude'], gps_location['longitude']
Ejemplo n.º 8
0
def get_active_stations():
    """ return a list of all *active* station numbers in the HiSPAC Network"""
    stations = Network(force_fresh=True).station_numbers()
    for sn in stations:
        s = Station(sn)
        if s.info['active']:
            yield sn
Ejemplo n.º 9
0
def get_data(station_numbers):
    """Read the eventtime csv files for the given station numbers"""

    return {
        number: Station(number, force_stale=True).event_time()
        for number in station_numbers
    }
Ejemplo n.º 10
0
def get_station_locations(country=None,
                          cluster=None,
                          subcluster=None,
                          station=None,
                          stations=None):
    latitudes = []
    longitudes = []

    if station is not None:
        station_numbers = [station]
    elif stations is not None:
        station_numbers = stations
    else:
        station_numbers = NETWORK.station_numbers(country=country,
                                                  cluster=cluster,
                                                  subcluster=subcluster)

    for station_number in station_numbers:
        location = Station(station_number, force_stale=True).gps_location()
        if location['latitude'] == 0 or location['longitude'] == 0:
            continue
        latitudes.append(location['latitude'])
        longitudes.append(location['longitude'])

    return latitudes, longitudes
Ejemplo n.º 11
0
def find_overlaps():
    with tables.open_file(DATA, 'r') as data:
        events = data.root.s99.events
        s = Station(99)
        for i in range(events.nrows - 1):
            if events[i +
                      1]['ext_timestamp'] - events[i]['ext_timestamp'] > 1e4:
                continue
            t1 = s.event_trace(events[i]['timestamp'],
                               events[i]['nanoseconds'], True)
            t2 = s.event_trace(events[i + 1]['timestamp'],
                               events[i + 1]['nanoseconds'], True)
            overlap = longest_overlap(t1[0], t2[0])
            if overlap is not None:
                print i, len(overlap) * 2.5, 'ns'
            else:
                print i, 'No overlap'
Ejemplo n.º 12
0
def get_station_end_timestamp(station, data):
    """Read all eventtime data into a dictionary"""

    if Station(station, force_stale=True).info['active']:
        return None
    else:
        # Start of day after last day with data
        return data[station]['timestamp'][-1] + 3600
Ejemplo n.º 13
0
def get_traces(station_number, ts, ns, raw=False):
    """Retrieve the traces

    :param station_number: number of the station to which the event belongs.
    :param ts: timestamp of the event in seconds.
    :param ns: subsecond part of the extended timestamp in nanoseconds.
    :return: the traces.

    """
    traces = Station(station_number).event_trace(ts, ns, raw)
    return traces
Ejemplo n.º 14
0
def get_weather_locations():
    latitudes = []
    longitudes = []

    station_numbers = [s['number'] for s in NETWORK.stations_with_weather()]

    for station_number in station_numbers:
        location = Station(station_number, force_stale=True).gps_location()
        latitudes.append(location['latitude'])
        longitudes.append(location['longitude'])

    return latitudes, longitudes
Ejemplo n.º 15
0
def plot_station_offset_matrix():
    n = len(STATIONS)
    stations = {
        station: Station(station, force_stale=True)
        for station in STATIONS
    }

    for type in ['offset', 'error']:
        plot = MultiPlot(n, n, width=r'.08\textwidth', height=r'.08\textwidth')

        for i, station in enumerate(STATIONS):
            for j, ref_station in enumerate(STATIONS):
                splot = plot.get_subplot_at(i, j)
                if i == n:
                    splot.show_xticklabels()
                if station == ref_station:
                    splot.set_empty()
                    splot.set_label(r'%d' % station, location='center')
                    continue
                offsets = stations[station].station_timing_offsets(ref_station)
                bins = list(offsets['timestamp'])
                bins += [bins[-1] + 86400]
                splot.histogram(offsets[type], bins, linestyle='very thin')
                splot.set_axis_options('line join=round')
                plot_cuts_vertically(splot, stations, station, ref_station)

        # Even/row rows/columns
        for row in range(0, n, 2):
            plot.set_yticklabels_position(row, n - 1, 'right')
            plot.set_xticklabels_position(n - 1, row, 'bottom')
            plot.show_yticklabels(row, n - 1)
            #plot.show_xticklabels(n - 1, row)

        for row in range(1, n, 2):
            plot.set_yticklabels_position(row, 0, 'left')
            plot.set_xticklabels_position(0, row, 'top')
            plot.show_yticklabels(row, 0)
            #plot.show_xticklabels(0, row)

        if type == 'offset':
            plot.set_ylimits_for_all(None, min=-70, max=70)
        else:
            plot.set_ylimits_for_all(None, min=0, max=10)
        plot.set_xlimits_for_all(None,
                                 min=datetime_to_gps(date(2011, 1, 1)),
                                 max=datetime_to_gps(date.today()))
        plot.set_xticks_for_all(None, YEARS_TICKS)
        # plot.set_xtick_labels_for_all(YEARS_LABELS)

        plot.set_xlabel(r'Date')
        plot.set_ylabel(r'Station offset [\si{\ns}]')

        plot.save_as_pdf('station_offsets_%s' % type)
Ejemplo n.º 16
0
def main(station_number=501, date=datetime.date(2016, 2, 1)):
    filepath = os.path.join(ESD_PATH, date.strftime('%Y/%-m/%Y_%-m_%-d.h5'))
    with tables.open_file(filepath, 'r') as data:
        station = Station(station_number)
        events = data.get_node(
            '/hisparc/cluster_%s/station_%d' %
            (station.cluster().lower(), station_number), 'events')
        ext_timestamps = events.col('ext_timestamp')
    ext_timestamps.sort()
    difs = ext_timestamps[1:] - ext_timestamps[:-1]

    print('Minimum: %d. Maximum: %d. n(diff < 100 us): %d' %
          (min(difs), max(difs), len(numpy.where(difs < 1e5)[0])))

    bins = numpy.logspace(2, 11)
    plot = Plot('semilogx')
    plot.histogram(*numpy.histogram(difs, bins=bins))
    plot.set_xlabel(r'Time between subsequent triggers [\si{\ns}]')
    plot.set_ylabel('Occurance')
    plot.set_ylimits(min=0)
    plot.set_xlimits(min(bins), max(bins))
    plot.save_as_pdf('time_between_triggers_%d' % station_number)
Ejemplo n.º 17
0
def generate_json():
    """Get the API info data for each station"""

    station_numbers = Network().station_numbers()
    station_info = {}

    for number in pbar(station_numbers):
        try:
            station = Station(number)
            station_info[number] = station.info
        except:
            continue

    return station_info
Ejemplo n.º 18
0
def reconstruct_events(data):

    # Station 510
    cluster = get_cluster()
    station = cluster.get_station(STATIONS[1])
    station_group = '/hisparc/cluster_amsterdam/station_%d' % station.number
    rec_510 = ReconstructESDEvents(data,
                                   station_group,
                                   station,
                                   overwrite=True,
                                   progress=True)
    rec_510.prepare_output()
    rec_510.offsets = Station(station.number)
    rec_510.reconstruct_directions()
    rec_510.store_reconstructions()

    rec_510.theta = array(rec_510.theta)
    rec_510.phi = array(rec_510.phi)

    # Station 507
    for order in itertools.permutations(range(4), 4):
        cluster = get_cluster()
        station = cluster.get_station(STATIONS[0])
        station_group = '/hisparc/cluster_amsterdam/station_%d' % station.number
        station._detectors = [station.detectors[id] for id in order]

        rec_507 = ReconstructESDEvents(data,
                                       station_group,
                                       station,
                                       overwrite=True,
                                       progress=True,
                                       destination=REC_PATH % order)
        rec_507.prepare_output()
        rec_507.offsets = Station(station.number)
        rec_507.reconstruct_directions(
            detector_ids=[order[0], order[1], order[3]])
        rec_507.store_reconstructions()
Ejemplo n.º 19
0
def plot_traces(coincidence_events):
    plot = Plot()
    t0 = int(coincidence_events[0][1]['ext_timestamp'])
    tick_labels = []
    tick_positions = []

    for i, station_event in enumerate(coincidence_events):
        station_number, event = station_event
        station = Station(station_number)
        traces = station.event_trace(event['timestamp'], event['nanoseconds'])
        start_trace = (int(event['ext_timestamp']) - t0) - event['t_trigger']
        t = arange(start_trace, start_trace + (2.5 * len(traces[0])), 2.5)
        t = insert(t, 0, -20000)
        t = append(t, 20000)
        # trace = array(traces).sum(0)
        for j, trace in enumerate(traces):
            if max(trace) <= 10:
                trace = array(trace)
            else:
                trace = array(trace) / float(max(trace)) * 100
            trace = insert(trace, 0, 0)
            trace = append(trace, 0)
            plot.plot(t,
                      trace + (100 * j) + (500 * i),
                      mark=None,
                      linestyle=COLORS[j])
        tick_labels.append(station_number)
        tick_positions.append(500 * i)

    plot.set_yticks(tick_positions)
    plot.set_ytick_labels(tick_labels)
    plot.set_xlimits(min=-250, max=1300)
    plot.set_xlabel('t [\si{n\second}]')
    plot.set_ylabel('Signal strength')

    plot.save_as_pdf('traces_%d' % t0)
Ejemplo n.º 20
0
def plot_traces(event, station):
    s = Station(station)
    plot = Plot()
    traces = s.event_trace(event['timestamp'], event['nanoseconds'], raw=True)
    for j, trace in enumerate(traces):
        t = arange(0, (2.5 * len(traces[0])), 2.5)
        plot.plot(t, trace, mark=None, linestyle=COLORS[j])
    n_peaks = event['n_peaks']
    plot.set_title('%d - %d' % (station, event['ext_timestamp']))
    plot.set_label('%d ' * 4 % tuple(n_peak for n_peak in n_peaks))
    plot.set_xlabel('t [\si{n\second}]')
    plot.set_ylabel('Signal strength')
    plot.set_xlimits(min=0, max=2.5 * len(traces[0]))
    plot.set_ylimits(min=150, max=500)  # max=2 ** 12
    plot.draw_horizontal_line(253, linestyle='gray')
    plot.draw_horizontal_line(323, linestyle='gray')
    plot.draw_horizontal_line(event['baseline'][0] + 20, linestyle='thin,gray')
    plot.draw_horizontal_line(event['baseline'][1] + 20,
                              linestyle='thin,red!40!black')
    plot.draw_horizontal_line(event['baseline'][2] + 20,
                              linestyle='thin,green!40!black')
    plot.draw_horizontal_line(event['baseline'][3] + 20,
                              linestyle='thin,blue!40!black')
    plot.save_as_pdf('traces_%d_%d' % (station, event['ext_timestamp']))
Ejemplo n.º 21
0
def get_histogram_for_station_on_date(station_id, date, did):
    """Return a histogram of the spectrum of a station on a date.

    :return n, bins: histogram counts and bins, as obtained using
       ``numpy.histogram``.

    """
    data = Station(station_id).pulse_integral(date.year, date.month, date.day)

    bins = list(data['pulseintegral'])
    bins.append(bins[-1] + (bins[-1] - bins[-2]))
    bins = np.array(bins)

    n = data['pi%d' % (did + 1)]

    return n, bins
Ejemplo n.º 22
0
def normalize_event_rates(data, station_numbers):
    """Normalize event rates using the number of detectors

    Number per hour is divided by the expected number of events per hour for a
    station with a certain number of detectors.

    So after this a '1.3' would be on average 30% more events per hour than the
    expected number of events per hour for such a station.

    """
    scaled_data = data.copy()
    for i, s in enumerate(station_numbers):
        n = Station(s).n_detectors()
        if n == 2:
            scaled_data[i] /= 1200.
        elif n == 4:
            scaled_data[i] /= 2500.
    scaled_data = np.where(scaled_data > 2., 2., scaled_data)

    return scaled_data
Ejemplo n.º 23
0
def get_offsets():
    """Setup nested dictionary with station timing offsets

    The result looks like this:

        {reference1: {station1: func, station2: func}, reference2: {...}, ...}

    Get the offset (and error) between 501-502 on timestamp 1425168000 using:

        offsets[501][502](1425168000)
        # (6.1, 0.72)

    """
    offsets = {
        ref: {
            s: partial(Station(s, force_stale=True).station_timing_offset,
                       reference_station=ref)
            for s in STATIONS if not s == ref
        }
        for ref in STATIONS
    }
    return offsets
Ejemplo n.º 24
0
def main():
    """Demo the MPV finder with actual data."""

    today = datetime.date.today()
    yesterday = today - datetime.timedelta(days=1)
    station_ids = get_station_ids_with_data(yesterday)

    for station in station_ids:
        plt.figure()
        for did in range(Station(station).n_detectors()):
            n, bins = get_histogram_for_station_on_date(station, yesterday,
                                                        did)
            find_mpv = FindMostProbableValueInSpectrum(n, bins)
            mpv, is_fitted = find_mpv.find_mpv()

            plt.plot((bins[:-1] + bins[1:]) / 2., n, c=COLORS[did])
            lines = ['dotted', 'solid']
            plt.axvline(mpv + did * (bins[1] - bins[0]) / 20.,
                        c=COLORS[did], ls=lines[is_fitted])
            plt.title(station)
            plt.xlim(0, bins[len(bins)/2])
            plt.yscale('log')
Ejemplo n.º 25
0
def get_latlon_coordinates(station_number):
    station = Station(station_number)
    gps_location = station.gps_location(START)
    if gps_location['latitude'] == 0.:
        raise Exception
    return gps_location['latitude'], gps_location['longitude']
Ejemplo n.º 26
0
def get_offsets_dict():
    offsets = {
        s: Station(s).detector_timing_offset
        for s in Network().station_numbers()
    }
    return offsets
Ejemplo n.º 27
0
def get_eventtime_data(station):
    """Get eventtime data"""
    return Station(station, force_stale=True).event_time()
Ejemplo n.º 28
0
from pprint import PrettyPrinter
from random import sample

from sapphire import HiSPARCStations, Station
from sapphire.analysis.direction_reconstruction import CoincidenceDirectionReconstruction

# SN = [501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511]
SN = [501, 502, 503, 505, 506, 508, 510, 511]
cluster = HiSPARCStations(SN, force_stale=True)
crec = CoincidenceDirectionReconstruction(cluster)
offsets = {sn: Station(sn, force_stale=True) for sn in SN}

ref_sn = 501
station_number = 502

ts0 = 1461542400

print crec.determine_best_offsets(SN, ts0, offsets)
print offsets[station_number].station_timing_offset(ref_sn, ts0)
print offsets[station_number].detector_timing_offset(ts0)

offs = crec.determine_best_offsets([sn for sn in sample(SN, len(SN))], ts0,
                                   offsets)
reference = offs[ref_sn][1]
offs = {sn: round(off[1] - reference, 1) for sn, off in offs.items()}
pp = PrettyPrinter()
pp.pprint(offs)
Ejemplo n.º 29
0
def get_station_locations(station):
    """All GPS locations for a single station"""

    locations = Station(station, force_stale=True).gps_locations
    return locations['latitude'], locations['longitude']
Ejemplo n.º 30
0
Archivo: main.py Proyecto: 153957/topaz
        return

    if len(gps_locations) < 2:
        return

    for p1, p2 in itertools.combinations(gps_locations, 2):
        d = distance(p1, p2)
        if d > .25:
            print station.station, d
            break


def distance(s1, s2):
    """
    returns distance in km
    """
    R = 6371  # km Radius of earth
    d_lat = numpy.radians(s2['latitude'] - s1['latitude'])
    d_lon = numpy.radians(s2['longitude'] - s1['longitude'])
    a = (numpy.sin(d_lat / 2)**2 + numpy.cos(numpy.radians(s1['latitude'])) *
         numpy.cos(numpy.radians(s2['latitude'])) * numpy.sin(d_lon / 2)**2)
    c = 2 * numpy.arctan2(numpy.sqrt(a), numpy.sqrt(1 - a))
    distance = R * c
    return distance


if __name__ == "__main__":
    for sn in Network().station_numbers():
        station = Station(sn)
        detect_problems(station)
Ejemplo n.º 31
0
import tables

from numpy import array, histogram, histogram2d, linspace

from artist import MultiPlot

from sapphire import Station
from sapphire.analysis.process_traces import MeanFilter, TraceObservables
from sapphire.publicdb import download_data
from sapphire.utils import pbar

DATA_PATH = '/Users/arne/Datastore/intergral_filter/data.h5'
STATION = 505
GROUP = '/s%d' % STATION
API_STATION = Station(STATION)
COLORS = ['black', 'red', 'green', 'blue']


def get_data():
    """Ensure data is downloaded and available"""

    if not os.path.exists(DATA_PATH):
        with tables.open_file(DATA_PATH, 'w') as data:
            start = datetime.datetime(2014, 6, 10)
            end = datetime.datetime(2014, 6, 11)
            download_data(data, GROUP, STATION, start, end, get_blobs=True)


def get_traces_from_api(station, event):
    return retrieve_traces(station, event['timestamp'], event['nanoseconds'])
Ejemplo n.º 32
0
    plot.draw_horizontal_line(baseline, linestyle='densely dashed, gray')
    plot.draw_horizontal_line(baseline + BASELINE_THRESHOLD,
                              linestyle='densely dotted, gray')
    plot.draw_horizontal_line(max(trace), linestyle='densely dashed, gray')

    #     plot.set_ylimits(0)
    plot.set_xlabel(r'Trace time [\si{\ns}]')
    plot.set_ylabel(r'Signal strength [ADC]')
    plot.save_as_pdf('integral')


def plot_filtered(filtered_trace):
    plot = Plot()

    time = arange(0, len(filtered_trace) * 2.5, 2.5)
    plot.plot(time, filtered_trace, mark=None, linestyle='const plot')

    plot.set_xlabel(r'Trace time [\si{\ns}]')
    plot.set_ylabel(r'Signal strength [ADC]')
    plot.save_as_pdf('mean_filter')


if __name__ == "__main__":
    if 'trace' not in globals():
        s = Station(510)
    plot_integral(trace, baseline)
    trace, baseline, integral, pulseheight, filtered_trace = get_trace(s)
    plot_filtered(filtered_trace)
    print '%d ADC, %d ADC.sample, %d ADC' % (baseline, integral, pulseheight)