Beispiel #1
0
def determine_offsets_for_pair(stations):
    ref_station, station = stations
    path = DT_DATAPATH % (ref_station, station)
    with tables.open_file(path, 'r') as data:
        table = data.get_node('/s%d' % station)
        offsets = []
        start = datetime(2010, 1, 1)
        end = datetime(2015, 4, 1)
        for dt0 in (start + timedelta(days=x)
                    for x in xrange(0, (end - start).days, 10)):
            ts0 = datetime_to_gps(dt0)
            CLUSTER.set_timestamp(ts0)
            # dz is z - z_ref
            r, _, dz = CLUSTER.calc_rphiz_for_stations(
                CLUSTER.get_station(ref_station).station_id,
                CLUSTER.get_station(station).station_id)
            ts1 = datetime_to_gps(dt0 +
                                  timedelta(days=max(int(r**1.12 / DAYS), 7)))
            dt = table.read_where('(timestamp >= ts0) & (timestamp < ts1)',
                                  field='delta')
            if len(dt) < 100:
                s_off = nan
            else:
                s_off = determine_station_timing_offset(dt, dz)
            offsets.append((ts0, s_off))
        write_offets(station, ref_station, offsets)
Beispiel #2
0
def plot_station_offset_matrix():
    n = len(STATIONS)
    stations = {
        station: Station(station, force_stale=True)
        for station in STATIONS
    }

    for type in ['offset', 'error']:
        plot = MultiPlot(n, n, width=r'.08\textwidth', height=r'.08\textwidth')

        for i, station in enumerate(STATIONS):
            for j, ref_station in enumerate(STATIONS):
                splot = plot.get_subplot_at(i, j)
                if i == n:
                    splot.show_xticklabels()
                if station == ref_station:
                    splot.set_empty()
                    splot.set_label(r'%d' % station, location='center')
                    continue
                offsets = stations[station].station_timing_offsets(ref_station)
                bins = list(offsets['timestamp'])
                bins += [bins[-1] + 86400]
                splot.histogram(offsets[type], bins, linestyle='very thin')
                splot.set_axis_options('line join=round')
                plot_cuts_vertically(splot, stations, station, ref_station)

        # Even/row rows/columns
        for row in range(0, n, 2):
            plot.set_yticklabels_position(row, n - 1, 'right')
            plot.set_xticklabels_position(n - 1, row, 'bottom')
            plot.show_yticklabels(row, n - 1)
            #plot.show_xticklabels(n - 1, row)

        for row in range(1, n, 2):
            plot.set_yticklabels_position(row, 0, 'left')
            plot.set_xticklabels_position(0, row, 'top')
            plot.show_yticklabels(row, 0)
            #plot.show_xticklabels(0, row)

        if type == 'offset':
            plot.set_ylimits_for_all(None, min=-70, max=70)
        else:
            plot.set_ylimits_for_all(None, min=0, max=10)
        plot.set_xlimits_for_all(None,
                                 min=datetime_to_gps(date(2011, 1, 1)),
                                 max=datetime_to_gps(date.today()))
        plot.set_xticks_for_all(None, YEARS_TICKS)
        # plot.set_xtick_labels_for_all(YEARS_LABELS)

        plot.set_xlabel(r'Date')
        plot.set_ylabel(r'Station offset [\si{\ns}]')

        plot.save_as_pdf('station_offsets_%s' % type)
    def test_get_cuts(self):
        gps_station = (datetime_to_gps(datetime(2014, 1, 1, 10, 3)),
                       datetime_to_gps(datetime(2014, 3, 1, 11, 32)))
        gps_ref_station = (datetime_to_gps(datetime(2014, 1, 5, 0, 1, 1)),
                           datetime_to_gps(datetime(2014, 3, 5, 3, 34, 4)))
        elec_station = (datetime_to_gps(datetime(2014, 1, 3, 3, 34, 3)),
                        datetime_to_gps(datetime(2014, 3, 5, 23, 59, 59)))
        elec_ref_station = (datetime_to_gps(datetime(2014, 1, 9, 0, 0, 0)),
                            datetime_to_gps(datetime(2014, 3, 15, 1, 2, 3)))
        gps_mock = Mock()
        elec_mock = Mock()

        gps_mock.side_effect = [array(gps_station), array(gps_ref_station)]
        elec_mock.side_effect = [array(elec_station), array(elec_ref_station)]

        self.off._get_electronics_timestamps = elec_mock
        self.off._get_gps_timestamps = gps_mock

        cuts = self.off._get_cuts(sentinel.station, sentinel.ref_station)

        elec_mock.assert_has_calls([call(sentinel.ref_station), call(sentinel.station)], any_order=True)
        gps_mock.assert_has_calls([call(sentinel.ref_station), call(sentinel.station)], any_order=True)

        self.assertEqual(len(cuts), 8)
        six.assertCountEqual(self, sorted(cuts), cuts)
        self.assertEqual(cuts[0], datetime(2014, 1, 1))
        today = datetime.now()
        self.assertEqual(cuts[-1], datetime(today.year, today.month, today.day))
Beispiel #4
0
def plot_offset_timeline(ref_station, station):
    ref_s = Station(ref_station)
    s = Station(station)
    #         ref_gps = ref_s.gps_locations
    #         ref_voltages = ref_s.voltages
    #         ref_n = get_n_events(ref_station)
    #         gps = s.gps_locations
    #         voltages = s.voltages
    #         n = get_n_events(station)
    # Determine offsets for first day of each month
    #         d_off = s.detector_timing_offsets
    s_off = get_station_offsets(ref_station, station)
    graph = Plot(width=r'.6\textwidth')
    #         graph.scatter(ref_gps['timestamp'], [95] * len(ref_gps), mark='square', markstyle='purple,mark size=.5pt')
    #         graph.scatter(ref_voltages['timestamp'], [90] * len(ref_voltages), mark='triangle', markstyle='purple,mark size=.5pt')
    #         graph.scatter(gps['timestamp'], [85] * len(gps), mark='square', markstyle='gray,mark size=.5pt')
    #         graph.scatter(voltages['timestamp'], [80] * len(voltages), mark='triangle', markstyle='gray,mark size=.5pt')
    #         graph.shade_region(n['timestamp'], -ref_n['n'] / 1000, n['n'] / 1000, color='lightgray,const plot')
    #         graph.plot(d_off['timestamp'], d_off['d0'], markstyle='mark size=.5pt')
    #         graph.plot(d_off['timestamp'], d_off['d2'], markstyle='mark size=.5pt', linestyle='green')
    #         graph.plot(d_off['timestamp'], d_off['d3'], markstyle='mark size=.5pt', linestyle='blue')
    graph.plot(s_off['timestamp'],
               s_off['offset'],
               mark='*',
               markstyle='mark size=1.25pt',
               linestyle=None)
    graph.set_ylabel('$\Delta t$ [ns]')
    graph.set_xlabel('Date')
    graph.set_xticks(
        [datetime_to_gps(date(y, 1, 1)) for y in range(2010, 2016)])
    graph.set_xtick_labels(['%d' % y for y in range(2010, 2016)])
    graph.set_xlimits(1.25e9, 1.45e9)
    graph.set_ylimits(-150, 150)
    graph.save_as_pdf('plots/offsets/offsets_ref%d_%d' %
                      (ref_station, station))
Beispiel #5
0
def get_detector_timing_offsets_source(request, station_number):
    data = get_detector_timing_offsets(station_number)
    if not len(data):
        raise Http404

    data = [
        next(rows) for _, rows in groupby(data, key=itemgetter(1, 2, 3, 4))
    ]

    data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2]),
             none_to_nan(r[3]), none_to_nan(r[4])) for r in data]

    buffer = StringIO()
    writer = csv.writer(buffer, delimiter='\t', lineterminator='\n')
    writer.writerows(data)
    tsvdata = buffer.getvalue().strip('\n')

    response = render(request,
                      'source/detector_timing_offsets.tsv', {
                          'tsvdata': tsvdata,
                          'station_number': station_number
                      },
                      content_type=MIME_TSV)
    response['Content-Disposition'] = (
        'attachment; filename=detector_timing_offsets-s%s.tsv' %
        station_number)
    return response
Beispiel #6
0
def coin_rate_since(start_ts=None):
    """Get coincidence rate since a given timestamp"""

    if start_ts is None:
        start_ts = datetime_to_gps(date(2015, 1, 1))

    for s1, s2 in combinations([7001, 7002, 7003], 2):
        ts12 = get_timestamp_ranges([s1, s2])
        mts12 = modify_range(ts12, start_ts)
        tot12 = get_total_exposure(mts12)
        n_coin = get_coin_count(s1, s2, start_ts)
        print s1, s2, tot12, n_coin, n_coin / tot12, tot12 / n_coin
Beispiel #7
0
def monthrange(start, stop):
    """Generator for datetime month ranges

    This is a very specific generator for datetime ranges. Based on
    start and stop values, it generates one month intervals.

    :param start: a year, month tuple
    :param stop: a year, month tuple
    :return: generator for start and end timestamps of one month intervals

    The stop is the last end of the range.

    """
    startdt = datetime(start[0], start[1], 1)
    stopdt = datetime(stop[0], stop[1], 1)

    if stopdt < startdt:
        return

    if start == stop:
        yield (datetime_to_gps(datetime(start[0], start[1], 1)),
               datetime_to_gps(datetime(start[0], start[1] + 1, 1)))
        return
    else:
        current_year, current_month = start

        while (current_year, current_month) != stop:
            if current_month < 12:
                next_year = current_year
                next_month = current_month + 1
            else:
                next_year = current_year + 1
                next_month = 1
            yield (datetime_to_gps(datetime(current_year, current_month, 1)),
                   datetime_to_gps(datetime(next_year, next_month, 1)))

            current_year = next_year
            current_month = next_month
        return
Beispiel #8
0
def plot_timing_offsets(station_number):
    """Create a plot object from station configs"""

    data = get_detector_timing_offsets(station_number)
    data = [[clock.datetime_to_gps(row[0]), row[1:]] for row in data]
    data = zip(*data)

    if not data:
        return None

    timestamps = data[0]
    values = zip(*data[1])

    x_label = 'Date (month/year)'
    y_label = 'Timing offset (ns)'

    plot_object = create_plot_object(timestamps, values, x_label, y_label)
    return plot_object
Beispiel #9
0
def plot_timing_offsets(station_number):
    """Create a plot object from station configs"""

    data = get_detector_timing_offsets(station_number)
    data = [[clock.datetime_to_gps(row[0]), row[1:]] for row in data]
    data = zip(*data)

    if not data:
        return None

    timestamps = data[0]
    values = zip(*data[1])

    x_label = 'Date (month/year)'
    y_label = 'Timing offset (ns)'

    plot_object = create_plot_object(timestamps, values, x_label, y_label)
    return plot_object
Beispiel #10
0
def plot_luminosity(timestamp, aligned_data, aligned_data_all, i):

    n_active_aligned = (aligned_data != 0).sum(axis=0)
    cumsummed_data_all = aligned_data_all.sum(axis=0).cumsum()
    summed_data = aligned_data.sum(axis=0)
    cumsummed_data = summed_data.cumsum()

    plot = Plot(width=r'.5\textwidth')
    #     plot.plot([t / 1e9 for t in timestamp[::100]], cumsummed_data_all[::100],
    #               linestyle='black!50!green, thick', mark=None)
    plot.plot([t / 1e9 for t in timestamp[::100]],
              cumsummed_data[::100],
              linestyle='thick',
              mark=None)
    plot.set_xticks([datetime_to_gps(date(y, 1, 1)) / 1e9 for y in YEARS[::3]])
    plot.set_xtick_labels(['%d' % y for y in YEARS[::3]])
    plot.set_ylabel('Cummulative number of events')
    plot.set_xlabel('Date')
    plot.save_as_pdf('luminosity_%s' % ['network', 'spa'][i])
Beispiel #11
0
def get_station_timing_offsets_source(request, ref_station_number,
                                      station_number):
    ref_station_number = int(ref_station_number)
    station_number = int(station_number)

    if ref_station_number >= station_number:
        raise Http404

    data = get_station_timing_offsets(ref_station_number, station_number)

    if not len(data):
        try:
            Station.objects.get(number=ref_station_number)
            Station.objects.get(number=station_number)
        except Station.DoesNotExist:
            raise Http404
        else:
            # For existing pair without offsets return (nan, nan),
            # to be handled by analysis software.
            data = [(FIRSTDATE, nan, nan)]

    data = [next(rows) for _, rows in groupby(data, key=itemgetter(1))]

    data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2]))
            for r in data]

    buffer = StringIO()
    writer = csv.writer(buffer, delimiter='\t', lineterminator='\n')
    writer.writerows(data)
    tsvdata = buffer.getvalue().strip('\n')

    response = render(request,
                      'source/station_timing_offsets.tsv', {
                          'tsvdata': tsvdata,
                          'ref_station_number': ref_station_number,
                          'station_number': station_number
                      },
                      content_type=MIME_TSV)
    response['Content-Disposition'] = (
        'attachment; filename=station_timing_offsets-s%d-s%d.tsv' %
        (ref_station_number, station_number))
    return response
Beispiel #12
0
def get_station_timing_offsets_source(request, ref_station_number, station_number):
    ref_station_number = int(ref_station_number)
    station_number = int(station_number)

    if ref_station_number >= station_number:
        raise Http404

    data = get_station_timing_offsets(ref_station_number, station_number)

    if not len(data):
        try:
            Station.objects.get(number=ref_station_number)
            Station.objects.get(number=station_number)
        except Station.DoesNotExist:
            raise Http404
        else:
            # For existing pair without offsets return (nan, nan),
            # to be handled by analysis software.
            data = [(FIRSTDATE, nan, nan)]

    data = [next(rows) for _, rows in groupby(data, key=itemgetter(1))]

    data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2]))
            for r in data]

    buffer = StringIO()
    writer = csv.writer(buffer, delimiter='\t', lineterminator='\n')
    writer.writerows(data)
    tsvdata = buffer.getvalue().strip('\n')

    response = render(request, 'source/station_timing_offsets.tsv',
                      {'tsvdata': tsvdata,
                       'ref_station_number': ref_station_number,
                       'station_number': station_number},
                      content_type=MIME_TSV)
    response['Content-Disposition'] = (
        'attachment; filename=station_timing_offsets-s%d-s%d.tsv' %
        (ref_station_number, station_number))
    return response
Beispiel #13
0
def get_eventtime_histogram_sources(station_number, start, end):
    histograms = get_list_or_404(
        DailyHistogram.objects.select_related('source'),
        source__station__number=station_number,
        source__date__gte=start, source__date__lt=end,
        type__slug='eventtime')
    bins = []
    values = []
    hours = arange(24) * 3600
    no_data = [0] * 24
    i = 0
    for date in daterange(start, end):
        ts = clock.datetime_to_gps(date)
        bins.extend(ts + hours)
        if histograms[i].source.date == date:
            values.extend(histograms[i].values)
            i += 1
            if i == len(histograms):
                break
        else:
            values.extend(no_data)
    return izip(bins, values)
Beispiel #14
0
def get_eventtime_histogram_sources(station_number, start, end):
    histograms = get_list_or_404(
        DailyHistogram.objects.select_related('source'),
        source__station__number=station_number,
        source__date__gte=start, source__date__lt=end,
        type__slug='eventtime')
    bins = []
    values = []
    hours = arange(24) * 3600
    no_data = [0] * 24
    i = 0
    for date in daterange(start, end):
        ts = clock.datetime_to_gps(date)
        bins.extend(ts + hours)
        if histograms[i].source.date == date:
            values.extend(histograms[i].values)
            i += 1
            if i == len(histograms):
                break
        else:
            values.extend(no_data)
    return izip(bins, values)
Beispiel #15
0
def get_detector_timing_offsets_source(request, station_number):
    data = get_detector_timing_offsets(station_number)
    if not len(data):
        raise Http404

    data = [next(rows) for _, rows in groupby(data, key=itemgetter(1, 2, 3, 4))]

    data = [(clock.datetime_to_gps(r[0]), none_to_nan(r[1]), none_to_nan(r[2]), none_to_nan(r[3]), none_to_nan(r[4]))
            for r in data]

    buffer = StringIO()
    writer = csv.writer(buffer, delimiter='\t', lineterminator='\n')
    writer.writerows(data)
    tsvdata = buffer.getvalue().strip('\n')

    response = render(request, 'source/detector_timing_offsets.tsv',
                      {'tsvdata': tsvdata,
                       'station_number': station_number},
                      content_type=MIME_TSV)
    response['Content-Disposition'] = (
        'attachment; filename=detector_timing_offsets-s%s.tsv' %
        station_number)
    return response
Beispiel #16
0
from artist import MultiPlot, Plot

from sapphire.transformations.clock import datetime_to_gps
from sapphire.utils import pbar

from download_dataset import END, START, STATIONS

DATASTORE = "/Users/arne/Datastore/dataset"
DATA_PATH = os.path.join(DATASTORE,
                         'dataset_sciencepark_stations_110601_160201.h5')
COIN_PATH = os.path.join(DATASTORE, 'dataset_sciencepark_n2_110601_160201.h5')
TSV_PATH = os.path.join(DATASTORE, 'stats/s%d_%s.tsv')

# STATIONS = STATIONS[-2:]
START_TS = datetime_to_gps(date(*START, day=1))
END_TS = datetime_to_gps(date(*END, day=1))
BINS = arange(START_TS, END_TS + 1, 86400)
BIN_WIDTH = BINS[1] - BINS[0]
COLORS = ['black', 'red', 'green', 'blue']

YEARS = range(2011, date.today().year + 1)
YEARS_TICKS = array([datetime_to_gps(date(y, 1, 1)) for y in YEARS])
YEARS_LABELS = [str(y) for y in YEARS]

FIELDS = [
    'event_rate', 'mpv', 'integrals', 't_trigger', ('t1', 't2', 't3', 't4'),
    ('n1', 'n2', 'n3', 'n4')
]
FIELD_NAMES = [''.join(field) for field in FIELDS]
Beispiel #17
0
    graph.save_as_pdf('detector_offset_drift_%s_%d' % (type, station))


if __name__ == '__main__':

    for station in pbar(STATIONS):
        # Determine offsets for first day of each month
        output = open('offsets_%d.tsv' % station, 'wb')
        csvwriter = csv.writer(output, delimiter='\t')
        offsets = []
        timestamps = []
        for y in range(2010, 2016):
            for m in range(1, 13):
                if y == 2015 and m >= 4:
                    continue
                timestamps.append(datetime_to_gps(date(y, m, 1)))
                path = os.path.join(DATA_PATH, str(y), str(m),
                                    '%d_%d_1.h5' % (y, m))
                with tables.open_file(path, 'r') as data:
                    offsets.append(determine_offset(data, station))
                csvwriter.writerow([timestamps[-1]] + offsets[-1])
        output.close()
        plot_detector_offsets(offsets, 'month')

        # Determine offsets for each day in one month
        offsets = []
        y = 2013
        m = 1
        for d in range(1, 32):
            path = os.path.join(DATA_PATH, str(y), str(m),
                                '%d_%d_%d.h5' % (y, m, d))
Beispiel #18
0
def offsets_on_date(offsets, d, id):
    timestamp = datetime_to_gps(d)
    return [
        o(timestamp)[id] for o in offsets.values()
        if not isnan(o(timestamp)[1])
    ]
Beispiel #19
0
 def test_datetime_to_gps(self):
     for date, timestamp, _ in self.combinations:
         dt = datetime.datetime.strptime(date, '%B %d, %Y')
         self.assertEqual(clock.datetime_to_gps(dt), timestamp)
Beispiel #20
0
from datetime import date
from itertools import chain

from numpy import array

from artist import MultiPlot

from sapphire import Station
from sapphire.transformations.clock import datetime_to_gps

YEARS = range(2011, date.today().year + 1)
YEARS_TICKS = array([datetime_to_gps(date(y, 1, 1)) for y in YEARS])
YEARS_LABELS = [str(y) for y in YEARS]

STATIONS = [501, 502, 503, 504, 505, 506, 508, 509, 510, 511]


def plot_station_offset_matrix():
    n = len(STATIONS)
    stations = {
        station: Station(station, force_stale=True)
        for station in STATIONS
    }

    for type in ['offset', 'error']:
        plot = MultiPlot(n, n, width=r'.08\textwidth', height=r'.08\textwidth')

        for i, station in enumerate(STATIONS):
            for j, ref_station in enumerate(STATIONS):
                splot = plot.get_subplot_at(i, j)
                if i == n:
Beispiel #21
0
import datetime

import numpy as np

from artist import Plot

from sapphire import Station
from sapphire.transformations.clock import datetime_to_gps, gps_to_datetime

from get_aligned_eventtimes import get_aligned, get_station_numbers

YEARS = range(2004, datetime.date.today().year + 1)
YEARS_TICKS = np.array(
    [datetime_to_gps(datetime.date(y, 1, 1)) for y in YEARS])
YEARS_LABELS = [str(y) for y in YEARS]


def normalize_event_rates(data, station_numbers):
    """Normalize event rates using the number of detectors

    Number per hour is divided by the expected number of events per hour for a
    station with a certain number of detectors.

    So after this a '1.3' would be on average 30% more events per hour than the
    expected number of events per hour for such a station.

    """
    scaled_data = data.copy()
    for i, s in enumerate(station_numbers):
        n = Station(s).n_detectors()
        if n == 2:
Beispiel #22
0
MONTH = 30 * DAY
YEAR = 365 * DAY

SPA_STAT = [501, 502, 503, 504, 505, 506, 508, 509, 510]
CLUSTER = HiSPARCStations(SPA_STAT)

DATA_PATH = '/Users/arne/Datastore/station_offsets/'


def get_station_dt(data, station):
    table = data.get_node('/s%d' % station)
    return table


if __name__ == '__main__':
    t_start = datetime_to_gps(datetime(2010, 1, 1))
    t_end = datetime_to_gps(datetime(2015, 4, 1))

    for i, station in enumerate(STATIONS, 1):
        with tables.open_file(DATA_PATH + 'dt_ref501_%d.h5' % station, 'r') as data:
            distance, _, _ = CLUSTER.calc_rphiz_for_stations(i, 0)
            max_dt = max(distance / .3, 100) * 1.5
            table = get_station_dt(data, station)
            graph = Plot()
            counts, x, y = histogram2d(table.col('timestamp'),
                                       table.col('delta'),
                                       bins=(arange(t_start, t_end, XWEEK),
                                             linspace(-max_dt, max_dt, 150)))
            graph.histogram2d(counts, x, y, bitmap=True, type='color')
            graph.set_ylabel('$\Delta t$ [ns]')
            graph.set_xlabel('Timestamp [s]')
Beispiel #23
0
def plot_active_stations(timestamps, stations, aligned_data, data, i):

    first_ts = []
    last_ts = []
    stations_with_data = []

    assert aligned_data.shape[0] == len(stations)

    for n in range(aligned_data.shape[0]):
        prev_ts = 0
        for ts, has_data in zip(timestamps, aligned_data[n]):
            if has_data:
                if prev_ts > 30:
                    # Running for at least 30 hours.
                    first_ts.append(ts)
                    stations_with_data.append(stations[n])
                    break
                else:
                    prev_ts += 1
            else:
                prev_ts = 0

    for station in stations_with_data:
        end_ts = get_station_end_timestamp(station, data)
        if end_ts is not None:
            last_ts.append(end_ts)

    first_ts = sorted(first_ts)
    last_ts = sorted(last_ts)
    diff_stations = array([1] * len(first_ts) + [-1] * len(last_ts))
    idx = argsort(first_ts + last_ts)
    n_stations = diff_stations[idx].cumsum()

    # Get maximinum number of simultaneaously active stations per 7 days
    n_active_aligned = (aligned_data != 0).sum(axis=0)
    n_binned, t_binned, _ = binned_statistic(timestamps,
                                             n_active_aligned,
                                             npmax,
                                             bins=len(timestamps) / (7 * 24))
    # Get average number of detected events per 7 days
    # todo; scale 2/4 detector stations
    summed_data = aligned_data.sum(axis=0)
    e_binned, t_binned, _ = binned_statistic(timestamps,
                                             summed_data,
                                             average,
                                             bins=len(timestamps) / (7 * 24))

    plot = Plot(width=r'.5\textwidth')
    plot.plot([t / 1e9 for t in sorted(first_ts + last_ts)],
              n_stations,
              linestyle='gray, thick',
              mark=None,
              use_steps=True)
    plot.histogram(n_binned, t_binned / 1e9, linestyle='thick')
    plot.histogram(e_binned * max(n_binned) / max(e_binned),
                   t_binned / 1e9,
                   linestyle='blue')
    plot.set_axis_options('line join=round')
    plot.set_ylabel('Number of stations')
    plot.set_xlabel('Date')
    plot.set_ylimits(min=0)
    plot.set_xticks([datetime_to_gps(date(y, 1, 1)) / 1e9 for y in YEARS[::3]])
    plot.set_xtick_labels(['%d' % y for y in YEARS[::3]])
    plot.save_as_pdf('active_stations_%s' % ['network', 'spa'][i])
Beispiel #24
0
 def test_datetime_to_gps(self):
     for date, timestamp, _ in self.combinations:
         dt = datetime.datetime.strptime(date, '%B %d, %Y')
         self.assertEqual(clock.datetime_to_gps(dt), timestamp)
                                       (data[sn]['counts'] < 5000))
    return timestamps, extended_data


if __name__ == "__main__":
    timestamps, eventtime = get_aligned()
    summed_data = eventtime.sum(axis=0)
    plot = Plot()
    bins = arange(-.5, len(STATIONS) + 1.5)

    counts, bins = histogram(summed_data, bins=bins)
    counts_in_years = counts / 24. / 365.
    plot.histogram(counts_in_years, bins, linestyle='semitransparent')

    # Exluding data from before 26-09-2008
    start = argmax(timestamps > datetime_to_gps(date(2008, 9, 26)))
    counts, bins = histogram(summed_data[start:], bins=bins)
    counts_in_years = counts / 24. / 365.
    print[(i, sum(counts_in_years[i:])) for i in range(11)]
    plot.histogram(counts_in_years, bins)

    # Exluding data from before 08-03-2010
    start = argmax(timestamps > datetime_to_gps(date(2010, 3, 8)))
    counts, bins = histogram(summed_data[start:], bins=bins)
    counts_in_years = counts / 24. / 365.
    print[(i, sum(counts_in_years[i:])) for i in range(11)]
    plot.histogram(counts_in_years, bins, linestyle='blue')

    # Exluding data from before 01-07-2011
    start = argmax(timestamps > datetime_to_gps(date(2011, 7, 1)))
    counts, bins = histogram(summed_data[start:], bins=bins)