コード例 #1
0
    def test_download_data(self, mock_server, mock_retrieve, mock_store):
        start = datetime(2010, 1, 1, 11)
        end = datetime(2010, 1, 1, 13)
        file = Mock()
        mock_get_data_url = mock_server.return_value.hisparc.get_data_url
        mock_get_data_url.return_value = sentinel.url
        mock_retrieve.return_value = (sentinel.tmpdata, sentinel.headers)
        publicdb.download_data(file,
                               sentinel.group,
                               sentinel.station_id,
                               start,
                               end,
                               get_blobs=sentinel.blobs)
        mock_get_data_url.assert_called_once_with(sentinel.station_id, start,
                                                  sentinel.blobs)

        mock_get_data_url.side_effect = Exception("No data")
        publicdb.download_data(file,
                               sentinel.group,
                               sentinel.station_id,
                               start,
                               end,
                               get_blobs=sentinel.blobs)

        mock_get_data_url.side_effect = Exception("Unknown error")
        self.assertRaises(Exception,
                          publicdb.download_data,
                          file,
                          sentinel.group,
                          sentinel.station_id,
                          start,
                          end,
                          get_blobs=sentinel.blobs)
コード例 #2
0
def get_data():
    """Ensure data is downloaded and available"""

    if not os.path.exists(DATA_PATH):
        with tables.open_file(DATA_PATH, 'w') as data:
            start = datetime.datetime(2014, 6, 10)
            end = datetime.datetime(2014, 6, 11)
            download_data(data, GROUP, STATION, start, end, get_blobs=True)
コード例 #3
0
    def download_data(self):
        start, end = self.datetimerange

        for station, group_path in zip(self.stations, self.station_groups):
            if not group_path in self.data:
                print "Downloading data for station", station
                download_data(self.data, group_path, station,
                              start, end, get_blobs=True)
コード例 #4
0
def download_tests():
    with tables.open_file(DATA, 'w') as data:
        for i, start, end in TESTS:
            download_data(data,
                          '/t%d' % i,
                          STATION,
                          start,
                          end,
                          get_blobs=True)
コード例 #5
0
def download_and_store_station_data(f, station, date, get_blobs=True):
    start = datetime.datetime.combine(date, datetime.time(0, 0, 0))
    end = start + datetime.timedelta(days=1)

    cluster = station.cluster.main_cluster()
    station_group = get_or_create_station_group(f, cluster, station.number)

    download_data(f, station_group, station.number,
                  start, end,
                  get_blobs=get_blobs)
コード例 #6
0
def download():
    start = datetime(2015, 9, 16)
    end = datetime(2015, 9, 16, 0, 10)
    with tables.open_file(DATA_PATH, 'w') as data:
        for station in STATIONS:
            download_data(data,
                          '/s%d' % station,
                          station,
                          start,
                          end,
                          get_blobs=True)
コード例 #7
0
ファイル: master.py プロジェクト: rajeevyadav/sapphire
    def download_data(self):
        start, end = self.datetimerange

        for station, group_path in zip(self.stations, self.station_groups):
            if not group_path in self.data:
                print "Downloading data for station", station
                download_data(self.data,
                              group_path,
                              station,
                              start,
                              end,
                              get_blobs=True)
コード例 #8
0
def download_and_store_station_data(f, station, date, get_blobs=True):
    start = datetime.datetime.combine(date, datetime.time(0, 0, 0))
    end = start + datetime.timedelta(days=1)

    cluster = station.cluster.main_cluster()
    station_group = get_or_create_station_group(f, cluster, station.number)

    download_data(f,
                  station_group,
                  station.number,
                  start,
                  end,
                  get_blobs=get_blobs)
コード例 #9
0
ファイル: test_publicdb.py プロジェクト: tomkooij/sapphire
    def test_download_data(self, mock_server, mock_retrieve, mock_store):
        start = datetime(2010, 1, 1, 11)
        end = datetime(2010, 1, 1, 13)
        file = Mock()
        mock_get_data_url = mock_server.return_value.hisparc.get_data_url
        mock_get_data_url.return_value = sentinel.url
        mock_retrieve.return_value = (sentinel.tmpdata, sentinel.headers)
        publicdb.download_data(file, sentinel.group, sentinel.station_id,
                               start, end, get_blobs=sentinel.blobs)
        mock_get_data_url.assert_called_once_with(sentinel.station_id, start,
                                                  sentinel.blobs)

        mock_get_data_url.side_effect = Exception("No data")
        publicdb.download_data(file, sentinel.group, sentinel.station_id,
                               start, end, get_blobs=sentinel.blobs)

        mock_get_data_url.side_effect = Exception("Unknown error")
        self.assertRaises(Exception, publicdb.download_data, file,
                          sentinel.group, sentinel.station_id, start,
                          end, get_blobs=sentinel.blobs)
コード例 #10
0
#!/usr/bin/env python
"""Process HiSPARC events without traces

This script tests the ProcessEventsWithoutTraces class, as well as the
ProcessIndexedEventsWithoutTraces class.

"""
import datetime

import tables

from sapphire.analysis import process_events
from sapphire.publicdb import download_data

if __name__ == '__main__':
    data = tables.open_file('testdata.h5', 'a')
    if '/s501' not in data:
        download_data(data,
                      '/s501',
                      501,
                      datetime.datetime(2013, 1, 1),
                      datetime.datetime(2013, 1, 2),
                      get_blobs=False)
    process = process_events.ProcessEventsWithoutTraces(data, '/s501')
    process.process_and_store_results(overwrite=True)
    offsets = process.determine_detector_timing_offsets()
    print "Offsets:", offsets
コード例 #11
0
This script tests the process of searching for coincidences.

"""
import datetime

import tables

from sapphire.analysis import coincidences
from sapphire.publicdb import download_data

STATIONS = [501, 503, 506]
START = datetime.datetime(2013, 1, 1)
END = datetime.datetime(2013, 1, 2)


if __name__ == '__main__':
    station_groups = ['/s%d' % u for u in STATIONS]

    data = tables.open_file('data.h5', 'w')
    for station, group in zip(STATIONS, station_groups):
        download_data(data, group, station, START, END)

    coincidences = coincidences.Coincidences(data, '/coincidences', station_groups)
    coincidences.search_and_store_coincidences()

    # This is the manual method
    #coincidences.search_coincidences()
    #coincidences.process_events(overwrite=True)
    #coincidences.store_coincidences()
コード例 #12
0
import datetime

import tables

from sapphire.publicdb import download_data

if __name__ == "__main__":
    start = datetime.datetime(2015, 6, 3)
    end = datetime.datetime(2015, 6, 4)
    with tables.open_file('/Users/arne/Datastore/check_trigger/data.h5',
                          'w') as data:
        download_data(data, '/s501', 501, start, end, get_blobs=True)

    start = datetime.datetime(2012, 6, 10)
    end = datetime.datetime(2012, 6, 11)
    with tables.open_file('/Users/arne/Datastore/check_trigger/data_502_1.h5',
                          'w') as data:
        download_data(data, '/s502', 502, start, end, get_blobs=True)

    start = datetime.datetime(2012, 8, 1)
    end = datetime.datetime(2012, 8, 2)
    with tables.open_file('/Users/arne/Datastore/check_trigger/data_502_2.h5',
                          'w') as data:
        download_data(data, '/s502', 502, start, end, get_blobs=True)
コード例 #13
0
#!/usr/bin/env python

"""Process HiSPARC events without traces

This script tests the ProcessEventsWithoutTraces class, as well as the
ProcessIndexedEventsWithoutTraces class.

"""
import datetime

import tables

from sapphire.publicdb import download_data
from sapphire.analysis import process_events


if __name__ == '__main__':
    data = tables.open_file('testdata.h5', 'a')
    if '/s501' not in data:
        download_data(data, '/s501', 501, datetime.datetime(2013, 1, 1),
                      datetime.datetime(2013, 1, 2), get_blobs=False)
    process = process_events.ProcessEventsWithoutTraces(data, '/s501')
    process.process_and_store_results(overwrite=True)
    offsets = process.determine_detector_timing_offsets()
    print "Offsets:", offsets
コード例 #14
0
"""
import datetime

import tables

from sapphire.publicdb import download_data
from sapphire.analysis import coincidences


STATIONS = [501, 503, 506]
START = datetime.datetime(2013, 1, 1)
END = datetime.datetime(2013, 1, 2)


if __name__ == '__main__':
    station_groups = ['/s%d' % u for u in STATIONS]

    data = tables.open_file('data.h5', 'w')
    for station, group in zip(STATIONS, station_groups):
        download_data(data, group, station, START, END)

    coincidences = coincidences.Coincidences(data, '/coincidences',
                                             station_groups)
    coincidences.search_and_store_coincidences()

    # This is the manual method
    #coincidences.search_coincidences()
    #coincidences.process_events(overwrite=True)
    #coincidences.store_coincidences()