def test_download_data(self, mock_server, mock_retrieve, mock_store): start = datetime(2010, 1, 1, 11) end = datetime(2010, 1, 1, 13) file = Mock() mock_get_data_url = mock_server.return_value.hisparc.get_data_url mock_get_data_url.return_value = sentinel.url mock_retrieve.return_value = (sentinel.tmpdata, sentinel.headers) publicdb.download_data(file, sentinel.group, sentinel.station_id, start, end, get_blobs=sentinel.blobs) mock_get_data_url.assert_called_once_with(sentinel.station_id, start, sentinel.blobs) mock_get_data_url.side_effect = Exception("No data") publicdb.download_data(file, sentinel.group, sentinel.station_id, start, end, get_blobs=sentinel.blobs) mock_get_data_url.side_effect = Exception("Unknown error") self.assertRaises(Exception, publicdb.download_data, file, sentinel.group, sentinel.station_id, start, end, get_blobs=sentinel.blobs)
def get_data(): """Ensure data is downloaded and available""" if not os.path.exists(DATA_PATH): with tables.open_file(DATA_PATH, 'w') as data: start = datetime.datetime(2014, 6, 10) end = datetime.datetime(2014, 6, 11) download_data(data, GROUP, STATION, start, end, get_blobs=True)
def download_data(self): start, end = self.datetimerange for station, group_path in zip(self.stations, self.station_groups): if not group_path in self.data: print "Downloading data for station", station download_data(self.data, group_path, station, start, end, get_blobs=True)
def download_tests(): with tables.open_file(DATA, 'w') as data: for i, start, end in TESTS: download_data(data, '/t%d' % i, STATION, start, end, get_blobs=True)
def download_and_store_station_data(f, station, date, get_blobs=True): start = datetime.datetime.combine(date, datetime.time(0, 0, 0)) end = start + datetime.timedelta(days=1) cluster = station.cluster.main_cluster() station_group = get_or_create_station_group(f, cluster, station.number) download_data(f, station_group, station.number, start, end, get_blobs=get_blobs)
def download(): start = datetime(2015, 9, 16) end = datetime(2015, 9, 16, 0, 10) with tables.open_file(DATA_PATH, 'w') as data: for station in STATIONS: download_data(data, '/s%d' % station, station, start, end, get_blobs=True)
#!/usr/bin/env python """Process HiSPARC events without traces This script tests the ProcessEventsWithoutTraces class, as well as the ProcessIndexedEventsWithoutTraces class. """ import datetime import tables from sapphire.analysis import process_events from sapphire.publicdb import download_data if __name__ == '__main__': data = tables.open_file('testdata.h5', 'a') if '/s501' not in data: download_data(data, '/s501', 501, datetime.datetime(2013, 1, 1), datetime.datetime(2013, 1, 2), get_blobs=False) process = process_events.ProcessEventsWithoutTraces(data, '/s501') process.process_and_store_results(overwrite=True) offsets = process.determine_detector_timing_offsets() print "Offsets:", offsets
This script tests the process of searching for coincidences. """ import datetime import tables from sapphire.analysis import coincidences from sapphire.publicdb import download_data STATIONS = [501, 503, 506] START = datetime.datetime(2013, 1, 1) END = datetime.datetime(2013, 1, 2) if __name__ == '__main__': station_groups = ['/s%d' % u for u in STATIONS] data = tables.open_file('data.h5', 'w') for station, group in zip(STATIONS, station_groups): download_data(data, group, station, START, END) coincidences = coincidences.Coincidences(data, '/coincidences', station_groups) coincidences.search_and_store_coincidences() # This is the manual method #coincidences.search_coincidences() #coincidences.process_events(overwrite=True) #coincidences.store_coincidences()
import datetime import tables from sapphire.publicdb import download_data if __name__ == "__main__": start = datetime.datetime(2015, 6, 3) end = datetime.datetime(2015, 6, 4) with tables.open_file('/Users/arne/Datastore/check_trigger/data.h5', 'w') as data: download_data(data, '/s501', 501, start, end, get_blobs=True) start = datetime.datetime(2012, 6, 10) end = datetime.datetime(2012, 6, 11) with tables.open_file('/Users/arne/Datastore/check_trigger/data_502_1.h5', 'w') as data: download_data(data, '/s502', 502, start, end, get_blobs=True) start = datetime.datetime(2012, 8, 1) end = datetime.datetime(2012, 8, 2) with tables.open_file('/Users/arne/Datastore/check_trigger/data_502_2.h5', 'w') as data: download_data(data, '/s502', 502, start, end, get_blobs=True)
#!/usr/bin/env python """Process HiSPARC events without traces This script tests the ProcessEventsWithoutTraces class, as well as the ProcessIndexedEventsWithoutTraces class. """ import datetime import tables from sapphire.publicdb import download_data from sapphire.analysis import process_events if __name__ == '__main__': data = tables.open_file('testdata.h5', 'a') if '/s501' not in data: download_data(data, '/s501', 501, datetime.datetime(2013, 1, 1), datetime.datetime(2013, 1, 2), get_blobs=False) process = process_events.ProcessEventsWithoutTraces(data, '/s501') process.process_and_store_results(overwrite=True) offsets = process.determine_detector_timing_offsets() print "Offsets:", offsets
""" import datetime import tables from sapphire.publicdb import download_data from sapphire.analysis import coincidences STATIONS = [501, 503, 506] START = datetime.datetime(2013, 1, 1) END = datetime.datetime(2013, 1, 2) if __name__ == '__main__': station_groups = ['/s%d' % u for u in STATIONS] data = tables.open_file('data.h5', 'w') for station, group in zip(STATIONS, station_groups): download_data(data, group, station, START, END) coincidences = coincidences.Coincidences(data, '/coincidences', station_groups) coincidences.search_and_store_coincidences() # This is the manual method #coincidences.search_coincidences() #coincidences.process_events(overwrite=True) #coincidences.store_coincidences()