def test_download_coincidences(self, mock_esd_api_network): """Download coincidence data from esd and validate results""" output_path = create_tempfile_path() perform_download_coincidences(output_path) validate_results(self, test_data_coincidences_path, output_path) os.remove(output_path)
def test_simulation_output(self): """Perform a simulation and verify the output""" output_path = create_tempfile_path() perform_nkgldfsimulation(output_path) validate_results(self, test_data_nkg, output_path) os.remove(output_path)
def test_load_data_output(self): """Load data tsv into hdf5 and verify the output""" output_path = create_tempfile_path() perform_load_data(output_path) validate_results(self, test_data_path, output_path) os.remove(output_path)
def test_load_coincidences_output(self, mock_esd_api_network): """Load coincidences tsv into hdf5 and verify the output""" output_path = create_tempfile_path() perform_load_coincidences(output_path) validate_results(self, test_data_coincidences_path, output_path) os.remove(output_path)
def test_coincidencesesd_output(self): with tables.open_file(self.data_path, "a") as data: with patch("sapphire.analysis.process_events.ProcessIndexedEventsWithoutTraces"): c = coincidences.Coincidences(data, "/coincidences", ["/station_501", "/station_502"], progress=False) c.search_and_store_coincidences() validate_results(self, self.get_testdata_path(), self.data_path)
def test_simulation_output(self): """Perform a simulation and verify the output""" output_path = create_tempfile_path() perform_groundparticlesgammasimulation(output_path) validate_results(self, test_data_gamma, output_path) os.remove(output_path)
def test_download_data(self): """Download data and validate results""" output_path = create_tempfile_path() perform_esd_download_data(output_path) validate_results(self, test_data_path, output_path) os.remove(output_path)
def test_perform_update_tasks(self, mock_close): """Update the ESD for summaries in need of updates""" self.setup_station() summary = histograms_factories.SummaryFactory( station=self.station, date=date(2017, 1, 1), needs_update_events=True, num_events=168, needs_update_weather=True, num_weather=60, needs_update_config=True, num_config=None, needs_update_singles=True, num_singles=301, needs_update=True, ) jobs.perform_update_tasks() # Created data should equal reference file test_data = join(settings.ESD_PATH, '2017/1/2017_1_1.h5') reference_path = join(dirname(abspath(__file__)), '../data/esd/2017/1/2017_1_1.h5') validate_results(self, test_data, reference_path) rmtree(settings.ESD_PATH) # Procesed configuration from data into database self.assertEqual(1, models.Configuration.objects.filter(summary=summary).count()) # Calculated detector offsets detector_offset = models.DetectorTimingOffset.objects.get(summary=summary) self.assertEqual(-0.25, detector_offset.offset_1) self.assertEqual(0.0, detector_offset.offset_2) self.assertEqual(1.75, detector_offset.offset_3) self.assertEqual(0.5, detector_offset.offset_4)
def test_read_and_store_data(self): path = self.destination_path with tables.open_file(path, 'a') as self.destination_data: self.kascade = kascade.StoreKascadeData(self.destination_data, TEST_DATA_FILE, '/kascade', progress=False) self.kascade.read_and_store_data() validate_results(self, TEST_DATA_REF, self.destination_path)
def test_coincidencesesd_output(self): with tables.open_file(self.data_path, 'a') as data: c = coincidences.CoincidencesESD(data, '/coincidences', ['/station_501', '/station_502'], progress=False) c.search_and_store_coincidences() validate_results(self, self.get_testdata_path(), self.data_path)
def test_coincidencesesd_output(self): with tables.open_file(self.data_path, 'a') as data: c = coincidences.CoincidencesESD(data, '/coincidences', ['/station_501', '/station_502'], progress=False) self.assertRaises(RuntimeError, c.search_and_store_coincidences, station_numbers=[501]) c.search_and_store_coincidences(station_numbers=[501, 502]) validate_results(self, self.get_testdata_path(), self.data_path)
def test_store_data(self): # First with overwrite false self.assertRaises(Exception, store_and_sort_corsika_data, self.source_path, self.destination_path, progress=True, thin=self.thin) # Now with overwrite true store_and_sort_corsika_data(self.source_path, self.destination_path, overwrite=True, thin=self.thin) validate_results(self, self.expected_path, self.destination_path)
def test_coincidencesesd_output(self): with tables.open_file(self.data_path, 'a') as data: with patch( 'sapphire.analysis.process_events.ProcessIndexedEventsWithoutTraces' ): c = coincidences.Coincidences(data, '/coincidences', ['/station_501', '/station_502'], progress=False) c.search_and_store_coincidences() validate_results(self, self.get_testdata_path(), self.data_path)
def test__store_data_no_end(self): # store data removes the source data when completed, so use a temp tmp_src_path = create_tempfile_path() shutil.copy(test_data_src_path, tmp_src_path) output_path = create_tempfile_path() start = datetime(2016, 4, 21) filters = tables.Filters(complevel=1) with tables.open_file(output_path, 'w', filters=filters) as datafile: publicdb._store_data(datafile, '/station_501', tmp_src_path, start, None) validate_results(self, test_data_src_path, output_path) os.remove(output_path)
def test_store_data(self): result = subprocess.check_output(self.command, shell=True) self.assertEqual(result, b'') self.assertRaises(subprocess.CalledProcessError, subprocess.check_output, self.command + ' --progress', stderr=subprocess.STDOUT, shell=True) result = subprocess.check_output(self.command + ' --overwrite', shell=True) self.assertEqual(result, b'') validate_results(self, self.expected_path, self.destination_path)
def test_perform_update_tasks(self, mock_close): """Update the ESD for summaries in need of updates""" self.setup_station() summary = histograms_factories.SummaryFactory( station=self.station, date=date(2017, 1, 1), needs_update_events=True, num_events=168, needs_update_weather=True, num_weather=60, needs_update_config=True, num_config=None, needs_update_singles=True, num_singles=301, needs_update=True, ) jobs.perform_update_tasks() # Created data should equal reference file test_data = join(settings.ESD_PATH, '2017/1/2017_1_1.h5') reference_path = join(dirname(abspath(__file__)), '../data/esd/2017/1/2017_1_1.h5') validate_results(self, test_data, reference_path) rmtree(settings.ESD_PATH) # Procesed configuration from data into database self.assertEqual( 1, models.Configuration.objects.filter(summary=summary).count()) # Calculated detector offsets detector_offset = models.DetectorTimingOffset.objects.get( summary=summary) self.assertEqual(-0.25, detector_offset.offset_1) self.assertEqual(0.0, detector_offset.offset_2) self.assertEqual(1.75, detector_offset.offset_3) self.assertEqual(0.5, detector_offset.offset_4)
def test_coincidencesesd_output(self): with tables.open_file(self.data_path, "a") as data: c = coincidences.CoincidencesESD(data, "/coincidences", ["/station_501", "/station_502"], progress=False) c.search_and_store_coincidences() validate_results(self, self.get_testdata_path(), self.data_path)
def test_store_data(self): result = subprocess.check_output(self.command, shell=True) self.assertEqual(result, '') validate_results(self, self.expected_path, self.destination_path)
def test_store_data(self): generate_corsika_overview(source=self.source_path, destination=self.destination_path) validate_results(self, self.expected_path, self.destination_path)