def test_auto_file_format_during_writing(self): """ The file format is either determined by directly specifying the format or deduced from the filename. The former overwrites the latter. """ # Get format name and name of the write function. formats = [ (key, value.module_name) for key, value in _get_default_eps( 'obspy.plugin.waveform', 'writeFormat').items() # Only test plugins that are actually part of ObsPy. if value.dist.key == "obspy" ] # Test for stream as well as for trace. stream_trace = [read(), read()[0]] # get mseed cache name and mseed function mseed_name = "obspy/obspy.plugin.waveform.MSEED/writeFormat" mseed_func = _ENTRY_POINT_CACHE.get(mseed_name, _write_mseed) for suffix, module_name in formats: # get a list of dist, group, name. entry_point_list = [ "obspy", "obspy.plugin.waveform.%s" % suffix, "writeFormat" ] # load entry point to make sure it is in the cache. buffered_load_entry_point(*entry_point_list) # get the cache name for monkey patching. entry_point_name = '/'.join(entry_point_list) # For stream and trace. for obj in stream_trace: # Various versions of the suffix. for s in [suffix.capitalize(), suffix.lower(), suffix.upper()]: # create a mock function and patch the entry point cache. write_func = _ENTRY_POINT_CACHE[entry_point_name] mocked_func = mock.MagicMock(write_func) mock_dict = {entry_point_name: mocked_func} with mock.patch.dict(_ENTRY_POINT_CACHE, mock_dict): obj.write("temp." + s) # Make sure the fct has actually been called. self.assertEqual(mocked_func.call_count, 1) # Specifying the format name should overwrite this. mocked_mseed_func = mock.MagicMock(mseed_func) mseed_mock_dict = {mseed_name: mocked_mseed_func} with mock.patch.dict(_ENTRY_POINT_CACHE, mseed_mock_dict): obj.write("temp." + s, format="mseed") self.assertEqual(mocked_mseed_func.call_count, 1) self.assertEqual(mocked_func.call_count, 1) # An unknown suffix should raise. with self.assertRaises(ValueError): for obj in stream_trace: obj.write("temp.random_suffix")
def test_get_availability_percentage(self): client = get_test_client() mock_availability_output = [ ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 12, 22, 20, 53)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 12, 23, 20, 53), UTCDateTime(2018, 9, 11, 0, 0, 0)) ] client.get_availability = mock.MagicMock( return_value=mock_availability_output) avail_percentage = client.get_availability_percentage( "AK", "BAGL", "--", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 9, 11, 0, 0, 0)) expected_avail_percentage = (0.998659490472, 1) self.assertAlmostEqual(avail_percentage[0], expected_avail_percentage[0]) self.assertEqual(avail_percentage[1], expected_avail_percentage[1]) self.assertIsInstance(avail_percentage, tuple) mock_availability_output = [ ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 1, 1), UTCDateTime(2018, 1, 2)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 1, 3), UTCDateTime(2018, 1, 4)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 1, 5), UTCDateTime(2018, 1, 6)) ] client.get_availability = mock.MagicMock( return_value=mock_availability_output) avail_percentage = client.get_availability_percentage( "AK", "BAGL", "--", "LCC", UTCDateTime(2018, 1, 1), UTCDateTime(2018, 1, 6)) expected_avail_percentage = (0.6, 2) self.assertAlmostEqual(avail_percentage[0], expected_avail_percentage[0]) self.assertEqual(avail_percentage[1], expected_avail_percentage[1]) self.assertIsInstance(avail_percentage, tuple) # Test for over extending time span avail_percentage = client.get_availability_percentage( "AK", "BAGL", "--", "LCC", UTCDateTime(2017, 12, 31), UTCDateTime(2018, 1, 7)) expected_avail_percentage = (0.4285714, 4) self.assertAlmostEqual(avail_percentage[0], expected_avail_percentage[0]) self.assertEqual(avail_percentage[1], expected_avail_percentage[1]) self.assertIsInstance(avail_percentage, tuple)
def test_image_comparison(self): """ Tests the image comparison mechanism with an expected fail and an expected passing test. Also tests that temporary files are deleted after both passing and failing tests. """ path = os.path.join(os.path.dirname(__file__), "images") img_basename = "image.png" img_ok = os.path.join(path, "image_ok.png") img_fail = os.path.join(path, "image_fail.png") # image comparison that should pass with ImageComparison(path, img_basename) as ic: shutil.copy(img_ok, ic.name) self.assertTrue(os.path.exists(ic.name)) # check that temp file is deleted self.assertFalse(os.path.exists(ic.name)) # image comparison that should raise # avoid uploading the staged test fail image # (after an estimate of 10000 uploads of it.. ;-)) with mock.patch.object(ImageComparison, '_upload_images', new=mock.MagicMock(return_value='')): self.assertRaises(ImageComparisonException, image_comparison_in_function, path, img_basename, img_fail) # check that temp file is deleted self.assertFalse(os.path.exists(ic.name))
def test_get_waveforms_bulk(self): # Some mock routing response. content = """ http://example1.com/fdsnws/station/1/query AA B1 -- DD 2017-01-01T00:00:00 2017-01-02T00:10:00 http://example2.com/fdsnws/station/1/query AA B2 -- DD 2017-01-01T00:00:00 2017-01-02T00:10:00 """ if hasattr(content, "encode"): content = content.encode() with mock.patch(self._cls + "._download") as p1, \ mock.patch(self._cls + "._download_waveforms") as p2, \ mock.patch(self._cls + ".get_stations_bulk") as p3: p1.return_value = _DummyResponse(content=content) p2.return_value = "1234" # For the underlying get_stations_bulk() call. _dummy_inv = mock.MagicMock() _dummy_inv.get_contents.return_value = { "channels": ["AA.BB.CC.DD", "AA.BB.CC.DD"]} p3.return_value = _dummy_inv st = self.client.get_waveforms_bulk( [["AA", "B*", "", "DD", obspy.UTCDateTime(2017, 1, 1), obspy.UTCDateTime(2017, 1, 2)]], longestonly=True, minimumlength=2) self.assertEqual(st, "1234") self.assertEqual(p1.call_count, 1) self.assertEqual(p1.call_args[0][0], "http://www.orfeus-eu.org/eidaws/routing/1/query") # This has been modified by our mocked call to get_stations_bulk(). self.assertEqual(p1.call_args[1]["data"], ( b"service=dataselect\nformat=post\n" b"AA BB CC DD 2017-01-01T00:00:00.000000 " b"2017-01-02T00:00:00.000000")) # This is the final call to _download_waveforms() which is again # dependent on the dummy response to the _download() function. self.assertEqual(p2.call_args[0][0], { "http://example1.com": "AA B1 -- DD 2017-01-01T00:00:00 2017-01-02T00:10:00", "http://example2.com": "AA B2 -- DD 2017-01-01T00:00:00 2017-01-02T00:10:00"}) self.assertEqual(p2.call_args[1], {"longestonly": True, "minimumlength": 2}) # Call to this only dependent on the original bulk request. self.assertEqual(p3.call_count, 1) self.assertEqual(p3.call_args[0][0][0], ["AA", "B*", "--", "DD", str(obspy.UTCDateTime(2017, 1, 1))[:-1], str(obspy.UTCDateTime(2017, 1, 2))[:-1]]) # Everything should be passed on. self.assertEqual(p3.call_args[1], { "format": "text", "level": "channel", "longestonly": True, "minimumlength": 2})
def test_download_leap_seconds_file(self): filepath = get_test_data_filepath() database = os.path.join(filepath, 'timeseries.sqlite') indexer = Indexer(filepath, database=database, loglevel="ERROR") # mock actually downloading the file since this requires a internet # connection indexer._download = mock.MagicMock(return_value=requests.Response()) # create a empty leap-seconds.list file test_file = os.path.join(os.path.dirname(database), "leap-seconds.list") file_path = indexer.download_leap_seconds_file(test_file) # assert that the file was put in the same location as the # sqlite db self.assertTrue(os.path.isfile(file_path)) self.assertEqual(file_path, test_file) os.remove(test_file)
def test_get_nslc(self): client = get_test_client() # test using actual sqlite3 test database expected_nslc = [(u'CU', u'TGUH', u'00', u'BHZ')] actual_nslc = client.get_nslc("I*,CU", "ANMO,COL?,T*", "00,10", "BHZ", "2018-01-01T00:00:00.000000", "2018-01-01T00:00:00.019499") self.assertListEqual(actual_nslc, expected_nslc) actual_nslc = client.get_nslc("CU", "ANMO,COL?,T*") self.assertListEqual(actual_nslc, expected_nslc) # test using mocked client._get_summary_rows method for more diversity NamedRow = namedtuple('NamedRow', [ 'network', 'station', 'location', 'channel', 'earliest', 'latest' ]) mocked_summary_rows = [ NamedRow("AK", "ANM", "", "VM2", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991"), NamedRow("AK", "ANM", "", "VM3", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991"), NamedRow("AK", "ANM", "", "VM4", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991"), NamedRow("XX", "ANM", "", "VM5", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991"), NamedRow("N4", "H43A", "", "VM2", "2018-08-10T21:09:39.000000", "2018-08-10T22:09:28.890415"), NamedRow("N4", "H43A", "", "VM3", "2018-08-10T21:09:39.000000", "2018-08-10T22:09:28.890415") ] client._get_summary_rows = \ mock.MagicMock(return_value=mocked_summary_rows) expected_nslc = [("AK", "ANM", "", "VM2"), ("AK", "ANM", "", "VM3"), ("AK", "ANM", "", "VM4"), ("N4", "H43A", "", "VM2"), ("N4", "H43A", "", "VM3"), ("XX", "ANM", "", "VM5")] self.assertEqual( client.get_nslc("AK,N4,XX", "ANM,H43A", "", "VM2,VM3,VM4,VM5", "2018-08-10T21:09:39.000000", "2018-08-10T22:09:28.890415"), expected_nslc)
def test_build_file_list(self): filepath = get_test_data_filepath() database = os.path.join(filepath, 'timeseries.sqlite') indexer = Indexer(filepath, database=database, filename_pattern="*.mseed", loglevel="ERROR") # test for relative paths file_list = indexer.build_file_list(relative_paths=True, reindex=True) file_list.sort() self.assertEqual(len(file_list), 3) self.assertEqual( os.path.normpath( 'CU/2018/001/CU.TGUH.00.BHZ.2018.001_first_minute.mseed'), file_list[0]) self.assertEqual( os.path.normpath( 'IU/2018/001/IU.ANMO.10.BHZ.2018.001_first_minute.mseed'), file_list[1]) self.assertEqual( os.path.normpath( 'IU/2018/001/IU.COLA.10.BHZ.2018.001_first_minute.mseed'), file_list[2]) # case where the root path is outside of the absolute # data path, to assert that already indexed files are still skipped indexer = Indexer(tempfile.mkdtemp(), database=TSIndexDatabaseHandler(database=database), filename_pattern="*.mseed", loglevel="ERROR") self.assertRaisesRegex(OSError, "^No files matching filename.*$", indexer.build_file_list, reindex=True) # test for absolute paths # this time pass a TSIndexDatabaseHandler instance as the database indexer = Indexer(filepath, database=TSIndexDatabaseHandler(database=database), filename_pattern="*.mseed", leap_seconds_file=None, loglevel="ERROR") file_list = indexer.build_file_list(reindex=True) file_list.sort() self.assertEqual(len(file_list), 3) self.assertNotEqual( os.path.normpath( 'CU/2018/001/CU.TGUH.00.BHZ.2018.001_first_minute.mseed'), file_list[0]) self.assertIn( os.path.normpath( 'CU/2018/001/CU.TGUH.00.BHZ.2018.001_first_minute.mseed'), file_list[0]) self.assertNotEqual( os.path.normpath( 'IU/2018/001/IU.ANMO.10.BHZ.2018.001_first_minute.mseed'), file_list[1]) self.assertIn( os.path.normpath( 'IU/2018/001/IU.ANMO.10.BHZ.2018.001_first_minute.mseed'), file_list[1]) self.assertNotEqual( os.path.normpath( 'IU/2018/001/IU.COLA.10.BHZ.2018.001_first_minute.mseed'), file_list[2]) self.assertIn( os.path.normpath( 'IU/2018/001/IU.COLA.10.BHZ.2018.001_first_minute.mseed'), file_list[2]) # test that already indexed files (relative and absolute) get skipped. self.assertRaisesRegex(OSError, "^No unindexed files matching filename.*$", indexer.build_file_list, reindex=False, relative_paths=False) self.assertRaisesRegex(OSError, "^No unindexed files matching filename.*$", indexer.build_file_list, reindex=False, relative_paths=True) # for this test mock an unindexed file ('data.mseed') to ensure that # it gets added when reindex is True mocked_files = [ 'CU/2018/001/' 'CU.TGUH.00.BHZ.2018.001_first_minute.mseed', 'IU/2018/001/' 'IU.ANMO.10.BHZ.2018.001_first_minute.mseed', 'IU/2018/001/' 'IU.COLA.10.BHZ.2018.001_first_minute.mseed', 'data.mseed' ] for i in range(len(mocked_files)): mocked_files[i] = os.path.normpath(mocked_files[i]) indexer._get_rootpath_files = mock.MagicMock(return_value=mocked_files) self.assertEqual( indexer.build_file_list(reindex=False, relative_paths=False), ['data.mseed'])
def test_get_availability(self): client = get_test_client() # test using actual sqlite test database expected_avail = [(u'IU', u'ANMO', u'10', u'BHZ', UTCDateTime(2018, 1, 1, 0, 0, 0, 19500), UTCDateTime(2018, 1, 1, 0, 0, 59, 994536)), (u'IU', u'COLA', u'10', u'BHZ', UTCDateTime(2018, 1, 1, 0, 0, 0, 19500), UTCDateTime(2018, 1, 1, 0, 0, 59, 994538))] actual_avail = client.get_availability("IU", "ANMO,COLA", "10", "BHZ", "2018-01-01", "2018-12-31") self.assertListEqual(actual_avail, expected_avail) actual_avail = client.get_availability("IU", "ANMO,COLA") self.assertListEqual(actual_avail, expected_avail) # test using mocked client._get_summary_rows method for more diversity NamedRow = namedtuple('NamedRow', [ 'network', 'station', 'location', 'channel', 'samplerate', 'starttime', 'endtime', 'timespans' ]) # Each row contains one gap. The first rows latest time is adjacent # to the second rows earliest time. The third row overlaps with the # second row. mocked_tsindex_rows = \ [ # 2018-08-10T22:00:54 to 2018-08-10T22:15:53 # 2018-08-10T22:05:54 to 2018-08-10T22:20:53 (MERGE) NamedRow( network=u'AK', station=u'BAGL', location=u'', channel=u'LCC', starttime=u'2018-08-10T22:00:54.000000', endtime=u'2018-08-10T22:20:53.000000', samplerate=1.0, timespans=u'[1533938454.000000:1533939353.000000],' u'[1533938754.000000:1533939653.000000]'), # 2018-08-10T22:20:53.999000 to 2018-08-12T22:20:53 (JOIN) # 2018-08-12T23:20:53 to 2018-09-01T23:20:53 NamedRow( network=u'AK', station=u'BAGL', location=u'', channel=u'LCC', starttime=u'2018-08-10T22:20:53.999000', endtime=u'2018-09-01T23:20:53.000000', samplerate=1.0, timespans=u'[1533939653.999000:1534112453.000000],' u'[1534116053.000000:1535844053.000000]'), # (MERGE IF INCL SAMPLE RATE IS TRUE) # 2018-08-27T00:00:00 to 2018-09-11T00:00:00 NamedRow(network=u'AK', station=u'BAGL', location=u'', channel=u'LCC', starttime=u'2018-08-27T00:00:00.000000', endtime=u'2018-09-11T00:00:00.000000', samplerate=10.0, timespans=u'[1535328000.0:1536624000.0]') ] client._get_tsindex_rows = \ mock.MagicMock(return_value=mocked_tsindex_rows) expected_unmerged_avail = [ ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 10, 22, 15, 53)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 10, 22, 5, 54), UTCDateTime(2018, 8, 12, 22, 20, 53)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 12, 23, 20, 53), UTCDateTime(2018, 9, 1, 23, 20, 53)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 27, 0, 0), UTCDateTime(2018, 9, 11, 0, 0, 0)) ] # test default options self.assertListEqual( client.get_availability( "AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 10, 22, 9, 28, 890415)), expected_unmerged_avail) # test merge overlap false self.assertListEqual( client.get_availability("AK", "BAGL", "--", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 10, 22, 9, 28, 890415), merge_overlap=False), expected_unmerged_avail) # test merge overlap true expected_merged_avail = [ ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 12, 22, 20, 53)), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 12, 23, 20, 53), UTCDateTime(2018, 9, 11, 0, 0, 0)) ] self.assertListEqual( client.get_availability("AK", "BAGL", "--", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 10, 22, 9, 28, 890415), merge_overlap=True), expected_merged_avail) # test include_sample_rate true expected_incl_sr_avail = \ [("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 12, 22, 20, 53), 1.0), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 12, 23, 20, 53), UTCDateTime(2018, 9, 1, 23, 20, 53), 1.0), ("AK", "BAGL", "", "LCC", UTCDateTime(2018, 8, 27, 0, 0), UTCDateTime(2018, 9, 11, 0, 0, 0), 10.0)] self.assertListEqual( client.get_availability("AK", "BAGL", "--", "LCC", UTCDateTime(2018, 8, 10, 22, 0, 54), UTCDateTime(2018, 8, 10, 22, 9, 28, 890415), merge_overlap=True, include_sample_rate=True), expected_incl_sr_avail)
def test_get_availability_extent(self): client = get_test_client() # test using actual sqlite test database expected_nslc = [(u'IU', u'ANMO', u'10', u'BHZ', UTCDateTime(2018, 1, 1, 0, 0, 0, 19500), UTCDateTime(2018, 1, 1, 0, 0, 59, 994536)), (u'IU', u'COLA', u'10', u'BHZ', UTCDateTime(2018, 1, 1, 0, 0, 0, 19500), UTCDateTime(2018, 1, 1, 0, 0, 59, 994538))] actual_avail_extents = client.get_availability_extent( "I*", "ANMO,COL?,T*", "00,10", "BHZ", "2018-01-01T00:00:00.000000", "2018-12-31T00:00:00.000000") self.assertListEqual(actual_avail_extents, expected_nslc) actual_avail_extents = client.get_availability_extent("I*") self.assertListEqual(actual_avail_extents, expected_nslc) # test using mocked client._get_summary_rows method for more diversity NamedRow = namedtuple('NamedRow', [ 'network', 'station', 'location', 'channel', 'earliest', 'latest' ]) mocked_summary_rows = [ NamedRow("AK", "ANM", "", "VM2", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991"), NamedRow("AK", "ANM", "", "VM3", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991"), NamedRow("AK", "ANM", "", "VM5", "2018-08-10T21:52:50.000000", "2018-08-10T22:15:39.999991"), NamedRow("N4", "H43A", "", "VM2", "2018-08-10T21:09:39.000000", "2018-08-10T22:09:28.890415"), NamedRow("N4", "H43A", "", "VM3", "2018-08-10T21:09:39.000000", "2018-08-10T22:09:28.890415"), NamedRow("XX", "ANM", "", "VM4", "2018-08-10T21:52:50.000000", "2018-08-10T22:12:39.999991") ] client._get_summary_rows = \ mock.MagicMock(return_value=mocked_summary_rows) expected_avail_extents = \ [("AK", "ANM", "", "VM2", UTCDateTime("2018-08-10T21:52:50.000000"), UTCDateTime("2018-08-10T22:12:39.999991")), ("AK", "ANM", "", "VM3", UTCDateTime("2018-08-10T21:52:50.000000"), UTCDateTime("2018-08-10T22:12:39.999991")), ("AK", "ANM", "", "VM5", UTCDateTime("2018-08-10T21:52:50.000000"), UTCDateTime("2018-08-10T22:15:39.999991")), ("N4", "H43A", "", "VM2", UTCDateTime("2018-08-10T21:09:39.000000"), UTCDateTime("2018-08-10T22:09:28.890415")), ("N4", "H43A", "", "VM3", UTCDateTime("2018-08-10T21:09:39.000000"), UTCDateTime("2018-08-10T22:09:28.890415")), ("XX", "ANM", "", "VM4", UTCDateTime("2018-08-10T21:52:50.000000"), UTCDateTime("2018-08-10T22:12:39.999991"))] self.assertListEqual( client.get_availability_extent("AK,N4", "ANM,H43A", "", "VM2,VM3,VM4,VM5", "2018-08-10T21:09:39.000000", "2018-08-10T22:09:28.890415"), expected_avail_extents)