def test_issue165(self): """ Test cases related to #165: - number of poles or zeros can be 0 - an unsupported response information somewhere in the metadata should not automatically raise an Error, if the desired information can still be retrieved This test also tests if a warning is raised if no startime is given. """ parser = Parser() file = os.path.join(self.path, "bug165.dataless") t = UTCDateTime("2010-01-01T00:00:00") # raises UserWarning with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # Trigger a warning. parser.read(file) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertTrue('date' and 'required' in str(w[-1].message).lower()) # Triggers a warning. paz = parser.get_paz("NZ.DCZ.20.HNZ", t) result = {'digitizer_gain': 419430.0, 'gain': 24595700000000.0, 'poles': [(-981 + 1009j), (-981 - 1009j), (-3290 + 1263j), (-3290 - 1263j)], 'seismometer_gain': 1.01885, 'sensitivity': 427336.0, 'zeros': []} self.assertEqual(paz, result)
def test_issue_157(self): """ Test case for issue #157: re-using parser object. """ expected = { 'latitude': 48.162899, 'elevation': 565.0, 'longitude': 11.2752, 'local_depth': 0.0, 'azimuth': 0.0, 'dip': -90.0 } filename1 = os.path.join(self.path, 'dataless.seed.BW_FURT') filename2 = os.path.join(self.path, 'dataless.seed.BW_MANZ') t = UTCDateTime("2010-07-01") parser = Parser() parser.read(filename2) # parsing a second time will raise a UserWarning: Clearing parser # before every subsequent read() with warnings.catch_warnings(record=True): warnings.simplefilter("error", UserWarning) self.assertRaises(UserWarning, parser.read, filename1) warnings.simplefilter("ignore", UserWarning) parser.read(filename1) result = parser.get_coordinates("BW.FURT..EHZ", t) self.assertEqual(expected, result)
def test_issue165(self): """ Test cases related to #165: - number of poles or zeros can be 0 - an unsupported response information somewhere in the metadata should not automatically raise an Error, if the desired information can still be retrieved """ parser = Parser(strict=True) file = os.path.join(self.path, "bug165.dataless") t = UTCDateTime("2010-01-01T00:00:00") parser.read(file) paz = parser.get_paz("NZ.DCZ.20.HNZ", t) result = { 'digitizer_gain': 419430.0, 'gain': 24595700000000.0, 'poles': [(-981 + 1009j), (-981 - 1009j), (-3290 + 1263j), (-3290 - 1263j)], 'seismometer_gain': 1.01885, 'sensitivity': 427336.0, 'zeros': [] } self.assertEqual(paz, result)
def test_issue_358(self): """ Test case for issue #358. """ filename = os.path.join(self.path, "CL.AIO.dataless") parser = Parser() parser.read(filename) dt = UTCDateTime("2012-01-01") parser.getPAZ("CL.AIO.00.EHZ", dt)
def test_issue_358(self): """ Test case for issue #358. """ filename = os.path.join(self.path, 'CL.AIO.dataless') parser = Parser() parser.read(filename) dt = UTCDateTime('2012-01-01') parser.get_paz('CL.AIO.00.EHZ', dt)
def test_issue358(self): """ Test case for issue #358. """ filename = os.path.join(self.path, 'CL.AIO.dataless') parser = Parser() parser.read(filename) dt = UTCDateTime('2012-01-01') parser.getPAZ('CL.AIO.00.EHZ', dt)
def is_valid_dataless(file_path): """ Check if is a valid dataless file. :param file_path: The full file's path. :return: True if path is a valid dataless. False otherwise. """ parser = Parser() try: parser.read(file_path) return True except IOError: return False
def test_splitStationsDataless2XSEED(self): """ Test case for writing dataless to XSEED with multiple entries. """ filename = os.path.join(self.path, 'dataless.seed.BW_DHFO') parser = Parser() parser.read(filename) with NamedTemporaryFile() as fh: tempfile = fh.name # this will create two files due to two entries in dataless parser.write_XSEED(tempfile, split_stations=True) # the second file name is appended with the timestamp of start # period os.remove(tempfile + '.1301529600.0.xml')
def test_split_stations_dataless_to_xseed(self): """ Test case for writing dataless to XSEED with multiple entries. """ filename = os.path.join(self.path, 'dataless.seed.BW_DHFO') parser = Parser() parser.read(filename) with NamedTemporaryFile() as fh: tempfile = fh.name # this will create two files due to two entries in dataless parser.write_xseed(tempfile, split_stations=True) # the second file name is appended with the timestamp of start # period os.remove(tempfile + '.1301529600.0.xml')
def test_issue361(self): """ Test case for issue #361. """ filename = os.path.join(self.path, 'G.SPB.dataless') parser = Parser() parser.read(filename) # 1 - G.SPB..BHZ - no Laplace transform - works parser.getPAZ('G.SPB..BHZ') # 2 - G.SPB.00.BHZ - raises exception because of multiple results self.assertRaises(SEEDParserException, parser.getPAZ, 'G.SPB.00.BHZ') # 3 - G.SPB.00.BHZ with datetime - no Laplace transform - works dt = UTCDateTime('2007-01-01') parser.getPAZ('G.SPB.00.BHZ', dt) # 4 - G.SPB.00.BHZ with later datetime works dt = UTCDateTime('2012-01-01') parser.getPAZ('G.SPB.00.BHZ', dt)
def test_multipleContinuedStationControlHeader(self): """ """ # create a valid blockette 010 with record length 256 b010 = b"0100042 2.4082008,001~2038,001~2009,001~~~" blockette = Blockette010(strict=True, compact=True) blockette.parse_SEED(b010) self.assertEqual(b010, blockette.get_SEED()) # create a valid blockette 054 b054 = b"0540960A0400300300000039" nr = b"" for i in range(0, 78): # 960 chars nr = nr + ("+1.000%02dE-03" % i).encode('ascii', 'strict') blockette = Blockette054(strict=True, compact=True) blockette.parse_SEED(b054 + nr) self.assertEqual(b054 + nr, blockette.get_SEED()) # create a blockette 051 b051 = b'05100271999,123~~0001000000' blockette = Blockette051(strict=False) # ignore user warning with warnings.catch_warnings(record=True): warnings.simplefilter("ignore") blockette.parse_SEED(b051) # combine data (each line equals 256 chars) data = b"000001V " + b010 + (b' ' * 206) data += b"000002S " + b054 + nr[0:224] # 256-8-24 = 224 data += b"000003S*" + nr[224:472] # 256-8 = 248 data += b"000004S*" + nr[472:720] data += b"000005S*" + nr[720:] + b051 + b' ' * 5 # 5 spaces left self.assertEqual(len(data), 256 * 5) data += b"000006S " + b054 + nr[0:224] # 256-8-24 = 224 data += b"000007S*" + nr[224:472] # 256-8 = 248 data += b"000008S*" + nr[472:720] data += b"000009S*" + nr[720:] + b' ' * 32 # 32 spaces left self.assertEqual(len(data), 256 * 9) # read records parser = Parser(strict=False) with warnings.catch_warnings(): warnings.simplefilter("ignore") parser.read(data) # check results self.assertEqual(sorted(parser.blockettes.keys()), [10, 51, 54]) self.assertEqual(len(parser.blockettes[10]), 1) self.assertEqual(len(parser.blockettes[51]), 1) self.assertEqual(len(parser.blockettes[54]), 2)
def test_issue_361(self): """ Test case for issue #361. """ filename = os.path.join(self.path, 'G.SPB.dataless') parser = Parser() parser.read(filename) # 1 - G.SPB..BHZ - no Laplace transform - works parser.get_paz('G.SPB..BHZ') # 2 - G.SPB.00.BHZ - raises exception because of multiple results self.assertRaises(SEEDParserException, parser.get_paz, 'G.SPB.00.BHZ') # 3 - G.SPB.00.BHZ with datetime - no Laplace transform - works dt = UTCDateTime('2007-01-01') parser.get_paz('G.SPB.00.BHZ', dt) # 4 - G.SPB.00.BHZ with later datetime works dt = UTCDateTime('2012-01-01') parser.get_paz('G.SPB.00.BHZ', dt)
def test_multiple_continued_station_control_header(self): """ """ # create a valid blockette 010 with record length 256 b010 = b"0100042 2.4082008,001~2038,001~2009,001~~~" blockette = Blockette010(strict=True, compact=True) blockette.parse_seed(b010) self.assertEqual(b010, blockette.get_seed()) # create a valid blockette 054 b054 = b"0540960A0400300300000039" nr = b"" for i in range(0, 78): # 960 chars nr = nr + ("+1.000%02dE-03" % i).encode('ascii', 'strict') blockette = Blockette054(strict=True, compact=True) blockette.parse_seed(b054 + nr) self.assertEqual(b054 + nr, blockette.get_seed()) # create a blockette 051 b051 = b'05100271999,123~~0001000000' blockette = Blockette051(strict=False) # ignore user warning with warnings.catch_warnings(record=True): warnings.simplefilter("ignore") blockette.parse_seed(b051) # combine data (each line equals 256 chars) data = b"000001V " + b010 + (b' ' * 206) data += b"000002S " + b054 + nr[0:224] # 256-8-24 = 224 data += b"000003S*" + nr[224:472] # 256-8 = 248 data += b"000004S*" + nr[472:720] data += b"000005S*" + nr[720:] + b051 + b' ' * 5 # 5 spaces left self.assertEqual(len(data), 256 * 5) data += b"000006S " + b054 + nr[0:224] # 256-8-24 = 224 data += b"000007S*" + nr[224:472] # 256-8 = 248 data += b"000008S*" + nr[472:720] data += b"000009S*" + nr[720:] + b' ' * 32 # 32 spaces left self.assertEqual(len(data), 256 * 9) # read records parser = Parser(strict=False) with warnings.catch_warnings(): warnings.simplefilter("ignore") parser.read(data) # check results self.assertEqual(sorted(parser.blockettes.keys()), [10, 51, 54]) self.assertEqual(len(parser.blockettes[10]), 1) self.assertEqual(len(parser.blockettes[51]), 1) self.assertEqual(len(parser.blockettes[54]), 2)
def test_issue_157(self): """ Test case for issue #157: re-using parser object. """ expected = {"latitude": 48.162899, "elevation": 565.0, "longitude": 11.2752, "local_depth": 0.0} filename1 = os.path.join(self.path, "dataless.seed.BW_FURT") filename2 = os.path.join(self.path, "dataless.seed.BW_MANZ") t = UTCDateTime("2010-07-01") parser = Parser() parser.read(filename2) # parsing a second time will raise a UserWarning: Clearing parser # before every subsequent read() with warnings.catch_warnings(record=True): warnings.simplefilter("error", UserWarning) self.assertRaises(UserWarning, parser.read, filename1) warnings.simplefilter("ignore", UserWarning) parser.read(filename1) result = parser.get_coordinates("BW.FURT..EHZ", t) self.assertEqual(expected, result)
def test_issue165(self): """ Test cases related to #165: - number of poles or zeros can be 0 - an unsupported response information somewhere in the metadata should not automatically raise an Error, if the desired information can still be retrieved """ parser = Parser(strict=True) file = os.path.join(self.path, "bug165.dataless") t = UTCDateTime("2010-01-01T00:00:00") parser.read(file) paz = parser.get_paz("NZ.DCZ.20.HNZ", t) result = {'digitizer_gain': 419430.0, 'gain': 24595700000000.0, 'poles': [(-981 + 1009j), (-981 - 1009j), (-3290 + 1263j), (-3290 - 1263j)], 'seismometer_gain': 1.01885, 'sensitivity': 427336.0, 'zeros': []} self.assertEqual(paz, result)
def test_blocketteStartsAfterRecord(self): """ '... 058003504 1.00000E+00 0.00000E+0000 000006S*0543864 ... ' ' 0543864' -> results in Blockette 005 """ # create a valid blockette 010 with record length 256 b010 = b"0100042 2.4082008,001~2038,001~2009,001~~~" blockette = Blockette010(strict=True, compact=True) blockette.parse_SEED(b010) self.assertEqual(b010, blockette.get_SEED()) # create a valid blockette 054 b054 = b"0540240A0400300300000009" + (b"+1.58748E-03" * 18) blockette = Blockette054(strict=True, compact=True) blockette.parse_SEED(b054) self.assertEqual(b054, blockette.get_SEED()) # combine data data = b"000001V " + b010 + (b' ' * 206) data += b"000002S " + b054 + (b' ' * 8) data += b"000003S*" + b054 + (b' ' * 8) # read records parser = Parser(strict=True) parser.read(data)
def test_blockette_starts_after_record(self): """ '... 058003504 1.00000E+00 0.00000E+0000 000006S*0543864 ... ' ' 0543864' -> results in Blockette 005 """ # create a valid blockette 010 with record length 256 b010 = b"0100042 2.4082008,001~2038,001~2009,001~~~" blockette = Blockette010(strict=True, compact=True) blockette.parse_seed(b010) self.assertEqual(b010, blockette.get_seed()) # create a valid blockette 054 b054 = b"0540240A0400300300000009" + (b"+1.58748E-03" * 18) blockette = Blockette054(strict=True, compact=True) blockette.parse_seed(b054) self.assertEqual(b054, blockette.get_seed()) # combine data data = b"000001V " + b010 + (b' ' * 206) data += b"000002S " + b054 + (b' ' * 8) data += b"000003S*" + b054 + (b' ' * 8) # read records parser = Parser(strict=True) parser.read(data)
def test_issue165(self): """ Test cases related to #165: - number of poles or zeros can be 0 - an unsupported response information somewhere in the metadata should not automatically raise an Error, if the desired information can still be retrieved This test also tests if a warning is raised if no startime is given. """ parser = Parser() file = os.path.join(self.path, "bug165.dataless") t = UTCDateTime("2010-01-01T00:00:00") # raises UserWarning with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # Trigger a warning. parser.read(file) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertTrue('date' and 'required' in str(w[-1].message).lower()) # Triggers a warning. paz = parser.getPAZ("NZ.DCZ.20.HNZ", t) result = { 'digitizer_gain': 419430.0, 'gain': 24595700000000.0, 'poles': [(-981 + 1009j), (-981 - 1009j), (-3290 + 1263j), (-3290 - 1263j)], 'seismometer_gain': 1.01885, 'sensitivity': 427336.0, 'zeros': [] } self.assertEqual(paz, result)