Exemplo n.º 1
0
    def test_write_catalog_shapefile_with_extra_field(self):
        """
        Tests writing a catalog with an additional custom database column
        """
        cat = read_events('/path/to/mchedr.dat')
        cat += read_events('/path/to/nlloc.qml')
        extra_fields = [('Region', 'C', 50, None,
                         ['SOUTHEAST OF HONSHU, JAPAN', 'GERMANY'])]
        bad_extra_fields_wrong_length = [('Region', 'C', 50, None, ['ABC'])]
        bad_extra_fields_name_clash = [('Magnitude', 'C', 50, None, ['ABC'])]

        with TemporaryWorkingDirectory():
            with warnings.catch_warnings(record=True) as w:
                warnings.filterwarnings('always')
                # test some bad calls that should raise an Exception
                with self.assertRaises(ValueError) as cm:
                    _write_shapefile(
                        cat,
                        "catalog.shp",
                        extra_fields=bad_extra_fields_wrong_length)
                self.assertEqual(
                    str(cm.exception), "list of values for each item in "
                    "'extra_fields' must have same length as Catalog object")
                with self.assertRaises(ValueError) as cm:
                    _write_shapefile(cat,
                                     "catalog.shp",
                                     extra_fields=bad_extra_fields_name_clash)
                self.assertEqual(
                    str(cm.exception), "Conflict with existing field named "
                    "'Magnitude'.")
                # now test a good call that should work
                _write_shapefile(cat, "catalog.shp", extra_fields=extra_fields)
            for w_ in w:
                try:
                    self.assertEqual(
                        str(w_.message),
                        'Encountered an event with origin uncertainty '
                        'description of type "confidence ellipsoid". This is '
                        'not yet implemented for output as shapefile. No '
                        'origin uncertainty will be added to shapefile for '
                        'such events.')
                except AssertionError:
                    continue
                break
            else:
                raise Exception
            for suffix in SHAPEFILE_SUFFIXES:
                self.assertTrue(os.path.isfile("catalog" + suffix))
            with open("catalog.shp", "rb") as fh_shp, \
                    open("catalog.dbf", "rb") as fh_dbf, \
                    open("catalog.shx", "rb") as fh_shx:
                shp = shapefile.Reader(shp=fh_shp, shx=fh_shx, dbf=fh_dbf)
                # check contents of shapefile that we just wrote
                _assert_records_and_fields(
                    got_fields=shp.fields,
                    got_records=shp.records(),
                    expected_fields=expected_catalog_fields_with_region,
                    expected_records=expected_catalog_records_with_region)
                self.assertEqual(shp.shapeType, shapefile.POINT)
                _close_shapefile_reader(shp)
            # For some reason, on windows the files are still in use when
            # TemporaryWorkingDirectory tries to remove the directory.
            self.assertTrue(fh_shp.closed)
            self.assertTrue(fh_dbf.closed)
            self.assertTrue(fh_shx.closed)
Exemplo n.º 2
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Sort the magnitudes - they are sorted on writing and we need to check
     # like-for-like
     test_event.magnitudes.sort(key=lambda obj: obj['mag'], reverse=True)
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles='test',
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
         read_cat = Catalog()
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             read_cat += read_nordic(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time, test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth, test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag, test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag, test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag, test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr, test_ev.amplitudes[0].snr)
     self.assertEqual(read_ev.amplitudes[2].period,
                      test_ev.amplitudes[2].period)
     self.assertEqual(read_ev.amplitudes[2].snr, test_ev.amplitudes[2].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(
         read_ev.amplitudes[1].waveform_id.channel_code,
         test_ev.amplitudes[1].waveform_id.channel_code[0] +
         test_ev.amplitudes[1].waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)
Exemplo n.º 3
0
    def test_sds_report(self):
        """
        Test command line script for generating SDS report html.

        Inherently that script uses many other routines like `_get_filenames`,
        `get_availability_percentage`, `_get_current_endtime`,
        `get_latency`, `has_data` and `get_all_stations`, so these should be
        sufficiently covered as well.
        """
        # generate some dummy SDS with data roughly 2-3 hours behind current
        # time
        t = UTCDateTime() - 2.5 * 3600
        with TemporarySDSDirectory(year=None, doy=None, time=t) as temp_sds, \
                TemporaryWorkingDirectory():
            # create the report
            output_basename = "sds_report"
            argv = [
                "-r={}".format(temp_sds.tempdir),
                "-o={}".format(os.path.join(os.curdir, output_basename)),
                "-l=", "-l=00", "-l=10", "-c=HHZ", "-c=BHZ", "-i=AB.XYZ..BHE",
                "--check-quality-days=1"
            ]
            sds_report(argv)
            # do the testing
            output_basename_abspath = os.path.abspath(
                os.path.join(os.curdir, output_basename))
            file_html = output_basename_abspath + ".html"
            file_txt = output_basename_abspath + ".txt"
            file_png = output_basename_abspath + ".png"
            # check that output files exist
            for file_ in [file_html, file_txt, file_png]:
                self.assertTrue(os.path.isfile(file_))
            # check content of image file (just check it is a png file)
            self.assertEqual(imghdr.what(file_png), "png")
            # check content of stream info / data quality file
            expected_lines = [
                b"AB,XYZ,,BHE,831[0-9].[0-9]*?,0.007292,2",
                b"AB,XYZ,,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"AB,XYZ,00,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"AB,ZZZ3,,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"AB,ZZZ3,00,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"CD,XYZ,,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"CD,XYZ,00,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"CD,ZZZ3,,HHZ,831[0-9].[0-9]*?,0.007292,2",
                b"CD,ZZZ3,00,HHZ,831[0-9].[0-9]*?,0.007292,2"
            ]
            with open(file_txt, "rb") as fh:
                got_lines = fh.readlines()
            for expected_line, got_line in zip(expected_lines, got_lines):
                self.assertIsNotNone(re.match(expected_line, got_line))
            # check content of html report
            with open(file_html, "rb") as fh:
                got_lines = fh.readlines()
            html_regex_file = os.path.join(self.data_dir, "sds_report.regex")
            with open(html_regex_file, "rb") as fh:
                regex_patterns = fh.readlines()
            failed = False  # XXX remove again
            for got, pattern in zip(got_lines, regex_patterns):
                match = re.match(pattern.strip(), got.strip())
                try:
                    self.assertIsNotNone(match)
                except AssertionError:
                    failed = True
                    print(pattern.strip())
                    print(got.strip())
            if failed:
                raise Exception