Exemplo n.º 1
0
    def test_read_and_write_minimal_file(self):
        """
        Test that writing the most basic StationXML document possible works.
        """
        filename = os.path.join(self.data_dir, "minimal_station.xml")
        inv = obspy.station.read_inventory(filename)

        # Assert the few values that are set directly.
        self.assertEqual(inv.source, "OBS")
        self.assertEqual(inv.created, obspy.UTCDateTime(2013, 1, 1))
        self.assertEqual(len(inv.networks), 1)
        self.assertEqual(inv.networks[0].code, "PY")

        # Write it again. Also validate it to get more confidence. Suppress the
        # writing of the ObsPy related tags to ease testing.
        file_buffer = compatibility.BytesIO()
        inv.write(file_buffer, format="StationXML", validate=True,
                  _suppress_module_tags=True)
        file_buffer.seek(0, 0)

        with open(filename, "rb") as open_file:
            expected_xml_file_buffer = compatibility.BytesIO(open_file.read())
        expected_xml_file_buffer.seek(0, 0)

        self._assert_station_xml_equality(file_buffer,
                                          expected_xml_file_buffer)
Exemplo n.º 2
0
    def test_reading_and_writing_full_root_tag(self):
        """
        Tests reading and writing a full StationXML root tag.
        """
        filename = os.path.join(
            self.data_dir,
            "minimal_with_non_obspy_module_and_sender_tags_station.xml")
        inv = obspy.station.read_inventory(filename)
        self.assertEqual(inv.source, "OBS")
        self.assertEqual(inv.created, obspy.UTCDateTime(2013, 1, 1))
        self.assertEqual(len(inv.networks), 1)
        self.assertEqual(inv.networks[0].code, "PY")
        self.assertEqual(inv.module, "Some Random Module")
        self.assertEqual(inv.module_uri, "http://www.some-random.site")
        self.assertEqual(inv.sender, "The ObsPy Team")

        # Write it again. Do not write the module tags.
        file_buffer = compatibility.BytesIO()
        inv.write(file_buffer, format="StationXML", validate=True,
                  _suppress_module_tags=True)
        file_buffer.seek(0, 0)

        with open(filename, "rb") as open_file:
            expected_xml_file_buffer = compatibility.BytesIO(open_file.read())
        expected_xml_file_buffer.seek(0, 0)

        self._assert_station_xml_equality(
            file_buffer, expected_xml_file_buffer)
Exemplo n.º 3
0
    def test_read_and_write_full_file(self):
        """
        Test that reading and writing of a full StationXML document with all
        possible tags works.
        """
        filename = os.path.join(self.data_dir, "full_random_stationxml.xml")
        inv = obspy.station.read_inventory(filename)

        # Write it again. Also validate it to get more confidence. Suppress the
        # writing of the ObsPy related tags to ease testing.
        file_buffer = compatibility.BytesIO()

        # XXX helper variable to debug writing the full random file, set True
        # XXX for debug output
        write_debug_output = False

        inv.write(file_buffer, format="StationXML",
                  validate=(not write_debug_output),
                  _suppress_module_tags=True)
        file_buffer.seek(0, 0)

        if write_debug_output:
            with open("/tmp/debugout.xml", "wb") as open_file:
                open_file.write(file_buffer.read())
            file_buffer.seek(0, 0)

        with open(filename, "rb") as open_file:
            expected_xml_file_buffer = compatibility.BytesIO(open_file.read())
        expected_xml_file_buffer.seek(0, 0)

        self._assert_station_xml_equality(file_buffer,
                                          expected_xml_file_buffer)
Exemplo n.º 4
0
 def test_readBytesIO(self):
     """
     Tests reading from BytesIO instances.
     """
     # 1
     file = os.path.join(self.path, 'example.y_first_trace')
     with open(file, 'rb') as f:
         data = f.read()
     st = readSEGY(compatibility.BytesIO(data))
     self.assertEqual(len(st.traces[0].data), 500)
     # 2
     file = os.path.join(self.path, 'ld0042_file_00018.sgy_first_trace')
     with open(file, 'rb') as f:
         data = f.read()
     st = readSEGY(compatibility.BytesIO(data))
     self.assertEqual(len(st.traces[0].data), 2050)
     # 3
     file = os.path.join(self.path, '1.sgy_first_trace')
     with open(file, 'rb') as f:
         data = f.read()
     st = readSEGY(compatibility.BytesIO(data))
     self.assertEqual(len(st.traces[0].data), 8000)
     # 4
     file = os.path.join(self.path, '00001034.sgy_first_trace')
     with open(file, 'rb') as f:
         data = f.read()
     st = readSEGY(compatibility.BytesIO(data))
     self.assertEqual(len(st.traces[0].data), 2001)
     # 5
     file = os.path.join(self.path, 'planes.segy_first_trace')
     with open(file, 'rb') as f:
         data = f.read()
     st = readSEGY(compatibility.BytesIO(data))
     self.assertEqual(len(st.traces[0].data), 512)
Exemplo n.º 5
0
 def parseSEED(self, data, expected_length=0):
     """
     If number of FIR coefficients are larger than maximal blockette size of
     9999 chars a follow up blockette with the same blockette id and
     response lookup key is expected - this is checked here.
     """
     # convert to stream for test issues
     if isinstance(data, bytes):
         expected_length = len(data)
         data = compatibility.BytesIO(data)
     elif isinstance(data, (str, native_str)):
         raise TypeError("Data must be bytes, not string")
     # get current lookup key
     pos = data.tell()
     data.read(7)
     global_lookup_key = int(data.read(4))
     data.seek(pos)
     # read first blockette
     temp = compatibility.BytesIO()
     temp.write(data.read(expected_length))
     # check next blockettes
     while True:
         # save position
         pos = data.tell()
         try:
             blockette_id = int(data.read(3))
         except ValueError:
             break
         if blockette_id != 41:
             # different blockette id -> break
             break
         blockette_length = int(data.read(4))
         lookup_key = int(data.read(4))
         if lookup_key != global_lookup_key:
             # different lookup key -> break
             break
         # ok follow up blockette found - skip some unneeded fields
         self.fields[1].read(data)
         self.fields[2].read(data)
         self.fields[3].read(data)
         self.fields[4].read(data)
         self.fields[5].read(data)
         # remaining length in current blockette
         length = pos - data.tell() + blockette_length
         # read follow up blockette and append it to temporary blockette
         temp.write(data.read(length))
     # reposition file pointer
     data.seek(pos)
     # parse new combined temporary blockette
     temp.seek(0, os.SEEK_END)
     _len = temp.tell()
     temp.seek(0)
     Blockette.parseSEED(self, temp, expected_length=_len)
Exemplo n.º 6
0
    def getEventDetail(self, uri, format=None):
        """
        Gets event detail information.

        :type uri: str
        :param uri: Event identifier as either a EMSC event unique identifier,
            e.g. ``"19990817_0000001"`` or a QuakeML-formatted event URI, e.g.
            ``"quakeml:eu.emsc/event#19990817_0000001"``.
        :type format: ``'list'``, ``'xml'`` or ``'catalog'``, optional
        :param format: Format of returned results. Defaults to ``'xml'``.
        :rtype: :class:`~obspy.core.event.Catalog`, list or str
        :return: Method will return either an ObsPy
            :class:`~obspy.core.event.Catalog` object, a list of event
            dictionaries or a QuakeML string depending on the ``format``
            keyword.

        .. seealso:: http://www.seismicportal.eu/services/event/detail/info/

        .. rubric:: Example

        >>> from obspy.neries import Client
        >>> client = Client()
        >>> result = client.getEventDetail("19990817_0000001", 'list')
        >>> len(result)  # Number of calculated origins
        12
        >>> result[0]  # Details about first calculated origin  #doctest: +SKIP
        {'author': u'EMSC', 'event_id': u'19990817_0000001',
         'origin_id': 1465935, 'longitude': 29.972,
         'datetime': UTCDateTime(1999, 8, 17, 0, 1, 35), 'depth': -10.0,
         'magnitude': 6.7, 'magnitude_type': u'mw', 'latitude': 40.749}
        """
        # deprecation warning if format is not set
        if format is None:
            msg = "The default setting format='xml' for obspy.neries." + \
                "Client.getEventDetail() will be changed in the future to " + \
                "format='catalog'. Please call this function with the " + \
                "format keyword in order to hide this deprecation warning."
            warnings.warn(msg, category=DeprecationWarning)
            format = "xml"
        # parse parameters
        kwargs = {}
        if format == 'list':
            kwargs['format'] = 'json'
        else:
            kwargs['format'] = 'xml'
        if str(uri).startswith('quakeml:'):
            # QuakeML-formatted event URI
            kwargs['uri'] = str(uri)
        else:
            # EMSC event unique identifier
            kwargs['unid'] = str(uri)
        # fetch data
        data = self._fetch("/services/event/detail", **kwargs)
        # format output
        if format == "list":
            return self._json2list(data.decode())
        elif format == "catalog":
            return readEvents(compatibility.BytesIO(data), 'QUAKEML')
        else:
            return data
Exemplo n.º 7
0
    def test_long_year_range(self):
        """
        Tests reading and writing years 1900 to 2100.
        """
        tr = Trace(np.arange(5, dtype="float32"))

        # Year 2056 is non-deterministic for days 1, 256 and 257. These three
        # dates are simply simply not supported right now. See the libmseed
        # documentation for more details.
        # Use every 5th year. Otherwise the test takes too long. Use 1901 as
        # start to get year 2056.
        years = list(range(1901, 2101, 5))
        for year in years:
            for byteorder in ["<", ">"]:
                memfile = compatibility.BytesIO()
                # Get some random time with the year and byteorder as the seed.
                random.seed(year + ord(byteorder))
                tr.stats.starttime = UTCDateTime(
                    year,
                    julday=random.randrange(1, 365),
                    hour=random.randrange(0, 24),
                    minute=random.randrange(0, 60),
                    second=random.randrange(0, 60))
                if year == 2056:
                    tr.stats.starttime = UTCDateTime(2056, 2, 1)
                tr.write(memfile, format="mseed")
                st2 = read(memfile)
                self.assertEqual(len(st2), 1)
                tr2 = st2[0]
                # Remove the mseed specific header fields. These are obviously
                # not equal.
                del tr2.stats.mseed
                del tr2.stats._format
                self.assertEqual(tr, tr2)
Exemplo n.º 8
0
    def test_evalresp_file_like_object(self):
        """
        Test evalresp with file like object
        """
        rawf = os.path.join(self.path, 'CRLZ.HHZ.10.NZ.SAC')
        respf = os.path.join(self.path, 'RESP.NZ.CRLZ.10.HHZ')

        tr1 = read(rawf)[0]
        tr2 = read(rawf)[0]

        date = UTCDateTime(2003, 11, 1, 0, 0, 0)
        seedresp = {
            'filename': respf,
            'date': date,
            'units': 'VEL',
            'network': 'NZ',
            'station': 'CRLZ',
            'location': '10',
            'channel': 'HHZ'
        }
        tr1.data = seisSim(tr1.data,
                           tr1.stats.sampling_rate,
                           seedresp=seedresp)

        with open(respf, 'rb') as fh:
            stringio = compatibility.BytesIO(fh.read())
        seedresp['filename'] = stringio
        tr2.data = seisSim(tr2.data,
                           tr2.stats.sampling_rate,
                           seedresp=seedresp)

        self.assertEqual(tr1, tr2)
Exemplo n.º 9
0
 def test_packAndUnpackIBMSpecialCases(self):
     """
     Tests the packing and unpacking of several powers of 16 which are
     problematic because they need separate handling in the algorithm.
     """
     endians = ['>', '<']
     # Create the first 10 powers of 16.
     data = []
     for i in range(10):
         data.append(16 ** i)
         data.append(-16 ** i)
     data = np.array(data)
     # Convert to float64 in case native floats are different to be
     # able to utilize double precision.
     data = np.require(data, 'float64')
     # Loop over little and big endian.
     for endian in endians:
         # Pack.
         f = compatibility.BytesIO()
         DATA_SAMPLE_FORMAT_PACK_FUNCTIONS[1](f, data, endian)
         # Jump to beginning and read again.
         f.seek(0, 0)
         new_data = DATA_SAMPLE_FORMAT_UNPACK_FUNCTIONS[1](
             f, len(data), endian)
         f.close()
         # Test both.
         np.testing.assert_array_equal(new_data, data)
Exemplo n.º 10
0
 def test_readAndWriteTextualFileHeader(self):
     """
     Reading and writing should not change the textual file header.
     """
     for file, attribs in self.files.items():
         endian = attribs['endian']
         header_enc = attribs['textual_header_enc']
         file = os.path.join(self.path, file)
         # Read the file.
         with open(file, 'rb') as f:
             org_header = f.read(3200)
             f.seek(0, 0)
             # Initialize an empty SEGY object and set certain attributes.
             segy = SEGYFile()
             segy.endian = endian
             segy.file = f
             segy.textual_header_encoding = None
             # Read the textual header.
             segy._readTextualHeader()
             # Assert the encoding and compare with known values.
             self.assertEqual(segy.textual_header_encoding, header_enc)
         # The header writes to a file like object.
         new_header = compatibility.BytesIO()
         segy._writeTextualHeader(new_header)
         new_header.seek(0, 0)
         new_header = new_header.read()
         # Assert the correct length.
         self.assertEqual(len(new_header), 3200)
         # Assert the actual header.
         self.assertEqual(org_header, new_header)
Exemplo n.º 11
0
 def test_packAndUnpackVerySmallIBMFloats(self):
     """
     The same test as test_packAndUnpackIBMFloat just for small numbers
     because they might suffer more from the inaccuracies.
     """
     # Some random seeds.
     seeds = [123, 1592, 4482, 601, 1, 783, 6849]
     endians = ['<', '>']
     # Loop over all combinations.
     for seed in seeds:
         # Generate 50000 random floats from -10000 to +10000.
         np.random.seed(seed)
         data = 1E-5 * np.random.ranf(50000)
         # Convert to float64 in case native floats are different to be
         # able to utilize double precision.
         data = np.require(data, 'float64')
         # Loop over little and big endian.
         for endian in endians:
             # Pack.
             f = compatibility.BytesIO()
             DATA_SAMPLE_FORMAT_PACK_FUNCTIONS[1](f, data, endian)
             # Jump to beginning and read again.
             f.seek(0, 0)
             new_data = DATA_SAMPLE_FORMAT_UNPACK_FUNCTIONS[1](
                 f, len(data), endian)
             f.close()
             # A relative tolerance of 1E-6 is considered good enough.
             rms1 = rms(data, new_data)
             self.assertEqual(True, rms1 < 1E-6)
Exemplo n.º 12
0
 def test_packAndUnpackIBMFloat(self):
     """
     Packing and unpacking IBM floating points might yield some inaccuracies
     due to floating point rounding errors.
     This test tests a large number of random floating point numbers.
     """
     # Some random seeds.
     seeds = [1234, 592, 459482, 6901, 0, 7083, 68349]
     endians = ['<', '>']
     # Loop over all combinations.
     for seed in seeds:
         # Generate 50000 random floats from -10000 to +10000.
         np.random.seed(seed)
         data = 200000.0 * np.random.ranf(50000) - 100000.0
         # Convert to float64 in case native floats are different to be
         # able to utilize double precision.
         data = np.require(data, 'float64')
         # Loop over little and big endian.
         for endian in endians:
             # Pack.
             f = compatibility.BytesIO()
             DATA_SAMPLE_FORMAT_PACK_FUNCTIONS[1](f, data, endian)
             # Jump to beginning and read again.
             f.seek(0, 0)
             new_data = DATA_SAMPLE_FORMAT_UNPACK_FUNCTIONS[1](
                 f, len(data), endian)
             f.close()
             # A relative tolerance of 1E-6 is considered good enough.
             rms1 = rms(data, new_data)
             self.assertEqual(True, rms1 < 1E-6)
Exemplo n.º 13
0
    def __init__(self, xml_doc, namespace=None):
        """
        Initializes a XMLPaser object.

        :type xml_doc: str, filename, file-like object, parsed XML document
        :param xml_doc: XML document
        :type namespace: str, optional
        :param namespace: Document-wide default namespace. Defaults to ``''``.
        """
        if isinstance(xml_doc, bytes):
            # some string - check if it starts with <?xml
            if xml_doc.strip()[0:5].upper().startswith(b'<?XML'):
                xml_doc = compatibility.BytesIO(xml_doc)
            # parse XML file
            self.xml_doc = etree.parse(xml_doc)
        elif isinstance(xml_doc, (str, native_str)):
            # filename
            if not os.path.exists(xml_doc):
                raise IOError("filename %s does not exist" % xml_doc)
            self.xml_doc = etree.parse(xml_doc)
        elif hasattr(xml_doc, 'seek'):
            # some file-based content
            xml_doc.seek(0)
            self.xml_doc = etree.parse(xml_doc)
        else:
            self.xml_doc = xml_doc
        self.xml_root = self.xml_doc.getroot()
        self.namespace = namespace or self._getRootNamespace()
Exemplo n.º 14
0
 def test_stringIO(self):
     """
     Checks that reading and writing works via StringIO
     """
     gse2file = os.path.join(self.path, 'loc_RNON20040609200559.z')
     with open(gse2file, 'rb') as f:
         fin = compatibility.BytesIO(f.read())
     header, data = libgse2.read(fin)
     # be sure something es actually read
     self.assertEqual(12000, header['npts'])
     self.assertEqual(1, data[-1])
     fout = compatibility.BytesIO()
     libgse2.write(header, data, fout)
     fout.seek(0)
     newheader, newdata = libgse2.read(fout)
     self.assertEqual(header, newheader)
     np.testing.assert_equal(data, newdata)
Exemplo n.º 15
0
 def test_readBytesIO(self):
     """
     Tests reading from BytesIO instances.
     """
     # 1
     filename = os.path.join(self.path, '1.su_first_trace')
     with open(filename, 'rb') as fp:
         data = fp.read()
     st = readSU(compatibility.BytesIO(data))
     self.assertEqual(len(st.traces[0].data), 8000)
Exemplo n.º 16
0
    def test_enforcing_reading_byteorder(self):
        """
        Tests if setting the byteorder of the header for reading is passed to
        the C functions.

        Quite simple. It just checks if reading with the correct byteorder
        works and reading with the wrong byteorder fails.
        """
        tr = Trace(data=np.arange(10, dtype="int32"))

        # Test with little endian.
        memfile = compatibility.BytesIO()
        tr.write(memfile, format="mseed", byteorder="<")
        memfile.seek(0, 0)
        # Reading little endian should work just fine.
        tr2 = read(memfile, header_byteorder="<")[0]
        memfile.seek(0, 0)
        self.assertEqual(tr2.stats.mseed.byteorder, "<")
        # Remove the mseed specific header fields. These are obviously not
        # equal.
        del tr2.stats.mseed
        del tr2.stats._format
        self.assertEqual(tr, tr2)
        # Wrong byteorder raises.
        self.assertRaises(ValueError, read, memfile, header_byteorder=">")

        # Same test with big endian
        memfile = compatibility.BytesIO()
        tr.write(memfile, format="mseed", byteorder=">")
        memfile.seek(0, 0)
        # Reading big endian should work just fine.
        tr2 = read(memfile, header_byteorder=">")[0]
        memfile.seek(0, 0)
        self.assertEqual(tr2.stats.mseed.byteorder, ">")
        # Remove the mseed specific header fields. These are obviously not
        # equal.
        del tr2.stats.mseed
        del tr2.stats._format
        self.assertEqual(tr, tr2)
        # Wrong byteorder raises.
        self.assertRaises(ValueError, read, memfile, header_byteorder="<")
Exemplo n.º 17
0
    def loads(self, string):
        """
        Parses mchedr string into ObsPy catalog object.

        :type string: str
        :param string: QuakeML string to parse.
        :rtype: :class:`~obspy.core.event.Catalog`
        :returns: ObsPy Catalog object.
        """
        self.fh = compatibility.BytesIO(string)
        self.filename = None
        return self._deserialize()
Exemplo n.º 18
0
 def parseSEED(self, data, expected_length=0):
     """
     Parse given data for blockette fields and create attributes.
     """
     # convert to stream for test issues
     if isinstance(data, bytes):
         expected_length = len(data)
         data = compatibility.BytesIO(data)
     elif isinstance(data, (str, native_str)):
         raise TypeError("data must be bytes, not string")
     start_pos = data.tell()
     # debug
     if self.debug:
         print((' DATA: %s' % (data.read(expected_length))))
         data.seek(-expected_length, 1)
     blockette_fields = self.default_fields + self.getFields()
     # loop over all blockette fields
     for field in blockette_fields:
         # if blockette length reached break with warning
         if data.tell() - start_pos >= expected_length:
             if not self.strict:
                 break
             if isinstance(field, Loop):
                 break
             msg = "End of blockette " + self.blockette_id + " reached " + \
                   "without parsing all expected fields, here: " + \
                   str(field)
             if self.strict:
                 raise BlocketteLengthException(msg)
             else:
                 warnings.warn(msg, category=Warning)
             break
         field.parseSEED(self, data)
         if field.id == 2:
             expected_length = field.data
     # strict tests
     if not self.strict:
         return
     # check length
     end_pos = data.tell()
     blockette_length = end_pos - start_pos
     if expected_length == blockette_length:
         return
     # wrong length
     msg = 'Wrong size of Blockette %s (%d of %d) in sequence %06d'
     msg = msg % (self.blockette_id, blockette_length, expected_length,
                  self.record_id or 0)
     if self.strict:
         raise BlocketteLengthException(msg)
     else:
         warnings.warn(msg, category=Warning)
Exemplo n.º 19
0
 def test_noDAT2NullPointer(self):
     """
     Checks that null pointers are returned correctly by read83 function
     of read. Error "decomp_6b: Neither DAT2 or DAT1 found!" is on
     purpose.
     """
     filename = os.path.join(self.path,
                             'loc_RJOB20050831023349_first100_dos.z')
     fout = compatibility.BytesIO()
     with open(filename, 'rb') as fin:
         lines = (l for l in fin if not l.startswith(b'DAT2'))
         fout.write(b"".join(lines))
     fout.seek(0)
     #with CatchOutput() as out:
     with CatchOutput():
         self.assertRaises(GSEUtiError, libgse2.read, fout)
Exemplo n.º 20
0
    def test_createReadAssertAndWriteXSEED(self):
        """
        This test takes some SEED files, reads them to a Parser object
        and converts them back to SEED once. This is done to avoid any
        formating issues as seen in test_readAndWriteSEED.

        Therefore the reading and writing of SEED files is considered to be
        correct.

        Finally the resulting SEED gets converted to XSEED and back to SEED
        and the two SEED strings are then evaluated to be identical.

        This tests also checks for XML validity using a XML schema.
        """
        # Loop over all files and versions.
        for version in ['1.0', '1.1']:
            # Path to XML schema file.
            xsd_path = os.path.join(self.path, 'xml-seed-%s.xsd' % version)
            # Prepare validator.
            f = open(xsd_path, 'rb')
            xmlschema_doc = etree.parse(f)
            f.close()
            xmlschema = etree.XMLSchema(xmlschema_doc)
            for file in self.BW_SEED_files:
                # Parse the file.
                parser1 = Parser(file)
                # Convert to SEED once to avoid any issues seen in
                # test_readAndWriteSEED.
                original_seed = parser1.getSEED()
                del parser1
                # Now read the file, parse it, write XSEED, read XSEED and
                # write SEED again. The output should be totally identical.
                parser2 = Parser(original_seed)
                xseed_string = parser2.getXSEED(version=version)
                del parser2
                # Validate XSEED.
                doc = etree.parse(compatibility.BytesIO(xseed_string))
                self.assertTrue(xmlschema.validate(doc))
                del doc
                parser3 = Parser(xseed_string)
                new_seed = parser3.getSEED()
                self.assertEqual(original_seed, new_seed)
                del parser3, original_seed, new_seed
Exemplo n.º 21
0
    def test_writing_module_tags(self):
        """
        Tests the writing of ObsPy related tags.
        """
        net = obspy.station.Network(code="UL")
        inv = obspy.station.Inventory(networks=[net], source="BLU")

        file_buffer = compatibility.BytesIO()
        inv.write(file_buffer, format="StationXML", validate=True)
        file_buffer.seek(0, 0)
        lines = file_buffer.read().decode().splitlines()
        module_line = [_i.strip() for _i in lines if _i.strip().startswith(
            "<Module>")][0]
        self.assertTrue(fnmatch.fnmatch(module_line,
                                        "<Module>ObsPy *</Module>"))
        module_URI_line = [_i.strip() for _i in lines if _i.strip().startswith(
            "<ModuleURI>")][0]
        self.assertEqual(module_URI_line,
                         "<ModuleURI>http://www.obspy.org</ModuleURI>")
Exemplo n.º 22
0
 def test_readAndWriteTraceHeader(self):
     """
     Reading and writing should not change the trace header.
     """
     for file, attribs in self.files.items():
         endian = attribs['endian']
         file = os.path.join(self.path, file)
         # Read the file.
         with open(file, 'rb') as f:
             f.seek(3600)
             org_header = f.read(240)
         header = SEGYTraceHeader(header=org_header, endian=endian)
         # The header writes to a file like object.
         new_header = compatibility.BytesIO()
         header.write(new_header)
         new_header.seek(0, 0)
         new_header = new_header.read()
         # Assert the correct length.
         self.assertEqual(len(new_header), 240)
         # Assert the actual header.
         self.assertEqual(org_header, new_header)
Exemplo n.º 23
0
 def test_getRecordInformation(self):
     """
     Tests the util._getMSFileInfo method with known values.
     """
     filename = os.path.join(self.path, 'data',
                             'BW.BGLD.__.EHE.D.2008.001.first_10_records')
     # Simply reading the file.
     info = util.getRecordInformation(filename)
     self.assertEqual(info['filesize'], 5120)
     self.assertEqual(info['record_length'], 512)
     self.assertEqual(info['number_of_records'], 10)
     self.assertEqual(info['excess_bytes'], 0)
     # Now with an open file. This should work regardless of the current
     # value of the file pointer and it should also not change the file
     # pointer.
     with open(filename, 'rb') as open_file:
         open_file.seek(1234)
         info = util.getRecordInformation(open_file)
         self.assertEqual(info['filesize'], 5120 - 1234)
         self.assertEqual(info['record_length'], 512)
         self.assertEqual(info['number_of_records'], 7)
         self.assertEqual(info['excess_bytes'], 302)
         self.assertEqual(open_file.tell(), 1234)
     # Now test with a BytesIO with the first ten percent.
     with open(filename, 'rb') as open_file:
         open_file_string = compatibility.BytesIO(open_file.read())
     open_file_string.seek(111)
     info = util.getRecordInformation(open_file_string)
     self.assertEqual(info['filesize'], 5120 - 111)
     self.assertEqual(info['record_length'], 512)
     self.assertEqual(info['number_of_records'], 9)
     self.assertEqual(info['excess_bytes'], 401)
     self.assertEqual(open_file_string.tell(), 111)
     # One more file containing two records.
     filename = os.path.join(self.path, 'data', 'test.mseed')
     info = util.getRecordInformation(filename)
     self.assertEqual(info['filesize'], 8192)
     self.assertEqual(info['record_length'], 4096)
     self.assertEqual(info['number_of_records'], 2)
     self.assertEqual(info['excess_bytes'], 0)
Exemplo n.º 24
0
    def test_saveResponse(self):
        """
        Fetches and stores response information as Dataless SEED volume.
        """
        client = Client(user='******')
        start = UTCDateTime(2008, 1, 1)
        end = start + 1
        with NamedTemporaryFile() as tf:
            tempfile = tf.name
            # Dataless SEED
            client.saveResponse(tempfile, 'BW', 'MANZ', '', 'EHZ', start, end)
            with open(tempfile, 'rb') as fp:
                self.assertEqual(fp.read(8), b"000001V ")

        # Try again but write to a BytesIO instance.
        file_object = compatibility.BytesIO()
        client = Client(user='******')
        start = UTCDateTime(2008, 1, 1)
        end = start + 1
        # Dataless SEED
        client.saveResponse(file_object, 'BW', 'MANZ', '', 'EHZ', start, end)
        file_object.seek(0, 0)
        self.assertEqual(file_object.read(8), b"000001V ")
Exemplo n.º 25
0
 def test_init(self):
     """
     Tests the __init__ method of the XMLParser object.
     """
     # parser accepts
     # 1 - filenames
     XMLParser(self.iris_xml)
     # 2 - XML strings
     data = XML_DOC
     XMLParser(data)
     # 3 - file like objects
     fh = open(self.iris_xml, 'rt')
     XMLParser(fh)
     fh.close()
     # 4 - StringIO
     data = compatibility.BytesIO(XML_DOC)
     XMLParser(data)
     # 5 - with xml parsed XML documents
     xml_doc = xml_etree.parse(self.iris_xml)
     XMLParser(xml_doc)
     # 6 - with lxml parsed XML documents
     xml_doc = lxml_etree.parse(self.iris_xml)
     XMLParser(xml_doc)
Exemplo n.º 26
0
 def parseSEED(self, data, length=0, *args, **kwargs):
     """
     Read Blockette 60.
     """
     # convert to stream for test issues
     if isinstance(data, bytes):
         length = len(data)
         data = compatibility.BytesIO(data)
     elif isinstance(data, (str, native_str)):
         raise TypeError("data must be bytes, not string")
     new_data = data.read(length)
     new_data = new_data[7:]
     number_of_stages = int(new_data[0:2])
     # Loop over all stages.
     counter = 2
     for _i in range(number_of_stages):
         number_of_responses = int(new_data[counter + 2:counter + 4])
         self.stages.append([])
         # Start inner loop
         counter += 4
         for _j in range(number_of_responses):
             # Append to last list.
             self.stages[-1].append(int(new_data[counter:counter + 4]))
             counter += 4
Exemplo n.º 27
0
    def test_creating_minimal_QuakeML_with_MT(self):
        """
        Tests the creation of a minimal QuakeML containing origin, magnitude
        and moment tensor.
        """
        # Rotate into physical domain
        lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1)
        mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18
        scalar_moment = math.sqrt(mrr**2 + mtt**2 + mpp**2 + mtr**2 + mpr**2 +
                                  mtp**2)
        moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1)

        # Initialise event
        ev = Event(event_type="earthquake")

        ev_origin = Origin(time=org_time,
                           latitude=lat,
                           longitude=lon,
                           depth=depth,
                           resource_id=ResourceIdentifier())
        ev.origins.append(ev_origin)

        # populte event moment tensor
        ev_tensor = Tensor(m_rr=mrr,
                           m_tt=mtt,
                           m_pp=mpp,
                           m_rt=mtr,
                           m_rp=mpr,
                           m_tp=mtp)

        ev_momenttensor = MomentTensor(tensor=ev_tensor)
        ev_momenttensor.scalar_moment = scalar_moment
        ev_momenttensor.derived_origin_id = ev_origin.resource_id

        ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor)
        ev.focal_mechanisms.append(ev_focalmechanism)

        # populate event magnitude
        ev_magnitude = Magnitude()
        ev_magnitude.mag = moment_magnitude
        ev_magnitude.magnitude_type = 'Mw'
        ev_magnitude.evaluation_mode = 'automatic'
        ev.magnitudes.append(ev_magnitude)

        # write QuakeML file
        cat = Catalog(events=[ev])
        memfile = compatibility.BytesIO()
        cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML)

        memfile.seek(0, 0)
        new_cat = readQuakeML(memfile)
        self.assertEqual(len(new_cat), 1)
        event = new_cat[0]
        self.assertEqual(len(event.origins), 1)
        self.assertEqual(len(event.magnitudes), 1)
        self.assertEqual(len(event.focal_mechanisms), 1)
        org = event.origins[0]
        mag = event.magnitudes[0]
        fm = event.focal_mechanisms[0]
        self.assertEqual(org.latitude, lat)
        self.assertEqual(org.longitude, lon)
        self.assertEqual(org.depth, depth)
        self.assertEqual(org.time, org_time)
        # Moment tensor.
        mt = fm.moment_tensor.tensor
        self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) /
                        scalar_moment < scalar_moment * 1E-10)
        self.assertEqual(mt.m_rr, mrr)
        self.assertEqual(mt.m_pp, mpp)
        self.assertEqual(mt.m_tt, mtt)
        self.assertEqual(mt.m_rt, mtr)
        self.assertEqual(mt.m_rp, mpr)
        self.assertEqual(mt.m_tp, mtp)
        # Mag
        self.assertAlmostEqual(mag.mag, moment_magnitude)
        self.assertEqual(mag.magnitude_type, "Mw")
        self.assertEqual(mag.evaluation_mode, "automatic")
Exemplo n.º 28
0
 def test_getPAZ(self):
     """
     Test extracting poles and zeros information
     """
     filename = os.path.join(self.path, 'arclink_full.seed')
     sp = Parser(filename)
     paz = sp.getPAZ('BHE')
     self.assertEqual(paz['gain'], +6.00770e+07)
     self.assertEqual(paz['zeros'], [0j, 0j])
     self.assertEqual(paz['poles'], [(-3.70040e-02 + 3.70160e-02j),
                                     (-3.70040e-02 - 3.70160e-02j),
                                     (-2.51330e+02 + 0.00000e+00j),
                                     (-1.31040e+02 - 4.67290e+02j),
                                     (-1.31040e+02 + 4.67290e+02j)])
     self.assertEqual(paz['sensitivity'], +7.86576e+08)
     self.assertEqual(paz['seismometer_gain'], +1.50000E+03)
     # Raise exception for undefined channels
     self.assertRaises(SEEDParserException, sp.getPAZ, 'EHE')
     #
     # Do the same for another dataless file
     #
     filename = os.path.join(self.path, 'dataless.seed.BW_FURT')
     sp = Parser(filename)
     paz = sp.getPAZ('EHE')
     self.assertEqual(paz['gain'], +1.00000e+00)
     self.assertEqual(paz['zeros'], [0j, 0j, 0j])
     self.assertEqual(paz['poles'], [(-4.44400e+00 + 4.44400e+00j),
                                     (-4.44400e+00 - 4.44400e+00j),
                                     (-1.08300e+00 + 0.00000e+00j)])
     self.assertEqual(paz['sensitivity'], +6.71140E+08)
     self.assertEqual(paz['seismometer_gain'], 4.00000E+02)
     # Raise exception for undefined channels
     self.assertRaises(SEEDParserException, sp.getPAZ, 'BHE')
     # Raise UserWarning if not a Laplacian transfer function ('A').
     # Modify transfer_fuction_type on the fly
     for blk in sp.blockettes[53]:
         blk.transfer_function_types = 'X'
     with warnings.catch_warnings(record=True):
         warnings.simplefilter("error", UserWarning)
         self.assertRaises(UserWarning, sp.getPAZ, 'EHE')
     #
     # And the same for yet another dataless file
     #
     filename = os.path.join(self.path, 'nied.dataless.gz')
     f = compatibility.BytesIO(gzip.open(filename).read())
     sp = Parser(f)
     gain = [+3.94857E+03, +4.87393E+04, +3.94857E+03]
     zeros = [[+0.00000E+00 + 0.00000E+00j, +0.00000E+00 + 0.00000E+00j],
              [
                  +0.00000E+00 + 0.00000E+00j, +0.00000E+00 + 0.00000E+00j,
                  -6.32511E+02 + 0.00000E+00j
              ], [+0.00000E+00 + 0.00000E+00j, +0.00000E+00 + 0.00000E+00j]]
     poles = [[
         -1.23413E-02 + 1.23413E-02j, -1.23413E-02 - 1.23413E-02j,
         -3.91757E+01 + 4.91234E+01j, -3.91757E+01 - 4.91234E+01j
     ],
              [
                  -3.58123E-02 - 4.44766E-02j, -3.58123E-02 + 4.44766E-02j,
                  -5.13245E+02 + 0.00000E+00j, -6.14791E+04 + 0.00000E+00j
              ],
              [
                  -1.23413E-02 + 1.23413E-02j, -1.23413E-02 - 1.23413E-02j,
                  -3.91757E+01 + 4.91234E+01j, -3.91757E+01 - 4.91234E+01j
              ]]
     sensitivity = [+4.92360E+08, +2.20419E+06, +9.84720E+08]
     seismometer_gain = [+2.29145E+03, +1.02583E+01, +2.29145E+03]
     for i, channel in enumerate(['BHZ', 'BLZ', 'LHZ']):
         paz = sp.getPAZ(channel)
         self.assertEqual(paz['gain'], gain[i])
         self.assertEqual(paz['zeros'], zeros[i])
         self.assertEqual(paz['poles'], poles[i])
         self.assertEqual(paz['sensitivity'], sensitivity[i])
         self.assertEqual(paz['seismometer_gain'], seismometer_gain[i])
     sp = Parser(os.path.join(self.path, 'dataless.seed.BW_RJOB'))
     paz = sp.getPAZ("BW.RJOB..EHZ", UTCDateTime("2007-01-01"))
     result = {
         'gain': 1.0,
         'poles': [(-4.444 + 4.444j), (-4.444 - 4.444j), (-1.083 + 0j)],
         'seismometer_gain': 400.0,
         'sensitivity': 671140000.0,
         'zeros': [0j, 0j, 0j],
         'digitizer_gain': 1677850.0
     }
     self.assertEqual(paz, result)
     paz = sp.getPAZ("BW.RJOB..EHZ", UTCDateTime("2010-01-01"))
     result = {
         'gain':
         60077000.0,
         'poles': [(-0.037004000000000002 + 0.037016j),
                   (-0.037004000000000002 - 0.037016j),
                   (-251.33000000000001 + 0j),
                   (-131.03999999999999 - 467.29000000000002j),
                   (-131.03999999999999 + 467.29000000000002j)],
         'seismometer_gain':
         1500.0,
         'sensitivity':
         2516800000.0,
         'zeros': [0j, 0j],
         'digitizer_gain':
         1677850.0
     }
     self.assertEqual(sorted(paz.items()), sorted(result.items()))
     # last test again, check arg name changed in [3722]
     result = {
         'gain':
         60077000.0,
         'poles': [(-0.037004000000000002 + 0.037016j),
                   (-0.037004000000000002 - 0.037016j),
                   (-251.33000000000001 + 0j),
                   (-131.03999999999999 - 467.29000000000002j),
                   (-131.03999999999999 + 467.29000000000002j)],
         'seismometer_gain':
         1500.0,
         'sensitivity':
         2516800000.0,
         'zeros': [0j, 0j],
         'digitizer_gain':
         1677850.0
     }
     with warnings.catch_warnings(record=True) as w:
         warnings.resetwarnings()
         paz = sp.getPAZ(channel_id="BW.RJOB..EHZ",
                         datetime=UTCDateTime("2010-01-01"))
     self.assertEqual(len(w), 1)
     self.assertEqual(w[0].category, DeprecationWarning)
     self.assertEqual(sorted(paz.items()), sorted(result.items()))
     paz = sp.getPAZ(seed_id="BW.RJOB..EHZ",
                     datetime=UTCDateTime("2010-01-01"))
     self.assertEqual(sorted(paz.items()), sorted(result.items()))
Exemplo n.º 29
0
def Beachball(fm, linewidth=2, facecolor='b', bgcolor='w', edgecolor='k',
              alpha=1.0, xy=(0, 0), width=200, size=100, nofill=False,
              zorder=100, outfile=None, format=None, fig=None):
    """
    Draws a beach ball diagram of an earthquake focal mechanism.

    S1, D1, and R1, the strike, dip and rake of one of the focal planes, can
    be vectors of multiple focal mechanisms.

    :param fm: Focal mechanism that is either number of mechanisms (NM) by 3
        (strike, dip, and rake) or NM x 6 (M11, M22, M33, M12, M13, M23 - the
        six independent components of the moment tensor, where the coordinate
        system is 1,2,3 = Up,South,East which equals r,theta,phi). The strike
        is of the first plane, clockwise relative to north.
        The dip is of the first plane, defined clockwise and perpendicular to
        strike, relative to horizontal such that 0 is horizontal and 90 is
        vertical. The rake is of the first focal plane solution. 90 moves the
        hanging wall up-dip (thrust), 0 moves it in the strike direction
        (left-lateral), -90 moves it down-dip (normal), and 180 moves it
        opposite to strike (right-lateral).
    :param facecolor: Color to use for quadrants of tension; can be a string,
        e.g. ``'r'``, ``'b'`` or three component color vector, [R G B].
        Defaults to ``'b'`` (blue).
    :param bgcolor: The background color. Defaults to ``'w'`` (white).
    :param edgecolor: Color of the edges. Defaults to ``'k'`` (black).
    :param alpha: The alpha level of the beach ball. Defaults to ``1.0``
        (opaque).
    :param xy: Origin position of the beach ball as tuple. Defaults to
        ``(0, 0)``.
    :type width: int
    :param width: Symbol size of beach ball. Defaults to ``200``.
    :param size: Controls the number of interpolation points for the
        curves. Minimum is automatically set to ``100``.
    :param nofill: Do not fill the beach ball, but only plot the planes.
    :param zorder: Set zorder. Artists with lower zorder values are drawn
        first.
    :param outfile: Output file string. Also used to automatically
        determine the output format. Supported file formats depend on your
        matplotlib backend. Most backends support png, pdf, ps, eps and
        svg. Defaults to ``None``.
    :param format: Format of the graph picture. If no format is given the
        outfile parameter will be used to try to automatically determine
        the output format. If no format is found it defaults to png output.
        If no outfile is specified but a format is, than a binary
        imagestring will be returned.
        Defaults to ``None``.
    :param fig: Give an existing figure instance to plot into. New Figure if
        set to ``None``.
    """
    plot_width = width * 0.95

    # plot the figure
    if not fig:
        fig = plt.figure(figsize=(3, 3), dpi=100)
        fig.subplots_adjust(left=0, bottom=0, right=1, top=1)
        fig.set_figheight(width // 100)
        fig.set_figwidth(width // 100)
    ax = fig.add_subplot(111, aspect='equal')

    # hide axes + ticks
    ax.axison = False

    # plot the collection
    collection = Beach(fm, linewidth=linewidth, facecolor=facecolor,
                       edgecolor=edgecolor, bgcolor=bgcolor,
                       alpha=alpha, nofill=nofill, xy=xy,
                       width=plot_width, size=size, zorder=zorder)
    ax.add_collection(collection)

    ax.autoscale_view(tight=False, scalex=True, scaley=True)
    # export
    if outfile:
        if format:
            fig.savefig(outfile, dpi=100, transparent=True, format=format)
        else:
            fig.savefig(outfile, dpi=100, transparent=True)
    elif format and not outfile:
        imgdata = compatibility.BytesIO()
        fig.savefig(imgdata, format=format, dpi=100, transparent=True)
        imgdata.seek(0)
        return imgdata.read()
    else:
        plt.show()
        return fig
Exemplo n.º 30
0
    def test_reading_and_writing_full_station_tag(self):
        """
        Tests the reading and writing of a file with a more or less full
        station tag.
        """
        filename = os.path.join(self.data_dir,
                                "full_station_field_station.xml")
        inv = obspy.station.read_inventory(filename)

        # Assert all the values...
        self.assertEqual(len(inv.networks), 1)
        self.assertEqual(inv.source, "OBS")
        self.assertEqual(inv.module, "Some Random Module")
        self.assertEqual(inv.module_uri, "http://www.some-random.site")
        self.assertEqual(inv.sender, "The ObsPy Team")
        self.assertEqual(inv.created, obspy.UTCDateTime(2013, 1, 1))
        self.assertEqual(len(inv.networks), 1)
        network = inv.networks[0]
        self.assertEqual(network.code, "PY")

        # Now assert the station specific values.
        self.assertEqual(len(network.stations), 1)
        station = network.stations[0]
        self.assertEqual(station.code, "PY")
        self.assertEqual(station.start_date, obspy.UTCDateTime(2011, 1, 1))
        self.assertEqual(station.end_date, obspy.UTCDateTime(2012, 1, 1))
        self.assertEqual(station.restricted_status, "open")
        self.assertEqual(station.alternate_code, "PYY")
        self.assertEqual(station.historical_code, "YYP")
        self.assertEqual(station.description, "Some Description...")
        self.assertEqual(len(station.comments), 2)
        comment_1 = station.comments[0]
        self.assertEqual(comment_1.value, "Comment number 1")
        self.assertEqual(comment_1.begin_effective_time,
                         obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(comment_1.end_effective_time,
                         obspy.UTCDateTime(2008, 2, 3))
        self.assertEqual(len(comment_1.authors), 1)
        authors = comment_1.authors[0]
        self.assertEqual(len(authors.names), 2)
        self.assertEqual(authors.names[0], "This person")
        self.assertEqual(authors.names[1], "has multiple names!")
        self.assertEqual(len(authors.agencies), 3)
        self.assertEqual(authors.agencies[0], "And also")
        self.assertEqual(authors.agencies[1], "many")
        self.assertEqual(authors.agencies[2], "many Agencies")
        self.assertEqual(len(authors.emails), 4)
        self.assertEqual(authors.emails[0], "*****@*****.**")
        self.assertEqual(authors.emails[1], "*****@*****.**")
        self.assertEqual(authors.emails[2], "*****@*****.**")
        self.assertEqual(authors.emails[3], "*****@*****.**")
        self.assertEqual(len(authors.phones), 2)
        self.assertEqual(authors.phones[0].description, "phone number 1")
        self.assertEqual(authors.phones[0].country_code, 49)
        self.assertEqual(authors.phones[0].area_code, 123)
        self.assertEqual(authors.phones[0].phone_number, "456-7890")
        self.assertEqual(authors.phones[1].description, "phone number 2")
        self.assertEqual(authors.phones[1].country_code, 34)
        self.assertEqual(authors.phones[1].area_code, 321)
        self.assertEqual(authors.phones[1].phone_number, "129-7890")
        comment_2 = station.comments[1]
        self.assertEqual(comment_2.value, "Comment number 2")
        self.assertEqual(comment_2.begin_effective_time,
                         obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(comment_1.end_effective_time,
                         obspy.UTCDateTime(2008, 2, 3))
        self.assertEqual(len(comment_2.authors), 3)
        for _i, author in enumerate(comment_2.authors):
            self.assertEqual(len(author.names), 1)
            self.assertEqual(author.names[0], "Person %i" % (_i + 1))
            self.assertEqual(len(author.agencies), 1)
            self.assertEqual(author.agencies[0], "Some agency")
            self.assertEqual(len(author.emails), 1)
            self.assertEqual(author.emails[0], "*****@*****.**")
            self.assertEqual(len(author.phones), 1)
            self.assertEqual(author.phones[0].description, None)
            self.assertEqual(author.phones[0].country_code, 49)
            self.assertEqual(author.phones[0].area_code, 123)
            self.assertEqual(author.phones[0].phone_number, "456-7890")

        self.assertEqual(station.latitude, 10.0)
        self.assertEqual(station.longitude, 20.0)
        self.assertEqual(station.elevation, 100.0)

        self.assertEqual(station.site.name, "Some site")
        self.assertEqual(station.site.description, "Some description")
        self.assertEqual(station.site.town, "Some town")
        self.assertEqual(station.site.county, "Some county")
        self.assertEqual(station.site.region, "Some region")
        self.assertEqual(station.site.country, "Some country")

        self.assertEqual(station.vault, "Some vault")
        self.assertEqual(station.geology, "Some geology")

        self.assertEqual(len(station.equipments), 2)
        self.assertEqual(station.equipments[0].resource_id, "some_id")
        self.assertEqual(station.equipments[0].type, "Some type")
        self.assertEqual(station.equipments[0].description, "Some description")
        self.assertEqual(station.equipments[0].manufacturer,
                         "Some manufacturer")
        self.assertEqual(station.equipments[0].vendor, "Some vendor")
        self.assertEqual(station.equipments[0].model, "Some model")
        self.assertEqual(station.equipments[0].serial_number, "12345-ABC")
        self.assertEqual(station.equipments[0].installation_date,
                         obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(station.equipments[0].removal_date,
                         obspy.UTCDateTime(1999, 5, 5))
        self.assertEqual(station.equipments[0].calibration_dates[0],
                         obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(station.equipments[0].calibration_dates[1],
                         obspy.UTCDateTime(1992, 5, 5))
        self.assertEqual(station.equipments[1].resource_id, "something_new")
        self.assertEqual(station.equipments[1].type, "Some type")
        self.assertEqual(station.equipments[1].description, "Some description")
        self.assertEqual(station.equipments[1].manufacturer,
                         "Some manufacturer")
        self.assertEqual(station.equipments[1].vendor, "Some vendor")
        self.assertEqual(station.equipments[1].model, "Some model")
        self.assertEqual(station.equipments[1].serial_number, "12345-ABC")
        self.assertEqual(station.equipments[1].installation_date,
                         obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(station.equipments[1].removal_date,
                         obspy.UTCDateTime(1999, 5, 5))
        self.assertEqual(station.equipments[1].calibration_dates[0],
                         obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(station.equipments[1].calibration_dates[1],
                         obspy.UTCDateTime(1992, 5, 5))

        self.assertEqual(len(station.operators), 2)
        self.assertEqual(station.operators[0].agencies[0], "Agency 1")
        self.assertEqual(station.operators[0].agencies[1], "Agency 2")
        self.assertEqual(station.operators[0].contacts[0].names[0],
                         "This person")
        self.assertEqual(station.operators[0].contacts[0].names[1],
                         "has multiple names!")
        self.assertEqual(len(station.operators[0].contacts[0].agencies), 3)
        self.assertEqual(station.operators[0].contacts[0].agencies[0],
                         "And also")
        self.assertEqual(station.operators[0].contacts[0].agencies[1], "many")
        self.assertEqual(station.operators[0].contacts[0].agencies[2],
                         "many Agencies")
        self.assertEqual(len(station.operators[0].contacts[0].emails), 4)
        self.assertEqual(station.operators[0].contacts[0].emails[0],
                         "*****@*****.**")
        self.assertEqual(station.operators[0].contacts[0].emails[1],
                         "*****@*****.**")
        self.assertEqual(station.operators[0].contacts[0].emails[2],
                         "*****@*****.**")
        self.assertEqual(station.operators[0].contacts[0].emails[3],
                         "*****@*****.**")
        self.assertEqual(len(station.operators[0].contacts[0].phones), 2)
        self.assertEqual(
            station.operators[0].contacts[0].phones[0].description,
            "phone number 1")
        self.assertEqual(
            station.operators[0].contacts[0].phones[0].country_code, 49)
        self.assertEqual(
            station.operators[0].contacts[0].phones[0].area_code, 123)
        self.assertEqual(
            station.operators[0].contacts[0].phones[0].phone_number,
            "456-7890")
        self.assertEqual(
            station.operators[0].contacts[0].phones[1].description,
            "phone number 2")
        self.assertEqual(
            station.operators[0].contacts[0].phones[1].country_code, 34)
        self.assertEqual(station.operators[0].contacts[0].phones[1].area_code,
                         321)
        self.assertEqual(
            station.operators[0].contacts[0].phones[1].phone_number,
            "129-7890")
        self.assertEqual(station.operators[0].contacts[1].names[0], "Name")
        self.assertEqual(station.operators[0].contacts[1].agencies[0],
                         "Agency")
        self.assertEqual(station.operators[0].contacts[1].emails[0],
                         "*****@*****.**")
        self.assertEqual(
            station.operators[0].contacts[1].phones[0].description,
            "phone number 1")
        self.assertEqual(
            station.operators[0].contacts[1].phones[0].country_code, 49)
        self.assertEqual(
            station.operators[0].contacts[1].phones[0].area_code, 123)
        self.assertEqual(
            station.operators[0].contacts[1].phones[0].phone_number,
            "456-7890")
        self.assertEqual(station.operators[0].website, "http://www.web.site")

        self.assertEqual(station.operators[1].agencies[0], "Agency")
        self.assertEqual(station.operators[1].contacts[0].names[0], "New Name")
        self.assertEqual(station.operators[1].contacts[0].agencies[0],
                         "Agency")
        self.assertEqual(station.operators[1].contacts[0].emails[0],
                         "*****@*****.**")
        self.assertEqual(
            station.operators[1].contacts[0].phones[0].description,
            "phone number 1")
        self.assertEqual(
            station.operators[1].contacts[0].phones[0].country_code, 49)
        self.assertEqual(station.operators[1].contacts[0].phones[0].area_code,
                         123)
        self.assertEqual(
            station.operators[1].contacts[0].phones[0].phone_number,
            "456-7890")
        self.assertEqual(station.operators[1].website, "http://www.web.site")

        self.assertEqual(station.creation_date, obspy.UTCDateTime(1990, 5, 5))
        self.assertEqual(station.termination_date,
                         obspy.UTCDateTime(2009, 5, 5))
        self.assertEqual(station.total_number_of_channels, 100)
        self.assertEqual(station.selected_number_of_channels, 1)

        self.assertEqual(len(station.external_references), 2)
        self.assertEqual(station.external_references[0].uri,
                         "http://path.to/something")
        self.assertEqual(station.external_references[0].description,
                         "Some description")
        self.assertEqual(station.external_references[1].uri,
                         "http://path.to/something/else")
        self.assertEqual(station.external_references[1].description,
                         "Some other description")

        # Now write it again and compare to the original file.
        file_buffer = compatibility.BytesIO()
        inv.write(file_buffer, format="StationXML", validate=True,
                  _suppress_module_tags=True)
        file_buffer.seek(0, 0)

        with open(filename, "rb") as open_file:
            expected_xml_file_buffer = compatibility.BytesIO(open_file.read())
        expected_xml_file_buffer.seek(0, 0)

        self._assert_station_xml_equality(file_buffer,
                                          expected_xml_file_buffer)