def read_stations(fname): """ Read station positions from whitespace delimited file Example file: # station lat lon elev STN 10.0 -50.0 160 """ ret = AttribDict() with open(fname) as f: for line in f.readlines(): if not line[0].startswith('#'): vals = line.split() ret[vals[0]] = AttribDict() ret[vals[0]].latitude = float(vals[1]) ret[vals[0]].longitude = float(vals[2]) ret[vals[0]].elevation = float(vals[3]) return ret
def _read_y(filename, headonly=False, **kwargs): # @UnusedVariable """ Reads a Nanometrics Y file and returns an ObsPy Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: Nanometrics Y file to be read. :type headonly: bool, optional :param headonly: If set to True, read only the head. This is most useful for scanning available data in huge (temporary) data sets. :rtype: :class:`~obspy.core.stream.Stream` :return: A ObsPy Stream object. .. rubric:: Example >>> from obspy import read >>> st = read("/path/to/YAYT_BHZ_20021223.124800") >>> st # doctest: +ELLIPSIS <obspy.core.stream.Stream object at 0x...> >>> print(st) # doctest: +ELLIPSIS 1 Trace(s) in Stream: .AYT..BHZ | 2002-12-23T12:48:00.000100Z - ... | 100.0 Hz, 18000 samples """ # The first tag in a Y-file must be the TAG_Y_FILE (0) tag. This must be # followed by the following tags, in any order: # TAG_STATION_INFO (1) # TAG_STATION_LOCATION (2) # TAG_STATION_PARAMETERS (3) # TAG_STATION_DATABASE (4) # TAG_SERIES_INFO (5) # TAG_SERIES_DATABASE (6) # The following tag is optional: # TAG_STATION_RESPONSE (26) # The last tag in the file must be a TAG_DATA_INT32 (7) tag. This tag must # be followed by an array of LONG's. The number of entries in the array # must agree with what was described in the TAG_SERIES_INFO data. with open(filename, "rb") as fh: trace = Trace() trace.stats.y = AttribDict() count = -1 while True: endian, tag_type, next_tag, _next_same = __parse_tag(fh) if tag_type == 1: # TAG_STATION_INFO # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # UCHAR Station[5] (BLANKPAD) # Station is the five letter SEED format station # identification. # UCHAR Location[2] (BLANKPAD) # Location Location is the two letter SEED format location # identification. # UCHAR Channel[3] (BLANKPAD) # Channel Channel is the three letter SEED format channel # identification. # UCHAR NetworkID[51] (ASCIIZ) # This is some descriptive text identifying the network. # UCHAR SiteName[61] (ASCIIZ) # SiteName is some text identifying the site. # UCHAR Comment[31] (ASCIIZ) # Comment is any comment for this station. # UCHAR SensorType[51] (ASCIIZ) # SensorType is some text describing the type of sensor used # at the station. # UCHAR DataFormat[7] (ASCIIZ) # DataFormat is some text describing the data format recorded # at the station. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(b"5s2s3s51z61z31z51z7z", data[8:]) trace.stats.station = parts[0] trace.stats.location = parts[1] trace.stats.channel = parts[2] # extra params = AttribDict() params.network_id = parts[3] params.side_name = parts[4] params.comment = parts[5] params.sensor_type = parts[6] params.data_format = parts[7] trace.stats.y.tag_station_info = params elif tag_type == 2: # TAG_STATION_LOCATION # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # FLOAT Latitude # Latitude in degrees of the location of the station. The # latitude should be between -90 (South) and +90 (North). # FLOAT Longitude # Longitude in degrees of the location of the station. The # longitude should be between -180 (West) and +180 (East). # FLOAT Elevation # Elevation in meters above sea level of the station. # FLOAT Depth # Depth is the depth in meters of the sensor. # FLOAT Azimuth # Azimuth of the sensor in degrees clockwise. # FLOAT Dip # Dip is the dip of the sensor. 90 degrees is defined as # vertical right way up. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b"ffffff", data[8:]) params = AttribDict() params.latitude = parts[0] params.longitude = parts[1] params.elevation = parts[2] params.depth = parts[3] params.azimuth = parts[4] params.dip = parts[5] trace.stats.y.tag_station_location = params elif tag_type == 3: # TAG_STATION_PARAMETERS # UCHAR Update[16] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME StartValidTime # Time that the information in these records became valid. # REALTIME EndValidTime # Time that the information in these records became invalid. # FLOAT Sensitivity # Sensitivity of the sensor in nanometers per bit. # FLOAT SensFreq # Frequency at which the sensitivity was measured. # FLOAT SampleRate # This is the number of samples per second. This value can be # less than 1.0. (i.e. 0.1) # FLOAT MaxClkDrift # Maximum drift rate of the clock in seconds per sample. # UCHAR SensUnits[24] (ASCIIZ) # Some text indicating the units in which the sensitivity was # measured. # UCHAR CalibUnits[24] (ASCIIZ) # Some text indicating the units in which calibration input # was measured. # UCHAR ChanFlags[27] (BLANKPAD) # Text indicating the channel flags according to the SEED # definition. # UCHAR UpdateFlag # This flag must be “N” or “U” according to the SEED # definition. # UCHAR Filler[4] # Filler Pads out the record to satisfy the alignment # restrictions for reading data on a SPARC processor. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b"ddffff24z24z27sc4s", data[16:]) trace.stats.sampling_rate = parts[4] # extra params = AttribDict() params.start_valid_time = parts[0] params.end_valid_time = parts[1] params.sensitivity = parts[2] params.sens_freq = parts[3] params.sample_rate = parts[4] params.max_clk_drift = parts[5] params.sens_units = parts[6] params.calib_units = parts[7] params.chan_flags = parts[8] params.update_flag = parts[9] trace.stats.y.tag_station_parameters = params elif tag_type == 4: # TAG_STATION_DATABASE # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME LoadDate # Date the information was loaded into the database. # UCHAR Key[16] # Unique key that identifies this record in the database. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b"d16s", data[8:]) params = AttribDict() params.load_date = parts[0] params.key = parts[1] trace.stats.y.tag_station_database = params elif tag_type == 5: # TAG_SERIES_INFO # UCHAR Update[16] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME StartTime # This is start time of the data in this series. # REALTIME EndTime # This is end time of the data in this series. # ULONG NumSamples # This is the number of samples of data in this series. # LONG DCOffset # DCOffset is the DC offset of the data. # LONG MaxAmplitude # MaxAmplitude is the maximum amplitude of the data. # LONG MinAmplitude # MinAmplitude is the minimum amplitude of the data. # UCHAR Format[8] (ASCIIZ) # This is the format of the data. This should always be # “YFILE”. # UCHAR FormatVersion[8] (ASCIIZ) # FormatVersion is the version of the format of the data. # This should always be “5.0” data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b"ddLlll8z8z", data[16:]) trace.stats.starttime = UTCDateTime(parts[0]) count = parts[2] # extra params = AttribDict() params.endtime = UTCDateTime(parts[1]) params.num_samples = parts[2] params.dc_offset = parts[3] params.max_amplitude = parts[4] params.min_amplitude = parts[5] params.format = parts[6] params.format_version = parts[7] trace.stats.y.tag_series_info = params elif tag_type == 6: # TAG_SERIES_DATABASE # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME LoadDate # Date the information was loaded into the database. # UCHAR Key[16] # Unique key that identifies this record in the database. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b"d16s", data[8:]) params = AttribDict() params.load_date = parts[0] params.key = parts[1] trace.stats.y.tag_series_database = params elif tag_type == 26: # TAG_STATION_RESPONSE # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # UCHAR PathName[260] # PathName is the full name of the file which contains the # response information for this station. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(b"260s", data[8:]) params = AttribDict() params.path_name = parts[0] trace.stats.y.tag_station_response = params elif tag_type == 7: # TAG_DATA_INT32 trace.data = from_buffer(fh.read(np.dtype(np.int32).itemsize * count), dtype=np.int32) # break loop as TAG_DATA_INT32 should be the last tag in file break else: fh.seek(next_tag, 1) return Stream([trace])
def _add_paz_and_coords(trace, dataless, paz_dict=None): trace.stats.paz = None trace.stats.coords = None traceid = trace.get_id() time = trace.stats.starttime # We first look into the dataless dictionary, if available if isinstance(dataless, dict): for sp in dataless.values(): # Check first if our traceid is in the dataless file if traceid not in str(sp): continue try: paz = AttribDict(sp.get_paz(traceid, time)) coords = AttribDict(sp.get_coordinates(traceid, time)) except SEEDParserException as err: logger.error('%s time: %s' % (err, str(time))) pass elif isinstance(dataless, Inventory): try: with warnings.catch_warnings(record=True) as warns: # get_sacpz() can issue warnings on more than one PAZ found, # so let's catch those warnings and log them properly sacpz = dataless.get_response(traceid, time).get_sacpz() for w in warns: message = str(w.message) logger.warning('%s: %s' % (traceid, message)) attach_paz(trace, io.StringIO(sacpz)) paz = trace.stats.paz coords = AttribDict(dataless.get_coordinates(traceid, time)) except Exception as err: logger.error('%s traceid: %s time: %s' % (err, traceid, str(time))) pass try: trace.stats.paz = paz # elevation is in meters in the dataless coords.elevation /= 1000. trace.stats.coords = coords except Exception: pass # If we couldn't find any PAZ in the dataless dictionary, # we try to attach paz from the paz dictionary passed # as argument if trace.stats.paz is None and paz_dict is not None: # Look for traceid or for a generic paz net, sta, loc, chan = trace.id.split('.') ids = [ trace.id, '.'.join(('__', '__', '__', '__')), '.'.join( (net, '__', '__', '__')), '.'.join((net, sta, '__', '__')), '.'.join((net, sta, loc, '__')), 'default' ] for id in ids: try: paz = paz_dict[id] trace.stats.paz = paz except KeyError: pass # If we're still out of luck, # we try to build the sensitivity from the # user2 and user3 header fields (ISNet format) if trace.stats.paz is None and trace.stats.format == 'ISNet': try: # instrument constants u2 = trace.stats.sac.user2 u3 = trace.stats.sac.user3 paz = AttribDict() paz.sensitivity = u3 / u2 paz.poles = [] paz.zeros = [] paz.gain = 1 trace.stats.paz = paz except AttributeError: pass # Still no paz? Antilles or IPOC format! if (trace.stats.paz is None and (trace.stats.format == 'Antilles' or trace.stats.format == 'IPOC')): paz = AttribDict() paz.sensitivity = 1 paz.poles = [] paz.zeros = [] paz.gain = 1 trace.stats.paz = paz # If we still don't have trace coordinates, # we try to get them from SAC header if trace.stats.coords is None: try: stla = trace.stats.sac.stla stlo = trace.stats.sac.stlo try: stel = trace.stats.sac.stel # elevation is in meters in SAC header: stel /= 1000. except AttributeError: stel = 0. coords = AttribDict() coords.elevation = stel coords.latitude = stla coords.longitude = stlo trace.stats.coords = coords except AttributeError: pass # Still no coords? Raise an exception if trace.stats.coords is None: raise Exception('%s: could not find coords for trace: skipping trace' % traceid)
def _read_y(filename, headonly=False, **kwargs): # @UnusedVariable """ Reads a Nanometrics Y file and returns an ObsPy Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: Nanometrics Y file to be read. :type headonly: bool, optional :param headonly: If set to True, read only the head. This is most useful for scanning available data in huge (temporary) data sets. :rtype: :class:`~obspy.core.stream.Stream` :return: A ObsPy Stream object. .. rubric:: Example >>> from obspy import read >>> st = read("/path/to/YAYT_BHZ_20021223.124800") >>> st # doctest: +ELLIPSIS <obspy.core.stream.Stream object at 0x...> >>> print(st) # doctest: +ELLIPSIS 1 Trace(s) in Stream: .AYT..BHZ | 2002-12-23T12:48:00.000100Z - ... | 100.0 Hz, 18000 samples """ # The first tag in a Y-file must be the TAG_Y_FILE (0) tag. This must be # followed by the following tags, in any order: # TAG_STATION_INFO (1) # TAG_STATION_LOCATION (2) # TAG_STATION_PARAMETERS (3) # TAG_STATION_DATABASE (4) # TAG_SERIES_INFO (5) # TAG_SERIES_DATABASE (6) # The following tag is optional: # TAG_STATION_RESPONSE (26) # The last tag in the file must be a TAG_DATA_INT32 (7) tag. This tag must # be followed by an array of LONG's. The number of entries in the array # must agree with what was described in the TAG_SERIES_INFO data. with open(filename, 'rb') as fh: trace = Trace() trace.stats.y = AttribDict() count = -1 while True: endian, tag_type, next_tag, _next_same = _parse_tag(fh) if tag_type == 1: # TAG_STATION_INFO # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # UCHAR Station[5] (BLANKPAD) # Station is the five letter SEED format station # identification. # UCHAR Location[2] (BLANKPAD) # Location Location is the two letter SEED format location # identification. # UCHAR Channel[3] (BLANKPAD) # Channel Channel is the three letter SEED format channel # identification. # UCHAR NetworkID[51] (ASCIIZ) # This is some descriptive text identifying the network. # UCHAR SiteName[61] (ASCIIZ) # SiteName is some text identifying the site. # UCHAR Comment[31] (ASCIIZ) # Comment is any comment for this station. # UCHAR SensorType[51] (ASCIIZ) # SensorType is some text describing the type of sensor used # at the station. # UCHAR DataFormat[7] (ASCIIZ) # DataFormat is some text describing the data format recorded # at the station. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(b'5s2s3s51z61z31z51z7z', data[8:]) trace.stats.station = parts[0] trace.stats.location = parts[1] trace.stats.channel = parts[2] # extra params = AttribDict() params.network_id = parts[3] params.site_name = parts[4] params.comment = parts[5] params.sensor_type = parts[6] params.data_format = parts[7] trace.stats.y.tag_station_info = params elif tag_type == 2: # TAG_STATION_LOCATION # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # FLOAT Latitude # Latitude in degrees of the location of the station. The # latitude should be between -90 (South) and +90 (North). # FLOAT Longitude # Longitude in degrees of the location of the station. The # longitude should be between -180 (West) and +180 (East). # FLOAT Elevation # Elevation in meters above sea level of the station. # FLOAT Depth # Depth is the depth in meters of the sensor. # FLOAT Azimuth # Azimuth of the sensor in degrees clockwise. # FLOAT Dip # Dip is the dip of the sensor. 90 degrees is defined as # vertical right way up. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b'ffffff', data[8:]) params = AttribDict() params.latitude = parts[0] params.longitude = parts[1] params.elevation = parts[2] params.depth = parts[3] params.azimuth = parts[4] params.dip = parts[5] trace.stats.y.tag_station_location = params elif tag_type == 3: # TAG_STATION_PARAMETERS # UCHAR Update[16] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME StartValidTime # Time that the information in these records became valid. # REALTIME EndValidTime # Time that the information in these records became invalid. # FLOAT Sensitivity # Sensitivity of the sensor in nanometers per bit. # FLOAT SensFreq # Frequency at which the sensitivity was measured. # FLOAT SampleRate # This is the number of samples per second. This value can be # less than 1.0. (i.e. 0.1) # FLOAT MaxClkDrift # Maximum drift rate of the clock in seconds per sample. # UCHAR SensUnits[24] (ASCIIZ) # Some text indicating the units in which the sensitivity was # measured. # UCHAR CalibUnits[24] (ASCIIZ) # Some text indicating the units in which calibration input # was measured. # UCHAR ChanFlags[27] (BLANKPAD) # Text indicating the channel flags according to the SEED # definition. # UCHAR UpdateFlag # This flag must be “N” or “U” according to the SEED # definition. # UCHAR Filler[4] # Filler Pads out the record to satisfy the alignment # restrictions for reading data on a SPARC processor. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode( endian + b'ddffff24z24z27sc4s', data[16:]) trace.stats.sampling_rate = parts[4] # extra params = AttribDict() params.start_valid_time = parts[0] params.end_valid_time = parts[1] params.sensitivity = parts[2] params.sens_freq = parts[3] params.sample_rate = parts[4] params.max_clk_drift = parts[5] params.sens_units = parts[6] params.calib_units = parts[7] params.chan_flags = parts[8] params.update_flag = parts[9] trace.stats.y.tag_station_parameters = params elif tag_type == 4: # TAG_STATION_DATABASE # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME LoadDate # Date the information was loaded into the database. # UCHAR Key[16] # Unique key that identifies this record in the database. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b'd16s', data[8:]) params = AttribDict() params.load_date = parts[0] params.key = parts[1] trace.stats.y.tag_station_database = params elif tag_type == 5: # TAG_SERIES_INFO # UCHAR Update[16] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME StartTime # This is start time of the data in this series. # REALTIME EndTime # This is end time of the data in this series. # ULONG NumSamples # This is the number of samples of data in this series. # LONG DCOffset # DCOffset is the DC offset of the data. # LONG MaxAmplitude # MaxAmplitude is the maximum amplitude of the data. # LONG MinAmplitude # MinAmplitude is the minimum amplitude of the data. # UCHAR Format[8] (ASCIIZ) # This is the format of the data. This should always be # “YFILE”. # UCHAR FormatVersion[8] (ASCIIZ) # FormatVersion is the version of the format of the data. # This should always be “5.0” data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b'ddLlll8z8z', data[16:]) trace.stats.starttime = UTCDateTime(parts[0]) count = parts[2] # extra params = AttribDict() params.endtime = UTCDateTime(parts[1]) params.num_samples = parts[2] params.dc_offset = parts[3] params.max_amplitude = parts[4] params.min_amplitude = parts[5] params.format = parts[6] params.format_version = parts[7] trace.stats.y.tag_series_info = params elif tag_type == 6: # TAG_SERIES_DATABASE # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # REALTIME LoadDate # Date the information was loaded into the database. # UCHAR Key[16] # Unique key that identifies this record in the database. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(endian + b'd16s', data[8:]) params = AttribDict() params.load_date = parts[0] params.key = parts[1] trace.stats.y.tag_series_database = params elif tag_type == 26: # TAG_STATION_RESPONSE # UCHAR Update[8] # This field is only used internally for administrative # purposes. It should always be set to zeroes. # UCHAR PathName[260] # PathName is the full name of the file which contains the # response information for this station. data = fh.read(next_tag) parts = _unpack_with_asciiz_and_decode(b'260s', data[8:]) params = AttribDict() params.path_name = parts[0] trace.stats.y.tag_station_response = params elif tag_type == 7: # TAG_DATA_INT32 trace.data = from_buffer(fh.read( np.dtype(np.int32).itemsize * count), dtype=np.int32) # break loop as TAG_DATA_INT32 should be the last tag in file break else: fh.seek(next_tag, 1) return Stream([trace])
def _add_paz_and_coords(trace, metadata, paz_dict, config): traceid = trace.get_id() # If we already know that traceid is skipped, raise a silent exception if traceid in _add_paz_and_coords.skipped: raise Exception() trace.stats.paz = None trace.stats.coords = None time = trace.stats.starttime # We first check whether metadata is a dataless dictionary if isinstance(metadata, dict): for sp in metadata.values(): # Check first if our traceid is in the dataless file if traceid not in str(sp): continue try: paz = AttribDict(sp.get_paz(traceid, time)) coords = AttribDict(sp.get_coordinates(traceid, time)) except SEEDParserException as err: logger.error('%s time: %s' % (err, str(time))) pass elif isinstance(metadata, Inventory): try: with warnings.catch_warnings(record=True) as warns: # get_sacpz() can issue warnings on more than one PAZ found, # so let's catch those warnings and log them properly sacpz = metadata.get_response(traceid, time).get_sacpz() for w in warns: message = str(w.message) logger.warning('%s: %s' % (traceid, message)) attach_paz(trace, io.StringIO(sacpz)) paz = trace.stats.paz coords = AttribDict(metadata.get_coordinates(traceid, time)) except Exception as err: logger.error('%s traceid: %s time: %s' % (err, traceid, str(time))) pass try: trace.stats.paz = paz # elevation is in meters coords.elevation /= 1000. trace.stats.coords = coords except Exception: pass # If we couldn't find any PAZ in the dataless dictionary # or in the Inventory, we try to attach paz from a paz dictionary if trace.stats.paz is None and paz_dict is not None: # Look for traceid or for a generic paz net, sta, loc, chan = trace.id.split('.') ids = [ trace.id, '.'.join(('__', '__', '__', '__')), '.'.join((net, '__', '__', '__')), '.'.join((net, sta, '__', '__')), '.'.join((net, sta, loc, '__')), 'default' ] for id in ids: try: paz = paz_dict[id] trace.stats.paz = paz except KeyError: pass # If a "sensitivity" config option is provided, override the paz computed # from metadata or paz_dict if config.sensitivity is not None: # instrument constants paz = AttribDict() paz.sensitivity = _compute_sensitivity(trace, config) paz.poles = [] paz.zeros = [] paz.gain = 1 trace.stats.paz = paz # If we still don't have trace coordinates, # we try to get them from SAC header if trace.stats.coords is None: try: stla = trace.stats.sac.stla stlo = trace.stats.sac.stlo try: stel = trace.stats.sac.stel # elevation is in meters in SAC header: stel /= 1000. except AttributeError: stel = 0. coords = AttribDict() coords.elevation = stel coords.latitude = stla coords.longitude = stlo trace.stats.coords = coords except AttributeError: pass # Still no coords? Raise an exception if trace.stats.coords is None: _add_paz_and_coords.skipped.append(traceid) raise Exception( '%s: could not find coords for trace: skipping trace' % traceid) if trace.stats.coords.latitude == trace.stats.coords.longitude == 0: logger.warning( '{}: trace has latitude and longitude equal to zero!'.format( traceid))