예제 #1
0
def _parse_hypocenter(hypo_file):
    if hypo_file is None:
        return None

    hypo = AttribDict()
    hypo.latitude = None
    hypo.longitude = None
    hypo.depth = None
    hypo.origin_time = None
    hypo.evid = None

    if isinstance(hypo_file, str):
        try:
            with open(hypo_file) as fp:
                # Corinth hypocenter file format:
                # TODO: check file format
                line = fp.readline()
                # Skip the first line if it contains
                # characters in the first 10 digits:
                if any(c.isalpha() for c in line[0:10]):
                    line = fp.readline()
        except IOError as err:
            logger.error(err)
            ssp_exit(1)

        timestr = line[0:17]
        # There are two possible formats for the timestring.
        # We try both of them
        try:
            dt = datetime.strptime(timestr, '%y%m%d %H %M%S.%f')
        except ValueError:
            dt = datetime.strptime(timestr, '%y%m%d %H%M %S.%f')
        hypo.origin_time = UTCDateTime(dt)

        lat = float(line[17:20])
        lat_deg = float(line[21:26])
        hypo.latitude = lat + lat_deg / 60
        lon = float(line[26:30])
        lon_deg = float(line[31:36])
        hypo.longitude = lon + lon_deg / 60
        hypo.depth = float(line[36:42])
        evid = os.path.basename(hypo_file)
        evid = evid.replace('.phs', '').replace('.h', '').replace('.hyp', '')
        hypo.evid = evid

    else:  # FIXME: put a condition here!
        ev = hypo_file  # FIXME: improve this!
        hypo.latitude = ev.latitude
        hypo.longitude = ev.longitude
        hypo.depth = ev.depth
        hypo.origin_time = ev.utcdate
        hypo.evid = ev.event_id

    return hypo
예제 #2
0
def _parse_hypo71_hypocenter(hypo_file):
    with open(hypo_file) as fp:
        line = fp.readline()
        # Skip the first line if it contains
        # characters in the first 10 digits:
        if any(c.isalpha() for c in line[0:10]):
            line = fp.readline()
    hypo = AttribDict()
    timestr = line[0:17]
    # There are two possible formats for the timestring.
    # We try both of them
    try:
        dt = datetime.strptime(timestr, '%y%m%d %H %M%S.%f')
    except Exception:
        dt = datetime.strptime(timestr, '%y%m%d %H%M %S.%f')
    hypo.origin_time = UTCDateTime(dt)
    lat = float(line[17:20])
    lat_deg = float(line[21:26])
    hypo.latitude = lat + lat_deg/60
    lon = float(line[26:30])
    lon_deg = float(line[31:36])
    hypo.longitude = lon + lon_deg/60
    hypo.depth = float(line[36:42])
    evid = os.path.basename(hypo_file)
    evid = evid.replace('.phs', '').replace('.h', '').replace('.hyp', '')
    hypo.evid = evid
    return hypo
예제 #3
0
def _parse_hypocenter_from_event(ev):
    hypo = AttribDict()
    hypo.latitude = ev.latitude
    hypo.longitude = ev.longitude
    hypo.depth = ev.depth
    hypo.origin_time = ev.utcdate
    hypo.evid = ev.event_id
    return hypo
예제 #4
0
def _add_hypocenter(trace, hypo):
    if hypo is None:
        # Try to get hypocenter information from the SAC header
        try:
            evla = trace.stats.sac.evla
            evlo = trace.stats.sac.evlo
            evdp = trace.stats.sac.evdp
            begin = trace.stats.sac.b
        except AttributeError:
            return

        try:
            tori = trace.stats.sac.o
            origin_time = trace.stats.starttime + tori - begin
        except AttributeError:
            origin_time = None

        if origin_time is not None:
            # make a copy of origin_time and round it to the nearest second
            _second = origin_time.second
            if origin_time.microsecond >= 500000:
                _second += 1
            _microsecond = 0
            _evid_time = origin_time.replace(
                second=_second, microsecond=_microsecond)
        else:
            # make a copy of starttime and round it to the nearest minute
            _starttime = trace.stats.starttime
            _minute = _starttime.minute
            if _starttime.second >= 30:
                _minute += 1
            _second = 0
            _microsecond = 0
            _evid_time = _starttime.replace(
                minute=_minute, second=_second, microsecond=_microsecond)

        hypo = AttribDict()
        hypo.origin_time = origin_time
        try:
            kevnm = trace.stats.sac.kevnm
            # if string is empty, raise Exception
            if not kevnm:
                raise Exception
            # if string has spaces, then kevnm is not a code,
            # so raise Exception
            if ' ' in kevnm:
                raise Exception
            hypo.evid = kevnm
        except Exception:
            hypo.evid = _evid_time.strftime('%Y%m%d_%H%M%S')
        hypo.latitude = evla
        hypo.longitude = evlo
        hypo.depth = evdp
    trace.stats.hypo = hypo
예제 #5
0
def _parse_hypo2000_hypo_line(line):
    word = line.split()
    hypo = AttribDict()
    timestr = ' '.join(word[0:3])
    hypo.origin_time = UTCDateTime(timestr)
    n = 3
    if word[n].isnumeric():
        # Check if word is integer
        # In this case the format should be: degrees and minutes
        latitude = float(word[n]) + float(word[n+1])/60.
        n += 2
    elif 'N' in word[n] or 'S' in word[n]:
        # Check if there is N or S in the string
        # In this case the format should be: degrees and minutes
        _word = word[n].replace('N', ' ').replace('S', ' ').split()
        latitude = float(_word[0]) + float(_word[1])/60.
        n += 1
    else:
        # Otherwise latitude should be in float format
        try:
            latitude = float(word[n])
        except Exception:
            msg = 'cannot read latitude: {}'.format(word[n])
            raise Exception(msg)
        n += 1
    hypo.latitude = latitude
    if word[n].isnumeric():
        # Check if word is integer
        # In this case the format should be: degrees and minutes
        longitude = float(word[n]) + float(word[n+1])/60.
        n += 2
    elif 'E' in word[n] or 'W' in word[n]:
        # Check if there is E or W in the string
        # In this case the format should be: degrees and minutes
        _word = word[n].replace('E', ' ').replace('W', ' ').split()
        longitude = float(_word[0]) + float(_word[1])/60.
        n += 1
    else:
        # Otherwise longitude should be in float format
        try:
            longitude = float(word[n])
        except Exception:
            msg = 'cannot read longitude: {}'.format(word[n])
            raise Exception(msg)
        n += 1
    hypo.longitude = longitude
    # depth is in km, according to the hypo2000 manual
    hypo.depth = float(word[n])
    return hypo
예제 #6
0
파일: io.py 프로젝트: preinh/RF
def read_stations(fname):
    """
    Read station positions from whitespace delimited file
    
    Example file:
    # station  lat  lon  elev
    STN  10.0  -50.0  160
    """
    ret = AttribDict()
    with open(fname) as f:
        for line in f.readlines():
            if not line[0].startswith('#'):
                vals = line.split()
                ret[vals[0]] = AttribDict()
                ret[vals[0]].latitude = float(vals[1])
                ret[vals[0]].longitude = float(vals[2])
                ret[vals[0]].elevation = float(vals[3])
    return ret
예제 #7
0
파일: core.py 프로젝트: petrrr/obspy
def _read_y(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a Nanometrics Y file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: Nanometrics Y file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read("/path/to/YAYT_BHZ_20021223.124800")
    >>> st  # doctest: +ELLIPSIS
    <obspy.core.stream.Stream object at 0x...>
    >>> print(st)  # doctest: +ELLIPSIS
    1 Trace(s) in Stream:
    .AYT..BHZ | 2002-12-23T12:48:00.000100Z - ... | 100.0 Hz, 18000 samples
    """
    # The first tag in a Y-file must be the TAG_Y_FILE (0) tag. This must be
    # followed by the following tags, in any order:
    #   TAG_STATION_INFO (1)
    #   TAG_STATION_LOCATION (2)
    #   TAG_STATION_PARAMETERS (3)
    #   TAG_STATION_DATABASE (4)
    #   TAG_SERIES_INFO (5)
    #   TAG_SERIES_DATABASE (6)
    # The following tag is optional:
    #   TAG_STATION_RESPONSE (26)
    # The last tag in the file must be a TAG_DATA_INT32 (7) tag. This tag must
    # be followed by an array of LONG's. The number of entries in the array
    # must agree with what was described in the TAG_SERIES_INFO data.
    with open(filename, "rb") as fh:
        trace = Trace()
        trace.stats.y = AttribDict()
        count = -1
        while True:
            endian, tag_type, next_tag, _next_same = __parse_tag(fh)
            if tag_type == 1:
                # TAG_STATION_INFO
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # UCHAR Station[5] (BLANKPAD)
                #   Station is the five letter SEED format station
                #   identification.
                # UCHAR Location[2] (BLANKPAD)
                #   Location Location is the two letter SEED format location
                #   identification.
                # UCHAR Channel[3] (BLANKPAD)
                #   Channel Channel is the three letter SEED format channel
                #   identification.
                # UCHAR NetworkID[51] (ASCIIZ)
                #   This is some descriptive text identifying the network.
                # UCHAR SiteName[61] (ASCIIZ)
                #   SiteName is some text identifying the site.
                # UCHAR Comment[31] (ASCIIZ)
                #   Comment is any comment for this station.
                # UCHAR SensorType[51] (ASCIIZ)
                #   SensorType is some text describing the type of sensor used
                #   at the station.
                # UCHAR DataFormat[7] (ASCIIZ)
                #   DataFormat is some text describing the data format recorded
                #   at the station.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(b"5s2s3s51z61z31z51z7z", data[8:])
                trace.stats.station = parts[0]
                trace.stats.location = parts[1]
                trace.stats.channel = parts[2]
                # extra
                params = AttribDict()
                params.network_id = parts[3]
                params.side_name = parts[4]
                params.comment = parts[5]
                params.sensor_type = parts[6]
                params.data_format = parts[7]
                trace.stats.y.tag_station_info = params
            elif tag_type == 2:
                # TAG_STATION_LOCATION
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # FLOAT Latitude
                #   Latitude in degrees of the location of the station. The
                #   latitude should be between -90 (South) and +90 (North).
                # FLOAT Longitude
                #   Longitude in degrees of the location of the station. The
                #   longitude should be between -180 (West) and +180 (East).
                # FLOAT Elevation
                #   Elevation in meters above sea level of the station.
                # FLOAT Depth
                #   Depth is the depth in meters of the sensor.
                # FLOAT Azimuth
                #   Azimuth of the sensor in degrees clockwise.
                # FLOAT Dip
                #   Dip is the dip of the sensor. 90 degrees is defined as
                #   vertical right way up.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b"ffffff", data[8:])
                params = AttribDict()
                params.latitude = parts[0]
                params.longitude = parts[1]
                params.elevation = parts[2]
                params.depth = parts[3]
                params.azimuth = parts[4]
                params.dip = parts[5]
                trace.stats.y.tag_station_location = params
            elif tag_type == 3:
                # TAG_STATION_PARAMETERS
                # UCHAR Update[16]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME StartValidTime
                #   Time that the information in these records became valid.
                # REALTIME EndValidTime
                #   Time that the information in these records became invalid.
                # FLOAT Sensitivity
                #   Sensitivity of the sensor in nanometers per bit.
                # FLOAT SensFreq
                #   Frequency at which the sensitivity was measured.
                # FLOAT SampleRate
                #   This is the number of samples per second. This value can be
                #   less than 1.0. (i.e. 0.1)
                # FLOAT MaxClkDrift
                #   Maximum drift rate of the clock in seconds per sample.
                # UCHAR SensUnits[24] (ASCIIZ)
                #   Some text indicating the units in which the sensitivity was
                #   measured.
                # UCHAR CalibUnits[24] (ASCIIZ)
                #   Some text indicating the units in which calibration input
                #   was measured.
                # UCHAR ChanFlags[27] (BLANKPAD)
                #   Text indicating the channel flags according to the SEED
                #   definition.
                # UCHAR UpdateFlag
                #   This flag must be “N” or “U” according to the SEED
                #   definition.
                # UCHAR Filler[4]
                #   Filler Pads out the record to satisfy the alignment
                #   restrictions for reading data on a SPARC processor.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b"ddffff24z24z27sc4s", data[16:])
                trace.stats.sampling_rate = parts[4]
                # extra
                params = AttribDict()
                params.start_valid_time = parts[0]
                params.end_valid_time = parts[1]
                params.sensitivity = parts[2]
                params.sens_freq = parts[3]
                params.sample_rate = parts[4]
                params.max_clk_drift = parts[5]
                params.sens_units = parts[6]
                params.calib_units = parts[7]
                params.chan_flags = parts[8]
                params.update_flag = parts[9]
                trace.stats.y.tag_station_parameters = params
            elif tag_type == 4:
                # TAG_STATION_DATABASE
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME LoadDate
                #   Date the information was loaded into the database.
                # UCHAR Key[16]
                #   Unique key that identifies this record in the database.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b"d16s", data[8:])
                params = AttribDict()
                params.load_date = parts[0]
                params.key = parts[1]
                trace.stats.y.tag_station_database = params
            elif tag_type == 5:
                # TAG_SERIES_INFO
                # UCHAR Update[16]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME StartTime
                #   This is start time of the data in this series.
                # REALTIME EndTime
                #   This is end time of the data in this series.
                # ULONG NumSamples
                #   This is the number of samples of data in this series.
                # LONG DCOffset
                #   DCOffset is the DC offset of the data.
                # LONG MaxAmplitude
                #   MaxAmplitude is the maximum amplitude of the data.
                # LONG MinAmplitude
                #   MinAmplitude is the minimum amplitude of the data.
                # UCHAR Format[8] (ASCIIZ)
                #   This is the format of the data. This should always be
                #   “YFILE”.
                # UCHAR FormatVersion[8] (ASCIIZ)
                #   FormatVersion is the version of the format of the data.
                #   This should always be “5.0”
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b"ddLlll8z8z", data[16:])
                trace.stats.starttime = UTCDateTime(parts[0])
                count = parts[2]
                # extra
                params = AttribDict()
                params.endtime = UTCDateTime(parts[1])
                params.num_samples = parts[2]
                params.dc_offset = parts[3]
                params.max_amplitude = parts[4]
                params.min_amplitude = parts[5]
                params.format = parts[6]
                params.format_version = parts[7]
                trace.stats.y.tag_series_info = params
            elif tag_type == 6:
                # TAG_SERIES_DATABASE
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME LoadDate
                #   Date the information was loaded into the database.
                # UCHAR Key[16]
                #   Unique key that identifies this record in the database.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b"d16s", data[8:])
                params = AttribDict()
                params.load_date = parts[0]
                params.key = parts[1]
                trace.stats.y.tag_series_database = params
            elif tag_type == 26:
                # TAG_STATION_RESPONSE
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # UCHAR PathName[260]
                #  PathName is the full name of the file which contains the
                #  response information for this station.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(b"260s", data[8:])
                params = AttribDict()
                params.path_name = parts[0]
                trace.stats.y.tag_station_response = params
            elif tag_type == 7:
                # TAG_DATA_INT32
                trace.data = from_buffer(fh.read(np.dtype(np.int32).itemsize * count), dtype=np.int32)
                # break loop as TAG_DATA_INT32 should be the last tag in file
                break
            else:
                fh.seek(next_tag, 1)
    return Stream([trace])
예제 #8
0
def _parse_qml(qml_file, evid=None):
    if qml_file is None:
        return None, None

    hypo = AttribDict()
    hypo.latitude = None
    hypo.longitude = None
    hypo.depth = None
    hypo.origin_time = None
    hypo.evid = None

    try:
        cat = read_events(qml_file)
    except Exception as err:
        logger.error(err)
        ssp_exit(1)

    if evid is not None:
        ev = [e for e in cat if evid in str(e.resource_id)][0]
    else:
        # just take the first event
        ev = cat[0]
    # See if there is a preferred origin...
    origin = ev.preferred_origin()
    # ...or just use the first one
    if origin is None:
        origin = ev.origins[0]
    hypo.origin_time = origin.time
    hypo.latitude = origin.latitude
    hypo.longitude = origin.longitude
    hypo.depth = origin.depth / 1000.
    hypo.evid = ev.resource_id.id.split('/')[-1]

    picks = []

    for pck in ev.picks:
        pick = Pick()
        pick.station = pck.waveform_id.station_code
        pick.network = pck.waveform_id.network_code
        pick.channel = pck.waveform_id.channel_code
        if pck.waveform_id.location_code is not None:
            pick.location = pck.waveform_id.location_code
        else:
            pick.location = ''
        if pck.onset == 'emergent':
            pick.flag = 'E'
        elif pck.onset == 'impulsive':
            pick.flag = 'I'
        try:
            pick.phase = pck.phase_hint[0:1]
        except Exception:
            # ignore picks with no phase hint
            continue
        if pck.polarity == 'positive':
            pick.polarity = 'U'
        elif pck.polarity == 'negative':
            pick.polarity = 'D'
        pick.time = pck.time
        picks.append(pick)

    return hypo, picks
예제 #9
0
def _add_hypocenter(trace, hypo):
    # we need to lazy-import here, so that OBSPY_VERSION is defined
    from sourcespec.ssp_setup import OBSPY_VERSION
    if hypo is None:
        # Try to get hypocenter information from the SAC header
        try:
            evla = trace.stats.sac.evla
            evlo = trace.stats.sac.evlo
            evdp = trace.stats.sac.evdp
            begin = trace.stats.sac.b
        except AttributeError:
            return

        try:
            tori = trace.stats.sac.o
            origin_time = trace.stats.starttime + tori - begin
        except AttributeError:
            origin_time = None

        if origin_time is not None:
            # make a copy of origin_time and round it to the nearest second
            _second = origin_time.second
            if origin_time.microsecond >= 500000:
                _second += 1
            _microsecond = 0
            if OBSPY_VERSION > (1, 1, 1):
                # UTCDateTime objects will become immutable in future
                # versions of ObsPy
                _evid_time = origin_time.replace(second=_second,
                                                 microsecond=_microsecond)
            else:
                # For old versions, UTCDateTime objects are mutable
                _evid_time = UTCDateTime(origin_time)
                _evid_time.second = _second
                _evid_time.microsecond = _microsecond
        else:
            # make a copy of starttime and round it to the nearest minute
            _starttime = trace.stats.starttime
            _minute = _starttime.minute
            if _starttime.second >= 30:
                _minute += 1
            _second = 0
            _microsecond = 0
            if OBSPY_VERSION > (1, 1, 1):
                # UTCDateTime objects will become immutable in future
                # versions of ObsPy
                _evid_time = _starttime.replace(minute=_minute,
                                                second=_second,
                                                microsecond=_microsecond)
            else:
                # For old versions, UTCDateTime objects are mutable
                _evid_time = UTCDateTime(_starttime)
                _evid_time.minute = _minute
                _evid_time.second = _second
                _evid_time.microsecond = _microsecond

        hypo = AttribDict()
        hypo.origin_time = origin_time
        hypo.evid = _evid_time.strftime('%Y%m%d_%H%M%S')
        hypo.latitude = evla
        hypo.longitude = evlo
        hypo.depth = evdp
    trace.stats.hypo = hypo
    _, _, baz = gps2dist_azimuth(hypo.latitude, hypo.longitude,
                                 trace.stats.coords.latitude,
                                 trace.stats.coords.longitude)
    trace.stats.back_azimuth = baz
예제 #10
0
def _add_paz_and_coords(trace, dataless, paz_dict=None):
    trace.stats.paz = None
    trace.stats.coords = None
    traceid = trace.get_id()
    time = trace.stats.starttime
    # We first look into the dataless dictionary, if available
    if isinstance(dataless, dict):
        for sp in dataless.values():
            # Check first if our traceid is in the dataless file
            if traceid not in str(sp):
                continue
            try:
                paz = AttribDict(sp.get_paz(traceid, time))
                coords = AttribDict(sp.get_coordinates(traceid, time))
            except SEEDParserException as err:
                logger.error('%s time: %s' % (err, str(time)))
                pass
    elif isinstance(dataless, Inventory):
        try:
            with warnings.catch_warnings(record=True) as warns:
                # get_sacpz() can issue warnings on more than one PAZ found,
                # so let's catch those warnings and log them properly
                sacpz = dataless.get_response(traceid, time).get_sacpz()
                for w in warns:
                    message = str(w.message)
                    logger.warning('%s: %s' % (traceid, message))
            attach_paz(trace, io.StringIO(sacpz))
            paz = trace.stats.paz
            coords = AttribDict(dataless.get_coordinates(traceid, time))
        except Exception as err:
            logger.error('%s traceid: %s time: %s' % (err, traceid, str(time)))
            pass
    try:
        trace.stats.paz = paz
        # elevation is in meters in the dataless
        coords.elevation /= 1000.
        trace.stats.coords = coords
    except Exception:
        pass
    # If we couldn't find any PAZ in the dataless dictionary,
    # we try to attach paz from the paz dictionary passed
    # as argument
    if trace.stats.paz is None and paz_dict is not None:
        # Look for traceid or for a generic paz
        net, sta, loc, chan = trace.id.split('.')
        ids = [
            trace.id, '.'.join(('__', '__', '__', '__')), '.'.join(
                (net, '__', '__', '__')), '.'.join((net, sta, '__', '__')),
            '.'.join((net, sta, loc, '__')), 'default'
        ]
        for id in ids:
            try:
                paz = paz_dict[id]
                trace.stats.paz = paz
            except KeyError:
                pass
    # If we're still out of luck,
    # we try to build the sensitivity from the
    # user2 and user3 header fields (ISNet format)
    if trace.stats.paz is None and trace.stats.format == 'ISNet':
        try:
            # instrument constants
            u2 = trace.stats.sac.user2
            u3 = trace.stats.sac.user3
            paz = AttribDict()
            paz.sensitivity = u3 / u2
            paz.poles = []
            paz.zeros = []
            paz.gain = 1
            trace.stats.paz = paz
        except AttributeError:
            pass
    # Still no paz? Antilles or IPOC format!
    if (trace.stats.paz is None and
        (trace.stats.format == 'Antilles' or trace.stats.format == 'IPOC')):
        paz = AttribDict()
        paz.sensitivity = 1
        paz.poles = []
        paz.zeros = []
        paz.gain = 1
        trace.stats.paz = paz
    # If we still don't have trace coordinates,
    # we try to get them from SAC header
    if trace.stats.coords is None:
        try:
            stla = trace.stats.sac.stla
            stlo = trace.stats.sac.stlo
            try:
                stel = trace.stats.sac.stel
                # elevation is in meters in SAC header:
                stel /= 1000.
            except AttributeError:
                stel = 0.
            coords = AttribDict()
            coords.elevation = stel
            coords.latitude = stla
            coords.longitude = stlo
            trace.stats.coords = coords
        except AttributeError:
            pass
    # Still no coords? Raise an exception
    if trace.stats.coords is None:
        raise Exception('%s: could not find coords for trace: skipping trace' %
                        traceid)
예제 #11
0
def _read_y(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a Nanometrics Y file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: Nanometrics Y file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read("/path/to/YAYT_BHZ_20021223.124800")
    >>> st  # doctest: +ELLIPSIS
    <obspy.core.stream.Stream object at 0x...>
    >>> print(st)  # doctest: +ELLIPSIS
    1 Trace(s) in Stream:
    .AYT..BHZ | 2002-12-23T12:48:00.000100Z - ... | 100.0 Hz, 18000 samples
    """
    # The first tag in a Y-file must be the TAG_Y_FILE (0) tag. This must be
    # followed by the following tags, in any order:
    #   TAG_STATION_INFO (1)
    #   TAG_STATION_LOCATION (2)
    #   TAG_STATION_PARAMETERS (3)
    #   TAG_STATION_DATABASE (4)
    #   TAG_SERIES_INFO (5)
    #   TAG_SERIES_DATABASE (6)
    # The following tag is optional:
    #   TAG_STATION_RESPONSE (26)
    # The last tag in the file must be a TAG_DATA_INT32 (7) tag. This tag must
    # be followed by an array of LONG's. The number of entries in the array
    # must agree with what was described in the TAG_SERIES_INFO data.
    with open(filename, 'rb') as fh:
        trace = Trace()
        trace.stats.y = AttribDict()
        count = -1
        while True:
            endian, tag_type, next_tag, _next_same = _parse_tag(fh)
            if tag_type == 1:
                # TAG_STATION_INFO
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # UCHAR Station[5] (BLANKPAD)
                #   Station is the five letter SEED format station
                #   identification.
                # UCHAR Location[2] (BLANKPAD)
                #   Location Location is the two letter SEED format location
                #   identification.
                # UCHAR Channel[3] (BLANKPAD)
                #   Channel Channel is the three letter SEED format channel
                #   identification.
                # UCHAR NetworkID[51] (ASCIIZ)
                #   This is some descriptive text identifying the network.
                # UCHAR SiteName[61] (ASCIIZ)
                #   SiteName is some text identifying the site.
                # UCHAR Comment[31] (ASCIIZ)
                #   Comment is any comment for this station.
                # UCHAR SensorType[51] (ASCIIZ)
                #   SensorType is some text describing the type of sensor used
                #   at the station.
                # UCHAR DataFormat[7] (ASCIIZ)
                #   DataFormat is some text describing the data format recorded
                #   at the station.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(b'5s2s3s51z61z31z51z7z',
                                                       data[8:])
                trace.stats.station = parts[0]
                trace.stats.location = parts[1]
                trace.stats.channel = parts[2]
                # extra
                params = AttribDict()
                params.network_id = parts[3]
                params.site_name = parts[4]
                params.comment = parts[5]
                params.sensor_type = parts[6]
                params.data_format = parts[7]
                trace.stats.y.tag_station_info = params
            elif tag_type == 2:
                # TAG_STATION_LOCATION
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # FLOAT Latitude
                #   Latitude in degrees of the location of the station. The
                #   latitude should be between -90 (South) and +90 (North).
                # FLOAT Longitude
                #   Longitude in degrees of the location of the station. The
                #   longitude should be between -180 (West) and +180 (East).
                # FLOAT Elevation
                #   Elevation in meters above sea level of the station.
                # FLOAT Depth
                #   Depth is the depth in meters of the sensor.
                # FLOAT Azimuth
                #   Azimuth of the sensor in degrees clockwise.
                # FLOAT Dip
                #   Dip is the dip of the sensor. 90 degrees is defined as
                #   vertical right way up.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b'ffffff',
                                                       data[8:])
                params = AttribDict()
                params.latitude = parts[0]
                params.longitude = parts[1]
                params.elevation = parts[2]
                params.depth = parts[3]
                params.azimuth = parts[4]
                params.dip = parts[5]
                trace.stats.y.tag_station_location = params
            elif tag_type == 3:
                # TAG_STATION_PARAMETERS
                # UCHAR Update[16]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME StartValidTime
                #   Time that the information in these records became valid.
                # REALTIME EndValidTime
                #   Time that the information in these records became invalid.
                # FLOAT Sensitivity
                #   Sensitivity of the sensor in nanometers per bit.
                # FLOAT SensFreq
                #   Frequency at which the sensitivity was measured.
                # FLOAT SampleRate
                #   This is the number of samples per second. This value can be
                #   less than 1.0. (i.e. 0.1)
                # FLOAT MaxClkDrift
                #   Maximum drift rate of the clock in seconds per sample.
                # UCHAR SensUnits[24] (ASCIIZ)
                #   Some text indicating the units in which the sensitivity was
                #   measured.
                # UCHAR CalibUnits[24] (ASCIIZ)
                #   Some text indicating the units in which calibration input
                #   was measured.
                # UCHAR ChanFlags[27] (BLANKPAD)
                #   Text indicating the channel flags according to the SEED
                #   definition.
                # UCHAR UpdateFlag
                #   This flag must be “N” or “U” according to the SEED
                #   definition.
                # UCHAR Filler[4]
                #   Filler Pads out the record to satisfy the alignment
                #   restrictions for reading data on a SPARC processor.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(
                    endian + b'ddffff24z24z27sc4s', data[16:])
                trace.stats.sampling_rate = parts[4]
                # extra
                params = AttribDict()
                params.start_valid_time = parts[0]
                params.end_valid_time = parts[1]
                params.sensitivity = parts[2]
                params.sens_freq = parts[3]
                params.sample_rate = parts[4]
                params.max_clk_drift = parts[5]
                params.sens_units = parts[6]
                params.calib_units = parts[7]
                params.chan_flags = parts[8]
                params.update_flag = parts[9]
                trace.stats.y.tag_station_parameters = params
            elif tag_type == 4:
                # TAG_STATION_DATABASE
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME LoadDate
                #   Date the information was loaded into the database.
                # UCHAR Key[16]
                #   Unique key that identifies this record in the database.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b'd16s',
                                                       data[8:])
                params = AttribDict()
                params.load_date = parts[0]
                params.key = parts[1]
                trace.stats.y.tag_station_database = params
            elif tag_type == 5:
                # TAG_SERIES_INFO
                # UCHAR Update[16]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME StartTime
                #   This is start time of the data in this series.
                # REALTIME EndTime
                #   This is end time of the data in this series.
                # ULONG NumSamples
                #   This is the number of samples of data in this series.
                # LONG DCOffset
                #   DCOffset is the DC offset of the data.
                # LONG MaxAmplitude
                #   MaxAmplitude is the maximum amplitude of the data.
                # LONG MinAmplitude
                #   MinAmplitude is the minimum amplitude of the data.
                # UCHAR Format[8] (ASCIIZ)
                #   This is the format of the data. This should always be
                #   “YFILE”.
                # UCHAR FormatVersion[8] (ASCIIZ)
                #   FormatVersion is the version of the format of the data.
                #   This should always be “5.0”
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b'ddLlll8z8z',
                                                       data[16:])
                trace.stats.starttime = UTCDateTime(parts[0])
                count = parts[2]
                # extra
                params = AttribDict()
                params.endtime = UTCDateTime(parts[1])
                params.num_samples = parts[2]
                params.dc_offset = parts[3]
                params.max_amplitude = parts[4]
                params.min_amplitude = parts[5]
                params.format = parts[6]
                params.format_version = parts[7]
                trace.stats.y.tag_series_info = params
            elif tag_type == 6:
                # TAG_SERIES_DATABASE
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # REALTIME LoadDate
                #   Date the information was loaded into the database.
                # UCHAR Key[16]
                #   Unique key that identifies this record in the database.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(endian + b'd16s',
                                                       data[8:])
                params = AttribDict()
                params.load_date = parts[0]
                params.key = parts[1]
                trace.stats.y.tag_series_database = params
            elif tag_type == 26:
                # TAG_STATION_RESPONSE
                # UCHAR Update[8]
                #   This field is only used internally for administrative
                #   purposes.  It should always be set to zeroes.
                # UCHAR PathName[260]
                #  PathName is the full name of the file which contains the
                #  response information for this station.
                data = fh.read(next_tag)
                parts = _unpack_with_asciiz_and_decode(b'260s', data[8:])
                params = AttribDict()
                params.path_name = parts[0]
                trace.stats.y.tag_station_response = params
            elif tag_type == 7:
                # TAG_DATA_INT32
                trace.data = from_buffer(fh.read(
                    np.dtype(np.int32).itemsize * count),
                                         dtype=np.int32)
                # break loop as TAG_DATA_INT32 should be the last tag in file
                break
            else:
                fh.seek(next_tag, 1)
    return Stream([trace])
예제 #12
0
def _add_paz_and_coords(trace, metadata, paz_dict, config):
    traceid = trace.get_id()
    # If we already know that traceid is skipped, raise a silent exception
    if traceid in _add_paz_and_coords.skipped:
        raise Exception()
    trace.stats.paz = None
    trace.stats.coords = None
    time = trace.stats.starttime
    # We first check whether metadata is a dataless dictionary
    if isinstance(metadata, dict):
        for sp in metadata.values():
            # Check first if our traceid is in the dataless file
            if traceid not in str(sp):
                continue
            try:
                paz = AttribDict(sp.get_paz(traceid, time))
                coords = AttribDict(sp.get_coordinates(traceid, time))
            except SEEDParserException as err:
                logger.error('%s time: %s' % (err, str(time)))
                pass
    elif isinstance(metadata, Inventory):
        try:
            with warnings.catch_warnings(record=True) as warns:
                # get_sacpz() can issue warnings on more than one PAZ found,
                # so let's catch those warnings and log them properly
                sacpz = metadata.get_response(traceid, time).get_sacpz()
                for w in warns:
                    message = str(w.message)
                    logger.warning('%s: %s' % (traceid, message))
            attach_paz(trace, io.StringIO(sacpz))
            paz = trace.stats.paz
            coords = AttribDict(metadata.get_coordinates(traceid, time))
        except Exception as err:
            logger.error('%s traceid: %s time: %s' % (err, traceid, str(time)))
            pass
    try:
        trace.stats.paz = paz
        # elevation is in meters
        coords.elevation /= 1000.
        trace.stats.coords = coords
    except Exception:
        pass
    # If we couldn't find any PAZ in the dataless dictionary
    # or in the Inventory, we try to attach paz from a paz dictionary
    if trace.stats.paz is None and paz_dict is not None:
        # Look for traceid or for a generic paz
        net, sta, loc, chan = trace.id.split('.')
        ids = [
            trace.id,
            '.'.join(('__', '__', '__', '__')),
            '.'.join((net, '__', '__', '__')),
            '.'.join((net, sta, '__', '__')),
            '.'.join((net, sta, loc, '__')),
            'default'
        ]
        for id in ids:
            try:
                paz = paz_dict[id]
                trace.stats.paz = paz
            except KeyError:
                pass
    # If a "sensitivity" config option is provided, override the paz computed
    # from metadata or paz_dict
    if config.sensitivity is not None:
        # instrument constants
        paz = AttribDict()
        paz.sensitivity = _compute_sensitivity(trace, config)
        paz.poles = []
        paz.zeros = []
        paz.gain = 1
        trace.stats.paz = paz
    # If we still don't have trace coordinates,
    # we try to get them from SAC header
    if trace.stats.coords is None:
        try:
            stla = trace.stats.sac.stla
            stlo = trace.stats.sac.stlo
            try:
                stel = trace.stats.sac.stel
                # elevation is in meters in SAC header:
                stel /= 1000.
            except AttributeError:
                stel = 0.
            coords = AttribDict()
            coords.elevation = stel
            coords.latitude = stla
            coords.longitude = stlo
            trace.stats.coords = coords
        except AttributeError:
            pass
    # Still no coords? Raise an exception
    if trace.stats.coords is None:
        _add_paz_and_coords.skipped.append(traceid)
        raise Exception(
            '%s: could not find coords for trace: skipping trace' % traceid)
    if trace.stats.coords.latitude == trace.stats.coords.longitude == 0:
        logger.warning(
            '{}: trace has latitude and longitude equal to zero!'.format(
                traceid))