Example #1
0
def attach_new_origin(
    old_event: Event,
    new_event: Event,
    new_origin: Origin,
    preferred: bool,
    index: Optional[int] = None,
) -> Catalog:
    """
    Attach a new origin to an existing events object.

    Parameters
    ----------
    old_event : obspy.core.event.Event
        The old event that will receive the new origin
    new_event : obspy.core.event.Event
        The new event that contains the origin, needed for merging picks
        that may not exist in old_event
    new_origin : obspy.core.event.Origin
        The new origin that will be attached to old_event
    preferred : bool
        If True mark the new origin as the preferred_origin
    index : int or None
        The origin index of old_cat that new_origin will overwrite, if None
        append the new_origin to old_cat.origins

    Returns
    -------
    obspy.Catalog
        modifies old_cat in-place, returns old_catalog
    """
    # make sure all the picks/amplitudes in new_event are also in old_event
    merge_events(old_event, new_event, delete_old=False)
    # point the arrivals in the new origin at the old picks
    _associate_picks(old_event, new_event, new_origin)
    # append the origin
    if index is not None:  # if this origin is to replace another
        try:
            old_ori = old_event.origins[index]
        except IndexError:
            msg = ("%d is not valid for an origin list of length %d") % (
                index,
                len(old_event.origins),
            )
            msg += " appending new origin to end of list"
            warnings.warn(msg)
            old_event.origins.append(new_origin)
        else:
            # set resource id and creation info
            new_origin.resource_id = old_ori.resource_id
            new_origin.creation_info = old_ori.creation_info
            old_event.origins[index] = new_origin
    else:
        old_event.origins.append(new_origin)
    # bump origin creation info
    bump_creation_version(new_origin)
    # set preferred
    if preferred:
        old_event.preferred_origin_id = new_origin.resource_id
    validate_catalog(old_event)
    return old_event
Example #2
0
def set_preferred_values(event: Event):
    """ set the preferred values to the last in the list if they are not
    defined """
    if not event.preferred_origin_id and len(event.origins):
        event.preferred_origin_id = event.origins[-1].resource_id
    if not event.preferred_magnitude_id and len(event.magnitudes):
        event.preferred_magnitude_id = event.magnitudes[-1].resource_id
    if not event.preferred_focal_mechanism_id and len(event.focal_mechanisms):
        focal_mech_id = event.focal_mechanisms[-1].resource_id
        event.preferred_focal_mechanism_id = focal_mech_id
Example #3
0
File: core.py Project: zurgeg/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [
                         int(v) if v is not None else 0 for v in comps[:-1]
                     ]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Example #4
0
File: core.py Project: Brtle/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [int(v) if v is not None else 0
                                 for v in comps[:-1]]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Example #5
0
def ORNL_events_to_cat(ornl_file):
    """Make Catalog from ORNL locations"""
    cat = Catalog()
    loc_df = pd.read_csv(ornl_file, infer_datetime_format=True)
    loc_df = loc_df.set_index('event_datetime')
    eid = 0
    for dt, row in loc_df.iterrows():
        ot = UTCDateTime(dt)
        hmc_east = row['x(m)']
        hmc_north = row['y(m)']
        hmc_elev = row['z(m)']
        errX = row['error_x (m)']
        errY = row['error_y (m)']
        errZ = row['error_z (m)']
        rms = row['rms (millisecond)']
        converter = SURF_converter()
        lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north,
                                              hmc_elev))
        o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev)
        o.origin_uncertainty = OriginUncertainty()
        o.quality = OriginQuality()
        ou = o.origin_uncertainty
        oq = o.quality
        ou.max_horizontal_uncertainty = np.max([errX, errY])
        ou.min_horizontal_uncertainty = np.min([errX, errY])
        o.depth_errors.uncertainty = errZ
        oq.standard_error = rms * 1e3
        extra = AttribDict({
            'hmc_east': {
                'value': hmc_east,
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': hmc_north,
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': hmc_elev,
                'namespace': 'smi:local/hmc'
            },
            'hmc_eid': {
                'value': eid,
                'namespace': 'smi:local/hmc'
            }
        })
        o.extra = extra
        rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f'))
        # Dummy magnitude of 1. for all events until further notice
        mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.))
        ev = Event(origins=[o], magnitudes=[mag], resource_id=rid)
        ev.preferred_origin_id = o.resource_id.id
        cat.events.append(ev)
        eid += 1
    return cat
Example #6
0
def read_header_line(string_line):

    new_event = Event()
    line = string_line

    param_event = line.split()[1:]

    ### check if line as required number of arguments

    if len(param_event) != 14:
        return new_event

    ### Get parameters

    year, month, day = [int(x) for x in param_event[0:3]]
    hour, minu = [int(x) for x in param_event[3:5]]
    sec = float(param_event[5])
    if sec >= 60:
        sec = 59.999
    lat, lon, z = [float(x) for x in param_event[6:9]]
    mag = float(param_event[9])
    errh, errz, rms = [float(x) for x in param_event[10:13]]

    _time = UTCDateTime(year, month, day, hour, minu, sec)
    _origin_quality = OriginQuality(standard_error=rms)

    # change what's next to handle origin with no errors estimates

    origin = Origin(time=_time,
                    longitude=lon,
                    latitude=lat,
                    depth=z,
                    longitude_errors=QuantityError(uncertainty=errh),
                    latitude_errors=QuantityError(uncertainty=errh),
                    depth_errors=QuantityError(uncertainty=errz),
                    quality=_origin_quality)

    magnitude = Magnitude(mag=mag, origin_id=origin.resource_id)

    ### Return

    new_event.origins.append(origin)
    new_event.magnitudes.append(magnitude)
    new_event.preferred_origin_id = origin.resource_id
    new_event.preferred_magnitude_id = magnitude.resource_id

    return new_event
Example #7
0
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split("\n"):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split("\t", 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get("lon"))
         origin.latitude = self._str2num(values.get("lat"))
         depth = self._str2num(values.get("depth"))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get("z_err"))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get("h_err"))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = "horizontal uncertainty"
             origin.origin_uncertainty = ou
         year = self._str2num(values.get("year"))
         if year is not None:
             t_fields = ["year", "month", "day", "hour", "minute", "second"]
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 utc_args = [int(v) for v in comps if v is not None]
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get("mag"))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get("m_err"))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         catalog.append(event)
     return catalog
Example #8
0
    def _parse_event(self, first_line):
        """
        Parse an event.

        :type first_line: str
        :param first_line: First line of an event block, which contains
            the event id.
        :rtype: :class:`~obspy.core.event.event.Event`
        :return: The parsed event or None.
        """
        event_id = first_line[5:].strip()
        # Skip event without id
        if not event_id:
            self._warn('Missing event id')
            return None

        event = Event()

        origin, origin_res_id = self._parse_origin(event)
        # Skip event without origin
        if not origin:
            return None

        line = self._skip_empty_lines()

        self._parse_region_name(line, event)
        self._parse_arrivals(event, origin, origin_res_id)

        # Origin ResourceIdentifier should be set at the end, when
        # Arrivals are already set.
        origin.resource_id = origin_res_id
        event.origins.append(origin)

        event.preferred_origin_id = origin.resource_id.id

        # Must be done after the origin parsing
        event.creation_info = self._get_creation_info()

        public_id = "event/%s" % event_id
        event.resource_id = self._get_res_id(public_id)

        event.scope_resource_ids()

        return event
Example #9
0
    def _parse_event(self, first_line):
        """
        Parse an event.

        :type first_line: str
        :param first_line: First line of an event block, which contains
            the event id.
        :rtype: :class:`~obspy.core.event.event.Event`
        :return: The parsed event or None.
        """
        event_id = first_line[5:].strip()
        # Skip event without id
        if not event_id:
            self._warn('Missing event id')
            return None

        event = Event()

        origin, origin_res_id = self._parse_origin(event)
        # Skip event without origin
        if not origin:
            return None

        line = self._skip_empty_lines()

        self._parse_region_name(line, event)
        self._parse_arrivals(event, origin, origin_res_id)

        # Origin ResourceIdentifier should be set at the end, when
        # Arrivals are already set.
        origin.resource_id = origin_res_id
        event.origins.append(origin)

        event.preferred_origin_id = origin.resource_id.id

        # Must be done after the origin parsing
        event.creation_info = self._get_creation_info()

        public_id = "event/%s" % event_id
        event.resource_id = self._get_res_id(public_id)

        event.scope_resource_ids()

        return event
Example #10
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
        event = Event(focal_mechanisms=[], picks=[], origins=[])
        origin = Origin(arrivals=[])
        origin.time = UTCDateTime(hp.tstamp)
        origin.latitude = hp.qlat
        origin.longitude = hp.qlon
        origin.depth = hp.qdep
        origin.creation_info = CreationInfo(version=hp.icusp)
        origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(
            hp.icusp))
        for _i in range(n):
            p = Pick()
            p.creation_info = CreationInfo(version=hp.arid[_i])
            p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format(
                p.creation_info.version))
            p.waveform_id = WaveformStreamID(network_code=hp.snet[_i],
                                             station_code=hp.sname[_i],
                                             channel_code=hp.scomp[_i])
            if hp.p_pol[_i] > 0:
                p.polarity = 'positive'
            else:
                p.polarity = 'negative'
            a = Arrival()
            a.creation_info = CreationInfo(version=hp.arid[_i])
            a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format(
                p.creation_info.version))
            a.azimuth = hp.p_azi_mc[_i, 0]
            a.takeoff_angle = 180. - hp.p_the_mc[_i, 0]
            a.pick_id = p.resource_id
            origin.arrivals.append(a)
            event.picks.append(p)
        event.origins.append(origin)
        event.preferred_origin_id = origin.resource_id.resource_id
    else:  # just update the changes
        origin = event.preferred_origin()
        picks = []
        arrivals = []
        for _i in range(n):
            ind = hp.p_index[_i]
            a = origin.arrivals[ind]
            p = a.pick_id.getReferredObject()
            a.takeoff_angle = hp.p_the_mc[_i, 0]
            picks.append(p)
            arrivals.append(a)
        if only_fm_picks:
            origin.arrivals = arrivals
            event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(),
                                                author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier(
            'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'],
                                                plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'],
                                                plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(
                hp.qual[s],
                resource_id=ResourceIdentifier(
                    focal_mech.resource_id.resource_id + '/comment/quality')))
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id
    return event
Example #11
0
def _read_single_hypocenter(lines, coordinate_converter, original_picks):
    """
    Given a list of lines (starting with a 'NLLOC' line and ending with a
    'END_NLLOC' line), parse them into an Event.
    """
    try:
        # some paranoid checks..
        assert lines[0].startswith("NLLOC ")
        assert lines[-1].startswith("END_NLLOC")
        for line in lines[1:-1]:
            assert not line.startswith("NLLOC ")
            assert not line.startswith("END_NLLOC")
    except Exception:
        msg = ("This should not have happened, please report this as a bug at "
               "https://github.com/obspy/obspy/issues.")
        raise Exception(msg)

    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines[:-1]])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    # new NLLoc > 6.0 seems to add prefix 'run:' before date
    if date.startswith('run:'):
        date = date[4:]
    signature = signature.strip()
    creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    if coordinate_converter:
        # maximum likelihood origin location in km info line
        line = lines["HYPOCENTER"]
        x, y, z = coordinate_converter(*map(float, line.split()[1:7:2]))
    else:
        # maximum likelihood origin location lon lat info line
        line = lines["GEOGRAPHIC"]
        y, x, z = map(float, line.split()[8:13:2])

    # maximum likelihood origin time info line
    line = lines["GEOGRAPHIC"]

    year, mon, day, hour, min = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    if "COMMENT" in lines:
        comment = lines["COMMENT"].strip()
        comment = comment.strip('\'"')
        comment = comment.strip()

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    o = Origin()
    event.origins = [o]
    event.preferred_origin_id = o.resource_id
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
    event.comments.append(Comment(text=comment, force_resource_id=False))

    # SIGNATURE field's first item is LOCSIG, which is supposed to be
    # 'Identification of an individual, institiution or other entity'
    # according to
    # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
    # so use it as author in creation info
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version,
                                       author=signature)
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version,
                                   author=signature)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        # network codes are not used by NonLinLoc, so they can not be known
        # when reading the .hyp file.. to conform with QuakeML standard set an
        # empty network code
        wid = WaveformStreamID(network_code="", station_code=station)
        # have to split this into ints for overflow to work correctly
        date, hourmin, sec = map(str, line[6:9])
        ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])]
        hm = [int(hourmin[:2]), int(hourmin[2:4])]
        t = UTCDateTime(*(ymd + hm), strict=False) + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    event.scope_resource_ids()

    return event
Example #12
0
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        lines = []
        for e in tqdm(self.eventList, desc='Rank %d' % (self.rank)):
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStation(
                            lonlat[0], lonlat[1], maxdist=1e3)  # 1km
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            c = r[0].split('.')[0]
                            newCode = c
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)

                        if (np.fabs(a.distance - dist) > 0.5):
                            #print ([e.preferred_origin.lon, e.preferred_origin.lat, sc[0], sc[1]])
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(
                resource_id=OResourceIdentifier(id=str(e.public_id)),
                creation_info=ci,
                event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            if (not self.discard_old_picks):
                for a in e.preferred_origin.arrival_list:
                    if (type(self.fdsn_inventory.t[a.net][a.sta]) ==
                            defaultdict):
                        missingStations[a.net + '.' + a.sta] += 1
                        continue
                    # end if
                    oldPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(a.utctime),
                        waveform_id=OWaveformStreamID(network_code=a.net,
                                                      station_code=a.sta,
                                                      channel_code=a.cha),
                        methodID=OResourceIdentifier('unknown'),
                        phase_hint=a.phase,
                        evaluation_mode='automatic',
                        creation_info=ci)

                    oldArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=oldPick.resource_id.id + "#"),
                        pick_id=oldPick.resource_id,
                        phase=oldPick.phase_hint,
                        distance=a.distance,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)

                    event.picks.append(oldPick)
                    event.preferred_origin().arrivals.append(oldArr)

                    # polulate list for text output
                    line = [
                        str(e.public_id), '{:<25s}',
                        e.preferred_origin.utctime.timestamp, '{:f}',
                        e.preferred_magnitude.magnitude_value, '{:f}',
                        e.preferred_origin.lon, '{:f}', e.preferred_origin.lat,
                        '{:f}', e.preferred_origin.depthkm, '{:f}', a.net,
                        '{:<5s}', a.sta, '{:<5s}', a.cha, '{:<5s}',
                        a.utctime.timestamp, '{:f}', a.phase, '{:<5s}',
                        self.fdsn_inventory.t[a.net][a.sta][0], '{:f}',
                        self.fdsn_inventory.t[a.net][a.sta][1], '{:f}', -999,
                        '{:f}', -999, '{:f}', a.distance, '{:f}', -999, '{:f}',
                        -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}',
                        -999, '{:d}', -999, '{:d}'
                    ]
                    lines.append(line)
                # end for
            # end if

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=[
                            OComment(
                                text='phasepapy_snr = ' + str(op[6][0]) +
                                ', quality_measure_cwt = ' + str(op[6][1]) +
                                ', dom_freq = ' + str(op[6][2]) +
                                ', quality_measure_slope = ' + str(op[6][3]) +
                                ', band_index = ' + str(op[6][4]) +
                                ', nsigma = ' + str(op[6][5]),
                                force_resource_id=False)
                        ],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)

                    # polulate list for text output
                    line = [
                        str(e.public_id), '{:<25s}',
                        e.preferred_origin.utctime.timestamp, '{:f}',
                        e.preferred_magnitude.magnitude_value, '{:f}',
                        e.preferred_origin.lon, '{:f}', e.preferred_origin.lat,
                        '{:f}', e.preferred_origin.depthkm, '{:f}', op[1],
                        '{:<5s}', op[2], '{:<5s}', op[3], '{:<5s}',
                        UTCDateTime(op[0]).timestamp, '{:f}', op[4], '{:<5s}',
                        op[10], '{:f}', op[9], '{:f}', op[12], '{:f}', op[13],
                        '{:f}', op[11], '{:f}', op[5], '{:f}', op[6][0],
                        '{:f}', op[6][1], '{:f}', op[6][2], '{:f}', op[6][3],
                        '{:f}',
                        int(op[6][4]), '{:d}',
                        int(op[6][5]), '{:d}'
                    ]
                    lines.append(line)
                # end for
            # end if

            quality= OOriginQuality(associated_phase_count= len(e.preferred_origin.arrival_list) * \
                                                            int(self.discard_old_picks) + \
                                                             len(self.our_picks.picks[e.public_id]),
                                    used_phase_count=len(e.preferred_origin.arrival_list) * \
                                                     int(self.discard_old_picks) + \
                                                     len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality

            if (len(self.our_picks.picks[e.public_id]) == 0
                    and self.discard_old_picks):
                continue
            # end if

            oEvents.append(event)
        # end for // loop over e

        if (len(missingStations)):
            for k, v in missingStations.items():
                self.logger.warning('Missing station %s: %d picks' % (k, v))
            # end for
        # end if

        # write xml output
        if (len(oEvents)):
            cat = OCatalog(events=oEvents)
            ofn = self.output_path + '/%d.xml' % (self.rank)
            cat.write(ofn, format='SC3ML')
        # end if

        # write text output
        procfile = open('%s/proc.%d.txt' % (self.output_path, self.rank), 'w+')
        for line in lines:
            lineout = ' '.join(line[1::2]).format(*line[::2])
            procfile.write(lineout + '\n')
        # end for
        procfile.close()

        # combine text output
        header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
        self.comm.barrier()
        if (self.rank == 0):
            of = open('%s/ensemble.txt' % (self.output_path), 'w+')
            of.write(header)

            for i in range(self.nproc):
                fn = '%s/proc.%d.txt' % (self.output_path, i)

                lines = open(fn, 'r').readlines()
                for line in lines:
                    of.write(line)
                # end for

                if (os.path.exists(fn)): os.remove(fn)
            # end for
            of.close()
Example #13
0
def __read_single_fnetmt_entry(line, **kwargs):
    """
    Reads a single F-net moment tensor solution to a
    :class:`~obspy.core.event.Event` object.

    :param line: String containing moment tensor information.
    :type line: str.
    """

    a = line.split()
    try:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f')
    except ValueError:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S')
    lat, lon, depjma, magjma = map(float, a[1:5])
    depjma *= 1000
    region = a[5]
    strike = tuple(map(int, a[6].split(';')))
    dip = tuple(map(int, a[7].split(';')))
    rake = tuple(map(int, a[8].split(';')))
    mo = float(a[9])
    depmt = float(a[10]) * 1000
    magmt = float(a[11])
    var_red = float(a[12])
    mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20])

    event_name = util.gen_sc3_id(ot)
    e = Event(event_type="earthquake")
    e.resource_id = _get_resource_id(event_name, 'event')

    # Standard JMA solution
    o_jma = Origin(time=ot, latitude=lat, longitude=lon,
                   depth=depjma, depth_type="from location",
                   region=region)
    o_jma.resource_id = _get_resource_id(event_name,
                                         'origin', 'JMA')
    m_jma = Magnitude(mag=magjma, magnitude_type='ML',
                      origin_id=o_jma.resource_id)
    m_jma.resource_id = _get_resource_id(event_name,
                                         'magnitude', 'JMA')
    # MT solution
    o_mt = Origin(time=ot, latitude=lat, longitude=lon,
                  depth=depmt, region=region,
                  depth_type="from moment tensor inversion")
    o_mt.resource_id = _get_resource_id(event_name,
                                        'origin', 'MT')
    m_mt = Magnitude(mag=magmt, magnitude_type='Mw',
                     origin_id=o_mt.resource_id)
    m_mt.resource_id = _get_resource_id(event_name,
                                        'magnitude', 'MT')
    foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id)
    foc_mec.resource_id = _get_resource_id(event_name,
                                           "focal_mechanism")
    nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0])
    nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1])
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)
    foc_mec.nodal_planes = nod

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)
    cm = Comment(text="Basis system: North,East,Down (Jost and \
    Herrmann 1989")
    cm.resource_id = _get_resource_id(event_name, 'comment', 'mt')
    mt = MomentTensor(derived_origin_id=o_mt.resource_id,
                      moment_magnitude_id=m_mt.resource_id,
                      scalar_moment=mo, comments=[cm],
                      tensor=tensor, variance_reduction=var_red)
    mt.resource_id = _get_resource_id(event_name,
                                      'moment_tensor')
    foc_mec.moment_tensor = mt
    e.origins = [o_jma, o_mt]
    e.magnitudes = [m_jma, m_mt]
    e.focal_mechanisms = [foc_mec]
    e.preferred_magnitude_id = m_mt.resource_id.id
    e.preferred_origin_id = o_mt.resource_id.id
    e.preferred_focal_mechanism_id = foc_mec.resource_id.id
    return e
Example #14
0
def _dbs_associator(start_time,
                    end_time,
                    moving_window,
                    tbl,
                    pair_n,
                    save_dir,
                    station_list,
                    consider_combination=False):

    if consider_combination == True:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 0
        tt = st
        pbar = tqdm(total=int(np.ceil(total_t.total_seconds() /
                                      moving_window)),
                    ncols=100)
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pbar.update()
            if len(detections) >= pair_n:
                evid += 1

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                # QuakeML
                print(detections.iloc[0]['event_start_time'])

                if len(detections) / pair_n <= 2:
                    ch = pair_n
                else:
                    ch = int(len(detections) - pair_n)

                picks = []
                for ns in range(ch, len(detections) + 1):
                    comb = 0
                    for ind in list(combinations(detections.index, ns)):
                        comb += 1
                        selected_detections = detections.loc[ind, :]
                        sorted_detections = selected_detections.sort_values(
                            'p_arrival_time')

                        Y2000_writer.write(
                            "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                            % (int(yr), int(mo), int(dy), int(hr), int(mi),
                               float(sec), float(st_lat_DMS[0]),
                               str(st_lat_DMS[1]), float(st_lat_DMS[2]),
                               float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                               float(st_lon_DMS[2]), float(depth), float(mag)))

                        station_buffer = []
                        row_buffer = []
                        tr_names = []
                        tr_names2 = []
                        for _, row in sorted_detections.iterrows():

                            trace_name = row['traceID'] + '*' + row[
                                'station'] + '*' + str(row['event_start_time'])
                            p_unc = row['p_unc']
                            p_prob = row['p_prob']
                            s_unc = row['s_unc']
                            s_prob = row['s_prob']

                            if p_unc:
                                Pweihgt = _weighcalculator_prob(p_prob *
                                                                (1 - p_unc))
                            else:
                                Pweihgt = _weighcalculator_prob(p_prob)
                            try:
                                Pweihgt = int(Pweihgt)
                            except Exception:
                                Pweihgt = 4

                            if s_unc:
                                Sweihgt = _weighcalculator_prob(s_prob *
                                                                (1 - s_unc))
                            else:
                                Sweihgt = _weighcalculator_prob(s_prob)
                            try:
                                Sweihgt = int(Sweihgt)
                            except Exception:
                                Sweihgt = 4

                            station = "{:<5}".format(row['station'])
                            network = "{:<2}".format(row['network'])
                            try:
                                yrp = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mop = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dyp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mip = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_p = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="P")
                                picks.append(p)
                            except Exception:
                                sec_p = None

                            try:
                                yrs = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mos = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dys = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrs = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mis = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_s = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="S")
                                picks.append(p)
                            except Exception:
                                sec_s = None

                            if row['station'] not in station_buffer:
                                tr_names.append(trace_name)
                                station_buffer.append(row['station'])
                                if sec_s:
                                    Y2000_writer.write(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis),
                                         float(0.0), float(sec_s), Sweihgt))
                                if sec_p:
                                    Y2000_writer.write(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                            else:
                                tr_names2.append(trace_name)
                                if sec_s:
                                    row_buffer.append(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis), 0.0,
                                         float(sec_s), Sweihgt))
                                if sec_p:
                                    row_buffer.append(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                        Y2000_writer.write("{:<62}".format(' ') + "%10d" %
                                           (evid) + '\n')

                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

            tt += timedelta(seconds=moving_window)

    #   plt.scatter(LTTP, TTP, s=10, marker='o', c='b', alpha=0.4, label='P')
    #   plt.scatter(LTTS, TTS, s=10, marker='o', c='r', alpha=0.4, label='S')
    #   plt.legend('upper right')
    #   plt.show()

        print('The Number of Realizations: ' + str(evid) + '\n', flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")
        f.write(jj)
        f.close()

    else:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        cat = Catalog()
        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 200000
        evidd = 100000
        tt = st
        pbar = tqdm(total=int(np.ceil(total_t.total_seconds() /
                                      moving_window)))
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pbar.update()
            if len(detections) >= pair_n:

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                Y2000_writer.write(
                    "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                    % (int(yr), int(mo), int(dy), int(hr), int(mi), float(sec),
                       float(st_lat_DMS[0]), str(st_lat_DMS[1]),
                       float(st_lat_DMS[2]), float(st_lon_DMS[0]),
                       str(st_lon_DMS[1]), float(
                           st_lon_DMS[2]), float(depth), float(mag)))
                event = Event()
                origin = Origin(time=UTCDateTime(
                    detections.iloc[0]['event_start_time']),
                                longitude=detections.iloc[0]['stlon'],
                                latitude=detections.iloc[0]['stlat'],
                                method="EqTransformer")
                event.origins.append(origin)

                station_buffer = []
                row_buffer = []
                sorted_detections = detections.sort_values('p_arrival_time')
                tr_names = []
                tr_names2 = []
                picks = []
                for _, row in sorted_detections.iterrows():
                    trace_name = row['traceID'] + '*' + row[
                        'station'] + '*' + str(row['event_start_time'])
                    p_unc = row['p_unc']
                    p_prob = row['p_prob']
                    s_unc = row['s_unc']
                    s_prob = row['s_prob']

                    if p_unc:
                        Pweihgt = _weighcalculator_prob(p_prob * (1 - p_unc))
                    else:
                        Pweihgt = _weighcalculator_prob(p_prob)
                    try:
                        Pweihgt = int(Pweihgt)
                    except Exception:
                        Pweihgt = 4

                    if s_unc:
                        Sweihgt = _weighcalculator_prob(s_prob * (1 - s_unc))
                    else:
                        Sweihgt = _weighcalculator_prob(s_prob)
                    try:
                        Sweihgt = int(Sweihgt)
                    except Exception:
                        Sweihgt = 4

                    station = "{:<5}".format(row['station'])
                    network = "{:<2}".format(row['network'])

                    try:
                        yrp = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mop = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dyp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mip = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_p = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['p_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="P",
                                 method_id="EqTransformer")
                        picks.append(p)
                    except Exception:
                        sec_p = None

                    try:
                        yrs = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mos = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dys = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrs = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mis = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_s = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['s_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="S",
                                 method_id="EqTransformer")
                        picks.append(p)
                    except Exception:
                        sec_s = None

                    if row['station'] not in station_buffer:
                        tr_names.append(trace_name)
                        station_buffer.append(row['station'])
                        if sec_s:
                            Y2000_writer.write(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), float(0.0),
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            Y2000_writer.write(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                    else:
                        tr_names2.append(trace_name)
                        if sec_s:
                            row_buffer.append(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), 0.0,
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            row_buffer.append(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                event.picks = picks
                event.preferred_origin_id = event.origins[0].resource_id
                cat.append(event)

                evid += 1
                Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                   '\n')
                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    evid += 1
                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

                elif len(row_buffer) < pair_n and len(row_buffer) != 0:
                    evidd += 1
                    traceNmae_dic[str(evidd)] = tr_names2

            elif len(detections) < pair_n and len(detections) != 0:
                tr_names = []
                for _, row in detections.iterrows():
                    trace_name = row['traceID']
                    tr_names.append(trace_name)
                evidd += 1
                traceNmae_dic[str(evidd)] = tr_names

            tt += timedelta(seconds=moving_window)

        print('The Number of Associated Events: ' + str(evid - 200000) + '\n',
              flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")

        f.write(jj)
        f.close()
        print(cat.__str__(print_all=True))
        cat.write(save_dir + "/associations.xml", format="QUAKEML")
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        for e in self.eventList:
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStations(lonlat[0],
                                                                   lonlat[1],
                                                                   maxdist=1e3)
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            for cr in r[0]:
                                c = cr.split('.')[0]
                                newCode = c
                            # end for
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)
                        if (np.fabs(a.distance - dist) > 0.5):
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(resource_id=OResourceIdentifier(id=self.get_id()),
                           creation_info=ci,
                           event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            for a in e.preferred_origin.arrival_list:
                if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict):
                    missingStations[a.net + '.' + a.sta] += 1
                    continue
                # end if
                oldPick = OPick(
                    resource_id=OResourceIdentifier(id=self.get_id()),
                    time=UTCDateTime(a.utctime),
                    waveform_id=OWaveformStreamID(network_code=a.net,
                                                  station_code=a.sta,
                                                  channel_code=a.cha),
                    methodID=OResourceIdentifier('unknown'),
                    phase_hint=a.phase,
                    evaluation_mode='automatic',
                    creation_info=ci)

                oldArr = OArrival(resource_id=OResourceIdentifier(
                    id=oldPick.resource_id.id + "#"),
                                  pick_id=oldPick.resource_id,
                                  phase=oldPick.phase_hint,
                                  distance=a.distance,
                                  earth_model_id=OResourceIdentifier(
                                      'quakeml:ga.gov.au/earthmodel/iasp91'),
                                  creation_info=ci)

                event.picks.append(oldPick)
                event.preferred_origin().arrivals.append(oldArr)
            # end for

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=op[6],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)
                # end for
            # end if

            quality = OOriginQuality(
                associated_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]),
                used_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality
            oEvents.append(event)
        # end for // loop over e

        #print notFound
        print self.rank, missingStations

        cat = OCatalog(events=oEvents)
        ofn = self.output_path + '/%d.xml' % (self.rank)
        cat.write(ofn, format='SC3ML')
Example #16
0
def sdxtoquakeml(sdx_dir, out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="", catalog_version="",
                 agency_id="", author="", vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(
                      author=author, agency_id=agency_id,
                      version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake", creation_info=CreationInfo(
            author=author, agency_id=agency_id),
            event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([sdxstation[1].split()[0],
                                 float(sdxstation[2].split()[0]),
                                 float(sdxstation[3].split()[0]),
                                 float(sdxstation[4].split()[0])])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".
                             format(sdxorigin[1][0:10].replace(".", "-"),
                                    sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxorigin[16].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time), longitude=evt_lon,
                                latitude=evt_lat, depth=evt_depth*-1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time, author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[1][0:10].replace(".", "-"),
                                     sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[14].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxpick[14].split()[6][11:23]))
                pick = Pick(time=pick_time,
                            waveform_id=WaveformStreamID(
                                network_code=network, station_code=station,
                                location_code=location, channel_code=channel),
                            time_errors=time_uncertainties[weight],
                            evaluation_mode="manual",
                            evaluation_status="confirmed", onset=pickonset,
                            phase_hint=phase, polarity=pol,
                            method_id=ResourceIdentifier(id="SDX"),
                            creation_info=CreationInfo(
                                creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth, distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
Example #17
0
def _readheader(f):
    """
    Internal header reader.
    :type f: file
    :param f: File open in read-mode.

    :returns: :class:`~obspy.core.event.event.Event`
    """
    f.seek(0)
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = _get_headline(f=f)
    if not topline:
        raise NordicParsingError('No header found, or incorrect '
                                 'formatting: corrupt s-file')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except Exception:
        NordicParsingError("Couldn't read a date from sfile")
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    try:
        new_event.origins[0].latitude = float(topline[23:30])
        new_event.origins[0].longitude = float(topline[31:38])
        new_event.origins[0].depth = float(topline[39:43]) * 1000
    except ValueError:
        # The origin 'requires' a lat & long
        new_event.origins[0].latitude = None
        new_event.origins[0].longitude = None
        new_event.origins[0].depth = None
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip())
    ksta = Comment(text='Number of stations=' + topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    if _float_conv(topline[51:55]) is not None:
        new_event.origins[0].quality = OriginQuality(
            standard_error=_float_conv(topline[51:55]))
    # Read in magnitudes if they are there.
    for index in [59, 67, 75]:
        if not topline[index].isspace():
            new_event.magnitudes.append(Magnitude())
            new_event.magnitudes[-1].mag = _float_conv(topline[index -
                                                               3:index])
            new_event.magnitudes[-1].magnitude_type = \
                _nortoevmag(topline[index])
            new_event.magnitudes[-1].creation_info = \
                CreationInfo(agency_id=topline[index + 1:index + 4].strip())
            new_event.magnitudes[-1].origin_id = new_event.origins[0].\
                resource_id
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = new_event.origins[0].resource_id
    try:
        # Select moment first, then local, then
        mag_filter = [
            'MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb', 'MS', 'Ms', 'MC', 'Mc'
        ]
        _magnitudes = [(m.magnitude_type, m.resource_id)
                       for m in new_event.magnitudes]
        preferred_magnitude = sorted(_magnitudes,
                                     key=lambda x: mag_filter.index(x[0]))[0]
        new_event.preferred_magnitude_id = preferred_magnitude[1]
    except (ValueError, IndexError):
        # If there is a magnitude not specified in filter
        try:
            new_event.preferred_magnitude_id = new_event.magnitudes[0].\
                resource_id
        except IndexError:
            pass
    return new_event
Example #18
0
def _internal_read_single_scardec(buf):
    """
    Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: open file or file-like object
    """
    # The first line encodes the origin time and epicenter
    line = buf.readline()

    origin_time = line.strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[6:]
    latitude, longitude = map(float, line[:2])

    # The second line encodes depth and the two focal mechanisms
    line = buf.readline()
    line = line.split()

    # First three values are depth, scalar moment (in Nm) and moment magnitude
    depth, scalar_moment, moment_mag = map(float, line[0:3])

    # depth is in km in SCARDEC files
    depth *= 1e3

    # Next six values are strike, dip, rake for both planes
    strike1, dip1, rake1 = map(float, line[3:6])
    strike2, dip2, rake2 = map(float, line[6:9])

    # The rest of the file is the moment rate function
    # In each line: time (sec), moment rate (Nm/sec)
    stf_time = []
    stf_mr = []
    for line in buf:
        stf_time.append(float(line.split()[0]))
        stf_mr.append(float(line.split()[1]))

    # Normalize the source time function
    stf_mr = np.array(stf_mr)
    stf_mr /= scalar_moment

    # Calculate the time step
    dt = np.mean(np.diff(stf_time))

    # Calculate the stf offset (time of first sample wrt to origin time)
    offset = stf_time[0]

    # event name is set to generic value for now
    event_name = 'SCARDEC_event'

    cmt_origin = Origin(resource_id=_get_resource_id(event_name,
                                                     "origin",
                                                     tag="cmt"),
                        time=origin_time,
                        longitude=longitude,
                        latitude=latitude,
                        depth=depth,
                        origin_type="centroid",
                        region=_fe.get_region(longitude=longitude,
                                              latitude=latitude))

    cmt_mag = Magnitude(resource_id=_get_resource_id(event_name,
                                                     "magnitude",
                                                     tag="mw"),
                        mag=moment_mag,
                        magnitude_type="mw",
                        origin_id=cmt_origin.resource_id)

    nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1)
    nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2)
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)

    foc_mec = FocalMechanism(resource_id=_get_resource_id(
        event_name, "focal_mechanism"),
                             nodal_planes=nod)

    dip1 *= np.pi / 180.
    rake1 *= np.pi / 180.
    strike1 *= np.pi / 180.

    mxx = -scalar_moment * (
        (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) +
        (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1)))
    mxy = scalar_moment * (
        (np.sin(dip1) * np.cos(rake1) * np.cos(2 * strike1)) +
        (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1) * 0.5))
    myy = scalar_moment * (
        (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) -
        (np.sin(2 * dip1) * np.sin(rake1) * np.cos(2 * strike1)))
    mxz = -scalar_moment * (
        (np.cos(dip1) * np.cos(rake1) * np.cos(strike1)) +
        (np.cos(2 * dip1) * np.sin(rake1) * np.sin(strike1)))
    myz = -scalar_moment * (
        (np.cos(dip1) * np.cos(rake1) * np.sin(strike1)) -
        (np.cos(2 * dip1) * np.sin(rake1) * np.cos(strike1)))
    mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1))

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)

    cm = [
        Comment(text="Basis system: North,East,Down \
                        (Jost and Herrmann 1989)")
    ]
    cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt')
    cm.append(
        Comment(text="MT derived from focal mechanism, therefore \
                            constrained to pure double couple.",
                force_resource_id=False))

    # Write moment rate function
    extra = {
        'moment_rate': {
            'value': stf_mr,
            'namespace': r"http://test.org/xmlns/0.1"
        },
        'dt': {
            'value': dt,
            'namespace': r"http://test.org/xmlns/0.1"
        },
        'offset': {
            'value': offset,
            'namespace': r"http://test.org/xmlns/0.1"
        }
    }

    # Source time function
    stf = SourceTimeFunction(type="unknown")
    stf.extra = extra

    mt = MomentTensor(resource_id=_get_resource_id(event_name,
                                                   "moment_tensor"),
                      derived_origin_id=cmt_origin.resource_id,
                      moment_magnitude_id=cmt_mag.resource_id,
                      scalar_moment=scalar_moment,
                      tensor=tensor,
                      source_time_function=stf,
                      comments=cm)

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(
        EventDescription(text=event_name, type="earthquake name"))
    ev.comments.append(
        Comment(text="Hypocenter catalog: SCARDEC", force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.magnitudes.append(cmt_mag)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    ev.scope_resource_ids()

    return ev
    # Make picks for detections from template picks
    det_picks = []
    for p in template_event.picks:
        delay_template = p.time - min_template_starttime
        det_pick_time = detect_time + delay_template
        pick = Pick(time=det_pick_time,
                    phase_hint=p.phase_hint,
                    waveform_id=p.waveform_id)
        det_picks.append(pick)

    # figure out origin time for detection
    pick1_temp = template_event.picks[0]
    origin_det = template_event.origins[0].copy()
    pick1_det = [
        p for p in det_picks if p.waveform_id == pick1_temp.waveform_id
    ][0]
    origin_det.time = pick1_det.time - (pick1_temp.time -
                                        template_event.origins[0].time)

    # Create and save event for detection
    event = Event(picks=det_picks, origins=[origin_det])
    event.preferred_origin_id = event.origins[0].resource_id
    catalog.append(event)

catalog_dir = os.path.join(os.getcwd(), "families_events")
catalog_fname = "catalog_" + family_name.split(".")[0] + ".xml"
catalog_file = os.path.join(catalog_dir, catalog_fname)
Logger.info("Now writing catalogue to file %s" % catalog_file)
catalog.write(catalog_file, format="QUAKEML")
Example #20
0
def __read_single_fnetmt_entry(line, **kwargs):
    """
    Reads a single F-net moment tensor solution to a
    :class:`~obspy.core.event.Event` object.

    :param line: String containing moment tensor information.
    :type line: str.
    """

    a = line.split()
    try:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f')
    except ValueError:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S')
    lat, lon, depjma, magjma = map(float, a[1:5])
    depjma *= 1000
    region = a[5]
    strike = tuple(map(int, a[6].split(';')))
    dip = tuple(map(int, a[7].split(';')))
    rake = tuple(map(int, a[8].split(';')))
    mo = float(a[9])
    depmt = float(a[10]) * 1000
    magmt = float(a[11])
    var_red = float(a[12])
    mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20])

    event_name = util.gen_sc3_id(ot)
    e = Event(event_type="earthquake")
    e.resource_id = _get_resource_id(event_name, 'event')

    # Standard JMA solution
    o_jma = Origin(time=ot,
                   latitude=lat,
                   longitude=lon,
                   depth=depjma,
                   depth_type="from location",
                   region=region)
    o_jma.resource_id = _get_resource_id(event_name, 'origin', 'JMA')
    m_jma = Magnitude(mag=magjma,
                      magnitude_type='ML',
                      origin_id=o_jma.resource_id)
    m_jma.resource_id = _get_resource_id(event_name, 'magnitude', 'JMA')
    # MT solution
    o_mt = Origin(time=ot,
                  latitude=lat,
                  longitude=lon,
                  depth=depmt,
                  region=region,
                  depth_type="from moment tensor inversion")
    o_mt.resource_id = _get_resource_id(event_name, 'origin', 'MT')
    m_mt = Magnitude(mag=magmt,
                     magnitude_type='Mw',
                     origin_id=o_mt.resource_id)
    m_mt.resource_id = _get_resource_id(event_name, 'magnitude', 'MT')
    foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id)
    foc_mec.resource_id = _get_resource_id(event_name, "focal_mechanism")
    nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0])
    nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1])
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)
    foc_mec.nodal_planes = nod

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)
    cm = Comment(text="Basis system: North,East,Down (Jost and \
    Herrmann 1989")
    cm.resource_id = _get_resource_id(event_name, 'comment', 'mt')
    mt = MomentTensor(derived_origin_id=o_mt.resource_id,
                      moment_magnitude_id=m_mt.resource_id,
                      scalar_moment=mo,
                      comments=[cm],
                      tensor=tensor,
                      variance_reduction=var_red)
    mt.resource_id = _get_resource_id(event_name, 'moment_tensor')
    foc_mec.moment_tensor = mt
    e.origins = [o_jma, o_mt]
    e.magnitudes = [m_jma, m_mt]
    e.focal_mechanisms = [foc_mec]
    e.preferred_magnitude_id = m_mt.resource_id.id
    e.preferred_origin_id = o_mt.resource_id.id
    e.preferred_focal_mechanism_id = foc_mec.resource_id.id
    return e
Example #21
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
	event = Event(focal_mechanisms=[], picks=[], origins=[])
	origin = Origin(arrivals=[])
	origin.time = UTCDateTime(hp.tstamp)
	origin.latitude = hp.qlat
	origin.longitude = hp.qlon
	origin.depth = hp.qdep
	origin.creation_info = CreationInfo(version=hp.icusp)
	origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp))
	for _i in range(n):
	    p = Pick()
	    p.creation_info = CreationInfo(version=hp.arid[_i])
	    p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version))
	    p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i])
	    if hp.p_pol[_i] > 0:
		p.polarity = 'positive'
	    else:
		p.polarity = 'negative'
	    a = Arrival()
	    a.creation_info = CreationInfo(version=hp.arid[_i])
	    a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version))
	    a.azimuth = hp.p_azi_mc[_i,0]
	    a.takeoff_angle = 180. - hp.p_the_mc[_i,0]
	    a.pick_id = p.resource_id
	    origin.arrivals.append(a)
	    event.picks.append(p)
	event.origins.append(origin)
	event.preferred_origin_id = str(origin.resource_id)
    else: # just update the changes
	origin = event.preferred_origin()
	picks = []
	arrivals = []
	for _i in range(n):
	    ind = hp.p_index[_i]
	    a = origin.arrivals[ind]
	    p = a.pick_id.getReferredObject()
	    a.takeoff_angle = hp.p_the_mc[_i,0]
	    picks.append(p)
	    arrivals.append(a)
	if only_fm_picks:
	    origin.arrivals = arrivals
	    event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality'))
            )
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
    return event
Example #22
0
def __read_single_cmtsolution(buf):
    """
    Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the preliminary epicenter.
    line = buf.readline()

    hypocenter_catalog = line[:4].strip().decode()

    origin_time = line[4:].strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[7:]
    latitude, longitude, depth, body_wave_mag, surface_wave_mag = \
        map(float, line[:5])

    # The rest encodes the centroid solution.
    event_name = buf.readline().strip().split()[-1].decode()

    preliminary_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="prelim"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        # Depth is in meters.
        depth=depth * 1000.0,
        origin_type="hypocenter",
        region=_fe.get_region(longitude=longitude, latitude=latitude),
        evaluation_status="preliminary"
    )

    preliminary_bw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"),
        mag=body_wave_mag, magnitude_type="Mb",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    preliminary_sw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"),
        mag=surface_wave_mag, magnitude_type="MS",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    values = ["time_shift", "half_duration", "latitude", "longitude",
              "depth", "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    cmt_values = {_i: float(buf.readline().strip().split()[-1])
                  for _i in values}

    # Moment magnitude calculation in dyne * cm.
    m_0 = 1.0 / math.sqrt(2.0) * math.sqrt(
        cmt_values["m_rr"] ** 2 +
        cmt_values["m_tt"] ** 2 +
        cmt_values["m_pp"] ** 2 +
        2.0 * cmt_values["m_rt"] ** 2 +
        2.0 * cmt_values["m_rp"] ** 2 +
        2.0 * cmt_values["m_tp"] ** 2)
    m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1)

    # Convert to meters.
    cmt_values["depth"] *= 1000.0
    # Convert to Newton meter.
    values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    for value in values:
        cmt_values[value] /= 1E7

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time + cmt_values["time_shift"],
        longitude=cmt_values["longitude"],
        latitude=cmt_values["latitude"],
        depth=cmt_values["depth"],
        origin_type="centroid",
        # Could rarely be different than the epicentral region.
        region=_fe.get_region(longitude=cmt_values["longitude"],
                              latitude=cmt_values["latitude"])
        # No evaluation status as it could be any of several and the file
        # format does not provide that information.
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        # Round to 2 digits.
        mag=round(m_w, 2),
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        # The preliminary origin most likely triggered the focal mechanism
        # determination.
        triggering_origin_id=preliminary_origin.resource_id
    )

    tensor = Tensor(
        m_rr=cmt_values["m_rr"],
        m_pp=cmt_values["m_pp"],
        m_tt=cmt_values["m_tt"],
        m_rt=cmt_values["m_rt"],
        m_rp=cmt_values["m_rp"],
        m_tp=cmt_values["m_tp"]
    )

    # Source time function is a triangle, according to the SPECFEM manual.
    stf = SourceTimeFunction(
        type="triangle",
        # The duration is twice the half duration.
        duration=2.0 * cmt_values["half_duration"]
    )

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        # Convert to Nm.
        scalar_moment=m_0 / 1E7,
        tensor=tensor,
        source_time_function=stf
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: %s" % hypocenter_catalog,
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.origins.append(preliminary_origin)
    ev.magnitudes.append(cmt_mag)
    ev.magnitudes.append(preliminary_bw_magnitude)
    ev.magnitudes.append(preliminary_sw_magnitude)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    return ev
Example #23
0
def readheader(sfile):
    """
    Read header information from a seisan nordic format S-file.
    Returns an obspy.core.event.Catalog type: note this changed for version \
    0.1.0 from the inbuilt class types.

    :type sfile: str
    :param sfile: Path to the s-file

    :returns: :class: obspy.core.event.Event

    >>> event = readheader('eqcorrscan/tests/test_data/REA/TEST_/' +
    ...                    '01-0411-15L.S201309')
    >>> print(event.origins[0].time)
    2013-09-01T04:11:15.700000Z
    """
    import warnings
    from obspy.core.event import Event, Origin, Magnitude, Comment
    from obspy.core.event import EventDescription, CreationInfo
    f = open(sfile, 'r')
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = f.readline()
    if not len(topline.rstrip()) == 80:
        raise IOError('s-file has a corrupt header, not 80 char long')
    f.seek(0)
    for line in f:
        if line[79] in [' ', '1']:
            topline = line
            break
        if line[79] == '7':
            raise IOError('No header found, corrupt s-file?')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except:
        warnings.warn("Couldn't read a date from sfile: " + sfile)
        new_event.origins.append(Origin(time=UTCDateTime(0)))
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    if not _float_conv(topline[23:30]) == 999:
        new_event.origins[0].latitude = _float_conv(topline[23:30])
        new_event.origins[0].longitude = _float_conv(topline[31:38])
        new_event.origins[0].depth = _float_conv(topline[39:43]) * 1000
    else:
        # The origin 'requires' a lat & long
        new_event.origins[0].latitude = float('NaN')
        new_event.origins[0].longitude = float('NaN')
        new_event.origins[0].depth = float('NaN')
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].
                                           strip())
    ksta = Comment(text='Number of stations=' +
                   topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    # new_event.origins[0].nsta??? = _int_conv(topline[49:51])
    if not _float_conv(topline[51:55]) == 999:
        new_event.origins[0].time_errors['Time_Residual_RMS'] = \
            _float_conv(topline[51:55])
    # Read in magnitudes if they are there.
    if len(topline[59].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[0].mag = _float_conv(topline[56:59])
        new_event.magnitudes[0].magnitude_type = topline[59]
        new_event.magnitudes[0].creation_info = \
            CreationInfo(agency_id=topline[60:63].strip())
        new_event.magnitudes[0].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[67].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[1].mag = _float_conv(topline[64:67])
        new_event.magnitudes[1].magnitude_type = topline[67]
        new_event.magnitudes[1].creation_info = \
            CreationInfo(agency_id=topline[68:71].strip())
        new_event.magnitudes[1].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[75].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[2].mag = _float_conv(topline[72:75])
        new_event.magnitudes[2].magnitude_type = topline[75]
        new_event.magnitudes[2].creation_info = \
            CreationInfo(agency_id=topline[76:79].strip())
        new_event.magnitudes[2].origin_id = new_event.origins[0].\
            resource_id
    f.close()
    # convert the nordic notation of magnitude to more general notation
    for _magnitude in new_event.magnitudes:
        _magnitude.magnitude_type = _nortoevmag(_magnitude.magnitude_type)
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = str(new_event.origins[0].resource_id)
    if len(new_event.magnitudes) > 1:
        try:
            # Select moment first, then local, then
            mag_filter = ['MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb',
                          'MS', 'Ms', 'Mc', 'MC']
            _magnitudes = [(m.magnitude_type, m.resource_id)
                           for m in new_event.magnitudes]
            preferred_magnitude = sorted(_magnitudes,
                                         key=lambda x: mag_filter.index(x[0]))
            new_event.preferred_magnitude_id = str(preferred_magnitude[0][1])
        except ValueError:
            # If there is a magnitude not specified in filter
            new_event.preferred_magnitude_id =\
                str(new_event.magnitudes[0].resource_id)
    elif len(new_event.magnitudes) == 1:
        new_event.preferred_magnitude_id =\
            str(new_event.magnitudes[0].resource_id)
    return new_event
Example #24
0
def _internal_read_single_cmtsolution(buf):
    """
    Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: open file or file-like object
    """
    # The first line encodes the preliminary epicenter.
    line = buf.readline()

    hypocenter_catalog = line[:5].strip().decode()

    origin_time = line[5:].strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line[28:].split()
    latitude, longitude, depth, body_wave_mag, surface_wave_mag = \
        map(float, line[:5])

    # The rest encodes the centroid solution.
    event_name = buf.readline().strip().split()[-1].decode()

    preliminary_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="prelim"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        # Depth is in meters.
        depth=depth * 1000.0,
        origin_type="hypocenter",
        region=_fe.get_region(longitude=longitude, latitude=latitude),
        evaluation_status="preliminary")

    preliminary_bw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"),
        mag=body_wave_mag,
        magnitude_type="Mb",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    preliminary_sw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"),
        mag=surface_wave_mag,
        magnitude_type="MS",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    values = [
        "time_shift", "half_duration", "latitude", "longitude", "depth",
        "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"
    ]
    cmt_values = {
        _i: float(buf.readline().strip().split()[-1])
        for _i in values
    }

    # Moment magnitude calculation in dyne * cm.
    m_0 = 1.0 / math.sqrt(2.0) * math.sqrt(
        cmt_values["m_rr"]**2 + cmt_values["m_tt"]**2 + cmt_values["m_pp"]**2 +
        2.0 * cmt_values["m_rt"]**2 + 2.0 * cmt_values["m_rp"]**2 +
        2.0 * cmt_values["m_tp"]**2)
    m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1)

    # Convert to meters.
    cmt_values["depth"] *= 1000.0
    # Convert to Newton meter.
    values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    for value in values:
        cmt_values[value] /= 1E7

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time + cmt_values["time_shift"],
        longitude=cmt_values["longitude"],
        latitude=cmt_values["latitude"],
        depth=cmt_values["depth"],
        origin_type="centroid",
        # Could rarely be different than the epicentral region.
        region=_fe.get_region(longitude=cmt_values["longitude"],
                              latitude=cmt_values["latitude"])
        # No evaluation status as it could be any of several and the file
        # format does not provide that information.
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        # Round to 2 digits.
        mag=round(m_w, 2),
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id)

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        # The preliminary origin most likely triggered the focal mechanism
        # determination.
        triggering_origin_id=preliminary_origin.resource_id)

    tensor = Tensor(m_rr=cmt_values["m_rr"],
                    m_pp=cmt_values["m_pp"],
                    m_tt=cmt_values["m_tt"],
                    m_rt=cmt_values["m_rt"],
                    m_rp=cmt_values["m_rp"],
                    m_tp=cmt_values["m_tp"])

    # Source time function is a triangle, according to the SPECFEM manual.
    stf = SourceTimeFunction(
        type="triangle",
        # The duration is twice the half duration.
        duration=2.0 * cmt_values["half_duration"])

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        # Convert to Nm.
        scalar_moment=m_0 / 1E7,
        tensor=tensor,
        source_time_function=stf)

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(
        EventDescription(text=event_name, type="earthquake name"))
    ev.comments.append(
        Comment(text="Hypocenter catalog: %s" % hypocenter_catalog,
                force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.origins.append(preliminary_origin)
    ev.magnitudes.append(cmt_mag)
    ev.magnitudes.append(preliminary_bw_magnitude)
    ev.magnitudes.append(preliminary_sw_magnitude)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    ev.scope_resource_ids()

    return ev
Example #25
0
File: core.py Project: mbyt/obspy
def _readheader(f):
    """
    Internal header reader.
    :type f: file
    :param f: File open in read-mode.

    :returns: :class:`~obspy.core.event.event.Event`
    """
    f.seek(0)
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = _get_headline(f=f)
    if not topline:
        raise NordicParsingError('No header found, or incorrect '
                                 'formatting: corrupt s-file')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except:
        NordicParsingError("Couldn't read a date from sfile")
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    try:
        new_event.origins[0].latitude = float(topline[23:30])
        new_event.origins[0].longitude = float(topline[31:38])
        new_event.origins[0].depth = float(topline[39:43]) * 1000
    except ValueError:
        # The origin 'requires' a lat & long
        new_event.origins[0].latitude = None
        new_event.origins[0].longitude = None
        new_event.origins[0].depth = None
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip())
    ksta = Comment(text='Number of stations=' + topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    if _float_conv(topline[51:55]) is not None:
        new_event.origins[0].time_errors['Time_Residual_RMS'] = \
            _float_conv(topline[51:55])
    # Read in magnitudes if they are there.
    for index in [59, 67, 75]:
        if not topline[index].isspace():
            new_event.magnitudes.append(Magnitude())
            new_event.magnitudes[-1].mag = _float_conv(
                topline[index - 3:index])
            new_event.magnitudes[-1].magnitude_type = \
                _nortoevmag(topline[index])
            new_event.magnitudes[-1].creation_info = \
                CreationInfo(agency_id=topline[index + 1:index + 4].strip())
            new_event.magnitudes[-1].origin_id = new_event.origins[0].\
                resource_id
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = new_event.origins[0].resource_id
    try:
        # Select moment first, then local, then
        mag_filter = ['MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb',
                      'MS', 'Ms', 'MC', 'Mc']
        _magnitudes = [(m.magnitude_type, m.resource_id)
                       for m in new_event.magnitudes]
        preferred_magnitude = sorted(_magnitudes,
                                     key=lambda x: mag_filter.index(x[0]))[0]
        new_event.preferred_magnitude_id = preferred_magnitude[1]
    except (ValueError, IndexError):
        # If there is a magnitude not specified in filter
        try:
            new_event.preferred_magnitude_id = new_event.magnitudes[0].\
                resource_id
        except IndexError:
            pass
    return new_event
Example #26
0
def sdxtoquakeml(sdx_dir,
                 out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="",
                 catalog_version="",
                 agency_id="",
                 author="",
                 vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(author=author,
                                             agency_id=agency_id,
                                             version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake",
                      creation_info=CreationInfo(author=author,
                                                 agency_id=agency_id),
                      event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([
                    sdxstation[1].split()[0],
                    float(sdxstation[2].split()[0]),
                    float(sdxstation[3].split()[0]),
                    float(sdxstation[4].split()[0])
                ])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".format(
                    sdxorigin[1][0:10].replace(".", "-"), sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxorigin[16].split()[6][0:10].replace(".", "-"),
                    sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time),
                                longitude=evt_lon,
                                latitude=evt_lat,
                                depth=evt_depth * -1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time,
                                    author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[1][0:10].replace(".", "-"), sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[14].split()[6][0:10].replace(".", "-"),
                    sdxpick[14].split()[6][11:23]))
                pick = Pick(
                    time=pick_time,
                    waveform_id=WaveformStreamID(network_code=network,
                                                 station_code=station,
                                                 location_code=location,
                                                 channel_code=channel),
                    time_errors=time_uncertainties[weight],
                    evaluation_mode="manual",
                    evaluation_status="confirmed",
                    onset=pickonset,
                    phase_hint=phase,
                    polarity=pol,
                    method_id=ResourceIdentifier(id="SDX"),
                    creation_info=CreationInfo(creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth,
                                          distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
Example #27
0
def _read_single_hypocenter(lines, coordinate_converter, original_picks):
    """
    Given a list of lines (starting with a 'NLLOC' line and ending with a
    'END_NLLOC' line), parse them into an Event.
    """
    try:
        # some paranoid checks..
        assert lines[0].startswith("NLLOC ")
        assert lines[-1].startswith("END_NLLOC")
        for line in lines[1:-1]:
            assert not line.startswith("NLLOC ")
            assert not line.startswith("END_NLLOC")
    except Exception:
        msg = ("This should not have happened, please report this as a bug at "
               "https://github.com/obspy/obspy/issues.")
        raise Exception(msg)

    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines[:-1]])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    # new NLLoc > 6.0 seems to add prefix 'run:' before date
    if date.startswith('run:'):
        date = date[4:]
    signature = signature.strip()
    creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    if coordinate_converter:
        # maximum likelihood origin location in km info line
        line = lines["HYPOCENTER"]
        x, y, z = coordinate_converter(*map(float, line.split()[1:7:2]))
    else:
        # maximum likelihood origin location lon lat info line
        line = lines["GEOGRAPHIC"]
        y, x, z = map(float, line.split()[8:13:2])

    # maximum likelihood origin time info line
    line = lines["GEOGRAPHIC"]

    year, mon, day, hour, min = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    if "COMMENT" in lines:
        comment = lines["COMMENT"].strip()
        comment = comment.strip('\'"')
        comment = comment.strip()

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    o = Origin()
    event.origins = [o]
    event.preferred_origin_id = o.resource_id
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
    event.comments.append(Comment(text=comment, force_resource_id=False))

    # SIGNATURE field's first item is LOCSIG, which is supposed to be
    # 'Identification of an individual, institiution or other entity'
    # according to
    # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
    # so use it as author in creation info
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version,
                                       author=signature)
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version,
                                   author=signature)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        # network codes are not used by NonLinLoc, so they can not be known
        # when reading the .hyp file.. to conform with QuakeML standard set an
        # empty network code
        wid = WaveformStreamID(network_code="", station_code=station)
        # have to split this into ints for overflow to work correctly
        date, hourmin, sec = map(str, line[6:9])
        ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])]
        hm = [int(hourmin[:2]), int(hourmin[2:4])]
        t = UTCDateTime(*(ymd + hm), strict=False) + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    event.scope_resource_ids()

    return event
Example #28
0
def setEventData(eventParser, arrivals, count):
    global originCount
    global eventCount
    global pickCount
    creation_info = CreationInfo(
        author='niket_engdahl_parser',
        creation_time=UTCDateTime(),
        agency_uri=ResourceIdentifier(id='smi:engdahl.ga.gov.au/ga-engdahl'),
        agency_id='ga-engdahl')

    #   magnitudeSurface = Magnitude(resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)+'#netMag.Ms'),
    #                         mag=eventParser.ms,
    #                         magnitude_type='Ms',
    #                         origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)),
    #                         azimuthal_gap=eventParser.openaz2,
    #                         creation_info=creation_info)
    origin = Origin(
        resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' +
                                       str(originCount)),
        time=UTCDateTime(int(str(2000 + int(eventParser.iyr))),
                         int(eventParser.mon), int(eventParser.iday),
                         int(eventParser.ihr), int(eventParser.min),
                         int(eventParser.sec.split('.')[0]),
                         int(eventParser.sec.split('.')[1] + '0')),
        longitude=eventParser.glon,
        latitude=eventParser.glat,
        depth=float(eventParser.depth) *
        1000,  # engdahl files report kms, obspy expects m
        depth_errors=eventParser.sedep,
        method_id=ResourceIdentifier(id='EHB'),
        earth_model_id=ResourceIdentifier(id='ak135'),
        quality=OriginQuality(associated_phase_count=len(arrivals),
                              used_phase_count=len(arrivals),
                              standard_error=eventParser.se,
                              azimuthal_gap=eventParser.openaz2),
        evaluation_mode='automatic',
        creation_info=creation_info)

    magnitude = Magnitude(
        resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' +
                                       str(originCount) + '#netMag.Mb'),
        mag=eventParser.mb,
        magnitude_type='Mb',
        origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' +
                                     str(originCount)),
        azimuthal_gap=eventParser.openaz1,
        creation_info=creation_info)

    originCount += 1

    pickList = []
    arrivalList = []
    pPhaseArrival = None
    for arrParser in arrivals:
        pickOnset = None
        pol = None

        if arrParser.year and arrParser.month and arrParser.day and arrParser.station:
            pPhaseArrival = arrParser
        else:
            arrParser.year = pPhaseArrival.year
            arrParser.day = pPhaseArrival.day
            arrParser.month = pPhaseArrival.month
            arrParser.station = pPhaseArrival.station
            arrParser.delta = pPhaseArrival.delta
            arrParser.dtdd = pPhaseArrival.dtdd
            arrParser.backaz = pPhaseArrival.backaz
            arrParser.focalDip = pPhaseArrival.focalDip
            arrParser.angleAzimuth = pPhaseArrival.angleAzimuth

        if arrParser.phase1 == 'LR' or arrParser.phase2 == 'LR' or arrParser.hour == '24':
            continue

        if arrParser.phase1.startswith('i'):
            pickOnset = PickOnset.impulsive
            if arrParser.fm == '+':
                pol = PickPolarity.positive
            elif arrParser.fm == '-':
                pol = PickPolarity.negative
        elif arrParser.phase1.startswith('e'):
            pickOnset = PickOnset.emergent

        pick = Pick(
            resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' +
                                           str(pickCount)),
            time=UTCDateTime(int(str(2000 + int(arrParser.year))),
                             int(arrParser.month), int(arrParser.day),
                             int(arrParser.hour), int(arrParser.minute),
                             int(arrParser.second.split('.')[0]),
                             int(arrParser.second.split('.')[1] + '0')),
            waveform_id=WaveformStreamID(network_code='',
                                         station_code=arrParser.station,
                                         channel_code='BHZ'),
            methodID=ResourceIdentifier('STA/LTA'),
            backazimuth=arrParser.backaz if arrParser.backaz else None,
            onset=pickOnset,
            phase_hint=arrParser.phase,
            polarity=pol,
            evaluation_mode='automatic',
            # TO-DO
            comment='populate all the remaining fields here as key value',
            creation_info=creation_info)
        if not arrParser.backaz:
            print "arrParser.backaz is empty. printing the arrParser for debugging"
        pickCount += 1
        pickList.append(pick)

        arrival = Arrival(
            pick_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' +
                                       str(pickCount - 1)),
            phase=arrParser.phase if arrParser.phase else None,
            azimuth=arrParser.backaz if arrParser.backaz else None,
            distance=arrParser.delta if arrParser.delta else None,
            # if the * has some significance, it should be accounted for. ignoring for now.
            time_residual=arrParser.residual.rstrip('*'),
            time_weight=arrParser.wgt if arrParser.wgt else None,
            backazimuth_weight=arrParser.wgt if arrParser.wgt else None)
        arrivalList.append(arrival)
        if not arrParser.wgt:
            print "arrParser.wgt is empty. printing the arrParser for debugging"


#          pprint.pprint(arrParser)

    origin.arrivals = arrivalList

    event = Event(resource_id=ResourceIdentifier(
        id='smi:engdahl.ga.gov.au/event/' + str(eventCount)),
                  creation_info=creation_info,
                  event_type='earthquake')

    eventCount += 1

    event.picks = pickList
    event.origins = [
        origin,
    ]
    event.magnitudes = [
        magnitude,
    ]
    event.preferred_origin_id = origin.resource_id
    event.preferred_magnitude_id = magnitude.resource_id
    return event
Example #29
0
def _internal_read_single_scardec(buf):
    """
    Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the origin time and epicenter
    line = buf.readline()

    origin_time = line.strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[6:]
    latitude, longitude = map(float, line[:2])

    # The second line encodes depth and the two focal mechanisms
    line = buf.readline()
    line = line.split()

    # First three values are depth, scalar moment (in Nm) and moment magnitude
    depth, scalar_moment, moment_mag = map(float, line[0:3])

    # depth is in km in SCARDEC files
    depth *= 1e3

    # Next six values are strike, dip, rake for both planes
    strike1, dip1, rake1 = map(float, line[3:6])
    strike2, dip2, rake2 = map(float, line[6:9])

    # The rest of the file is the moment rate function
    # In each line: time (sec), moment rate (Nm/sec)
    stf_time = []
    stf_mr = []
    for line in buf:
        stf_time.append(float(line.split()[0]))
        stf_mr.append(float(line.split()[1]))

    # Normalize the source time function
    stf_mr = np.array(stf_mr)
    stf_mr /= scalar_moment

    # Calculate the time step
    dt = np.mean(np.diff(stf_time))

    # Calculate the stf offset (time of first sample wrt to origin time)
    offset = stf_time[0]

    # event name is set to generic value for now
    event_name = 'SCARDEC_event'

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        depth=depth,
        origin_type="centroid",
        region=_fe.get_region(longitude=longitude,
                              latitude=latitude)
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        mag=moment_mag,
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1)
    nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2)
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        nodal_planes=nod
    )

    dip1 *= np.pi / 180.
    rake1 *= np.pi / 180.
    strike1 *= np.pi / 180.

    mxx = - scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                              np.sin(2 * strike1)) +
                             (np.sin(2 * dip1) * np.sin(rake1) *
                              np.sin(2 * strike1)))
    mxy = scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                            np.cos(2 * strike1)) +
                           (np.sin(2 * dip1) * np.sin(rake1) *
                            np.sin(2 * strike1) * 0.5))
    myy = scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                            np.sin(2 * strike1)) -
                           (np.sin(2 * dip1) * np.sin(rake1) *
                            np.cos(2 * strike1)))
    mxz = - scalar_moment * ((np.cos(dip1) * np.cos(rake1) *
                              np.cos(strike1)) +
                             (np.cos(2 * dip1) * np.sin(rake1) *
                              np.sin(strike1)))
    myz = - scalar_moment * ((np.cos(dip1) * np.cos(rake1) *
                             np.sin(strike1)) -
                             (np.cos(2 * dip1) * np.sin(rake1) *
                              np.cos(strike1)))
    mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1))

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)

    cm = [Comment(text="Basis system: North,East,Down \
                        (Jost and Herrmann 1989)")]
    cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt')
    cm.append(Comment(text="MT derived from focal mechanism, therefore \
                            constrained to pure double couple.",
                      force_resource_id=False))

    # Write moment rate function
    extra = {'moment_rate': {'value': stf_mr,
                             'namespace': r"http://test.org/xmlns/0.1"},
             'dt': {'value': dt,
                    'namespace': r"http://test.org/xmlns/0.1"},
             'offset': {'value': offset,
                        'namespace': r"http://test.org/xmlns/0.1"}
             }

    # Source time function
    stf = SourceTimeFunction(type="unknown")
    stf.extra = extra

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        scalar_moment=scalar_moment,
        tensor=tensor,
        source_time_function=stf,
        comments=cm
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: SCARDEC",
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.magnitudes.append(cmt_mag)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    return ev
Example #30
0
File: core.py Project: Qigaoo/obspy
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1: next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = (
                "Could not parse event %i (faulty file?). Will be "
                "skipped. Lines of the event:\n"
                "\t%s\n"
                "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(
            agency_id="GCMT",
            version=record["version_code"]
        )

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(
            force_resource_id=False,
            event_type="earthquake",
            event_type_certainty="known",
            event_descriptions=[
                EventDescription(text=region, type="Flinn-Engdahl region"),
                EventDescription(text=record["cmt_event_name"],
                                 type="earthquake name")
            ]
        )

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[Comment(text="Hypocenter catalog: %s" %
                              record["hypocenter_reference_catalog"],
                              force_resource_id=False)]
        )
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin", tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]},
            latitude=record["centroid_latitude"],
            latitude_errors={
                "uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000},
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy()
        )
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(
            force_resource_id=False,
            mag=round(record["Mw"], 2),
            magnitude_type="Mwc",
            origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude", tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["mb"],
            magnitude_type="mb",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'mb'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["MS"],
            magnitude_type="MS",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'MS'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(
            m_rr=record["m_rr"],
            m_rr_errors={"uncertainty": record["m_rr_error"]},
            m_pp=record["m_pp"],
            m_pp_errors={"uncertainty": record["m_pp_error"]},
            m_tt=record["m_tt"],
            m_tt_errors={"uncertainty": record["m_tt_error"]},
            m_rt=record["m_rt"],
            m_rt_errors={"uncertainty": record["m_rt_error"]},
            m_rp=record["m_rp"],
            m_rp_errors={"uncertainty": record["m_rp_error"]},
            m_tp=record["m_tp"],
            m_tp_errors={"uncertainty": record["m_tp_error"]},
            creation_info=creation_info.copy()
        )
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]
            ),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]
            ),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])
            ),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                             record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" %
                             record["cmt_timestamp"])],
            creation_info=creation_info.copy()
        )
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"],
                                             "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Example #31
0
def readheader(sfile):
    """
    Read header information from a seisan nordic format S-file.
    Returns an obspy.core.event.Catalog type: note this changed for version \
    0.1.0 from the inbuilt class types.

    :type sfile: str
    :param sfile: Path to the s-file

    :returns: :class: obspy.core.event.Event

    >>> event = readheader('eqcorrscan/tests/test_data/REA/TEST_/' +
    ...                    '01-0411-15L.S201309')
    >>> print(event.origins[0].time)
    2013-09-01T04:11:15.700000Z
    """
    import warnings
    from obspy.core.event import Event, Origin, Magnitude, Comment
    from obspy.core.event import EventDescription, CreationInfo
    f = open(sfile, 'r')
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = f.readline()
    if not len(topline.rstrip()) == 80:
        raise IOError('s-file has a corrupt header, not 80 char long')
    f.seek(0)
    for line in f:
        if line[79] in [' ', '1']:
            topline = line
            break
        if line[79] == '7':
            raise IOError('No header found, corrupt s-file?')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except:
        warnings.warn("Couldn't read a date from sfile: " + sfile)
        new_event.origins.append(Origin(time=UTCDateTime(0)))
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    if not _float_conv(topline[23:30]) == 999:
        new_event.origins[0].latitude = _float_conv(topline[23:30])
        new_event.origins[0].longitude = _float_conv(topline[31:38])
        new_event.origins[0].depth = _float_conv(topline[39:43]) * 1000
    # else:
    #     # The origin 'requires' a lat & long
    #     new_event.origins[0].latitude = float('NaN')
    #     new_event.origins[0].longitude = float('NaN')
    #     new_event.origins[0].depth = float('NaN')
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip())
    ksta = Comment(text='Number of stations=' + topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    # new_event.origins[0].nsta??? = _int_conv(topline[49:51])
    if not _float_conv(topline[51:55]) == 999:
        new_event.origins[0].time_errors['Time_Residual_RMS'] = \
            _float_conv(topline[51:55])
    # Read in magnitudes if they are there.
    if len(topline[59].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[0].mag = _float_conv(topline[56:59])
        new_event.magnitudes[0].magnitude_type = topline[59]
        new_event.magnitudes[0].creation_info = \
            CreationInfo(agency_id=topline[60:63].strip())
        new_event.magnitudes[0].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[67].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[1].mag = _float_conv(topline[64:67])
        new_event.magnitudes[1].magnitude_type = topline[67]
        new_event.magnitudes[1].creation_info = \
            CreationInfo(agency_id=topline[68:71].strip())
        new_event.magnitudes[1].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[75].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[2].mag = _float_conv(topline[72:75])
        new_event.magnitudes[2].magnitude_type = topline[75]
        new_event.magnitudes[2].creation_info = \
            CreationInfo(agency_id=topline[76:79].strip())
        new_event.magnitudes[2].origin_id = new_event.origins[0].\
            resource_id
    f.close()
    # convert the nordic notation of magnitude to more general notation
    for _magnitude in new_event.magnitudes:
        _magnitude.magnitude_type = _nortoevmag(_magnitude.magnitude_type)
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = str(new_event.origins[0].resource_id)
    if len(new_event.magnitudes) > 1:
        try:
            # Select moment first, then local, then
            mag_filter = [
                'MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb', 'MS', 'Ms', 'Mc', 'MC'
            ]
            _magnitudes = [(m.magnitude_type, m.resource_id)
                           for m in new_event.magnitudes]
            preferred_magnitude = sorted(_magnitudes,
                                         key=lambda x: mag_filter.index(x[0]))
            new_event.preferred_magnitude_id = str(preferred_magnitude[0][1])
        except ValueError:
            # If there is a magnitude not specified in filter
            new_event.preferred_magnitude_id =\
                str(new_event.magnitudes[0].resource_id)
    elif len(new_event.magnitudes) == 1:
        new_event.preferred_magnitude_id =\
            str(new_event.magnitudes[0].resource_id)
    return new_event
Example #32
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Example #33
0
def _read_single_event(event_file, locate_dir, units, local_mag_ph):
    """
    Parse an event file from QuakeMigrate into an obspy Event object.

    Parameters
    ----------
    event_file : `pathlib.Path` object
        Path to .event file to read.
    locate_dir : `pathlib.Path` object
        Path to locate directory (contains "events", "picks" etc. directories).
    units : {"km", "m"}
        Grid projection coordinates for QM LUT (determines units of depths and
        uncertainties in the .event files).
    local_mag_ph : {"S", "P"}
        Amplitude measurement used to calculate local magnitudes.

    Returns
    -------
    event : `obspy.Event` object
        Event object populated with all available information output by
        :class:`~quakemigrate.signal.scan.locate()`, including event locations
        and uncertainties, picks, and amplitudes and magnitudes if available.

    """

    # Parse information from event file
    event_info = pd.read_csv(event_file).iloc[0]
    event_uid = str(event_info["EventID"])

    # Set distance conversion factor (from units of QM LUT projection units).
    if units == "km":
        factor = 1e3
    elif units == "m":
        factor = 1
    else:
        raise AttributeError(f"units must be 'km' or 'm'; not {units}")

    # Create event object to store origin and pick information
    event = Event()
    event.extra = AttribDict()
    event.resource_id = str(event_info["EventID"])
    event.creation_info = CreationInfo(author="QuakeMigrate",
                                       version=quakemigrate.__version__)

    # Add COA info to extra
    event.extra.coa = {"value": event_info["COA"], "namespace": ns}
    event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns}
    event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns}
    event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns}
    event.extra.dec_coa_norm = {
        "value": event_info["DEC_COA_NORM"],
        "namespace": ns
    }

    # Determine location of cut waveform data - add to event object as a
    # custom extra attribute.
    mseed = locate_dir / "raw_cut_waveforms" / event_uid
    event.extra.cut_waveforms_file = {
        "value": str(mseed.with_suffix(".m").resolve()),
        "namespace": ns
    }
    if (locate_dir / "real_cut_waveforms").exists():
        mseed = locate_dir / "real_cut_waveforms" / event_uid
        event.extra.real_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }
    if (locate_dir / "wa_cut_waveforms").exists():
        mseed = locate_dir / "wa_cut_waveforms" / event_uid
        event.extra.wa_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }

    # Create origin with spline location and set to preferred event origin.
    origin = Origin()
    origin.method_id = "spline"
    origin.longitude = event_info["X"]
    origin.latitude = event_info["Y"]
    origin.depth = event_info["Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins = [origin]
    event.preferred_origin_id = origin.resource_id

    # Create origin with gaussian location and associate with event
    origin = Origin()
    origin.method_id = "gaussian"
    origin.longitude = event_info["GAU_X"]
    origin.latitude = event_info["GAU_Y"]
    origin.depth = event_info["GAU_Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins.append(origin)

    ouc = OriginUncertainty()
    ce = ConfidenceEllipsoid()
    ce.semi_major_axis_length = event_info["COV_ErrY"] * factor
    ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor
    ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor
    ce.major_axis_plunge = 0
    ce.major_axis_azimuth = 0
    ce.major_axis_rotation = 0
    ouc.confidence_ellipsoid = ce
    ouc.preferred_description = "confidence ellipsoid"

    # Set uncertainties for both as the gaussian uncertainties
    for origin in event.origins:
        origin.longitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrX"] * factor / 1e3)
        origin.latitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrY"] * factor / 1e3)
        origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor
        origin.origin_uncertainty = ouc

    # Add OriginQuality info to each origin?
    for origin in event.origins:
        origin.origin_type = "hypocenter"
        origin.evaluation_mode = "automatic"

    # --- Handle picks file ---
    pick_file = locate_dir / "picks" / event_uid
    if pick_file.with_suffix(".picks").is_file():
        picks = pd.read_csv(pick_file.with_suffix(".picks"))
    else:
        return None

    for _, pickline in picks.iterrows():
        station = str(pickline["Station"])
        phase = str(pickline["Phase"])
        wid = WaveformStreamID(network_code="", station_code=station)

        for method in ["modelled", "autopick"]:
            pick = Pick()
            pick.extra = AttribDict()
            pick.waveform_id = wid
            pick.method_id = method
            pick.phase_hint = phase
            if method == "autopick" and str(pickline["PickTime"]) != "-1":
                pick.time = UTCDateTime(pickline["PickTime"])
                pick.time_errors.uncertainty = float(pickline["PickError"])
                pick.extra.snr = {
                    "value": float(pickline["SNR"]),
                    "namespace": ns
                }
            elif method == "modelled":
                pick.time = UTCDateTime(pickline["ModelledTime"])
            else:
                continue
            event.picks.append(pick)

    # --- Handle amplitudes file ---
    amps_file = locate_dir / "amplitudes" / event_uid
    if amps_file.with_suffix(".amps").is_file():
        amps = pd.read_csv(amps_file.with_suffix(".amps"))

        i = 0
        for _, ampsline in amps.iterrows():
            wid = WaveformStreamID(seed_string=ampsline["id"])
            noise_amp = ampsline["Noise_amp"] / 1000  # mm to m
            for phase in ["P_amp", "S_amp"]:
                amp = Amplitude()
                if pd.isna(ampsline[phase]):
                    continue
                amp.generic_amplitude = ampsline[phase] / 1000  # mm to m
                amp.generic_amplitude_errors.uncertainty = noise_amp
                amp.unit = "m"
                amp.type = "AML"
                amp.method_id = phase
                amp.period = 1 / ampsline[f"{phase[0]}_freq"]
                amp.time_window = TimeWindow(
                    reference=UTCDateTime(ampsline[f"{phase[0]}_time"]))
                # amp.pick_id = ?
                amp.waveform_id = wid
                # amp.filter_id = ?
                amp.magnitude_hint = "ML"
                amp.evaluation_mode = "automatic"
                amp.extra = AttribDict()
                try:
                    amp.extra.filter_gain = {
                        "value": ampsline[f"{phase[0]}_filter_gain"],
                        "namespace": ns
                    }
                    amp.extra.avg_amp = {
                        "value": ampsline[f"{phase[0]}_avg_amp"] / 1000,  # m
                        "namespace": ns
                    }
                except KeyError:
                    pass

                if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]):
                    i += 1
                    stat_mag = StationMagnitude()
                    stat_mag.extra = AttribDict()
                    # stat_mag.origin_id = ? local_mag_loc
                    stat_mag.mag = ampsline["ML"]
                    stat_mag.mag_errors.uncertainty = ampsline["ML_Err"]
                    stat_mag.station_magnitude_type = "ML"
                    stat_mag.amplitude_id = amp.resource_id
                    stat_mag.extra.picked = {
                        "value": ampsline["is_picked"],
                        "namespace": ns
                    }
                    stat_mag.extra.epi_dist = {
                        "value": ampsline["epi_dist"],
                        "namespace": ns
                    }
                    stat_mag.extra.z_dist = {
                        "value": ampsline["z_dist"],
                        "namespace": ns
                    }

                    event.station_magnitudes.append(stat_mag)

                event.amplitudes.append(amp)

        mag = Magnitude()
        mag.extra = AttribDict()
        mag.mag = event_info["ML"]
        mag.mag_errors.uncertainty = event_info["ML_Err"]
        mag.magnitude_type = "ML"
        # mag.origin_id = ?
        mag.station_count = i
        mag.evaluation_mode = "automatic"
        mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns}

        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id

    return event