Ejemplo n.º 1
0
    def _parseRecordDa(self, line, focal_mechanism):
        """
        Parses the 'source parameter data - principal axes and
        nodal planes' record Da
        """
        exponent = self._intZero(line[3:5])
        scale = math.pow(10, exponent)
        t_axis_len = self._floatWithFormat(line[5:9], '4.2', scale)
        t_axis_stderr = self._floatWithFormat(line[9:12], '3.2', scale)
        t_axis_plunge = self._int(line[12:14])
        t_axis_azimuth = self._int(line[14:17])
        n_axis_len = self._floatWithFormat(line[17:21], '4.2', scale)
        n_axis_stderr = self._floatWithFormat(line[21:24], '3.2', scale)
        n_axis_plunge = self._int(line[24:26])
        n_axis_azimuth = self._int(line[26:29])
        p_axis_len = self._floatWithFormat(line[29:33], '4.2', scale)
        p_axis_stderr = self._floatWithFormat(line[33:36], '3.2', scale)
        p_axis_plunge = self._int(line[36:38])
        p_axis_azimuth = self._int(line[38:41])
        np1_strike = self._int(line[42:45])
        np1_dip = self._int(line[45:47])
        np1_slip = self._int(line[47:51])
        np2_strike = self._int(line[51:54])
        np2_dip = self._int(line[54:56])
        np2_slip = self._int(line[56:60])

        t_axis = Axis()
        t_axis.length = t_axis_len
        self._storeUncertainty(t_axis.length_errors, t_axis_stderr)
        t_axis.plunge = t_axis_plunge
        t_axis.azimuth = t_axis_azimuth
        n_axis = Axis()
        n_axis.length = n_axis_len
        self._storeUncertainty(n_axis.length_errors, n_axis_stderr)
        n_axis.plunge = n_axis_plunge
        n_axis.azimuth = n_axis_azimuth
        p_axis = Axis()
        p_axis.length = p_axis_len
        self._storeUncertainty(p_axis.length_errors, p_axis_stderr)
        p_axis.plunge = p_axis_plunge
        p_axis.azimuth = p_axis_azimuth
        principal_axes = PrincipalAxes()
        principal_axes.t_axis = t_axis
        principal_axes.n_axis = n_axis
        principal_axes.p_axis = p_axis
        focal_mechanism.principal_axes = principal_axes
        nodal_plane_1 = NodalPlane()
        nodal_plane_1.strike = np1_strike
        nodal_plane_1.dip = np1_dip
        nodal_plane_1.rake = np1_slip
        nodal_plane_2 = NodalPlane()
        nodal_plane_2.strike = np2_strike
        nodal_plane_2.dip = np2_dip
        nodal_plane_2.rake = np2_slip
        nodal_planes = NodalPlanes()
        nodal_planes.nodal_plane_1 = nodal_plane_1
        nodal_planes.nodal_plane_2 = nodal_plane_2
        focal_mechanism.nodal_planes = nodal_planes
Ejemplo n.º 2
0
    def _parse_record_da(self, line, focal_mechanism):
        """
        Parses the 'source parameter data - principal axes and
        nodal planes' record Da
        """
        exponent = self._int_zero(line[3:5])
        scale = math.pow(10, exponent)
        t_axis_len = self._float_with_format(line[5:9], '4.2', scale)
        t_axis_stderr = self._float_with_format(line[9:12], '3.2', scale)
        t_axis_plunge = self._int(line[12:14])
        t_axis_azimuth = self._int(line[14:17])
        n_axis_len = self._float_with_format(line[17:21], '4.2', scale)
        n_axis_stderr = self._float_with_format(line[21:24], '3.2', scale)
        n_axis_plunge = self._int(line[24:26])
        n_axis_azimuth = self._int(line[26:29])
        p_axis_len = self._float_with_format(line[29:33], '4.2', scale)
        p_axis_stderr = self._float_with_format(line[33:36], '3.2', scale)
        p_axis_plunge = self._int(line[36:38])
        p_axis_azimuth = self._int(line[38:41])
        np1_strike = self._int(line[42:45])
        np1_dip = self._int(line[45:47])
        np1_slip = self._int(line[47:51])
        np2_strike = self._int(line[51:54])
        np2_dip = self._int(line[54:56])
        np2_slip = self._int(line[56:60])

        t_axis = Axis()
        t_axis.length = t_axis_len
        self._store_uncertainty(t_axis.length_errors, t_axis_stderr)
        t_axis.plunge = t_axis_plunge
        t_axis.azimuth = t_axis_azimuth
        n_axis = Axis()
        n_axis.length = n_axis_len
        self._store_uncertainty(n_axis.length_errors, n_axis_stderr)
        n_axis.plunge = n_axis_plunge
        n_axis.azimuth = n_axis_azimuth
        p_axis = Axis()
        p_axis.length = p_axis_len
        self._store_uncertainty(p_axis.length_errors, p_axis_stderr)
        p_axis.plunge = p_axis_plunge
        p_axis.azimuth = p_axis_azimuth
        principal_axes = PrincipalAxes()
        principal_axes.t_axis = t_axis
        principal_axes.n_axis = n_axis
        principal_axes.p_axis = p_axis
        focal_mechanism.principal_axes = principal_axes
        nodal_plane_1 = NodalPlane()
        nodal_plane_1.strike = np1_strike
        nodal_plane_1.dip = np1_dip
        nodal_plane_1.rake = np1_slip
        nodal_plane_2 = NodalPlane()
        nodal_plane_2.strike = np2_strike
        nodal_plane_2.dip = np2_dip
        nodal_plane_2.rake = np2_slip
        nodal_planes = NodalPlanes()
        nodal_planes.nodal_plane_1 = nodal_plane_1
        nodal_planes.nodal_plane_2 = nodal_plane_2
        focal_mechanism.nodal_planes = nodal_planes
Ejemplo n.º 3
0
    def _map_fplane2focalmech(self, db):
        """
        Return an obspy FocalMechanism from an dict of CSS key/values
        corresponding to one record. See the 'Join' section for the implied
        database join expected.
        
        Inputs
        ======
        db : dict of key/values of CSS fields from the 'fplane' table

        Returns
        =======
        obspy.core.event.FocalMechanism

        Notes
        =====
        Any object that supports the dict 'get' method can be passed as
        input, e.g. OrderedDict, custom classes, etc.

        """
        #
        # NOTE: Antelope schema for this is wrong, no nulls defined
        # 
        fm = FocalMechanism()

        nps = NodalPlanes()
        nps.nodal_plane_1 = NodalPlane(db.get('str1'), db.get('dip1'), db.get('rake1'))
        nps.nodal_plane_2 = NodalPlane(db.get('str2'), db.get('dip2'), db.get('rake2'))

        nps.preferred_plane = 1

        prin_ax = PrincipalAxes()
        prin_ax.t_axis = Axis(db.get('taxazm'),db.get('taxplg'))
        prin_ax.p_axis = Axis(db.get('paxazm'),db.get('paxplg'))

        fm.nodal_planes = nps
        fm.principal_axes = prin_ax

        author_string = ':'.join([db['algorithm'], db['auth']])
        fm.creation_info = CreationInfo(
            version = db.get('mechid'), 
            creation_time = UTCDateTime(db['lddate']), 
            agency_id = self.agency,
            author = author_string,
            ) 
        
        fm.resource_id = self._rid(fm)
        return fm
Ejemplo n.º 4
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Ejemplo n.º 5
0
def makeCatalog(StazList, mt, scale, args):

    epi = args.epi.rsplit()
    model = args.model.split(os.sep)
    NrSt = len(StazList)
    NrCo = NrSt * 3
    (Fmin, Fmax) = getFreq(args)
    Tmin = ('%.0f' % (1 / Fmax))
    Tmax = ('%.0f' % (1 / Fmin))
    mo = ('%.3e' % (mt[0]))
    mw = ('%.2f' % (mt[1]))
    Pdc = ('%.2f' % (float(mt[2]) / 100))
    Pclvd = ('%.2f' % (float(mt[3]) / 100))

    Tval = ('%10.3e' % (mt[22]))
    Tplg = ('%4.1f' % (mt[23]))
    Tazi = ('%5.1f' % (mt[24]))
    Nval = ('%10.3e' % (mt[25]))
    Nplg = ('%4.1f' % (mt[26]))
    Nazi = ('%5.1f' % (mt[27]))
    Pval = ('%10.3e' % (mt[28]))
    Pplg = ('%4.1f' % (mt[29]))
    Pazi = ('%5.1f' % (mt[30]))

    STp1 = ('%5.1f' % (mt[31]))
    DPp1 = ('%4.1f' % (mt[32]))
    RAp1 = ('%6.1f' % (mt[33]))
    STp2 = ('%5.1f' % (mt[34]))
    DPp2 = ('%4.1f' % (mt[35]))
    RAp2 = ('%6.1f' % (mt[36]))
    var = ('%.2f' % (mt[37]))
    qua = ('%d' % (mt[38]))
    mij = [mt[4], mt[5], mt[6], mt[7], mt[8], mt[9]]

    mm0 = str('%10.3e' % (mij[0]))
    mm1 = str('%10.3e' % (mij[1]))
    mm2 = str('%10.3e' % (mij[2]))
    mm3 = str('%10.3e' % (mij[3]))
    mm4 = str('%10.3e' % (mij[4]))
    mm5 = str('%10.3e' % (mij[5]))
    # Aki konvention
    Mrr = mm5
    Mtt = mm0
    Mff = mm1
    Mrt = mm3
    Mrf = mm4
    Mtf = mm2

    # stress regime
    A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24])
    A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27])
    A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30])

    (regime, sh) = stressRegime(A1, A2, A3)
    sh = ('%5.1f' % (sh))

    #### Build classes #################################
    #
    #Resource Id is the event origin time for definition

    res_id = ResourceIdentifier(args.ori)
    nowUTC = datetime.datetime.utcnow()
    info = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC)
    evOrigin = Origin(resource_id=res_id,
                      time=args.ori,
                      latitude=epi[0],
                      longitude=epi[1],
                      depth=epi[2],
                      earth_model_id=model[-1],
                      creation_info=info)
    # Magnitudes
    magnitude = Magnitude(mag=mw, magnitude_type="Mw")
    # Nodal Planes
    np1 = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1)
    np2 = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2)
    planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    # Principal axes
    Taxe = Axis(azimuth=Tazi, plunge=Tplg, length=Tval)
    Naxe = Axis(azimuth=Nazi, plunge=Nplg, length=Nval)
    Paxe = Axis(azimuth=Pazi, plunge=Pplg, length=Pval)
    axes = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe)
    # MT elements
    MT = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, m_rt=Mrt, m_rp=Mrf, m_tp=Mtf)
    # Stress regime
    regStr = 'Stress regime: ' + regime + ' -  SH = ' + sh
    strDes = EventDescription(regStr)
    # MT dataset
    dataInfo = DataUsed(wave_type="combined",
                        station_count=NrSt,
                        component_count=NrCo,
                        shortest_period=Tmin,
                        longest_period=Tmax)
    source = MomentTensor(data_used=dataInfo,
                          scalar_moment=mo,
                          tensor=MT,
                          variance_reduction=var,
                          double_couple=Pdc,
                          clvd=Pclvd,
                          iso=0)
    focMec = FocalMechanism(moment_tensor=source,
                            nodal_planes=planes,
                            principal_axes=axes,
                            azimuthal_gap=-1)

    #Initialize Event Catalog
    mtSolution = Event(creation_info=info)
    mtSolution.origins.append(evOrigin)
    mtSolution.magnitudes.append(magnitude)
    mtSolution.focal_mechanisms.append(focMec)
    mtSolution.event_descriptions.append(strDes)

    cat = Catalog()
    cat.append(mtSolution)

    return cat
Ejemplo n.º 6
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
        event = Event(focal_mechanisms=[], picks=[], origins=[])
        origin = Origin(arrivals=[])
        origin.time = UTCDateTime(hp.tstamp)
        origin.latitude = hp.qlat
        origin.longitude = hp.qlon
        origin.depth = hp.qdep
        origin.creation_info = CreationInfo(version=hp.icusp)
        origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(
            hp.icusp))
        for _i in range(n):
            p = Pick()
            p.creation_info = CreationInfo(version=hp.arid[_i])
            p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format(
                p.creation_info.version))
            p.waveform_id = WaveformStreamID(network_code=hp.snet[_i],
                                             station_code=hp.sname[_i],
                                             channel_code=hp.scomp[_i])
            if hp.p_pol[_i] > 0:
                p.polarity = 'positive'
            else:
                p.polarity = 'negative'
            a = Arrival()
            a.creation_info = CreationInfo(version=hp.arid[_i])
            a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format(
                p.creation_info.version))
            a.azimuth = hp.p_azi_mc[_i, 0]
            a.takeoff_angle = 180. - hp.p_the_mc[_i, 0]
            a.pick_id = p.resource_id
            origin.arrivals.append(a)
            event.picks.append(p)
        event.origins.append(origin)
        event.preferred_origin_id = origin.resource_id.resource_id
    else:  # just update the changes
        origin = event.preferred_origin()
        picks = []
        arrivals = []
        for _i in range(n):
            ind = hp.p_index[_i]
            a = origin.arrivals[ind]
            p = a.pick_id.getReferredObject()
            a.takeoff_angle = hp.p_the_mc[_i, 0]
            picks.append(p)
            arrivals.append(a)
        if only_fm_picks:
            origin.arrivals = arrivals
            event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(),
                                                author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier(
            'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'],
                                                plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'],
                                                plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(
                hp.qual[s],
                resource_id=ResourceIdentifier(
                    focal_mech.resource_id.resource_id + '/comment/quality')))
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id
    return event
Ejemplo n.º 7
0
    def build(self):
        """
        Build an obspy moment tensor focal mech event

        This makes the tensor output into an Event containing:
        1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
        2) a Magnitude of the Mw from the Tensor

        Which is what we want for outputting QuakeML using
        the (slightly modified) obspy code.

        Input
        -----
        filehandle => open file OR str from filehandle.read()

        Output
        ------
        event => instance of Event() class as described above
        """
        p = self.parser
        event         = Event(event_type='earthquake')
        origin        = Origin()
        focal_mech    = FocalMechanism()
        nodal_planes  = NodalPlanes()
        moment_tensor = MomentTensor()
        principal_ax  = PrincipalAxes()
        magnitude     = Magnitude()
        data_used     = DataUsed()
        creation_info = CreationInfo(agency_id='NN')
        ev_mode = 'automatic'
        ev_stat = 'preliminary'
        evid = None
        orid = None
        # Parse the entire file line by line.
        for n,l in enumerate(p.line):
            if 'REVIEWED BY NSL STAFF' in l:
                ev_mode = 'manual'
                ev_stat = 'reviewed'
            if 'Event ID' in l:
                evid = p._id(n)
            if 'Origin ID' in l:
                orid = p._id(n)
            if 'Ichinose' in l:
                moment_tensor.category = 'regional'
            if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
                ev = p._event_info(n)
            if 'Depth' in l:
                derived_depth = p._depth(n)
            if 'Mw' in l:
                magnitude.mag = p._mw(n) 
                magnitude.magnitude_type = 'Mw'
            if 'Mo' in l and 'dyne' in l:
                moment_tensor.scalar_moment = p._mo(n)
            if 'Percent Double Couple' in l:
                moment_tensor.double_couple = p._percent(n)
            if 'Percent CLVD' in l:
                moment_tensor.clvd = p._percent(n)
            if 'Epsilon' in l:
                moment_tensor.variance = p._epsilon(n)
            if 'Percent Variance Reduction' in l:
                moment_tensor.variance_reduction = p._percent(n)
            if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
                np = p._double_couple(n)
                nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
                nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
                nodal_planes.preferred_plane = 1
            if 'Spherical Coordinates' in l:
                mt = p._mt_sphere(n)
                moment_tensor.tensor = Tensor(
                    m_rr = mt['Mrr'],
                    m_tt = mt['Mtt'],
                    m_pp = mt['Mff'],
                    m_rt = mt['Mrt'],
                    m_rp = mt['Mrf'],
                    m_tp = mt['Mtf'],
                    )
            if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
                ax = p._vectors(n)
                principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
                principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
                principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
            if 'Number of Stations' in l:
                data_used.station_count = p._number_of_stations(n)
            if 'Maximum' in l and 'Gap' in l:
                focal_mech.azimuthal_gap = p._gap(n)
            if re.match(r'^Date', l):
                creation_info.creation_time = p._creation_time(n)
        # Creation Time
        creation_info.version = orid
        # Fill in magnitude values
        magnitude.evaluation_mode = ev_mode
        magnitude.evaluation_status = ev_stat
        magnitude.creation_info = creation_info.copy()
        magnitude.resource_id = self._rid(magnitude)
        # Stub origin
        origin.time = ev.get('time')
        origin.latitude = ev.get('lat')
        origin.longitude = ev.get('lon')
        origin.depth = derived_depth * 1000.
        origin.depth_type = "from moment tensor inversion"
        origin.creation_info = creation_info.copy()
         # Unique from true origin ID
        _oid = self._rid(origin)
        origin.resource_id = ResourceIdentifier(str(_oid) + '/mt')
        del _oid
        # Make an id for the MT that references this origin
        ogid = str(origin.resource_id)
        doid = ResourceIdentifier(ogid, referred_object=origin)
        # Make an id for the moment tensor mag which references this mag
        mrid = str(magnitude.resource_id)
        mmid = ResourceIdentifier(mrid, referred_object=magnitude)
        # MT todo: could check/use URL for RID if parsing the php file
        moment_tensor.evaluation_mode = ev_mode
        moment_tensor.evaluation_status = ev_stat
        moment_tensor.data_used = data_used
        moment_tensor.moment_magnitude_id = mmid
        moment_tensor.derived_origin_id = doid
        moment_tensor.creation_info = creation_info.copy()
        moment_tensor.resource_id = self._rid(moment_tensor)
        # Fill in focal_mech values
        focal_mech.nodal_planes  = nodal_planes
        focal_mech.moment_tensor = moment_tensor
        focal_mech.principal_axes = principal_ax
        focal_mech.creation_info = creation_info.copy()
        focal_mech.resource_id = self._rid(focal_mech)
        # add mech and new magnitude to event
        event.focal_mechanisms = [focal_mech]
        event.magnitudes = [magnitude]
        event.origins = [origin]
        event.creation_info = creation_info.copy()
        # If an MT was done, that's the preferred mag/mech
        event.preferred_magnitude_id = str(magnitude.resource_id)
        event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
        if evid:
            event.creation_info.version = evid
        event.resource_id = self._rid(event)
        self.event = event