Esempio n. 1
0
    def _map_fplane2focalmech(self, db):
        """
        Return an obspy FocalMechanism from an dict of CSS key/values
        corresponding to one record. See the 'Join' section for the implied
        database join expected.
        
        Inputs
        ======
        db : dict of key/values of CSS fields from the 'fplane' table

        Returns
        =======
        obspy.core.event.FocalMechanism

        Notes
        =====
        Any object that supports the dict 'get' method can be passed as
        input, e.g. OrderedDict, custom classes, etc.

        """
        #
        # NOTE: Antelope schema for this is wrong, no nulls defined
        # 
        fm = FocalMechanism()

        nps = NodalPlanes()
        nps.nodal_plane_1 = NodalPlane(db.get('str1'), db.get('dip1'), db.get('rake1'))
        nps.nodal_plane_2 = NodalPlane(db.get('str2'), db.get('dip2'), db.get('rake2'))

        nps.preferred_plane = 1

        prin_ax = PrincipalAxes()
        prin_ax.t_axis = Axis(db.get('taxazm'),db.get('taxplg'))
        prin_ax.p_axis = Axis(db.get('paxazm'),db.get('paxplg'))

        fm.nodal_planes = nps
        fm.principal_axes = prin_ax

        author_string = ':'.join([db['algorithm'], db['auth']])
        fm.creation_info = CreationInfo(
            version = db.get('mechid'), 
            creation_time = UTCDateTime(db['lddate']), 
            agency_id = self.agency,
            author = author_string,
            ) 
        
        fm.resource_id = self._rid(fm)
        return fm
Esempio n. 2
0
def test_with_quakeml():
    np1 = NodalPlane(strike=259, dip=74, rake=10)
    np2 = NodalPlane(strike=166, dip=80, rake=164)
    nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    focal = FocalMechanism(nodal_planes=nodal_planes)
    event = Event(focal_mechanisms=[focal])
    catalog = Catalog(events=[event])
    event_text = '''<shakemap-data code_version="4.0" map_version="1">
<earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9"
time="2018-01-23T09:31:42Z"
depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/>
</shakemap-data>'''
    try:
        tempdir = tempfile.mkdtemp()
        xmlfile = os.path.join(tempdir, 'quakeml.xml')
        catalog.write(xmlfile, format="QUAKEML")
        eventfile = os.path.join(tempdir, 'event.xml')
        f = open(eventfile, 'wt')
        f.write(event_text)
        f.close()
        params = read_moment_quakeml(xmlfile)
        origin = Origin.fromFile(eventfile, momentfile=xmlfile)
        x = 1
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Esempio n. 3
0
def add_dconly(event):
    for fm in event.focal_mechanisms:
        if not fm.moment_tensor.resource_id is None:
            import obspyutils.momenttensor
            mtdc = obspyutils.momenttensor.extractDC(fm.moment_tensor)
            from obspy.core.event import FocalMechanism
            event.focal_mechanisms.append(
                FocalMechanism(method_id=fm.method_id, moment_tensor=mtdc))
            return
    return
Esempio n. 4
0
def _read_focmec_out(lines):
    """
    Read given data into an :class:`~obspy.core.event.Event` object.

    :type lines: list
    :param lines: List of decoded unicode strings with data from a FOCMEC out
        file.
    """
    event, _ = _read_common_header(lines)
    # now move to first line with a focal mechanism
    for i, line in enumerate(lines):
        if line.split()[:3] == ['Dip', 'Strike', 'Rake']:
            break
    else:
        return event
    header = lines[:i]
    polarity_count, weighted = _get_polarity_count(header)
    focmec_list_header = lines[i]
    event.comments.append(Comment(text='\n'.join(header)))
    try:
        lines = lines[i + 1:]
    except IndexError:
        return event
    for line in lines:
        # allow for empty lines (maybe they can happen at the end sometimes..)
        if not line.strip():
            continue
        comment = Comment(text='\n'.join((focmec_list_header, line)))
        items = line.split()
        dip, strike, rake = [float(x) for x in items[:3]]
        plane = NodalPlane(strike=strike, dip=dip, rake=rake)
        planes = NodalPlanes(nodal_plane_1=plane, preferred_plane=1)
        # XXX ideally should compute the auxilliary plane..
        focmec = FocalMechanism(nodal_planes=planes)
        focmec.station_polarity_count = polarity_count
        focmec.creation_info = CreationInfo(
            version='FOCMEC', creation_time=event.creation_info.creation_time)
        if not weighted:
            errors = sum([int(x) for x in items[3:6]])
            focmec.misfit = float(errors) / polarity_count
        focmec.comments.append(comment)
        event.focal_mechanisms.append(focmec)
    return event
def __toFocalMechanism(parser, focmec_el):
    """
    """
    global CURRENT_TYPE
    focmec = FocalMechanism()
    focmec.resource_id = ResourceIdentifier(
        prefix="/".join([RESOURCE_ROOT, "focal_mechanism"]))
    if CURRENT_TYPE == "obspyck":
        focmec.method_id = "%s/focal_mechanism_method/focmec/1" % RESOURCE_ROOT
    else:
        focmec.method_id = "%s/focal_mechanism_method/%s/1" % (
            RESOURCE_ROOT, parser.xpath2obj('program', focmec_el))
    if str(focmec.method_id).lower().endswith("none"):
        focmec.method_id = None
    focmec.station_polarity_count = parser.xpath2obj("stationPolarityCount",
                                                     focmec_el, int)
    if focmec.station_polarity_count:
        focmec.misfit = parser.xpath2obj("stationPolarityErrorCount",
                                         focmec_el, int) / float(
                                             focmec.station_polarity_count)
    focmec.nodal_planes = NodalPlanes()
    focmec.nodal_planes.nodal_plane_1 = NodalPlane()
    nodal_plane = focmec_el.find("nodalPlanes")
    if nodal_plane is None or not len(nodal_plane):
        return None
    n_p = focmec.nodal_planes.nodal_plane_1
    # There is always only one nodal plane, called nodalPlane1
    n_p.strike, strike_uncertainty = __toFloatQuantity(
        parser, focmec_el, "nodalPlanes/nodalPlane1/strike")
    n_p.dip, dip_uncertainty = __toFloatQuantity(
        parser, focmec_el, "nodalPlanes/nodalPlane1/dip")
    n_p.rake, rake_uncertainty = __toFloatQuantity(
        parser, focmec_el, "nodalPlanes/nodalPlane1/rake")
    if hasattr(strike_uncertainty, "uncertainty"):
        n_p.strike_errors.uncertainty = strike_uncertainty["uncertainty"]
    if hasattr(dip_uncertainty, "uncertainty"):
        n_p.dip_errors.uncertainty = dip_uncertainty["uncertainty"]
    if hasattr(rake_uncertainty, "uncertainty"):
        n_p.rake_errors.uncertainty = rake_uncertainty["uncertainty"]
    solution_count = parser.xpath2obj("possibleSolutionCount", focmec_el, int)
    if solution_count:
        focmec.comments.append(
            Comment(force_resource_id=False,
                    resource_id=None,
                    text="Possible Solution Count: %i" % solution_count))
    return focmec
Esempio n. 6
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Esempio n. 7
0
def _read_focmec_lst_one_block(lines, polarity_count=None):
    comment = Comment(text='\n'.join(lines))
    while lines and not lines[0].lstrip().startswith('Dip,Strike,Rake'):
        lines.pop(0)
    # the last block does not contain a focmec but only a short comment how
    # many solutions there were overall, so we hit a block that will not have
    # the above line and we exhaust the lines list
    if not lines:
        return None, []
    dip, strike, rake = [float(x) for x in lines[0].split()[1:4]]
    plane1 = NodalPlane(strike=strike, dip=dip, rake=rake)
    lines.pop(0)
    dip, strike, rake = [float(x) for x in lines[0].split()[1:4]]
    plane2 = NodalPlane(strike=strike, dip=dip, rake=rake)
    planes = NodalPlanes(nodal_plane_1=plane1, nodal_plane_2=plane2,
                         preferred_plane=1)
    focmec = FocalMechanism(nodal_planes=planes)
    focmec.comments.append(comment)
    if polarity_count is not None:
        polarity_errors = _get_polarity_error_count_lst_block(lines)
        focmec.station_polarity_count = polarity_count
        focmec.misfit = float(polarity_errors) / polarity_count
    return focmec, lines
def __toFocalMechanism(parser, focmec_el):
    """
    """
    global CURRENT_TYPE
    focmec = FocalMechanism()
    focmec.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "focal_mechanism"]))
    if CURRENT_TYPE == "obspyck":
        focmec.method_id = "%s/focal_mechanism_method/focmec/1" % RESOURCE_ROOT
    else:
        focmec.method_id = "%s/focal_mechanism_method/%s/1" % (RESOURCE_ROOT,
            parser.xpath2obj('program', focmec_el))
    if str(focmec.method_id).lower().endswith("none"):
        focmec.method_id = None
    focmec.station_polarity_count = parser.xpath2obj("stationPolarityCount",
        focmec_el, int)
    if focmec.station_polarity_count:
        focmec.misfit = parser.xpath2obj("stationPolarityErrorCount",
                focmec_el, int) / float(focmec.station_polarity_count)
    focmec.nodal_planes = NodalPlanes()
    focmec.nodal_planes.nodal_plane_1 = NodalPlane()
    nodal_plane = focmec_el.find("nodalPlanes")
    if nodal_plane is None or not len(nodal_plane):
        return None
    n_p = focmec.nodal_planes.nodal_plane_1
    # There is always only one nodal plane, called nodalPlane1
    n_p.strike, strike_uncertainty = __toFloatQuantity(parser,
        focmec_el, "nodalPlanes/nodalPlane1/strike")
    n_p.dip, dip_uncertainty = __toFloatQuantity(parser, focmec_el,
        "nodalPlanes/nodalPlane1/dip")
    n_p.rake, rake_uncertainty = __toFloatQuantity(parser,
        focmec_el, "nodalPlanes/nodalPlane1/rake")
    if hasattr(strike_uncertainty, "uncertainty"):
        n_p.strike_errors.uncertainty = strike_uncertainty["uncertainty"]
    if hasattr(dip_uncertainty, "uncertainty"):
        n_p.dip_errors.uncertainty = dip_uncertainty["uncertainty"]
    if hasattr(rake_uncertainty, "uncertainty"):
        n_p.rake_errors.uncertainty = rake_uncertainty["uncertainty"]
    solution_count = parser.xpath2obj("possibleSolutionCount", focmec_el, int)
    if solution_count:
        focmec.comments.append(Comment(
            force_resource_id=False, resource_id=None,
            text="Possible Solution Count: %i" % solution_count))
    return focmec
Esempio n. 9
0
def test_with_quakeml():
    np1 = NodalPlane(strike=259, dip=74, rake=10)
    np2 = NodalPlane(strike=166, dip=80, rake=164)
    nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    taxis = Axis(plunge=40, azimuth=70)
    naxis = Axis(plunge=50, azimuth=80)
    paxis = Axis(plunge=60, azimuth=90)
    paxes = PrincipalAxes(t_axis=taxis,
                          n_axis=naxis,
                          p_axis=paxis)
    focal = FocalMechanism(nodal_planes=nodal_planes,
                           principal_axes=paxes)
    event = Event(focal_mechanisms=[focal])
    catalog = Catalog(events=[event])
    event_text = '''<shakemap-data code_version="4.0" map_version="1">
<earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9"
time="2018-01-23T09:31:42Z"
depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/>
</shakemap-data>'''
    try:
        tempdir = tempfile.mkdtemp()
        xmlfile = os.path.join(tempdir, 'quakeml.xml')
        catalog.write(xmlfile, format="QUAKEML")
        eventfile = os.path.join(tempdir, 'event.xml')
        f = open(eventfile, 'wt')
        f.write(event_text)
        f.close()
        params = read_moment_quakeml(xmlfile)
        assert params['moment']['NP1']['strike'] == 259.0
        assert params['moment']['NP1']['dip'] == 74.0
        assert params['moment']['NP1']['rake'] == 10.0
        assert params['moment']['NP2']['strike'] == 166.0
        assert params['moment']['NP2']['dip'] == 80.0
        assert params['moment']['NP2']['rake'] == 164.0
        origin = Origin.fromFile(eventfile, momentfile=xmlfile)
        assert origin.mag == 7.9
        assert origin.lat == 56.046
        assert origin.lon == -149.073
        assert origin.id == 'us2000cmy3'
    except Exception:
        assert False
    finally:
        shutil.rmtree(tempdir)
Esempio n. 10
0
    def build(self):
        """
        Build an obspy moment tensor focal mech event

        This makes the tensor output into an Event containing:
        1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
        2) a Magnitude of the Mw from the Tensor

        Which is what we want for outputting QuakeML using
        the (slightly modified) obspy code.

        Input
        -----
        filehandle => open file OR str from filehandle.read()

        Output
        ------
        event => instance of Event() class as described above
        """
        p = self.parser
        event         = Event(event_type='earthquake')
        origin        = Origin()
        focal_mech    = FocalMechanism()
        nodal_planes  = NodalPlanes()
        moment_tensor = MomentTensor()
        principal_ax  = PrincipalAxes()
        magnitude     = Magnitude()
        data_used     = DataUsed()
        creation_info = CreationInfo(agency_id='NN')
        ev_mode = 'automatic'
        ev_stat = 'preliminary'
        evid = None
        orid = None
        # Parse the entire file line by line.
        for n,l in enumerate(p.line):
            if 'REVIEWED BY NSL STAFF' in l:
                ev_mode = 'manual'
                ev_stat = 'reviewed'
            if 'Event ID' in l:
                evid = p._id(n)
            if 'Origin ID' in l:
                orid = p._id(n)
            if 'Ichinose' in l:
                moment_tensor.category = 'regional'
            if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
                ev = p._event_info(n)
            if 'Depth' in l:
                derived_depth = p._depth(n)
            if 'Mw' in l:
                magnitude.mag = p._mw(n) 
                magnitude.magnitude_type = 'Mw'
            if 'Mo' in l and 'dyne' in l:
                moment_tensor.scalar_moment = p._mo(n)
            if 'Percent Double Couple' in l:
                moment_tensor.double_couple = p._percent(n)
            if 'Percent CLVD' in l:
                moment_tensor.clvd = p._percent(n)
            if 'Epsilon' in l:
                moment_tensor.variance = p._epsilon(n)
            if 'Percent Variance Reduction' in l:
                moment_tensor.variance_reduction = p._percent(n)
            if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
                np = p._double_couple(n)
                nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
                nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
                nodal_planes.preferred_plane = 1
            if 'Spherical Coordinates' in l:
                mt = p._mt_sphere(n)
                moment_tensor.tensor = Tensor(
                    m_rr = mt['Mrr'],
                    m_tt = mt['Mtt'],
                    m_pp = mt['Mff'],
                    m_rt = mt['Mrt'],
                    m_rp = mt['Mrf'],
                    m_tp = mt['Mtf'],
                    )
            if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
                ax = p._vectors(n)
                principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
                principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
                principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
            if 'Number of Stations' in l:
                data_used.station_count = p._number_of_stations(n)
            if 'Maximum' in l and 'Gap' in l:
                focal_mech.azimuthal_gap = p._gap(n)
            if re.match(r'^Date', l):
                creation_info.creation_time = p._creation_time(n)
        # Creation Time
        creation_info.version = orid
        # Fill in magnitude values
        magnitude.evaluation_mode = ev_mode
        magnitude.evaluation_status = ev_stat
        magnitude.creation_info = creation_info.copy()
        magnitude.resource_id = self._rid(magnitude)
        # Stub origin
        origin.time = ev.get('time')
        origin.latitude = ev.get('lat')
        origin.longitude = ev.get('lon')
        origin.depth = derived_depth * 1000.
        origin.depth_type = "from moment tensor inversion"
        origin.creation_info = creation_info.copy()
         # Unique from true origin ID
        _oid = self._rid(origin)
        origin.resource_id = ResourceIdentifier(str(_oid) + '/mt')
        del _oid
        # Make an id for the MT that references this origin
        ogid = str(origin.resource_id)
        doid = ResourceIdentifier(ogid, referred_object=origin)
        # Make an id for the moment tensor mag which references this mag
        mrid = str(magnitude.resource_id)
        mmid = ResourceIdentifier(mrid, referred_object=magnitude)
        # MT todo: could check/use URL for RID if parsing the php file
        moment_tensor.evaluation_mode = ev_mode
        moment_tensor.evaluation_status = ev_stat
        moment_tensor.data_used = data_used
        moment_tensor.moment_magnitude_id = mmid
        moment_tensor.derived_origin_id = doid
        moment_tensor.creation_info = creation_info.copy()
        moment_tensor.resource_id = self._rid(moment_tensor)
        # Fill in focal_mech values
        focal_mech.nodal_planes  = nodal_planes
        focal_mech.moment_tensor = moment_tensor
        focal_mech.principal_axes = principal_ax
        focal_mech.creation_info = creation_info.copy()
        focal_mech.resource_id = self._rid(focal_mech)
        # add mech and new magnitude to event
        event.focal_mechanisms = [focal_mech]
        event.magnitudes = [magnitude]
        event.origins = [origin]
        event.creation_info = creation_info.copy()
        # If an MT was done, that's the preferred mag/mech
        event.preferred_magnitude_id = str(magnitude.resource_id)
        event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
        if evid:
            event.creation_info.version = evid
        event.resource_id = self._rid(event)
        self.event = event
Esempio n. 11
0
def full_test_event():
    """
    Function to generate a basic, full test event
    """
    test_event = Event()
    test_event.origins.append(
        Origin(time=UTCDateTime("2012-03-26") + 1.2,
               latitude=45.0,
               longitude=25.0,
               depth=15000))
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.magnitudes.append(
        Magnitude(mag=0.1,
                  magnitude_type='ML',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[0].resource_id))
    test_event.magnitudes.append(
        Magnitude(mag=0.5,
                  magnitude_type='Mc',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[0].resource_id))
    test_event.magnitudes.append(
        Magnitude(mag=1.3,
                  magnitude_type='Ms',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[0].resource_id))

    # Define the test pick
    _waveform_id_1 = WaveformStreamID(station_code='FOZ',
                                      channel_code='SHZ',
                                      network_code='NZ')
    _waveform_id_2 = WaveformStreamID(station_code='WTSZ',
                                      channel_code='BH1',
                                      network_code=' ')
    # Pick to associate with amplitude
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_1,
             phase_hint='IAML',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"))
    # Need a second pick for coda
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_1,
             onset='impulsive',
             phase_hint='PN',
             polarity='positive',
             time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"))
    # Unassociated pick
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_2,
             onset='impulsive',
             phase_hint='SG',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.72,
             evaluation_mode="manual"))
    # Unassociated pick
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_2,
             onset='impulsive',
             phase_hint='PN',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.62,
             evaluation_mode="automatic"))
    # Test a generic local magnitude amplitude pick
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=2.0,
                  period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id,
                  unit='m',
                  magnitude_hint='ML',
                  category='point',
                  type='AML'))
    # Test a coda magnitude pick
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=10,
                  pick_id=test_event.picks[1].resource_id,
                  waveform_id=test_event.picks[1].waveform_id,
                  type='END',
                  category='duration',
                  unit='s',
                  magnitude_hint='Mc',
                  snr=2.3))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=0,
                phase=test_event.picks[1].phase_hint,
                pick_id=test_event.picks[1].resource_id))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2,
                phase=test_event.picks[2].phase_hint,
                pick_id=test_event.picks[2].resource_id,
                backazimuth_residual=5,
                time_residual=0.2,
                distance=15,
                azimuth=25))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2,
                phase=test_event.picks[3].phase_hint,
                pick_id=test_event.picks[3].resource_id,
                backazimuth_residual=5,
                time_residual=0.2,
                distance=15,
                azimuth=25))
    # Add in error info (line E)
    test_event.origins[0].quality = OriginQuality(standard_error=0.01,
                                                  azimuthal_gap=36)
    # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has
    # semi-major and semi-minor
    test_event.origins[0].origin_uncertainty = OriginUncertainty(
        confidence_ellipsoid=ConfidenceEllipsoid(
            semi_major_axis_length=3000,
            semi_minor_axis_length=1000,
            semi_intermediate_axis_length=2000,
            major_axis_plunge=20,
            major_axis_azimuth=100,
            major_axis_rotation=4))
    test_event.origins[0].time_errors = QuantityError(uncertainty=0.5)
    # Add in fault-plane solution info (line F) - Note have to check program
    # used to determine which fields are filled....
    test_event.focal_mechanisms.append(
        FocalMechanism(nodal_planes=NodalPlanes(
            nodal_plane_1=NodalPlane(strike=180,
                                     dip=20,
                                     rake=30,
                                     strike_errors=QuantityError(10),
                                     dip_errors=QuantityError(10),
                                     rake_errors=QuantityError(20))),
                       method_id=ResourceIdentifier(
                           "smi:nc.anss.org/focalMechanism/FPFIT"),
                       creation_info=CreationInfo(agency_id="NC"),
                       misfit=0.5,
                       station_distribution_ratio=0.8))
    # Need to test high-precision origin and that it is preferred origin.
    # Moment tensor includes another origin
    test_event.origins.append(
        Origin(time=UTCDateTime("2012-03-26") + 1.2,
               latitude=45.1,
               longitude=25.2,
               depth=14500))
    test_event.magnitudes.append(
        Magnitude(mag=0.1,
                  magnitude_type='MW',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[-1].resource_id))
    # Moment tensors go with focal-mechanisms
    test_event.focal_mechanisms.append(
        FocalMechanism(moment_tensor=MomentTensor(
            derived_origin_id=test_event.origins[-1].resource_id,
            moment_magnitude_id=test_event.magnitudes[-1].resource_id,
            scalar_moment=100,
            tensor=Tensor(
                m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15),
            method_id=ResourceIdentifier(
                'smi:nc.anss.org/momentTensor/BLAH'))))
    return test_event
Esempio n. 12
0
def _internal_read_single_scardec(buf):
    """
    Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the origin time and epicenter
    line = buf.readline()

    origin_time = line.strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[6:]
    latitude, longitude = map(float, line[:2])

    # The second line encodes depth and the two focal mechanisms
    line = buf.readline()
    line = line.split()

    # First three values are depth, scalar moment (in Nm) and moment magnitude
    depth, scalar_moment, moment_mag = map(float, line[0:3])

    # depth is in km in SCARDEC files
    depth *= 1e3

    # Next six values are strike, dip, rake for both planes
    strike1, dip1, rake1 = map(float, line[3:6])
    strike2, dip2, rake2 = map(float, line[6:9])

    # The rest of the file is the moment rate function
    # In each line: time (sec), moment rate (Nm/sec)
    stf_time = []
    stf_mr = []
    for line in buf:
        stf_time.append(float(line.split()[0]))
        stf_mr.append(float(line.split()[1]))

    # Normalize the source time function
    stf_mr = np.array(stf_mr)
    stf_mr /= scalar_moment

    # Calculate the time step
    dt = np.mean(np.diff(stf_time))

    # Calculate the stf offset (time of first sample wrt to origin time)
    offset = stf_time[0]

    # event name is set to generic value for now
    event_name = 'SCARDEC_event'

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        depth=depth,
        origin_type="centroid",
        region=_fe.get_region(longitude=longitude,
                              latitude=latitude)
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        mag=moment_mag,
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1)
    nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2)
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        nodal_planes=nod
    )

    dip1 *= np.pi / 180.
    rake1 *= np.pi / 180.
    strike1 *= np.pi / 180.

    mxx = - scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                              np.sin(2 * strike1)) +
                             (np.sin(2 * dip1) * np.sin(rake1) *
                              np.sin(2 * strike1)))
    mxy = scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                            np.cos(2 * strike1)) +
                           (np.sin(2 * dip1) * np.sin(rake1) *
                            np.sin(2 * strike1) * 0.5))
    myy = scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                            np.sin(2 * strike1)) -
                           (np.sin(2 * dip1) * np.sin(rake1) *
                            np.cos(2 * strike1)))
    mxz = - scalar_moment * ((np.cos(dip1) * np.cos(rake1) *
                              np.cos(strike1)) +
                             (np.cos(2 * dip1) * np.sin(rake1) *
                              np.sin(strike1)))
    myz = - scalar_moment * ((np.cos(dip1) * np.cos(rake1) *
                             np.sin(strike1)) -
                             (np.cos(2 * dip1) * np.sin(rake1) *
                              np.cos(strike1)))
    mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1))

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)

    cm = [Comment(text="Basis system: North,East,Down \
                        (Jost and Herrmann 1989)")]
    cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt')
    cm.append(Comment(text="MT derived from focal mechanism, therefore \
                            constrained to pure double couple.",
                      force_resource_id=False))

    # Write moment rate function
    extra = {'moment_rate': {'value': stf_mr,
                             'namespace': r"http://test.org/xmlns/0.1"},
             'dt': {'value': dt,
                    'namespace': r"http://test.org/xmlns/0.1"},
             'offset': {'value': offset,
                        'namespace': r"http://test.org/xmlns/0.1"}
             }

    # Source time function
    stf = SourceTimeFunction(type="unknown")
    stf.extra = extra

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        scalar_moment=scalar_moment,
        tensor=tensor,
        source_time_function=stf,
        comments=cm
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: SCARDEC",
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.magnitudes.append(cmt_mag)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    return ev
Esempio n. 13
0
def _internal_read_single_scardec(buf):
    """
    Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: open file or file-like object
    """
    # The first line encodes the origin time and epicenter
    line = buf.readline()

    origin_time = line.strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[6:]
    latitude, longitude = map(float, line[:2])

    # The second line encodes depth and the two focal mechanisms
    line = buf.readline()
    line = line.split()

    # First three values are depth, scalar moment (in Nm) and moment magnitude
    depth, scalar_moment, moment_mag = map(float, line[0:3])

    # depth is in km in SCARDEC files
    depth *= 1e3

    # Next six values are strike, dip, rake for both planes
    strike1, dip1, rake1 = map(float, line[3:6])
    strike2, dip2, rake2 = map(float, line[6:9])

    # The rest of the file is the moment rate function
    # In each line: time (sec), moment rate (Nm/sec)
    stf_time = []
    stf_mr = []
    for line in buf:
        stf_time.append(float(line.split()[0]))
        stf_mr.append(float(line.split()[1]))

    # Normalize the source time function
    stf_mr = np.array(stf_mr)
    stf_mr /= scalar_moment

    # Calculate the time step
    dt = np.mean(np.diff(stf_time))

    # Calculate the stf offset (time of first sample wrt to origin time)
    offset = stf_time[0]

    # event name is set to generic value for now
    event_name = 'SCARDEC_event'

    cmt_origin = Origin(resource_id=_get_resource_id(event_name,
                                                     "origin",
                                                     tag="cmt"),
                        time=origin_time,
                        longitude=longitude,
                        latitude=latitude,
                        depth=depth,
                        origin_type="centroid",
                        region=_fe.get_region(longitude=longitude,
                                              latitude=latitude))

    cmt_mag = Magnitude(resource_id=_get_resource_id(event_name,
                                                     "magnitude",
                                                     tag="mw"),
                        mag=moment_mag,
                        magnitude_type="mw",
                        origin_id=cmt_origin.resource_id)

    nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1)
    nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2)
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)

    foc_mec = FocalMechanism(resource_id=_get_resource_id(
        event_name, "focal_mechanism"),
                             nodal_planes=nod)

    dip1 *= np.pi / 180.
    rake1 *= np.pi / 180.
    strike1 *= np.pi / 180.

    mxx = -scalar_moment * (
        (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) +
        (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1)))
    mxy = scalar_moment * (
        (np.sin(dip1) * np.cos(rake1) * np.cos(2 * strike1)) +
        (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1) * 0.5))
    myy = scalar_moment * (
        (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) -
        (np.sin(2 * dip1) * np.sin(rake1) * np.cos(2 * strike1)))
    mxz = -scalar_moment * (
        (np.cos(dip1) * np.cos(rake1) * np.cos(strike1)) +
        (np.cos(2 * dip1) * np.sin(rake1) * np.sin(strike1)))
    myz = -scalar_moment * (
        (np.cos(dip1) * np.cos(rake1) * np.sin(strike1)) -
        (np.cos(2 * dip1) * np.sin(rake1) * np.cos(strike1)))
    mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1))

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)

    cm = [
        Comment(text="Basis system: North,East,Down \
                        (Jost and Herrmann 1989)")
    ]
    cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt')
    cm.append(
        Comment(text="MT derived from focal mechanism, therefore \
                            constrained to pure double couple.",
                force_resource_id=False))

    # Write moment rate function
    extra = {
        'moment_rate': {
            'value': stf_mr,
            'namespace': r"http://test.org/xmlns/0.1"
        },
        'dt': {
            'value': dt,
            'namespace': r"http://test.org/xmlns/0.1"
        },
        'offset': {
            'value': offset,
            'namespace': r"http://test.org/xmlns/0.1"
        }
    }

    # Source time function
    stf = SourceTimeFunction(type="unknown")
    stf.extra = extra

    mt = MomentTensor(resource_id=_get_resource_id(event_name,
                                                   "moment_tensor"),
                      derived_origin_id=cmt_origin.resource_id,
                      moment_magnitude_id=cmt_mag.resource_id,
                      scalar_moment=scalar_moment,
                      tensor=tensor,
                      source_time_function=stf,
                      comments=cm)

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(
        EventDescription(text=event_name, type="earthquake name"))
    ev.comments.append(
        Comment(text="Hypocenter catalog: SCARDEC", force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.magnitudes.append(cmt_mag)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    ev.scope_resource_ids()

    return ev
Esempio n. 14
0
    def _parse_record_dp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._int_zero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + '.' + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == 'FX':
            orig_time_stderr = 'Fixed'
        else:
            orig_time_stderr = \
                self._float_with_format(orig_time_stderr, '2.1', scale)
        centroid_latitude = self._float_with_format(line[17:21], '4.2')
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinate_sign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == 'FX':
            lat_stderr = 'Fixed'
        else:
            lat_stderr = self._float_with_format(lat_stderr, '3.2', scale)
        centroid_longitude = self._float_with_format(line[25:30], '5.2')
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinate_sign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == 'FX':
            lon_stderr = 'Fixed'
        else:
            lon_stderr = self._float_with_format(lon_stderr, '3.2', scale)
        centroid_depth = self._float_with_format(line[34:38], '4.1')
        depth_stderr = line[38:40]
        if depth_stderr == 'FX' or depth_stderr == 'BD':
            depth_stderr = 'Fixed'
        else:
            depth_stderr = self._float_with_format(depth_stderr, '2.1', scale)
        station_number = self._int_zero(line[40:43])
        component_number = self._int_zero(line[43:46])
        station_number2 = self._int_zero(line[46:48])
        component_number2 = self._int_zero(line[48:51])
        # unused: half_duration = self._float_with_format(line[51:54], '3.1')
        moment = self._float_with_format(line[54:56], '2.1')
        moment_stderr = self._float_with_format(line[56:58], '2.1')
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split('/')[-1]
        # Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != '.':
            origin = Origin()
            res_id = '/'.join((res_id_prefix, 'origin',
                               evid, source_contributor.lower(),
                               'mw' + computation_type.lower()))
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info = \
                CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime('%Y%m%d')
            origin.time = UTCDateTime(date + centroid_origin_time)
            # Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._store_uncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
                origin.epicenter_fixed = True
            else:
                self._store_uncertainty(origin.latitude_errors,
                                        self._lat_err_to_deg(lat_stderr))
                self._store_uncertainty(origin.longitude_errors,
                                        self._lon_err_to_deg(lon_stderr,
                                                             origin.latitude))
            if depth_stderr == 'Fixed':
                origin.depth_type = 'operator assigned'
            else:
                origin.depth_type = 'from location'
                self._store_uncertainty(origin.depth_errors,
                                        depth_stderr, scale=1000)
            quality = OriginQuality()
            quality.used_station_count = \
                station_number + station_number2
            quality.used_phase_count = \
                component_number + component_number2
            origin.quality = quality
            origin.origin_type = 'centroid'
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = '/'.join((res_id_prefix, 'focalmechanism',
                           evid, source_contributor.lower(),
                           'mw' + computation_type.lower()))
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info = \
            CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            # this is required for QuakeML validation:
            res_id = '/'.join((res_id_prefix, 'no-origin'))
            moment_tensor.derived_origin_id = \
                ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = '/'.join((res_id_prefix, 'momenttensor',
                           evid, source_contributor.lower(),
                           'mw' + computation_type.lower()))
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._store_uncertainty(moment_tensor.scalar_moment_errors,
                                moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == 'C':
            res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # CMT algorithm uses long-period body waves,
            # very-long-period surface waves and
            # intermediate period surface waves (since 2004
            # for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = 'combined'
        if computation_type == 'M':
            res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used by
            # "moment tensor" algorithm.
            data_used.wave_type = 'unknown'
        elif computation_type == 'B':
            res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: is 'combined' correct here?
            data_used.wave_type = 'combined'
        elif computation_type == 'F':
            res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = 'P waves'
        elif computation_type == 'S':
            res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used
            # for scalar moment determination.
            data_used.wave_type = 'unknown'
        moment_tensor.data_used = [data_used]
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Esempio n. 15
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
	event = Event(focal_mechanisms=[], picks=[], origins=[])
	origin = Origin(arrivals=[])
	origin.time = UTCDateTime(hp.tstamp)
	origin.latitude = hp.qlat
	origin.longitude = hp.qlon
	origin.depth = hp.qdep
	origin.creation_info = CreationInfo(version=hp.icusp)
	origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp))
	for _i in range(n):
	    p = Pick()
	    p.creation_info = CreationInfo(version=hp.arid[_i])
	    p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version))
	    p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i])
	    if hp.p_pol[_i] > 0:
		p.polarity = 'positive'
	    else:
		p.polarity = 'negative'
	    a = Arrival()
	    a.creation_info = CreationInfo(version=hp.arid[_i])
	    a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version))
	    a.azimuth = hp.p_azi_mc[_i,0]
	    a.takeoff_angle = 180. - hp.p_the_mc[_i,0]
	    a.pick_id = p.resource_id
	    origin.arrivals.append(a)
	    event.picks.append(p)
	event.origins.append(origin)
	event.preferred_origin_id = str(origin.resource_id)
    else: # just update the changes
	origin = event.preferred_origin()
	picks = []
	arrivals = []
	for _i in range(n):
	    ind = hp.p_index[_i]
	    a = origin.arrivals[ind]
	    p = a.pick_id.getReferredObject()
	    a.takeoff_angle = hp.p_the_mc[_i,0]
	    picks.append(p)
	    arrivals.append(a)
	if only_fm_picks:
	    origin.arrivals = arrivals
	    event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality'))
            )
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
    return event
Esempio n. 16
0
    def _parseRecordDp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._intZero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + "." + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == "FX":
            orig_time_stderr = "Fixed"
        else:
            orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale)
        centroid_latitude = self._floatWithFormat(line[17:21], "4.2")
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinateSign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == "FX":
            lat_stderr = "Fixed"
        else:
            lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale)
        centroid_longitude = self._floatWithFormat(line[25:30], "5.2")
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinateSign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == "FX":
            lon_stderr = "Fixed"
        else:
            lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale)
        centroid_depth = self._floatWithFormat(line[34:38], "4.1")
        depth_stderr = line[38:40]
        if depth_stderr == "FX" or depth_stderr == "BD":
            depth_stderr = "Fixed"
        else:
            depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale)
        station_number = self._intZero(line[40:43])
        component_number = self._intZero(line[43:46])
        station_number2 = self._intZero(line[46:48])
        component_number2 = self._intZero(line[48:51])
        # unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
        moment = self._floatWithFormat(line[54:56], "2.1")
        moment_stderr = self._floatWithFormat(line[56:58], "2.1")
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split("/")[-1]
        # Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != ".":
            origin = Origin()
            res_id = "/".join(
                (res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower())
            )
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info = CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime("%Y%m%d")
            origin.time = UTCDateTime(date + centroid_origin_time)
            # Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._storeUncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == "Fixed" and lon_stderr == "Fixed":
                origin.epicenter_fixed = True
            else:
                self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr))
                self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude))
            if depth_stderr == "Fixed":
                origin.depth_type = "operator assigned"
            else:
                origin.depth_type = "from location"
                self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
            quality = OriginQuality()
            quality.used_station_count = station_number + station_number2
            quality.used_phase_count = component_number + component_number2
            origin.quality = quality
            origin.type = "centroid"
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = "/".join(
            (res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower())
        )
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            # this is required for QuakeML validation:
            res_id = "/".join((res_id_prefix, "no-origin"))
            moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = "/".join(
            (res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower())
        )
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == "C":
            res_id = "/".join((res_id_prefix, "methodID=CMT"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # CMT algorithm uses long-period body waves,
            # very-long-period surface waves and
            # intermediate period surface waves (since 2004
            # for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = "combined"
        if computation_type == "M":
            res_id = "/".join((res_id_prefix, "methodID=moment_tensor"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used by
            # "moment tensor" algorithm.
            data_used.wave_type = "unknown"
        elif computation_type == "B":
            res_id = "/".join((res_id_prefix, "methodID=broadband_data"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: is 'combined' correct here?
            data_used.wave_type = "combined"
        elif computation_type == "F":
            res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = "P waves"
        elif computation_type == "S":
            res_id = "/".join((res_id_prefix, "methodID=scalar_moment"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used
            # for scalar moment determination.
            data_used.wave_type = "unknown"
        moment_tensor.data_used = data_used
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Esempio n. 17
0
def __read_single_fnetmt_entry(line, **kwargs):
    """
    Reads a single F-net moment tensor solution to a
    :class:`~obspy.core.event.Event` object.

    :param line: String containing moment tensor information.
    :type line: str.
    """

    a = line.split()
    try:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f')
    except ValueError:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S')
    lat, lon, depjma, magjma = map(float, a[1:5])
    depjma *= 1000
    region = a[5]
    strike = tuple(map(int, a[6].split(';')))
    dip = tuple(map(int, a[7].split(';')))
    rake = tuple(map(int, a[8].split(';')))
    mo = float(a[9])
    depmt = float(a[10]) * 1000
    magmt = float(a[11])
    var_red = float(a[12])
    mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20])

    event_name = util.gen_sc3_id(ot)
    e = Event(event_type="earthquake")
    e.resource_id = _get_resource_id(event_name, 'event')

    # Standard JMA solution
    o_jma = Origin(time=ot, latitude=lat, longitude=lon,
                   depth=depjma, depth_type="from location",
                   region=region)
    o_jma.resource_id = _get_resource_id(event_name,
                                         'origin', 'JMA')
    m_jma = Magnitude(mag=magjma, magnitude_type='ML',
                      origin_id=o_jma.resource_id)
    m_jma.resource_id = _get_resource_id(event_name,
                                         'magnitude', 'JMA')
    # MT solution
    o_mt = Origin(time=ot, latitude=lat, longitude=lon,
                  depth=depmt, region=region,
                  depth_type="from moment tensor inversion")
    o_mt.resource_id = _get_resource_id(event_name,
                                        'origin', 'MT')
    m_mt = Magnitude(mag=magmt, magnitude_type='Mw',
                     origin_id=o_mt.resource_id)
    m_mt.resource_id = _get_resource_id(event_name,
                                        'magnitude', 'MT')
    foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id)
    foc_mec.resource_id = _get_resource_id(event_name,
                                           "focal_mechanism")
    nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0])
    nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1])
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)
    foc_mec.nodal_planes = nod

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)
    cm = Comment(text="Basis system: North,East,Down (Jost and \
    Herrmann 1989")
    cm.resource_id = _get_resource_id(event_name, 'comment', 'mt')
    mt = MomentTensor(derived_origin_id=o_mt.resource_id,
                      moment_magnitude_id=m_mt.resource_id,
                      scalar_moment=mo, comments=[cm],
                      tensor=tensor, variance_reduction=var_red)
    mt.resource_id = _get_resource_id(event_name,
                                      'moment_tensor')
    foc_mec.moment_tensor = mt
    e.origins = [o_jma, o_mt]
    e.magnitudes = [m_jma, m_mt]
    e.focal_mechanisms = [foc_mec]
    e.preferred_magnitude_id = m_mt.resource_id.id
    e.preferred_origin_id = o_mt.resource_id.id
    e.preferred_focal_mechanism_id = foc_mec.resource_id.id
    return e
Esempio n. 18
0
def __read_single_cmtsolution(buf):
    """
    Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the preliminary epicenter.
    line = buf.readline()

    hypocenter_catalog = line[:4].strip().decode()

    origin_time = line[4:].strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[7:]
    latitude, longitude, depth, body_wave_mag, surface_wave_mag = \
        map(float, line[:5])

    # The rest encodes the centroid solution.
    event_name = buf.readline().strip().split()[-1].decode()

    preliminary_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="prelim"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        # Depth is in meters.
        depth=depth * 1000.0,
        origin_type="hypocenter",
        region=_fe.get_region(longitude=longitude, latitude=latitude),
        evaluation_status="preliminary"
    )

    preliminary_bw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"),
        mag=body_wave_mag, magnitude_type="Mb",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    preliminary_sw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"),
        mag=surface_wave_mag, magnitude_type="MS",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    values = ["time_shift", "half_duration", "latitude", "longitude",
              "depth", "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    cmt_values = {_i: float(buf.readline().strip().split()[-1])
                  for _i in values}

    # Moment magnitude calculation in dyne * cm.
    m_0 = 1.0 / math.sqrt(2.0) * math.sqrt(
        cmt_values["m_rr"] ** 2 +
        cmt_values["m_tt"] ** 2 +
        cmt_values["m_pp"] ** 2 +
        2.0 * cmt_values["m_rt"] ** 2 +
        2.0 * cmt_values["m_rp"] ** 2 +
        2.0 * cmt_values["m_tp"] ** 2)
    m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1)

    # Convert to meters.
    cmt_values["depth"] *= 1000.0
    # Convert to Newton meter.
    values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    for value in values:
        cmt_values[value] /= 1E7

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time + cmt_values["time_shift"],
        longitude=cmt_values["longitude"],
        latitude=cmt_values["latitude"],
        depth=cmt_values["depth"],
        origin_type="centroid",
        # Could rarely be different than the epicentral region.
        region=_fe.get_region(longitude=cmt_values["longitude"],
                              latitude=cmt_values["latitude"])
        # No evaluation status as it could be any of several and the file
        # format does not provide that information.
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        # Round to 2 digits.
        mag=round(m_w, 2),
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        # The preliminary origin most likely triggered the focal mechanism
        # determination.
        triggering_origin_id=preliminary_origin.resource_id
    )

    tensor = Tensor(
        m_rr=cmt_values["m_rr"],
        m_pp=cmt_values["m_pp"],
        m_tt=cmt_values["m_tt"],
        m_rt=cmt_values["m_rt"],
        m_rp=cmt_values["m_rp"],
        m_tp=cmt_values["m_tp"]
    )

    # Source time function is a triangle, according to the SPECFEM manual.
    stf = SourceTimeFunction(
        type="triangle",
        # The duration is twice the half duration.
        duration=2.0 * cmt_values["half_duration"]
    )

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        # Convert to Nm.
        scalar_moment=m_0 / 1E7,
        tensor=tensor,
        source_time_function=stf
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: %s" % hypocenter_catalog,
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.origins.append(preliminary_origin)
    ev.magnitudes.append(cmt_mag)
    ev.magnitudes.append(preliminary_bw_magnitude)
    ev.magnitudes.append(preliminary_sw_magnitude)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    return ev
Esempio n. 19
0
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
                                            Mtr ** 2 + Mpr ** 2 + Mtp ** 2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Esempio n. 20
0
File: core.py Progetto: Qigaoo/obspy
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1: next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = (
                "Could not parse event %i (faulty file?). Will be "
                "skipped. Lines of the event:\n"
                "\t%s\n"
                "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(
            agency_id="GCMT",
            version=record["version_code"]
        )

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(
            force_resource_id=False,
            event_type="earthquake",
            event_type_certainty="known",
            event_descriptions=[
                EventDescription(text=region, type="Flinn-Engdahl region"),
                EventDescription(text=record["cmt_event_name"],
                                 type="earthquake name")
            ]
        )

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[Comment(text="Hypocenter catalog: %s" %
                              record["hypocenter_reference_catalog"],
                              force_resource_id=False)]
        )
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin", tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]},
            latitude=record["centroid_latitude"],
            latitude_errors={
                "uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000},
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy()
        )
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(
            force_resource_id=False,
            mag=round(record["Mw"], 2),
            magnitude_type="Mwc",
            origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude", tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["mb"],
            magnitude_type="mb",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'mb'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["MS"],
            magnitude_type="MS",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'MS'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(
            m_rr=record["m_rr"],
            m_rr_errors={"uncertainty": record["m_rr_error"]},
            m_pp=record["m_pp"],
            m_pp_errors={"uncertainty": record["m_pp_error"]},
            m_tt=record["m_tt"],
            m_tt_errors={"uncertainty": record["m_tt_error"]},
            m_rt=record["m_rt"],
            m_rt_errors={"uncertainty": record["m_rt_error"]},
            m_rp=record["m_rp"],
            m_rp_errors={"uncertainty": record["m_rp_error"]},
            m_tp=record["m_tp"],
            m_tp_errors={"uncertainty": record["m_tp_error"]},
            creation_info=creation_info.copy()
        )
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]
            ),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]
            ),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])
            ),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                             record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" %
                             record["cmt_timestamp"])],
            creation_info=creation_info.copy()
        )
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"],
                                             "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Esempio n. 21
0
def write_qml(config, sourcepar):
    if not config.options.qml_file:
        return
    qml_file = config.options.qml_file
    cat = read_events(qml_file)
    evid = config.hypo.evid
    try:
        ev = [e for e in cat if evid in str(e.resource_id)][0]
    except Exception:
        logging.warning('Unable to find evid "{}" in QuakeML file. '
                        'QuakeML output will not be written.'.format(evid))

    origin = ev.preferred_origin()
    if origin is None:
        origin = ev.origins[0]
    origin_id = origin.resource_id
    origin_id_strip = origin_id.id.split('/')[-1]
    origin_id_strip = origin_id_strip.replace(config.smi_strip_from_origin_id,
                                              '')

    # Common parameters
    ssp_version = get_versions()['version']
    method_id = config.smi_base + '/sourcespec/' + ssp_version
    cr_info = CreationInfo()
    cr_info.agency_id = config.agency_id
    if config.author is None:
        author = '{}@{}'.format(getuser(), gethostname())
    else:
        author = config.author
    cr_info.author = author
    cr_info.creation_time = UTCDateTime()

    means = sourcepar.means_weight
    errors = sourcepar.errors_weight
    stationpar = sourcepar.station_parameters

    # Magnitude
    mag = Magnitude()
    _id = config.smi_magnitude_template.replace('$SMI_BASE', config.smi_base)
    _id = _id.replace('$ORIGIN_ID', origin_id_strip)
    mag.resource_id = ResourceIdentifier(id=_id)
    mag.method_id = ResourceIdentifier(id=method_id)
    mag.origin_id = origin_id
    mag.magnitude_type = 'Mw'
    mag.mag = means['Mw']
    mag_err = QuantityError()
    mag_err.uncertainty = errors['Mw']
    mag_err.confidence_level = 68.2
    mag.mag_errors = mag_err
    mag.station_count = len([_s for _s in stationpar.keys()])
    mag.evaluation_mode = 'automatic'
    mag.creation_info = cr_info

    # Seismic moment -- It has to be stored in a MomentTensor object
    # which, in turn, is part of a FocalMechanism object
    mt = MomentTensor()
    _id = config.smi_moment_tensor_template.replace('$SMI_BASE',
                                                    config.smi_base)
    _id = _id.replace('$ORIGIN_ID', origin_id_strip)
    mt.resource_id = ResourceIdentifier(id=_id)
    mt.derived_origin_id = origin_id
    mt.moment_magnitude_id = mag.resource_id
    mt.scalar_moment = means['Mo']
    mt_err = QuantityError()
    mt_err.lower_uncertainty = errors['Mo'][0]
    mt_err.upper_uncertainty = errors['Mo'][1]
    mt_err.confidence_level = 68.2
    mt.scalar_moment_errors = mt_err
    mt.method_id = method_id
    mt.creation_info = cr_info
    # And here is the FocalMechanism object
    fm = FocalMechanism()
    _id = config.smi_focal_mechanism_template.replace('$SMI_BASE',
                                                      config.smi_base)
    _id = _id.replace('$ORIGIN_ID', origin_id_strip)
    fm.resource_id = ResourceIdentifier(id=_id)
    fm.triggering_origin_id = origin_id
    fm.method_id = ResourceIdentifier(id=method_id)
    fm.moment_tensor = mt
    fm.creation_info = cr_info
    ev.focal_mechanisms.append(fm)

    # Station magnitudes
    for statId in sorted(stationpar.keys()):
        par = stationpar[statId]
        st_mag = StationMagnitude()
        seed_id = statId.split()[0]
        _id = config.smi_station_magnitude_template.replace(
            '$SMI_MAGNITUDE_TEMPLATE', config.smi_magnitude_template)
        _id = _id.replace('$ORIGIN_ID', origin_id_strip)
        _id = _id.replace('$SMI_BASE', config.smi_base)
        _id = _id.replace('$WAVEFORM_ID', seed_id)
        st_mag.resource_id = ResourceIdentifier(id=_id)
        st_mag.origin_id = origin_id
        st_mag.mag = par['Mw']
        st_mag.station_magnitude_type = 'Mw'
        st_mag.method_id = mag.method_id
        st_mag.creation_info = cr_info
        st_mag.waveform_id = WaveformStreamID(seed_string=seed_id)
        st_mag.extra = SSPExtra()
        st_mag.extra.moment = SSPTag(par['Mo'])
        st_mag.extra.corner_frequency = SSPTag(par['fc'])
        st_mag.extra.t_star = SSPTag(par['t_star'])
        ev.station_magnitudes.append(st_mag)
        st_mag_contrib = StationMagnitudeContribution()
        st_mag_contrib.station_magnitude_id = st_mag.resource_id
        mag.station_magnitude_contributions.append(st_mag_contrib)
    ev.magnitudes.append(mag)

    # Write other average parameters as custom tags
    ev.extra = SSPExtra()
    ev.extra.corner_frequency = SSPContainerTag()
    ev.extra.corner_frequency.value.value = SSPTag(means['fc'])
    ev.extra.corner_frequency.value.lower_uncertainty =\
        SSPTag(errors['fc'][0])
    ev.extra.corner_frequency.value.upper_uncertainty =\
        SSPTag(errors['fc'][1])
    ev.extra.corner_frequency.value.confidence_level = SSPTag(68.2)
    ev.extra.t_star = SSPContainerTag()
    ev.extra.t_star.value.value = SSPTag(means['t_star'])
    ev.extra.t_star.value.uncertainty = SSPTag(errors['t_star'])
    ev.extra.t_star.value.confidence_level = SSPTag(68.2)
    ev.extra.source_radius = SSPContainerTag()
    ev.extra.source_radius.value.value = SSPTag(means['ra'])
    ev.extra.source_radius.value.lower_uncertainty =\
        SSPTag(errors['ra'][0])
    ev.extra.source_radius.value.upper_uncertainty =\
        SSPTag(errors['ra'][1])
    ev.extra.source_radius.value.confidence_level = SSPTag(68.2)
    ev.extra.stress_drop = SSPContainerTag()
    ev.extra.stress_drop.value.value = SSPTag(means['bsd'])
    ev.extra.stress_drop.value.lower_uncertainty =\
        SSPTag(errors['bsd'][0])
    ev.extra.stress_drop.value.upper_uncertainty =\
        SSPTag(errors['bsd'][1])
    ev.extra.stress_drop.value.confidence_level = SSPTag(68.2)

    if config.set_preferred_magnitude:
        ev.preferred_magnitude_id = mag.resource_id.id

    qml_file_out = os.path.join(config.options.outdir, evid + '.xml')
    ev.write(qml_file_out, format='QUAKEML')
    logging.info('QuakeML file written to: ' + qml_file_out)