Пример #1
0
    def _map_fplane2focalmech(self, db):
        """
        Return an obspy FocalMechanism from an dict of CSS key/values
        corresponding to one record. See the 'Join' section for the implied
        database join expected.
        
        Inputs
        ======
        db : dict of key/values of CSS fields from the 'fplane' table

        Returns
        =======
        obspy.core.event.FocalMechanism

        Notes
        =====
        Any object that supports the dict 'get' method can be passed as
        input, e.g. OrderedDict, custom classes, etc.

        """
        #
        # NOTE: Antelope schema for this is wrong, no nulls defined
        # 
        fm = FocalMechanism()

        nps = NodalPlanes()
        nps.nodal_plane_1 = NodalPlane(db.get('str1'), db.get('dip1'), db.get('rake1'))
        nps.nodal_plane_2 = NodalPlane(db.get('str2'), db.get('dip2'), db.get('rake2'))

        nps.preferred_plane = 1

        prin_ax = PrincipalAxes()
        prin_ax.t_axis = Axis(db.get('taxazm'),db.get('taxplg'))
        prin_ax.p_axis = Axis(db.get('paxazm'),db.get('paxplg'))

        fm.nodal_planes = nps
        fm.principal_axes = prin_ax

        author_string = ':'.join([db['algorithm'], db['auth']])
        fm.creation_info = CreationInfo(
            version = db.get('mechid'), 
            creation_time = UTCDateTime(db['lddate']), 
            agency_id = self.agency,
            author = author_string,
            ) 
        
        fm.resource_id = self._rid(fm)
        return fm
Пример #2
0
def _read_focmec_out(lines):
    """
    Read given data into an :class:`~obspy.core.event.Event` object.

    :type lines: list
    :param lines: List of decoded unicode strings with data from a FOCMEC out
        file.
    """
    event, _ = _read_common_header(lines)
    # now move to first line with a focal mechanism
    for i, line in enumerate(lines):
        if line.split()[:3] == ['Dip', 'Strike', 'Rake']:
            break
    else:
        return event
    header = lines[:i]
    polarity_count, weighted = _get_polarity_count(header)
    focmec_list_header = lines[i]
    event.comments.append(Comment(text='\n'.join(header)))
    try:
        lines = lines[i + 1:]
    except IndexError:
        return event
    for line in lines:
        # allow for empty lines (maybe they can happen at the end sometimes..)
        if not line.strip():
            continue
        comment = Comment(text='\n'.join((focmec_list_header, line)))
        items = line.split()
        dip, strike, rake = [float(x) for x in items[:3]]
        plane = NodalPlane(strike=strike, dip=dip, rake=rake)
        planes = NodalPlanes(nodal_plane_1=plane, preferred_plane=1)
        # XXX ideally should compute the auxilliary plane..
        focmec = FocalMechanism(nodal_planes=planes)
        focmec.station_polarity_count = polarity_count
        focmec.creation_info = CreationInfo(
            version='FOCMEC', creation_time=event.creation_info.creation_time)
        if not weighted:
            errors = sum([int(x) for x in items[3:6]])
            focmec.misfit = float(errors) / polarity_count
        focmec.comments.append(comment)
        event.focal_mechanisms.append(focmec)
    return event
Пример #3
0
    def _parseRecordDp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._intZero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + '.' + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == 'FX':
            orig_time_stderr = 'Fixed'
        else:
            orig_time_stderr =\
                self._floatWithFormat(orig_time_stderr, '2.1', scale)
        centroid_latitude = self._floatWithFormat(line[17:21], '4.2')
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinateSign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == 'FX':
            lat_stderr = 'Fixed'
        else:
            lat_stderr = self._floatWithFormat(lat_stderr, '3.2', scale)
        centroid_longitude = self._floatWithFormat(line[25:30], '5.2')
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinateSign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == 'FX':
            lon_stderr = 'Fixed'
        else:
            lon_stderr = self._floatWithFormat(lon_stderr, '3.2', scale)
        centroid_depth = self._floatWithFormat(line[34:38], '4.1')
        depth_stderr = line[38:40]
        if depth_stderr == 'FX' or depth_stderr == 'BD':
            depth_stderr = 'Fixed'
        else:
            depth_stderr = self._floatWithFormat(depth_stderr, '2.1', scale)
        station_number = self._intZero(line[40:43])
        component_number = self._intZero(line[43:46])
        station_number2 = self._intZero(line[46:48])
        component_number2 = self._intZero(line[48:51])
        #unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
        moment = self._floatWithFormat(line[54:56], '2.1')
        moment_stderr = self._floatWithFormat(line[56:58], '2.1')
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split('/')[-1]
        #Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != '.':
            origin = Origin()
            res_id = '/'.join(
                (res_id_prefix, 'origin', evid, source_contributor.lower(),
                 'mw' + computation_type.lower()))
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info =\
                CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime('%Y%m%d')
            origin.time = UTCDateTime(date + centroid_origin_time)
            #Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._storeUncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
                origin.epicenter_fixed = True
            else:
                self._storeUncertainty(origin.latitude_errors,
                                       self._latErrToDeg(lat_stderr))
                self._storeUncertainty(
                    origin.longitude_errors,
                    self._lonErrToDeg(lon_stderr, origin.latitude))
            if depth_stderr == 'Fixed':
                origin.depth_type = 'operator assigned'
            else:
                origin.depth_type = 'from location'
                self._storeUncertainty(origin.depth_errors,
                                       depth_stderr,
                                       scale=1000)
            quality = OriginQuality()
            quality.used_station_count =\
                station_number + station_number2
            quality.used_phase_count =\
                component_number + component_number2
            origin.quality = quality
            origin.type = 'centroid'
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = '/'.join(
            (res_id_prefix, 'focalmechanism', evid, source_contributor.lower(),
             'mw' + computation_type.lower()))
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info =\
            CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            #this is required for QuakeML validation:
            res_id = '/'.join((res_id_prefix, 'no-origin'))
            moment_tensor.derived_origin_id =\
                ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = '/'.join(
            (res_id_prefix, 'momenttensor', evid, source_contributor.lower(),
             'mw' + computation_type.lower()))
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._storeUncertainty(moment_tensor.scalar_moment_errors,
                               moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == 'C':
            res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #CMT algorithm uses long-period body waves,
            #very-long-period surface waves and
            #intermediate period surface waves (since 2004
            #for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = 'combined'
        if computation_type == 'M':
            res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: not sure which kind of data is used by
            #"moment tensor" algorithm.
            data_used.wave_type = 'unknown'
        elif computation_type == 'B':
            res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: is 'combined' correct here?
            data_used.wave_type = 'combined'
        elif computation_type == 'F':
            res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = 'P waves'
        elif computation_type == 'S':
            res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: not sure which kind of data is used
            #for scalar moment determination.
            data_used.wave_type = 'unknown'
        moment_tensor.data_used = data_used
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Пример #4
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
        event = Event(focal_mechanisms=[], picks=[], origins=[])
        origin = Origin(arrivals=[])
        origin.time = UTCDateTime(hp.tstamp)
        origin.latitude = hp.qlat
        origin.longitude = hp.qlon
        origin.depth = hp.qdep
        origin.creation_info = CreationInfo(version=hp.icusp)
        origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(
            hp.icusp))
        for _i in range(n):
            p = Pick()
            p.creation_info = CreationInfo(version=hp.arid[_i])
            p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format(
                p.creation_info.version))
            p.waveform_id = WaveformStreamID(network_code=hp.snet[_i],
                                             station_code=hp.sname[_i],
                                             channel_code=hp.scomp[_i])
            if hp.p_pol[_i] > 0:
                p.polarity = 'positive'
            else:
                p.polarity = 'negative'
            a = Arrival()
            a.creation_info = CreationInfo(version=hp.arid[_i])
            a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format(
                p.creation_info.version))
            a.azimuth = hp.p_azi_mc[_i, 0]
            a.takeoff_angle = 180. - hp.p_the_mc[_i, 0]
            a.pick_id = p.resource_id
            origin.arrivals.append(a)
            event.picks.append(p)
        event.origins.append(origin)
        event.preferred_origin_id = origin.resource_id.resource_id
    else:  # just update the changes
        origin = event.preferred_origin()
        picks = []
        arrivals = []
        for _i in range(n):
            ind = hp.p_index[_i]
            a = origin.arrivals[ind]
            p = a.pick_id.getReferredObject()
            a.takeoff_angle = hp.p_the_mc[_i, 0]
            picks.append(p)
            arrivals.append(a)
        if only_fm_picks:
            origin.arrivals = arrivals
            event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(),
                                                author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier(
            'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'],
                                                plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'],
                                                plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(
                hp.qual[s],
                resource_id=ResourceIdentifier(
                    focal_mech.resource_id.resource_id + '/comment/quality')))
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id
    return event
Пример #5
0
    def _parse_record_dp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._int_zero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + '.' + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == 'FX':
            orig_time_stderr = 'Fixed'
        else:
            orig_time_stderr = \
                self._float_with_format(orig_time_stderr, '2.1', scale)
        centroid_latitude = self._float_with_format(line[17:21], '4.2')
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinate_sign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == 'FX':
            lat_stderr = 'Fixed'
        else:
            lat_stderr = self._float_with_format(lat_stderr, '3.2', scale)
        centroid_longitude = self._float_with_format(line[25:30], '5.2')
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinate_sign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == 'FX':
            lon_stderr = 'Fixed'
        else:
            lon_stderr = self._float_with_format(lon_stderr, '3.2', scale)
        centroid_depth = self._float_with_format(line[34:38], '4.1')
        depth_stderr = line[38:40]
        if depth_stderr == 'FX' or depth_stderr == 'BD':
            depth_stderr = 'Fixed'
        else:
            depth_stderr = self._float_with_format(depth_stderr, '2.1', scale)
        station_number = self._int_zero(line[40:43])
        component_number = self._int_zero(line[43:46])
        station_number2 = self._int_zero(line[46:48])
        component_number2 = self._int_zero(line[48:51])
        # unused: half_duration = self._float_with_format(line[51:54], '3.1')
        moment = self._float_with_format(line[54:56], '2.1')
        moment_stderr = self._float_with_format(line[56:58], '2.1')
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split('/')[-1]
        # Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != '.':
            origin = Origin()
            res_id = '/'.join((res_id_prefix, 'origin',
                               evid, source_contributor.lower(),
                               'mw' + computation_type.lower()))
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info = \
                CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime('%Y%m%d')
            origin.time = UTCDateTime(date + centroid_origin_time)
            # Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._store_uncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
                origin.epicenter_fixed = True
            else:
                self._store_uncertainty(origin.latitude_errors,
                                        self._lat_err_to_deg(lat_stderr))
                self._store_uncertainty(origin.longitude_errors,
                                        self._lon_err_to_deg(lon_stderr,
                                                             origin.latitude))
            if depth_stderr == 'Fixed':
                origin.depth_type = 'operator assigned'
            else:
                origin.depth_type = 'from location'
                self._store_uncertainty(origin.depth_errors,
                                        depth_stderr, scale=1000)
            quality = OriginQuality()
            quality.used_station_count = \
                station_number + station_number2
            quality.used_phase_count = \
                component_number + component_number2
            origin.quality = quality
            origin.origin_type = 'centroid'
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = '/'.join((res_id_prefix, 'focalmechanism',
                           evid, source_contributor.lower(),
                           'mw' + computation_type.lower()))
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info = \
            CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            # this is required for QuakeML validation:
            res_id = '/'.join((res_id_prefix, 'no-origin'))
            moment_tensor.derived_origin_id = \
                ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = '/'.join((res_id_prefix, 'momenttensor',
                           evid, source_contributor.lower(),
                           'mw' + computation_type.lower()))
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._store_uncertainty(moment_tensor.scalar_moment_errors,
                                moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == 'C':
            res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # CMT algorithm uses long-period body waves,
            # very-long-period surface waves and
            # intermediate period surface waves (since 2004
            # for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = 'combined'
        if computation_type == 'M':
            res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used by
            # "moment tensor" algorithm.
            data_used.wave_type = 'unknown'
        elif computation_type == 'B':
            res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: is 'combined' correct here?
            data_used.wave_type = 'combined'
        elif computation_type == 'F':
            res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = 'P waves'
        elif computation_type == 'S':
            res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used
            # for scalar moment determination.
            data_used.wave_type = 'unknown'
        moment_tensor.data_used = [data_used]
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Пример #6
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
	event = Event(focal_mechanisms=[], picks=[], origins=[])
	origin = Origin(arrivals=[])
	origin.time = UTCDateTime(hp.tstamp)
	origin.latitude = hp.qlat
	origin.longitude = hp.qlon
	origin.depth = hp.qdep
	origin.creation_info = CreationInfo(version=hp.icusp)
	origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp))
	for _i in range(n):
	    p = Pick()
	    p.creation_info = CreationInfo(version=hp.arid[_i])
	    p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version))
	    p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i])
	    if hp.p_pol[_i] > 0:
		p.polarity = 'positive'
	    else:
		p.polarity = 'negative'
	    a = Arrival()
	    a.creation_info = CreationInfo(version=hp.arid[_i])
	    a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version))
	    a.azimuth = hp.p_azi_mc[_i,0]
	    a.takeoff_angle = 180. - hp.p_the_mc[_i,0]
	    a.pick_id = p.resource_id
	    origin.arrivals.append(a)
	    event.picks.append(p)
	event.origins.append(origin)
	event.preferred_origin_id = str(origin.resource_id)
    else: # just update the changes
	origin = event.preferred_origin()
	picks = []
	arrivals = []
	for _i in range(n):
	    ind = hp.p_index[_i]
	    a = origin.arrivals[ind]
	    p = a.pick_id.getReferredObject()
	    a.takeoff_angle = hp.p_the_mc[_i,0]
	    picks.append(p)
	    arrivals.append(a)
	if only_fm_picks:
	    origin.arrivals = arrivals
	    event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality'))
            )
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
    return event
Пример #7
0
    def build(self):
        """
        Build an obspy moment tensor focal mech event

        This makes the tensor output into an Event containing:
        1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
        2) a Magnitude of the Mw from the Tensor

        Which is what we want for outputting QuakeML using
        the (slightly modified) obspy code.

        Input
        -----
        filehandle => open file OR str from filehandle.read()

        Output
        ------
        event => instance of Event() class as described above
        """
        p = self.parser
        event         = Event(event_type='earthquake')
        origin        = Origin()
        focal_mech    = FocalMechanism()
        nodal_planes  = NodalPlanes()
        moment_tensor = MomentTensor()
        principal_ax  = PrincipalAxes()
        magnitude     = Magnitude()
        data_used     = DataUsed()
        creation_info = CreationInfo(agency_id='NN')
        ev_mode = 'automatic'
        ev_stat = 'preliminary'
        evid = None
        orid = None
        # Parse the entire file line by line.
        for n,l in enumerate(p.line):
            if 'REVIEWED BY NSL STAFF' in l:
                ev_mode = 'manual'
                ev_stat = 'reviewed'
            if 'Event ID' in l:
                evid = p._id(n)
            if 'Origin ID' in l:
                orid = p._id(n)
            if 'Ichinose' in l:
                moment_tensor.category = 'regional'
            if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
                ev = p._event_info(n)
            if 'Depth' in l:
                derived_depth = p._depth(n)
            if 'Mw' in l:
                magnitude.mag = p._mw(n) 
                magnitude.magnitude_type = 'Mw'
            if 'Mo' in l and 'dyne' in l:
                moment_tensor.scalar_moment = p._mo(n)
            if 'Percent Double Couple' in l:
                moment_tensor.double_couple = p._percent(n)
            if 'Percent CLVD' in l:
                moment_tensor.clvd = p._percent(n)
            if 'Epsilon' in l:
                moment_tensor.variance = p._epsilon(n)
            if 'Percent Variance Reduction' in l:
                moment_tensor.variance_reduction = p._percent(n)
            if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
                np = p._double_couple(n)
                nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
                nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
                nodal_planes.preferred_plane = 1
            if 'Spherical Coordinates' in l:
                mt = p._mt_sphere(n)
                moment_tensor.tensor = Tensor(
                    m_rr = mt['Mrr'],
                    m_tt = mt['Mtt'],
                    m_pp = mt['Mff'],
                    m_rt = mt['Mrt'],
                    m_rp = mt['Mrf'],
                    m_tp = mt['Mtf'],
                    )
            if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
                ax = p._vectors(n)
                principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
                principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
                principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
            if 'Number of Stations' in l:
                data_used.station_count = p._number_of_stations(n)
            if 'Maximum' in l and 'Gap' in l:
                focal_mech.azimuthal_gap = p._gap(n)
            if re.match(r'^Date', l):
                creation_info.creation_time = p._creation_time(n)
        # Creation Time
        creation_info.version = orid
        # Fill in magnitude values
        magnitude.evaluation_mode = ev_mode
        magnitude.evaluation_status = ev_stat
        magnitude.creation_info = creation_info.copy()
        magnitude.resource_id = self._rid(magnitude)
        # Stub origin
        origin.time = ev.get('time')
        origin.latitude = ev.get('lat')
        origin.longitude = ev.get('lon')
        origin.depth = derived_depth * 1000.
        origin.depth_type = "from moment tensor inversion"
        origin.creation_info = creation_info.copy()
         # Unique from true origin ID
        _oid = self._rid(origin)
        origin.resource_id = ResourceIdentifier(str(_oid) + '/mt')
        del _oid
        # Make an id for the MT that references this origin
        ogid = str(origin.resource_id)
        doid = ResourceIdentifier(ogid, referred_object=origin)
        # Make an id for the moment tensor mag which references this mag
        mrid = str(magnitude.resource_id)
        mmid = ResourceIdentifier(mrid, referred_object=magnitude)
        # MT todo: could check/use URL for RID if parsing the php file
        moment_tensor.evaluation_mode = ev_mode
        moment_tensor.evaluation_status = ev_stat
        moment_tensor.data_used = data_used
        moment_tensor.moment_magnitude_id = mmid
        moment_tensor.derived_origin_id = doid
        moment_tensor.creation_info = creation_info.copy()
        moment_tensor.resource_id = self._rid(moment_tensor)
        # Fill in focal_mech values
        focal_mech.nodal_planes  = nodal_planes
        focal_mech.moment_tensor = moment_tensor
        focal_mech.principal_axes = principal_ax
        focal_mech.creation_info = creation_info.copy()
        focal_mech.resource_id = self._rid(focal_mech)
        # add mech and new magnitude to event
        event.focal_mechanisms = [focal_mech]
        event.magnitudes = [magnitude]
        event.origins = [origin]
        event.creation_info = creation_info.copy()
        # If an MT was done, that's the preferred mag/mech
        event.preferred_magnitude_id = str(magnitude.resource_id)
        event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
        if evid:
            event.creation_info.version = evid
        event.resource_id = self._rid(event)
        self.event = event
Пример #8
0
    def _parseRecordDp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._intZero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + "." + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == "FX":
            orig_time_stderr = "Fixed"
        else:
            orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale)
        centroid_latitude = self._floatWithFormat(line[17:21], "4.2")
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinateSign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == "FX":
            lat_stderr = "Fixed"
        else:
            lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale)
        centroid_longitude = self._floatWithFormat(line[25:30], "5.2")
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinateSign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == "FX":
            lon_stderr = "Fixed"
        else:
            lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale)
        centroid_depth = self._floatWithFormat(line[34:38], "4.1")
        depth_stderr = line[38:40]
        if depth_stderr == "FX" or depth_stderr == "BD":
            depth_stderr = "Fixed"
        else:
            depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale)
        station_number = self._intZero(line[40:43])
        component_number = self._intZero(line[43:46])
        station_number2 = self._intZero(line[46:48])
        component_number2 = self._intZero(line[48:51])
        # unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
        moment = self._floatWithFormat(line[54:56], "2.1")
        moment_stderr = self._floatWithFormat(line[56:58], "2.1")
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split("/")[-1]
        # Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != ".":
            origin = Origin()
            res_id = "/".join(
                (res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower())
            )
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info = CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime("%Y%m%d")
            origin.time = UTCDateTime(date + centroid_origin_time)
            # Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._storeUncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == "Fixed" and lon_stderr == "Fixed":
                origin.epicenter_fixed = True
            else:
                self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr))
                self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude))
            if depth_stderr == "Fixed":
                origin.depth_type = "operator assigned"
            else:
                origin.depth_type = "from location"
                self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
            quality = OriginQuality()
            quality.used_station_count = station_number + station_number2
            quality.used_phase_count = component_number + component_number2
            origin.quality = quality
            origin.type = "centroid"
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = "/".join(
            (res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower())
        )
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            # this is required for QuakeML validation:
            res_id = "/".join((res_id_prefix, "no-origin"))
            moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = "/".join(
            (res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower())
        )
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == "C":
            res_id = "/".join((res_id_prefix, "methodID=CMT"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # CMT algorithm uses long-period body waves,
            # very-long-period surface waves and
            # intermediate period surface waves (since 2004
            # for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = "combined"
        if computation_type == "M":
            res_id = "/".join((res_id_prefix, "methodID=moment_tensor"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used by
            # "moment tensor" algorithm.
            data_used.wave_type = "unknown"
        elif computation_type == "B":
            res_id = "/".join((res_id_prefix, "methodID=broadband_data"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: is 'combined' correct here?
            data_used.wave_type = "combined"
        elif computation_type == "F":
            res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = "P waves"
        elif computation_type == "S":
            res_id = "/".join((res_id_prefix, "methodID=scalar_moment"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used
            # for scalar moment determination.
            data_used.wave_type = "unknown"
        moment_tensor.data_used = data_used
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Пример #9
0
def write_qml(config, sourcepar):
    if not config.options.qml_file:
        return
    qml_file = config.options.qml_file
    cat = read_events(qml_file)
    evid = config.hypo.evid
    try:
        ev = [e for e in cat if evid in str(e.resource_id)][0]
    except Exception:
        logging.warning('Unable to find evid "{}" in QuakeML file. '
                        'QuakeML output will not be written.'.format(evid))

    origin = ev.preferred_origin()
    if origin is None:
        origin = ev.origins[0]
    origin_id = origin.resource_id
    origin_id_strip = origin_id.id.split('/')[-1]
    origin_id_strip = origin_id_strip.replace(config.smi_strip_from_origin_id,
                                              '')

    # Common parameters
    ssp_version = get_versions()['version']
    method_id = config.smi_base + '/sourcespec/' + ssp_version
    cr_info = CreationInfo()
    cr_info.agency_id = config.agency_id
    if config.author is None:
        author = '{}@{}'.format(getuser(), gethostname())
    else:
        author = config.author
    cr_info.author = author
    cr_info.creation_time = UTCDateTime()

    means = sourcepar.means_weight
    errors = sourcepar.errors_weight
    stationpar = sourcepar.station_parameters

    # Magnitude
    mag = Magnitude()
    _id = config.smi_magnitude_template.replace('$SMI_BASE', config.smi_base)
    _id = _id.replace('$ORIGIN_ID', origin_id_strip)
    mag.resource_id = ResourceIdentifier(id=_id)
    mag.method_id = ResourceIdentifier(id=method_id)
    mag.origin_id = origin_id
    mag.magnitude_type = 'Mw'
    mag.mag = means['Mw']
    mag_err = QuantityError()
    mag_err.uncertainty = errors['Mw']
    mag_err.confidence_level = 68.2
    mag.mag_errors = mag_err
    mag.station_count = len([_s for _s in stationpar.keys()])
    mag.evaluation_mode = 'automatic'
    mag.creation_info = cr_info

    # Seismic moment -- It has to be stored in a MomentTensor object
    # which, in turn, is part of a FocalMechanism object
    mt = MomentTensor()
    _id = config.smi_moment_tensor_template.replace('$SMI_BASE',
                                                    config.smi_base)
    _id = _id.replace('$ORIGIN_ID', origin_id_strip)
    mt.resource_id = ResourceIdentifier(id=_id)
    mt.derived_origin_id = origin_id
    mt.moment_magnitude_id = mag.resource_id
    mt.scalar_moment = means['Mo']
    mt_err = QuantityError()
    mt_err.lower_uncertainty = errors['Mo'][0]
    mt_err.upper_uncertainty = errors['Mo'][1]
    mt_err.confidence_level = 68.2
    mt.scalar_moment_errors = mt_err
    mt.method_id = method_id
    mt.creation_info = cr_info
    # And here is the FocalMechanism object
    fm = FocalMechanism()
    _id = config.smi_focal_mechanism_template.replace('$SMI_BASE',
                                                      config.smi_base)
    _id = _id.replace('$ORIGIN_ID', origin_id_strip)
    fm.resource_id = ResourceIdentifier(id=_id)
    fm.triggering_origin_id = origin_id
    fm.method_id = ResourceIdentifier(id=method_id)
    fm.moment_tensor = mt
    fm.creation_info = cr_info
    ev.focal_mechanisms.append(fm)

    # Station magnitudes
    for statId in sorted(stationpar.keys()):
        par = stationpar[statId]
        st_mag = StationMagnitude()
        seed_id = statId.split()[0]
        _id = config.smi_station_magnitude_template.replace(
            '$SMI_MAGNITUDE_TEMPLATE', config.smi_magnitude_template)
        _id = _id.replace('$ORIGIN_ID', origin_id_strip)
        _id = _id.replace('$SMI_BASE', config.smi_base)
        _id = _id.replace('$WAVEFORM_ID', seed_id)
        st_mag.resource_id = ResourceIdentifier(id=_id)
        st_mag.origin_id = origin_id
        st_mag.mag = par['Mw']
        st_mag.station_magnitude_type = 'Mw'
        st_mag.method_id = mag.method_id
        st_mag.creation_info = cr_info
        st_mag.waveform_id = WaveformStreamID(seed_string=seed_id)
        st_mag.extra = SSPExtra()
        st_mag.extra.moment = SSPTag(par['Mo'])
        st_mag.extra.corner_frequency = SSPTag(par['fc'])
        st_mag.extra.t_star = SSPTag(par['t_star'])
        ev.station_magnitudes.append(st_mag)
        st_mag_contrib = StationMagnitudeContribution()
        st_mag_contrib.station_magnitude_id = st_mag.resource_id
        mag.station_magnitude_contributions.append(st_mag_contrib)
    ev.magnitudes.append(mag)

    # Write other average parameters as custom tags
    ev.extra = SSPExtra()
    ev.extra.corner_frequency = SSPContainerTag()
    ev.extra.corner_frequency.value.value = SSPTag(means['fc'])
    ev.extra.corner_frequency.value.lower_uncertainty =\
        SSPTag(errors['fc'][0])
    ev.extra.corner_frequency.value.upper_uncertainty =\
        SSPTag(errors['fc'][1])
    ev.extra.corner_frequency.value.confidence_level = SSPTag(68.2)
    ev.extra.t_star = SSPContainerTag()
    ev.extra.t_star.value.value = SSPTag(means['t_star'])
    ev.extra.t_star.value.uncertainty = SSPTag(errors['t_star'])
    ev.extra.t_star.value.confidence_level = SSPTag(68.2)
    ev.extra.source_radius = SSPContainerTag()
    ev.extra.source_radius.value.value = SSPTag(means['ra'])
    ev.extra.source_radius.value.lower_uncertainty =\
        SSPTag(errors['ra'][0])
    ev.extra.source_radius.value.upper_uncertainty =\
        SSPTag(errors['ra'][1])
    ev.extra.source_radius.value.confidence_level = SSPTag(68.2)
    ev.extra.stress_drop = SSPContainerTag()
    ev.extra.stress_drop.value.value = SSPTag(means['bsd'])
    ev.extra.stress_drop.value.lower_uncertainty =\
        SSPTag(errors['bsd'][0])
    ev.extra.stress_drop.value.upper_uncertainty =\
        SSPTag(errors['bsd'][1])
    ev.extra.stress_drop.value.confidence_level = SSPTag(68.2)

    if config.set_preferred_magnitude:
        ev.preferred_magnitude_id = mag.resource_id.id

    qml_file_out = os.path.join(config.options.outdir, evid + '.xml')
    ev.write(qml_file_out, format='QUAKEML')
    logging.info('QuakeML file written to: ' + qml_file_out)