def attach_new_origin( old_event: Event, new_event: Event, new_origin: Origin, preferred: bool, index: Optional[int] = None, ) -> Catalog: """ Attach a new origin to an existing events object. Parameters ---------- old_event : obspy.core.event.Event The old event that will receive the new origin new_event : obspy.core.event.Event The new event that contains the origin, needed for merging picks that may not exist in old_event new_origin : obspy.core.event.Origin The new origin that will be attached to old_event preferred : bool If True mark the new origin as the preferred_origin index : int or None The origin index of old_cat that new_origin will overwrite, if None append the new_origin to old_cat.origins Returns ------- obspy.Catalog modifies old_cat in-place, returns old_catalog """ # make sure all the picks/amplitudes in new_event are also in old_event merge_events(old_event, new_event, delete_old=False) # point the arrivals in the new origin at the old picks _associate_picks(old_event, new_event, new_origin) # append the origin if index is not None: # if this origin is to replace another try: old_ori = old_event.origins[index] except IndexError: msg = ("%d is not valid for an origin list of length %d") % ( index, len(old_event.origins), ) msg += " appending new origin to end of list" warnings.warn(msg) old_event.origins.append(new_origin) else: # set resource id and creation info new_origin.resource_id = old_ori.resource_id new_origin.creation_info = old_ori.creation_info old_event.origins[index] = new_origin else: old_event.origins.append(new_origin) # bump origin creation info bump_creation_version(new_origin) # set preferred if preferred: old_event.preferred_origin_id = new_origin.resource_id validate_catalog(old_event) return old_event
def _parseRecordAH(self, line, event): """ Parses the 'additional hypocenter' record AH """ date = line[2:10] time = line[11:20] #unused: hypocenter_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] #unused: preliminary_flag = line[37] depth = self._float(line[38:43]) #unused: depth_quality = line[43] standard_dev = self._floatUnused(line[44:48]) station_number = self._intUnused(line[48:51]) phase_number = self._intUnused(line[51:55]) source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_code) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinateSign(lat_type) origin.longitude = longitude * self._coordinateSign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.standard_error = standard_dev origin.quality.used_station_count = station_number origin.quality.used_phase_count = phase_number origin.type = 'hypocenter' event.origins.append(origin)
def _parse_record_ah(self, line, event): """ Parses the 'additional hypocenter' record AH """ date = line[2:10] time = line[11:20] # unused: hypocenter_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] # unused: preliminary_flag = line[37] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float_unused(line[44:48]) station_number = self._int_unused(line[48:51]) phase_number = self._int_unused(line[51:55]) source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_code) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.standard_error = standard_dev origin.quality.used_station_count = station_number origin.quality.used_phase_count = phase_number origin.origin_type = 'hypocenter' event.origins.append(origin)
def _parse_record_hy(self, line): """ Parses the 'hypocenter' record HY """ date = line[2:10] time = line[11:20] # unused: location_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float(line[44:48]) station_number = self._int(line[48:51]) # unused: version_flag = line[51] fe_region_number = line[52:55] fe_region_name = self._decode_fe_region_number(fe_region_number) source_code = line[55:60].strip() event = Event() # FIXME: a smarter way to define evid? evid = date + time res_id = '/'.join((res_id_prefix, 'event', evid)) event.resource_id = ResourceIdentifier(id=res_id) description = EventDescription( type='region name', text=fe_region_name) event.event_descriptions.append(description) description = EventDescription( type='Flinn-Engdahl region', text=fe_region_number) event.event_descriptions.append(description) origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid)) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo() if source_code: origin.creation_info.agency_id = source_code else: origin.creation_info.agency_id = 'USGS-NEIC' res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) origin.earth_model_id = ResourceIdentifier(id=res_id) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.associated_station_count = station_number origin.quality.standard_error = standard_dev # associated_phase_count can be incremented in records 'P ' and 'S ' origin.quality.associated_phase_count = 0 # depth_phase_count can be incremented in record 'S ' origin.quality.depth_phase_count = 0 origin.origin_type = 'hypocenter' origin.region = fe_region_name event.origins.append(origin) return event
def _parseRecordDp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._intZero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + '.' + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == 'FX': orig_time_stderr = 'Fixed' else: orig_time_stderr =\ self._floatWithFormat(orig_time_stderr, '2.1', scale) centroid_latitude = self._floatWithFormat(line[17:21], '4.2') lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinateSign(lat_type) lat_stderr = line[22:25] if lat_stderr == 'FX': lat_stderr = 'Fixed' else: lat_stderr = self._floatWithFormat(lat_stderr, '3.2', scale) centroid_longitude = self._floatWithFormat(line[25:30], '5.2') lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinateSign(lon_type) lon_stderr = line[31:34] if lon_stderr == 'FX': lon_stderr = 'Fixed' else: lon_stderr = self._floatWithFormat(lon_stderr, '3.2', scale) centroid_depth = self._floatWithFormat(line[34:38], '4.1') depth_stderr = line[38:40] if depth_stderr == 'FX' or depth_stderr == 'BD': depth_stderr = 'Fixed' else: depth_stderr = self._floatWithFormat(depth_stderr, '2.1', scale) station_number = self._intZero(line[40:43]) component_number = self._intZero(line[43:46]) station_number2 = self._intZero(line[46:48]) component_number2 = self._intZero(line[48:51]) #unused: half_duration = self._floatWithFormat(line[51:54], '3.1') moment = self._floatWithFormat(line[54:56], '2.1') moment_stderr = self._floatWithFormat(line[56:58], '2.1') moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split('/')[-1] #Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != '.': origin = Origin() res_id = '/'.join( (res_id_prefix, 'origin', evid, source_contributor.lower(), 'mw' + computation_type.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info =\ CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime('%Y%m%d') origin.time = UTCDateTime(date + centroid_origin_time) #Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._storeUncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == 'Fixed' and lon_stderr == 'Fixed': origin.epicenter_fixed = True else: self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr)) self._storeUncertainty( origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude)) if depth_stderr == 'Fixed': origin.depth_type = 'operator assigned' else: origin.depth_type = 'from location' self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count =\ station_number + station_number2 quality.used_phase_count =\ component_number + component_number2 origin.quality = quality origin.type = 'centroid' event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = '/'.join( (res_id_prefix, 'focalmechanism', evid, source_contributor.lower(), 'mw' + computation_type.lower())) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info =\ CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: #this is required for QuakeML validation: res_id = '/'.join((res_id_prefix, 'no-origin')) moment_tensor.derived_origin_id =\ ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = '/'.join( (res_id_prefix, 'momenttensor', evid, source_contributor.lower(), 'mw' + computation_type.lower())) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == 'C': res_id = '/'.join((res_id_prefix, 'methodID=CMT')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #CMT algorithm uses long-period body waves, #very-long-period surface waves and #intermediate period surface waves (since 2004 #for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = 'combined' if computation_type == 'M': res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: not sure which kind of data is used by #"moment tensor" algorithm. data_used.wave_type = 'unknown' elif computation_type == 'B': res_id = '/'.join((res_id_prefix, 'methodID=broadband_data')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: is 'combined' correct here? data_used.wave_type = 'combined' elif computation_type == 'F': res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = 'P waves' elif computation_type == 'S': res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: not sure which kind of data is used #for scalar moment determination. data_used.wave_type = 'unknown' moment_tensor.data_used = data_used focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def _parse_first_line_origin(self, line, event, magnitudes): """ Parse the first line of origin data. :type line: str :param line: Line to parse. :type event: :class:`~obspy.core.event.event.Event` :param event: Event of the origin. :type magnitudes: list of :class:`~obspy.core.event.magnitude.Magnitude` :param magnitudes: Store magnitudes in a list to keep their positions. :rtype: :class:`~obspy.core.event.origin.Origin`, :class:`~obspy.core.event.resourceid.ResourceIdentifier` :returns: Parsed origin or None, resource identifier of the origin. """ magnitude_types = [] magnitude_values = [] magnitude_station_counts = [] fields = self.fields['line_1'] time_origin = line[fields['time']].strip() time_fixed_flag = line[fields['time_fixf']].strip() latitude = line[fields['lat']].strip() longitude = line[fields['lon']].strip() epicenter_fixed_flag = line[fields['epicenter_fixf']].strip() depth = line[fields['depth']].strip() depth_fixed_flag = line[fields['depth_fixf']].strip() phase_count = line[fields['n_def']].strip() station_count = line[fields['n_sta']].strip() azimuthal_gap = line[fields['gap']].strip() magnitude_types.append(line[fields['mag_type_1']].strip()) magnitude_values.append(line[fields['mag_1']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip()) magnitude_types.append(line[fields['mag_type_2']].strip()) magnitude_values.append(line[fields['mag_2']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip()) magnitude_types.append(line[fields['mag_type_3']].strip()) magnitude_values.append(line[fields['mag_3']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip()) author = line[fields['author']].strip() origin_id = line[fields['id']].strip() origin = Origin() origin.quality = OriginQuality() try: origin.time = UTCDateTime(time_origin.replace('/', '-')) origin.latitude = float(latitude) origin.longitude = float(longitude) except (TypeError, ValueError): self._warn('Missing origin data, skipping event') return None, None origin.time_fixed = time_fixed_flag.lower() == 'f' origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f' try: # Convert value from km to m origin.depth = float(depth) * 1000 except ValueError: pass try: origin.depth_type = DEPTH_TYPES[depth_fixed_flag] except KeyError: origin.depth_type = OriginDepthType('from location') try: origin.quality.used_phase_count = int(phase_count) origin.quality.associated_phase_count = int(phase_count) except ValueError: pass try: origin.quality.used_station_count = int(station_count) origin.quality.associated_station_count = int(station_count) except ValueError: pass try: origin.quality.azimuthal_gap = float(azimuthal_gap) except ValueError: pass self.author = author origin.creation_info = self._get_creation_info() public_id = "origin/%s" % origin_id origin_res_id = self._get_res_id(public_id) for i in range(3): try: magnitude = Magnitude() magnitude.creation_info = self._get_creation_info() magnitude.magnitude_type = magnitude_types[i] magnitude.mag = float(magnitude_values[i]) magnitude.station_count = int(magnitude_station_counts[i]) magnitude.origin_id = origin_res_id magnitudes.append(magnitude) event.magnitudes.append(magnitude) except ValueError: # Magnitude can be empty but we need to keep the # position between mag1, mag2 or mag3. magnitudes.append(None) return origin, origin_res_id
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left `None` NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z and return a tuple of three values (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their `pick_id` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_XX = float(line.split()[7]) covariance_YY = float(line.split()[13]) covariance_ZZ = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX)) except ValueError: if covariance_XX < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY)) except ValueError: if covariance_YY < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_ZZ) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
def outputOBSPY(hp, event=None, only_fm_picks=False): """ Make an Event which includes the current focal mechanism information from HASH Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism. This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones. Inputs ------- hp : hashpy.HashPype instance event : obspy.core.event.Event only_fm_picks : bool of whether to overwrite the picks/arrivals lists Returns ------- obspy.core.event.Event Event will be new if no event was input, FocalMech added to existing event """ # Returns new (or updates existing) Event with HASH solution n = hp.npol if event is None: event = Event(focal_mechanisms=[], picks=[], origins=[]) origin = Origin(arrivals=[]) origin.time = UTCDateTime(hp.tstamp) origin.latitude = hp.qlat origin.longitude = hp.qlon origin.depth = hp.qdep origin.creation_info = CreationInfo(version=hp.icusp) origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format( hp.icusp)) for _i in range(n): p = Pick() p.creation_info = CreationInfo(version=hp.arid[_i]) p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format( p.creation_info.version)) p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i]) if hp.p_pol[_i] > 0: p.polarity = 'positive' else: p.polarity = 'negative' a = Arrival() a.creation_info = CreationInfo(version=hp.arid[_i]) a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format( p.creation_info.version)) a.azimuth = hp.p_azi_mc[_i, 0] a.takeoff_angle = 180. - hp.p_the_mc[_i, 0] a.pick_id = p.resource_id origin.arrivals.append(a) event.picks.append(p) event.origins.append(origin) event.preferred_origin_id = origin.resource_id.resource_id else: # just update the changes origin = event.preferred_origin() picks = [] arrivals = [] for _i in range(n): ind = hp.p_index[_i] a = origin.arrivals[ind] p = a.pick_id.getReferredObject() a.takeoff_angle = hp.p_the_mc[_i, 0] picks.append(p) arrivals.append(a) if only_fm_picks: origin.arrivals = arrivals event.picks = picks # Use me double couple calculator and populate planes/axes etc x = hp._best_quality_index # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred for s in range(hp.nmult): dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]]) ax = dc.axis focal_mech = FocalMechanism() focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author) focal_mech.triggering_origin_id = origin.resource_id focal_mech.resource_id = ResourceIdentifier( 'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1)) focal_mech.method_id = ResourceIdentifier('HASH') focal_mech.nodal_planes = NodalPlanes() focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1) focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2) focal_mech.principal_axes = PrincipalAxes() focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip']) focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip']) focal_mech.station_polarity_count = n focal_mech.azimuthal_gap = hp.magap focal_mech.misfit = hp.mfrac[s] focal_mech.station_distribution_ratio = hp.stdr[s] focal_mech.comments.append( Comment( hp.qual[s], resource_id=ResourceIdentifier( focal_mech.resource_id.resource_id + '/comment/quality'))) #---------------------------------------- event.focal_mechanisms.append(focal_mech) if s == x: event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id return event
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.io.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z (each of type :class:`numpy.ndarray`) and return a tuple of three :class:`numpy.ndarray` (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their ``pick_id`` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
def _parseRecordDp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._intZero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + "." + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == "FX": orig_time_stderr = "Fixed" else: orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale) centroid_latitude = self._floatWithFormat(line[17:21], "4.2") lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinateSign(lat_type) lat_stderr = line[22:25] if lat_stderr == "FX": lat_stderr = "Fixed" else: lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale) centroid_longitude = self._floatWithFormat(line[25:30], "5.2") lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinateSign(lon_type) lon_stderr = line[31:34] if lon_stderr == "FX": lon_stderr = "Fixed" else: lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale) centroid_depth = self._floatWithFormat(line[34:38], "4.1") depth_stderr = line[38:40] if depth_stderr == "FX" or depth_stderr == "BD": depth_stderr = "Fixed" else: depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale) station_number = self._intZero(line[40:43]) component_number = self._intZero(line[43:46]) station_number2 = self._intZero(line[46:48]) component_number2 = self._intZero(line[48:51]) # unused: half_duration = self._floatWithFormat(line[51:54], '3.1') moment = self._floatWithFormat(line[54:56], "2.1") moment_stderr = self._floatWithFormat(line[56:58], "2.1") moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split("/")[-1] # Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != ".": origin = Origin() res_id = "/".join( (res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime("%Y%m%d") origin.time = UTCDateTime(date + centroid_origin_time) # Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._storeUncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == "Fixed" and lon_stderr == "Fixed": origin.epicenter_fixed = True else: self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr)) self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude)) if depth_stderr == "Fixed": origin.depth_type = "operator assigned" else: origin.depth_type = "from location" self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count = station_number + station_number2 quality.used_phase_count = component_number + component_number2 origin.quality = quality origin.type = "centroid" event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = "/".join( (res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: # this is required for QuakeML validation: res_id = "/".join((res_id_prefix, "no-origin")) moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = "/".join( (res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == "C": res_id = "/".join((res_id_prefix, "methodID=CMT")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # CMT algorithm uses long-period body waves, # very-long-period surface waves and # intermediate period surface waves (since 2004 # for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = "combined" if computation_type == "M": res_id = "/".join((res_id_prefix, "methodID=moment_tensor")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used by # "moment tensor" algorithm. data_used.wave_type = "unknown" elif computation_type == "B": res_id = "/".join((res_id_prefix, "methodID=broadband_data")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: is 'combined' correct here? data_used.wave_type = "combined" elif computation_type == "F": res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = "P waves" elif computation_type == "S": res_id = "/".join((res_id_prefix, "methodID=scalar_moment")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used # for scalar moment determination. data_used.wave_type = "unknown" moment_tensor.data_used = data_used focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def _parse_record_dp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._int_zero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + '.' + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == 'FX': orig_time_stderr = 'Fixed' else: orig_time_stderr = \ self._float_with_format(orig_time_stderr, '2.1', scale) centroid_latitude = self._float_with_format(line[17:21], '4.2') lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinate_sign(lat_type) lat_stderr = line[22:25] if lat_stderr == 'FX': lat_stderr = 'Fixed' else: lat_stderr = self._float_with_format(lat_stderr, '3.2', scale) centroid_longitude = self._float_with_format(line[25:30], '5.2') lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinate_sign(lon_type) lon_stderr = line[31:34] if lon_stderr == 'FX': lon_stderr = 'Fixed' else: lon_stderr = self._float_with_format(lon_stderr, '3.2', scale) centroid_depth = self._float_with_format(line[34:38], '4.1') depth_stderr = line[38:40] if depth_stderr == 'FX' or depth_stderr == 'BD': depth_stderr = 'Fixed' else: depth_stderr = self._float_with_format(depth_stderr, '2.1', scale) station_number = self._int_zero(line[40:43]) component_number = self._int_zero(line[43:46]) station_number2 = self._int_zero(line[46:48]) component_number2 = self._int_zero(line[48:51]) # unused: half_duration = self._float_with_format(line[51:54], '3.1') moment = self._float_with_format(line[54:56], '2.1') moment_stderr = self._float_with_format(line[56:58], '2.1') moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split('/')[-1] # Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != '.': origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_contributor.lower(), 'mw' + computation_type.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = \ CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime('%Y%m%d') origin.time = UTCDateTime(date + centroid_origin_time) # Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._store_uncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == 'Fixed' and lon_stderr == 'Fixed': origin.epicenter_fixed = True else: self._store_uncertainty(origin.latitude_errors, self._lat_err_to_deg(lat_stderr)) self._store_uncertainty(origin.longitude_errors, self._lon_err_to_deg(lon_stderr, origin.latitude)) if depth_stderr == 'Fixed': origin.depth_type = 'operator assigned' else: origin.depth_type = 'from location' self._store_uncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count = \ station_number + station_number2 quality.used_phase_count = \ component_number + component_number2 origin.quality = quality origin.origin_type = 'centroid' event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = '/'.join((res_id_prefix, 'focalmechanism', evid, source_contributor.lower(), 'mw' + computation_type.lower())) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info = \ CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: # this is required for QuakeML validation: res_id = '/'.join((res_id_prefix, 'no-origin')) moment_tensor.derived_origin_id = \ ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = '/'.join((res_id_prefix, 'momenttensor', evid, source_contributor.lower(), 'mw' + computation_type.lower())) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._store_uncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == 'C': res_id = '/'.join((res_id_prefix, 'methodID=CMT')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # CMT algorithm uses long-period body waves, # very-long-period surface waves and # intermediate period surface waves (since 2004 # for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = 'combined' if computation_type == 'M': res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used by # "moment tensor" algorithm. data_used.wave_type = 'unknown' elif computation_type == 'B': res_id = '/'.join((res_id_prefix, 'methodID=broadband_data')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: is 'combined' correct here? data_used.wave_type = 'combined' elif computation_type == 'F': res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = 'P waves' elif computation_type == 'S': res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used # for scalar moment determination. data_used.wave_type = 'unknown' moment_tensor.data_used = [data_used] focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def outputOBSPY(hp, event=None, only_fm_picks=False): """ Make an Event which includes the current focal mechanism information from HASH Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism. This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones. Inputs ------- hp : hashpy.HashPype instance event : obspy.core.event.Event only_fm_picks : bool of whether to overwrite the picks/arrivals lists Returns ------- obspy.core.event.Event Event will be new if no event was input, FocalMech added to existing event """ # Returns new (or updates existing) Event with HASH solution n = hp.npol if event is None: event = Event(focal_mechanisms=[], picks=[], origins=[]) origin = Origin(arrivals=[]) origin.time = UTCDateTime(hp.tstamp) origin.latitude = hp.qlat origin.longitude = hp.qlon origin.depth = hp.qdep origin.creation_info = CreationInfo(version=hp.icusp) origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp)) for _i in range(n): p = Pick() p.creation_info = CreationInfo(version=hp.arid[_i]) p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version)) p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i]) if hp.p_pol[_i] > 0: p.polarity = 'positive' else: p.polarity = 'negative' a = Arrival() a.creation_info = CreationInfo(version=hp.arid[_i]) a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version)) a.azimuth = hp.p_azi_mc[_i,0] a.takeoff_angle = 180. - hp.p_the_mc[_i,0] a.pick_id = p.resource_id origin.arrivals.append(a) event.picks.append(p) event.origins.append(origin) event.preferred_origin_id = str(origin.resource_id) else: # just update the changes origin = event.preferred_origin() picks = [] arrivals = [] for _i in range(n): ind = hp.p_index[_i] a = origin.arrivals[ind] p = a.pick_id.getReferredObject() a.takeoff_angle = hp.p_the_mc[_i,0] picks.append(p) arrivals.append(a) if only_fm_picks: origin.arrivals = arrivals event.picks = picks # Use me double couple calculator and populate planes/axes etc x = hp._best_quality_index # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred for s in range(hp.nmult): dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]]) ax = dc.axis focal_mech = FocalMechanism() focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author) focal_mech.triggering_origin_id = origin.resource_id focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1)) focal_mech.method_id = ResourceIdentifier('HASH') focal_mech.nodal_planes = NodalPlanes() focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1) focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2) focal_mech.principal_axes = PrincipalAxes() focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip']) focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip']) focal_mech.station_polarity_count = n focal_mech.azimuthal_gap = hp.magap focal_mech.misfit = hp.mfrac[s] focal_mech.station_distribution_ratio = hp.stdr[s] focal_mech.comments.append( Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality')) ) #---------------------------------------- event.focal_mechanisms.append(focal_mech) if s == x: event.preferred_focal_mechanism_id = str(focal_mech.resource_id) return event
def build(self): """ Build an obspy moment tensor focal mech event This makes the tensor output into an Event containing: 1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes 2) a Magnitude of the Mw from the Tensor Which is what we want for outputting QuakeML using the (slightly modified) obspy code. Input ----- filehandle => open file OR str from filehandle.read() Output ------ event => instance of Event() class as described above """ p = self.parser event = Event(event_type='earthquake') origin = Origin() focal_mech = FocalMechanism() nodal_planes = NodalPlanes() moment_tensor = MomentTensor() principal_ax = PrincipalAxes() magnitude = Magnitude() data_used = DataUsed() creation_info = CreationInfo(agency_id='NN') ev_mode = 'automatic' ev_stat = 'preliminary' evid = None orid = None # Parse the entire file line by line. for n,l in enumerate(p.line): if 'REVIEWED BY NSL STAFF' in l: ev_mode = 'manual' ev_stat = 'reviewed' if 'Event ID' in l: evid = p._id(n) if 'Origin ID' in l: orid = p._id(n) if 'Ichinose' in l: moment_tensor.category = 'regional' if re.match(r'^\d{4}\/\d{2}\/\d{2}', l): ev = p._event_info(n) if 'Depth' in l: derived_depth = p._depth(n) if 'Mw' in l: magnitude.mag = p._mw(n) magnitude.magnitude_type = 'Mw' if 'Mo' in l and 'dyne' in l: moment_tensor.scalar_moment = p._mo(n) if 'Percent Double Couple' in l: moment_tensor.double_couple = p._percent(n) if 'Percent CLVD' in l: moment_tensor.clvd = p._percent(n) if 'Epsilon' in l: moment_tensor.variance = p._epsilon(n) if 'Percent Variance Reduction' in l: moment_tensor.variance_reduction = p._percent(n) if 'Major Double Couple' in l and 'strike' in p.line[n+1]: np = p._double_couple(n) nodal_planes.nodal_plane_1 = NodalPlane(*np[0]) nodal_planes.nodal_plane_2 = NodalPlane(*np[1]) nodal_planes.preferred_plane = 1 if 'Spherical Coordinates' in l: mt = p._mt_sphere(n) moment_tensor.tensor = Tensor( m_rr = mt['Mrr'], m_tt = mt['Mtt'], m_pp = mt['Mff'], m_rt = mt['Mrt'], m_rp = mt['Mrf'], m_tp = mt['Mtf'], ) if 'Eigenvalues and eigenvectors of the Major Double Couple' in l: ax = p._vectors(n) principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev']) principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev']) principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev']) if 'Number of Stations' in l: data_used.station_count = p._number_of_stations(n) if 'Maximum' in l and 'Gap' in l: focal_mech.azimuthal_gap = p._gap(n) if re.match(r'^Date', l): creation_info.creation_time = p._creation_time(n) # Creation Time creation_info.version = orid # Fill in magnitude values magnitude.evaluation_mode = ev_mode magnitude.evaluation_status = ev_stat magnitude.creation_info = creation_info.copy() magnitude.resource_id = self._rid(magnitude) # Stub origin origin.time = ev.get('time') origin.latitude = ev.get('lat') origin.longitude = ev.get('lon') origin.depth = derived_depth * 1000. origin.depth_type = "from moment tensor inversion" origin.creation_info = creation_info.copy() # Unique from true origin ID _oid = self._rid(origin) origin.resource_id = ResourceIdentifier(str(_oid) + '/mt') del _oid # Make an id for the MT that references this origin ogid = str(origin.resource_id) doid = ResourceIdentifier(ogid, referred_object=origin) # Make an id for the moment tensor mag which references this mag mrid = str(magnitude.resource_id) mmid = ResourceIdentifier(mrid, referred_object=magnitude) # MT todo: could check/use URL for RID if parsing the php file moment_tensor.evaluation_mode = ev_mode moment_tensor.evaluation_status = ev_stat moment_tensor.data_used = data_used moment_tensor.moment_magnitude_id = mmid moment_tensor.derived_origin_id = doid moment_tensor.creation_info = creation_info.copy() moment_tensor.resource_id = self._rid(moment_tensor) # Fill in focal_mech values focal_mech.nodal_planes = nodal_planes focal_mech.moment_tensor = moment_tensor focal_mech.principal_axes = principal_ax focal_mech.creation_info = creation_info.copy() focal_mech.resource_id = self._rid(focal_mech) # add mech and new magnitude to event event.focal_mechanisms = [focal_mech] event.magnitudes = [magnitude] event.origins = [origin] event.creation_info = creation_info.copy() # If an MT was done, that's the preferred mag/mech event.preferred_magnitude_id = str(magnitude.resource_id) event.preferred_focal_mechanism_id = str(focal_mech.resource_id) if evid: event.creation_info.version = evid event.resource_id = self._rid(event) self.event = event
def _map_join2origin(self, db): """ Return an Origin instance from an dict of CSS key/values Inputs ====== db : dict of key/values of CSS fields related to the origin (see Join) Returns ======= obspy.core.event.Origin Notes ===== Any object that supports the dict 'get' method can be passed as input, e.g. OrderedDict, custom classes, etc. Join ---- origin <- origerr (outer) """ #-- Basic location ------------------------------------------ origin = Origin() origin.latitude = db.get('lat') origin.longitude = db.get('lon') origin.depth = _km2m(db.get('depth')) origin.time = _utc(db.get('time')) origin.extra = {} #-- Quality ------------------------------------------------- quality = OriginQuality( associated_phase_count = db.get('nass'), used_phase_count = db.get('ndef'), standard_error = db.get('sdobs'), ) origin.quality = quality #-- Solution Uncertainties ---------------------------------- # in CSS the ellipse is projected onto the horizontal plane # using the covariance matrix uncertainty = OriginUncertainty() a = _km2m(db.get('smajax')) b = _km2m(db.get('sminax')) s = db.get('strike') dep_u = _km2m(db.get('sdepth')) time_u = db.get('stime') uncertainty.max_horizontal_uncertainty = a uncertainty.min_horizontal_uncertainty = b uncertainty.azimuth_max_horizontal_uncertainty = s uncertainty.horizontal_uncertainty = a uncertainty.preferred_description = "horizontal uncertainty" if db.get('conf') is not None: uncertainty.confidence_level = db.get('conf') * 100. if uncertainty.horizontal_uncertainty is not None: origin.origin_uncertainty = uncertainty #-- Parameter Uncertainties --------------------------------- if all([a, b, s]): n, e = _get_NE_on_ellipse(a, b, s) lat_u = _m2deg_lat(n) lon_u = _m2deg_lon(e, lat=origin.latitude) origin.latitude_errors = {'uncertainty': lat_u} origin.longitude_errors = {'uncertainty': lon_u} if dep_u: origin.depth_errors = {'uncertainty': dep_u} if time_u: origin.time_errors = {'uncertainty': time_u} #-- Analyst-determined Status ------------------------------- posted_author = _str(db.get('auth')) mode, status = self.get_event_status(posted_author) origin.evaluation_mode = mode origin.evaluation_status = status # Save etype per origin due to schema differences... css_etype = _str(db.get('etype')) # Compatible with future patch rename "_namespace" -> "namespace" origin.extra['etype'] = { 'value': css_etype, 'namespace': CSS_NAMESPACE } origin.creation_info = CreationInfo( creation_time = _utc(db.get('lddate')), agency_id = self.agency, version = db.get('orid'), author = posted_author, ) origin.resource_id = self._rid(origin) return origin
def _read_single_hypocenter(lines, coordinate_converter, original_picks): """ Given a list of lines (starting with a 'NLLOC' line and ending with a 'END_NLLOC' line), parse them into an Event. """ try: # some paranoid checks.. assert lines[0].startswith("NLLOC ") assert lines[-1].startswith("END_NLLOC") for line in lines[1:-1]: assert not line.startswith("NLLOC ") assert not line.startswith("END_NLLOC") except Exception: msg = ("This should not have happened, please report this as a bug at " "https://github.com/obspy/obspy/issues.") raise Exception(msg) indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines[:-1]]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) # new NLLoc > 6.0 seems to add prefix 'run:' before date if date.startswith('run:'): date = date[4:] signature = signature.strip() creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S")) if coordinate_converter: # maximum likelihood origin location in km info line line = lines["HYPOCENTER"] x, y, z = coordinate_converter(*map(float, line.split()[1:7:2])) else: # maximum likelihood origin location lon lat info line line = lines["GEOGRAPHIC"] y, x, z = map(float, line.split()[8:13:2]) # maximum likelihood origin time info line line = lines["GEOGRAPHIC"] year, mon, day, hour, min = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] if "COMMENT" in lines: comment = lines["COMMENT"].strip() comment = comment.strip('\'"') comment = comment.strip() hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() o = Origin() event.origins = [o] event.preferred_origin_id = o.resource_id o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string, force_resource_id=False)) event.comments.append(Comment(text=comment, force_resource_id=False)) # SIGNATURE field's first item is LOCSIG, which is supposed to be # 'Identification of an individual, institiution or other entity' # according to # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_ # so use it as author in creation info event.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) o.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() # network codes are not used by NonLinLoc, so they can not be known # when reading the .hyp file.. to conform with QuakeML standard set an # empty network code wid = WaveformStreamID(network_code="", station_code=station) # have to split this into ints for overflow to work correctly date, hourmin, sec = map(str, line[6:9]) ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])] hm = [int(hourmin[:2]), int(hourmin[2:4])] t = UTCDateTime(*(ymd + hm), strict=False) + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id event.scope_resource_ids() return event