def _parse_record_ah(self, line, event): """ Parses the 'additional hypocenter' record AH """ date = line[2:10] time = line[11:20] # unused: hypocenter_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] # unused: preliminary_flag = line[37] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float_unused(line[44:48]) station_number = self._int_unused(line[48:51]) phase_number = self._int_unused(line[51:55]) source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_code) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.standard_error = standard_dev origin.quality.used_station_count = station_number origin.quality.used_phase_count = phase_number origin.origin_type = 'hypocenter' event.origins.append(origin)
def _parseRecordAH(self, line, event): """ Parses the 'additional hypocenter' record AH """ date = line[2:10] time = line[11:20] #unused: hypocenter_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] #unused: preliminary_flag = line[37] depth = self._float(line[38:43]) #unused: depth_quality = line[43] standard_dev = self._floatUnused(line[44:48]) station_number = self._intUnused(line[48:51]) phase_number = self._intUnused(line[51:55]) source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_code) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinateSign(lat_type) origin.longitude = longitude * self._coordinateSign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.standard_error = standard_dev origin.quality.used_station_count = station_number origin.quality.used_phase_count = phase_number origin.type = 'hypocenter' event.origins.append(origin)
def _parse_record_hy(self, line): """ Parses the 'hypocenter' record HY """ date = line[2:10] time = line[11:20] # unused: location_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float(line[44:48]) station_number = self._int(line[48:51]) # unused: version_flag = line[51] fe_region_number = line[52:55] fe_region_name = self._decode_fe_region_number(fe_region_number) source_code = line[55:60].strip() event = Event() # FIXME: a smarter way to define evid? evid = date + time res_id = '/'.join((res_id_prefix, 'event', evid)) event.resource_id = ResourceIdentifier(id=res_id) description = EventDescription( type='region name', text=fe_region_name) event.event_descriptions.append(description) description = EventDescription( type='Flinn-Engdahl region', text=fe_region_number) event.event_descriptions.append(description) origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid)) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo() if source_code: origin.creation_info.agency_id = source_code else: origin.creation_info.agency_id = 'USGS-NEIC' res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) origin.earth_model_id = ResourceIdentifier(id=res_id) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.associated_station_count = station_number origin.quality.standard_error = standard_dev # associated_phase_count can be incremented in records 'P ' and 'S ' origin.quality.associated_phase_count = 0 # depth_phase_count can be incremented in record 'S ' origin.quality.depth_phase_count = 0 origin.origin_type = 'hypocenter' origin.region = fe_region_name event.origins.append(origin) return event
def ORNL_events_to_cat(ornl_file): """Make Catalog from ORNL locations""" cat = Catalog() loc_df = pd.read_csv(ornl_file, infer_datetime_format=True) loc_df = loc_df.set_index('event_datetime') eid = 0 for dt, row in loc_df.iterrows(): ot = UTCDateTime(dt) hmc_east = row['x(m)'] hmc_north = row['y(m)'] hmc_elev = row['z(m)'] errX = row['error_x (m)'] errY = row['error_y (m)'] errZ = row['error_z (m)'] rms = row['rms (millisecond)'] converter = SURF_converter() lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north, hmc_elev)) o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev) o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality ou.max_horizontal_uncertainty = np.max([errX, errY]) ou.min_horizontal_uncertainty = np.min([errX, errY]) o.depth_errors.uncertainty = errZ oq.standard_error = rms * 1e3 extra = AttribDict({ 'hmc_east': { 'value': hmc_east, 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': hmc_north, 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': hmc_elev, 'namespace': 'smi:local/hmc' }, 'hmc_eid': { 'value': eid, 'namespace': 'smi:local/hmc' } }) o.extra = extra rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f')) # Dummy magnitude of 1. for all events until further notice mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.)) ev = Event(origins=[o], magnitudes=[mag], resource_id=rid) ev.preferred_origin_id = o.resource_id.id cat.events.append(ev) eid += 1 return cat
def _parse_record_dp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._int_zero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + '.' + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == 'FX': orig_time_stderr = 'Fixed' else: orig_time_stderr = \ self._float_with_format(orig_time_stderr, '2.1', scale) centroid_latitude = self._float_with_format(line[17:21], '4.2') lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinate_sign(lat_type) lat_stderr = line[22:25] if lat_stderr == 'FX': lat_stderr = 'Fixed' else: lat_stderr = self._float_with_format(lat_stderr, '3.2', scale) centroid_longitude = self._float_with_format(line[25:30], '5.2') lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinate_sign(lon_type) lon_stderr = line[31:34] if lon_stderr == 'FX': lon_stderr = 'Fixed' else: lon_stderr = self._float_with_format(lon_stderr, '3.2', scale) centroid_depth = self._float_with_format(line[34:38], '4.1') depth_stderr = line[38:40] if depth_stderr == 'FX' or depth_stderr == 'BD': depth_stderr = 'Fixed' else: depth_stderr = self._float_with_format(depth_stderr, '2.1', scale) station_number = self._int_zero(line[40:43]) component_number = self._int_zero(line[43:46]) station_number2 = self._int_zero(line[46:48]) component_number2 = self._int_zero(line[48:51]) # unused: half_duration = self._float_with_format(line[51:54], '3.1') moment = self._float_with_format(line[54:56], '2.1') moment_stderr = self._float_with_format(line[56:58], '2.1') moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split('/')[-1] # Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != '.': origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_contributor.lower(), 'mw' + computation_type.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = \ CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime('%Y%m%d') origin.time = UTCDateTime(date + centroid_origin_time) # Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._store_uncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == 'Fixed' and lon_stderr == 'Fixed': origin.epicenter_fixed = True else: self._store_uncertainty(origin.latitude_errors, self._lat_err_to_deg(lat_stderr)) self._store_uncertainty(origin.longitude_errors, self._lon_err_to_deg(lon_stderr, origin.latitude)) if depth_stderr == 'Fixed': origin.depth_type = 'operator assigned' else: origin.depth_type = 'from location' self._store_uncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count = \ station_number + station_number2 quality.used_phase_count = \ component_number + component_number2 origin.quality = quality origin.origin_type = 'centroid' event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = '/'.join((res_id_prefix, 'focalmechanism', evid, source_contributor.lower(), 'mw' + computation_type.lower())) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info = \ CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: # this is required for QuakeML validation: res_id = '/'.join((res_id_prefix, 'no-origin')) moment_tensor.derived_origin_id = \ ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = '/'.join((res_id_prefix, 'momenttensor', evid, source_contributor.lower(), 'mw' + computation_type.lower())) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._store_uncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == 'C': res_id = '/'.join((res_id_prefix, 'methodID=CMT')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # CMT algorithm uses long-period body waves, # very-long-period surface waves and # intermediate period surface waves (since 2004 # for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = 'combined' if computation_type == 'M': res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used by # "moment tensor" algorithm. data_used.wave_type = 'unknown' elif computation_type == 'B': res_id = '/'.join((res_id_prefix, 'methodID=broadband_data')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: is 'combined' correct here? data_used.wave_type = 'combined' elif computation_type == 'F': res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = 'P waves' elif computation_type == 'S': res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used # for scalar moment determination. data_used.wave_type = 'unknown' moment_tensor.data_used = [data_used] focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ origin = Origin() # I guess setting the program used as the method id is fine. origin.method_id = parser.xpath2obj('program', origin_el) # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin.latitude_errors = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin.longitude_errors = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el, str) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type == "from location" origin.depth_type = "from location" # Earth model. origin.earth_model_id = parser.xpath2obj("earth_mod", origin_el, str) # Parse th origin uncertainty. Rather verbose but should cover all cases. pref_desc = parser.xpath2obj("originUncertainty/preferredDescription", origin_el, str) hor_uncert = parser.xpath2obj("originUncertainty/horizontalUncertainty", origin_el, float) min_hor_uncert = parser.xpath2obj(\ "originUncertainty/minHorizontalUncertainty", origin_el, float) max_hor_uncert = parser.xpath2obj(\ "originUncertainty/maxHorizontalUncertainty", origin_el, float) azi_max_hor_uncert = parser.xpath2obj(\ "originUncertainty/azimuthMaxHorizontalUncertainty", origin_el, float) origin_uncert = {} if pref_desc: origin_uncert["preferred_description"] = pref_desc if hor_uncert: origin_uncert["horizontal_uncertainty"] = hor_uncert if min_hor_uncert: origin_uncert["min_horizontal_uncertainty"] = min_hor_uncert if max_hor_uncert: origin_uncert["max_horizontal_uncertainty"] = max_hor_uncert if azi_max_hor_uncert: origin_uncert["azimuth_max_horizontal_uncertainty"] = \ azi_max_hor_uncert if origin_uncert: origin.origin_uncertainty = origin_uncert # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count and s_phase_count: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) origin.quality.used_phase_count = phase_count origin.quality.associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) origin.quality.used_station_count = \ parser.xpath2obj("usedStationCount", origin_quality_el, int) origin.quality.depth_phase_count = \ parser.xpath2obj("depthPhaseCount", origin_quality_el, int) origin.quality.standard_error = \ parser.xpath2obj("standardError", origin_quality_el, float) origin.quality.azimuthal_gap = \ parser.xpath2obj("azimuthalGap", origin_quality_el, float) origin.quality.secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) origin.quality.ground_truth_level = \ parser.xpath2obj("groundTruthLevel", origin_quality_el, float) origin.quality.minimum_distance = \ parser.xpath2obj("minimumDistance", origin_quality_el, float) origin.quality.maximum_distance = \ parser.xpath2obj("maximumDistance", origin_quality_el, float) origin.quality.median_distance = \ parser.xpath2obj("medianDistance", origin_quality_el, float) return origin
def _parse_first_line_origin(self, line, event, magnitudes): """ Parse the first line of origin data. :type line: str :param line: Line to parse. :type event: :class:`~obspy.core.event.event.Event` :param event: Event of the origin. :type magnitudes: list of :class:`~obspy.core.event.magnitude.Magnitude` :param magnitudes: Store magnitudes in a list to keep their positions. :rtype: :class:`~obspy.core.event.origin.Origin`, :class:`~obspy.core.event.resourceid.ResourceIdentifier` :returns: Parsed origin or None, resource identifier of the origin. """ magnitude_types = [] magnitude_values = [] magnitude_station_counts = [] fields = self.fields['line_1'] time_origin = line[fields['time']].strip() time_fixed_flag = line[fields['time_fixf']].strip() latitude = line[fields['lat']].strip() longitude = line[fields['lon']].strip() epicenter_fixed_flag = line[fields['epicenter_fixf']].strip() depth = line[fields['depth']].strip() depth_fixed_flag = line[fields['depth_fixf']].strip() phase_count = line[fields['n_def']].strip() station_count = line[fields['n_sta']].strip() azimuthal_gap = line[fields['gap']].strip() magnitude_types.append(line[fields['mag_type_1']].strip()) magnitude_values.append(line[fields['mag_1']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip()) magnitude_types.append(line[fields['mag_type_2']].strip()) magnitude_values.append(line[fields['mag_2']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip()) magnitude_types.append(line[fields['mag_type_3']].strip()) magnitude_values.append(line[fields['mag_3']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip()) author = line[fields['author']].strip() origin_id = line[fields['id']].strip() origin = Origin() origin.quality = OriginQuality() try: origin.time = UTCDateTime(time_origin.replace('/', '-')) origin.latitude = float(latitude) origin.longitude = float(longitude) except (TypeError, ValueError): self._warn('Missing origin data, skipping event') return None, None origin.time_fixed = time_fixed_flag.lower() == 'f' origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f' try: # Convert value from km to m origin.depth = float(depth) * 1000 except ValueError: pass try: origin.depth_type = DEPTH_TYPES[depth_fixed_flag] except KeyError: origin.depth_type = OriginDepthType('from location') try: origin.quality.used_phase_count = int(phase_count) origin.quality.associated_phase_count = int(phase_count) except ValueError: pass try: origin.quality.used_station_count = int(station_count) origin.quality.associated_station_count = int(station_count) except ValueError: pass try: origin.quality.azimuthal_gap = float(azimuthal_gap) except ValueError: pass self.author = author origin.creation_info = self._get_creation_info() public_id = "origin/%s" % origin_id origin_res_id = self._get_res_id(public_id) for i in range(3): try: magnitude = Magnitude() magnitude.creation_info = self._get_creation_info() magnitude.magnitude_type = magnitude_types[i] magnitude.mag = float(magnitude_values[i]) magnitude.station_count = int(magnitude_station_counts[i]) magnitude.origin_id = origin_res_id magnitudes.append(magnitude) event.magnitudes.append(magnitude) except ValueError: # Magnitude can be empty but we need to keep the # position between mag1, mag2 or mag3. magnitudes.append(None) return origin, origin_res_id
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left `None` NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z and return a tuple of three values (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their `pick_id` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_XX = float(line.split()[7]) covariance_YY = float(line.split()[13]) covariance_ZZ = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX)) except ValueError: if covariance_XX < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY)) except ValueError: if covariance_YY < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_ZZ) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.io.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z (each of type :class:`numpy.ndarray`) and return a tuple of three :class:`numpy.ndarray` (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their ``pick_id`` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
def _read_single_hypocenter(lines, coordinate_converter, original_picks): """ Given a list of lines (starting with a 'NLLOC' line and ending with a 'END_NLLOC' line), parse them into an Event. """ try: # some paranoid checks.. assert lines[0].startswith("NLLOC ") assert lines[-1].startswith("END_NLLOC") for line in lines[1:-1]: assert not line.startswith("NLLOC ") assert not line.startswith("END_NLLOC") except Exception: msg = ("This should not have happened, please report this as a bug at " "https://github.com/obspy/obspy/issues.") raise Exception(msg) indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines[:-1]]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) # new NLLoc > 6.0 seems to add prefix 'run:' before date if date.startswith('run:'): date = date[4:] signature = signature.strip() creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S")) if coordinate_converter: # maximum likelihood origin location in km info line line = lines["HYPOCENTER"] x, y, z = coordinate_converter(*map(float, line.split()[1:7:2])) else: # maximum likelihood origin location lon lat info line line = lines["GEOGRAPHIC"] y, x, z = map(float, line.split()[8:13:2]) # maximum likelihood origin time info line line = lines["GEOGRAPHIC"] year, mon, day, hour, min = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] if "COMMENT" in lines: comment = lines["COMMENT"].strip() comment = comment.strip('\'"') comment = comment.strip() hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() o = Origin() event.origins = [o] event.preferred_origin_id = o.resource_id o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string, force_resource_id=False)) event.comments.append(Comment(text=comment, force_resource_id=False)) # SIGNATURE field's first item is LOCSIG, which is supposed to be # 'Identification of an individual, institiution or other entity' # according to # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_ # so use it as author in creation info event.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) o.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() # network codes are not used by NonLinLoc, so they can not be known # when reading the .hyp file.. to conform with QuakeML standard set an # empty network code wid = WaveformStreamID(network_code="", station_code=station) # have to split this into ints for overflow to work correctly date, hourmin, sec = map(str, line[6:9]) ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])] hm = [int(hourmin[:2]), int(hourmin[2:4])] t = UTCDateTime(*(ymd + hm), strict=False) + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id event.scope_resource_ids() return event
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % (RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = {'value': p_phase_count, 'namespace': NAMESPACE} if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = {'value': s_phase_count, 'namespace': NAMESPACE} origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin
def _parseRecordDp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._intZero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + "." + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == "FX": orig_time_stderr = "Fixed" else: orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale) centroid_latitude = self._floatWithFormat(line[17:21], "4.2") lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinateSign(lat_type) lat_stderr = line[22:25] if lat_stderr == "FX": lat_stderr = "Fixed" else: lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale) centroid_longitude = self._floatWithFormat(line[25:30], "5.2") lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinateSign(lon_type) lon_stderr = line[31:34] if lon_stderr == "FX": lon_stderr = "Fixed" else: lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale) centroid_depth = self._floatWithFormat(line[34:38], "4.1") depth_stderr = line[38:40] if depth_stderr == "FX" or depth_stderr == "BD": depth_stderr = "Fixed" else: depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale) station_number = self._intZero(line[40:43]) component_number = self._intZero(line[43:46]) station_number2 = self._intZero(line[46:48]) component_number2 = self._intZero(line[48:51]) # unused: half_duration = self._floatWithFormat(line[51:54], '3.1') moment = self._floatWithFormat(line[54:56], "2.1") moment_stderr = self._floatWithFormat(line[56:58], "2.1") moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split("/")[-1] # Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != ".": origin = Origin() res_id = "/".join( (res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime("%Y%m%d") origin.time = UTCDateTime(date + centroid_origin_time) # Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._storeUncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == "Fixed" and lon_stderr == "Fixed": origin.epicenter_fixed = True else: self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr)) self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude)) if depth_stderr == "Fixed": origin.depth_type = "operator assigned" else: origin.depth_type = "from location" self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count = station_number + station_number2 quality.used_phase_count = component_number + component_number2 origin.quality = quality origin.type = "centroid" event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = "/".join( (res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: # this is required for QuakeML validation: res_id = "/".join((res_id_prefix, "no-origin")) moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = "/".join( (res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == "C": res_id = "/".join((res_id_prefix, "methodID=CMT")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # CMT algorithm uses long-period body waves, # very-long-period surface waves and # intermediate period surface waves (since 2004 # for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = "combined" if computation_type == "M": res_id = "/".join((res_id_prefix, "methodID=moment_tensor")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used by # "moment tensor" algorithm. data_used.wave_type = "unknown" elif computation_type == "B": res_id = "/".join((res_id_prefix, "methodID=broadband_data")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: is 'combined' correct here? data_used.wave_type = "combined" elif computation_type == "F": res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = "P waves" elif computation_type == "S": res_id = "/".join((res_id_prefix, "methodID=scalar_moment")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used # for scalar moment determination. data_used.wave_type = "unknown" moment_tensor.data_used = data_used focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def _parseRecordDp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._intZero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + '.' + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == 'FX': orig_time_stderr = 'Fixed' else: orig_time_stderr =\ self._floatWithFormat(orig_time_stderr, '2.1', scale) centroid_latitude = self._floatWithFormat(line[17:21], '4.2') lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinateSign(lat_type) lat_stderr = line[22:25] if lat_stderr == 'FX': lat_stderr = 'Fixed' else: lat_stderr = self._floatWithFormat(lat_stderr, '3.2', scale) centroid_longitude = self._floatWithFormat(line[25:30], '5.2') lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinateSign(lon_type) lon_stderr = line[31:34] if lon_stderr == 'FX': lon_stderr = 'Fixed' else: lon_stderr = self._floatWithFormat(lon_stderr, '3.2', scale) centroid_depth = self._floatWithFormat(line[34:38], '4.1') depth_stderr = line[38:40] if depth_stderr == 'FX' or depth_stderr == 'BD': depth_stderr = 'Fixed' else: depth_stderr = self._floatWithFormat(depth_stderr, '2.1', scale) station_number = self._intZero(line[40:43]) component_number = self._intZero(line[43:46]) station_number2 = self._intZero(line[46:48]) component_number2 = self._intZero(line[48:51]) #unused: half_duration = self._floatWithFormat(line[51:54], '3.1') moment = self._floatWithFormat(line[54:56], '2.1') moment_stderr = self._floatWithFormat(line[56:58], '2.1') moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split('/')[-1] #Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != '.': origin = Origin() res_id = '/'.join( (res_id_prefix, 'origin', evid, source_contributor.lower(), 'mw' + computation_type.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info =\ CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime('%Y%m%d') origin.time = UTCDateTime(date + centroid_origin_time) #Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._storeUncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == 'Fixed' and lon_stderr == 'Fixed': origin.epicenter_fixed = True else: self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr)) self._storeUncertainty( origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude)) if depth_stderr == 'Fixed': origin.depth_type = 'operator assigned' else: origin.depth_type = 'from location' self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count =\ station_number + station_number2 quality.used_phase_count =\ component_number + component_number2 origin.quality = quality origin.type = 'centroid' event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = '/'.join( (res_id_prefix, 'focalmechanism', evid, source_contributor.lower(), 'mw' + computation_type.lower())) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info =\ CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: #this is required for QuakeML validation: res_id = '/'.join((res_id_prefix, 'no-origin')) moment_tensor.derived_origin_id =\ ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = '/'.join( (res_id_prefix, 'momenttensor', evid, source_contributor.lower(), 'mw' + computation_type.lower())) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == 'C': res_id = '/'.join((res_id_prefix, 'methodID=CMT')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #CMT algorithm uses long-period body waves, #very-long-period surface waves and #intermediate period surface waves (since 2004 #for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = 'combined' if computation_type == 'M': res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: not sure which kind of data is used by #"moment tensor" algorithm. data_used.wave_type = 'unknown' elif computation_type == 'B': res_id = '/'.join((res_id_prefix, 'methodID=broadband_data')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: is 'combined' correct here? data_used.wave_type = 'combined' elif computation_type == 'F': res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = 'P waves' elif computation_type == 'S': res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: not sure which kind of data is used #for scalar moment determination. data_used.wave_type = 'unknown' moment_tensor.data_used = data_used focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def _map_join2origin(self, db): """ Return an Origin instance from an dict of CSS key/values Inputs ====== db : dict of key/values of CSS fields related to the origin (see Join) Returns ======= obspy.core.event.Origin Notes ===== Any object that supports the dict 'get' method can be passed as input, e.g. OrderedDict, custom classes, etc. Join ---- origin <- origerr (outer) """ #-- Basic location ------------------------------------------ origin = Origin() origin.latitude = db.get('lat') origin.longitude = db.get('lon') origin.depth = _km2m(db.get('depth')) origin.time = _utc(db.get('time')) origin.extra = {} #-- Quality ------------------------------------------------- quality = OriginQuality( associated_phase_count = db.get('nass'), used_phase_count = db.get('ndef'), standard_error = db.get('sdobs'), ) origin.quality = quality #-- Solution Uncertainties ---------------------------------- # in CSS the ellipse is projected onto the horizontal plane # using the covariance matrix uncertainty = OriginUncertainty() a = _km2m(db.get('smajax')) b = _km2m(db.get('sminax')) s = db.get('strike') dep_u = _km2m(db.get('sdepth')) time_u = db.get('stime') uncertainty.max_horizontal_uncertainty = a uncertainty.min_horizontal_uncertainty = b uncertainty.azimuth_max_horizontal_uncertainty = s uncertainty.horizontal_uncertainty = a uncertainty.preferred_description = "horizontal uncertainty" if db.get('conf') is not None: uncertainty.confidence_level = db.get('conf') * 100. if uncertainty.horizontal_uncertainty is not None: origin.origin_uncertainty = uncertainty #-- Parameter Uncertainties --------------------------------- if all([a, b, s]): n, e = _get_NE_on_ellipse(a, b, s) lat_u = _m2deg_lat(n) lon_u = _m2deg_lon(e, lat=origin.latitude) origin.latitude_errors = {'uncertainty': lat_u} origin.longitude_errors = {'uncertainty': lon_u} if dep_u: origin.depth_errors = {'uncertainty': dep_u} if time_u: origin.time_errors = {'uncertainty': time_u} #-- Analyst-determined Status ------------------------------- posted_author = _str(db.get('auth')) mode, status = self.get_event_status(posted_author) origin.evaluation_mode = mode origin.evaluation_status = status # Save etype per origin due to schema differences... css_etype = _str(db.get('etype')) # Compatible with future patch rename "_namespace" -> "namespace" origin.extra['etype'] = { 'value': css_etype, 'namespace': CSS_NAMESPACE } origin.creation_info = CreationInfo( creation_time = _utc(db.get('lddate')), agency_id = self.agency, version = db.get('orid'), author = posted_author, ) origin.resource_id = self._rid(origin) return origin
def surf_events_to_cat(loc_file, pick_file): """ Take location files (hypoinverse formatted) and picks (format TBD) and creates a single obspy catalog for later use and dissemination. :param loc_file: File path :param pick_file: File path :return: obspy.core.Catalog """ # Read/parse location file and create Events for each surf_cat = Catalog() # Parse the pick file to a dictionary pick_dict = parse_picks(pick_file) with open(loc_file, 'r') as f: next(f) for ln in f: ln = ln.strip('\n') line = ln.split(',') eid = line[0] if eid not in pick_dict: print('No picks for this location, skipping for now.') continue ot = UTCDateTime(line[1]) hmc_east = float(line[2]) hmc_north = float(line[3]) hmc_elev = float(line[4]) gap = float(line[-5]) rms = float(line[-3]) errXY = float(line[-2]) errZ = float(line[-1]) converter = SURF_converter() lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north, hmc_elev)) o = Origin(time=ot, longitude=lon, latitude=lat, depth=130 - elev) o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality ou.horizontal_uncertainty = errXY * 1e3 ou.preferred_description = "horizontal uncertainty" o.depth_errors.uncertainty = errZ * 1e3 oq.standard_error = rms oq.azimuthal_gap = gap extra = AttribDict({ 'hmc_east': { 'value': hmc_east, 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': hmc_north, 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': hmc_elev, 'namespace': 'smi:local/hmc' }, 'hmc_eid': { 'value': eid, 'namespace': 'smi:local/hmc' } }) o.extra = extra rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f')) # Dummy magnitude of 1. for all events until further notice mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.)) ev = Event(origins=[o], magnitudes=[mag], picks=pick_dict[eid], resource_id=rid) surf_cat.append(ev) return surf_cat
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = { 'value': p_phase_count, 'namespace': NAMESPACE } if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = { 'value': s_phase_count, 'namespace': NAMESPACE } origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin