def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split('\n'): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split('\t', 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get('lon')) origin.latitude = self._str2num(values.get('lat')) depth = self._str2num(values.get('depth')) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get('z_err')) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get('h_err')) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = 'horizontal uncertainty' origin.origin_uncertainty = ou year = self._str2num(values.get('year')) if year is not None: t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): # no seconds involved if len(comps) < 6: utc_args = [int(v) for v in comps if v is not None] # we also have to handle seconds else: utc_args = [ int(v) if v is not None else 0 for v in comps[:-1] ] # just leave float seconds as is utc_args.append(comps[-1]) origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get('mag')) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get('m_err')) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id event.scope_resource_ids() catalog.append(event) return catalog
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split('\n'): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split('\t', 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get('lon')) origin.latitude = self._str2num(values.get('lat')) depth = self._str2num(values.get('depth')) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get('z_err')) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get('h_err')) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = 'horizontal uncertainty' origin.origin_uncertainty = ou year = self._str2num(values.get('year')) if year is not None: t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): # no seconds involved if len(comps) < 6: utc_args = [int(v) for v in comps if v is not None] # we also have to handle seconds else: utc_args = [int(v) if v is not None else 0 for v in comps[:-1]] # just leave float seconds as is utc_args.append(comps[-1]) origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get('mag')) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get('m_err')) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id event.scope_resource_ids() catalog.append(event) return catalog
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split("\n"): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split("\t", 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get("lon")) origin.latitude = self._str2num(values.get("lat")) depth = self._str2num(values.get("depth")) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get("z_err")) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get("h_err")) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = ou year = self._str2num(values.get("year")) if year is not None: t_fields = ["year", "month", "day", "hour", "minute", "second"] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): utc_args = [int(v) for v in comps if v is not None] origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get("mag")) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get("m_err")) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id catalog.append(event) return catalog
def _parse_second_line_origin(self, line, event, origin, magnitudes): magnitude_errors = [] fields = self.fields['line_2'] standard_error = line[fields['rms']].strip() time_uncertainty = line[fields['ot_error']].strip() max_horizontal_uncertainty = line[fields['s_major']].strip() min_horizontal_uncertainty = line[fields['s_minor']].strip() azimuth_max_horizontal_uncertainty = line[fields['az']].strip() depth_uncertainty = line[fields['depth_err']].strip() min_distance = line[fields['min_dist']].strip() max_distance = line[fields['max_dist']].strip() magnitude_errors.append(line[fields['mag_err_1']].strip()) magnitude_errors.append(line[fields['mag_err_2']].strip()) magnitude_errors.append(line[fields['mag_err_3']].strip()) analysis_type = line[fields['antype']].strip().lower() location_method = line[fields['loctype']].strip().lower() event_type = line[fields['evtype']].strip().lower() try: origin.quality.standard_error = float(standard_error) except ValueError: pass try: origin.time_errors.uncertainty = float(time_uncertainty) except ValueError: pass try: uncertainty = OriginUncertainty() # Convert values from km to m min_value = float(min_horizontal_uncertainty) * 1000 max_value = float(max_horizontal_uncertainty) * 1000 azimuth_value = float(azimuth_max_horizontal_uncertainty) description = OriginUncertaintyDescription('uncertainty ellipse') uncertainty.min_horizontal_uncertainty = min_value uncertainty.max_horizontal_uncertainty = max_value uncertainty.azimuth_max_horizontal_uncertainty = azimuth_value uncertainty.preferred_description = description origin.origin_uncertainty = uncertainty except ValueError: pass try: # Convert value from km to m origin.depth_errors.uncertainty = float(depth_uncertainty) * 1000 except ValueError: pass try: origin.quality.minimum_distance = float(min_distance) origin.quality.maximum_distance = float(max_distance) except ValueError: self._warn('Missing minimum/maximum distance') for i in range(2): try: mag_errors = magnitudes[i].mag_errors mag_errors.uncertainty = float(magnitude_errors[i]) except (AttributeError, ValueError): pass # No match for 'g' (guess) # We map 'g' to 'manual' and create a comment for origin try: origin.evaluation_mode = EVALUATION_MODES[analysis_type] if analysis_type == 'g': # comment: 'GSE2.0:antype=g' text = 'GSE2.0:antype=g' comment = self._comment(text) origin.comments.append(comment) except KeyError: self._warn('Wrong analysis type') if location_method not in LOCATION_METHODS.keys(): location_method = 'o' method = LOCATION_METHODS[location_method] method_id = "method/%s" % method origin.method_id = self._get_res_id(method_id) if event_type not in EVENT_TYPES.keys(): event_type = 'uk' self._warn('Wrong or unknown event type') event_data = EVENT_TYPES[event_type] event.event_type_certainty, event.event_type = event_data # comment: 'GSE2.0:evtype=<evtype>' if event_type: text = 'GSE2.0:evtype=%s' % event_type comment = self._comment(text) event.comments.append(comment)
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % (RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = {'value': p_phase_count, 'namespace': NAMESPACE} if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = {'value': s_phase_count, 'namespace': NAMESPACE} origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin
def _parse_second_line_origin(self, line, event, origin, magnitudes): magnitude_errors = [] fields = self.fields['line_2'] standard_error = line[fields['rms']].strip() time_uncertainty = line[fields['ot_error']].strip() max_horizontal_uncertainty = line[fields['s_major']].strip() min_horizontal_uncertainty = line[fields['s_minor']].strip() azimuth_max_horizontal_uncertainty = line[fields['az']].strip() depth_uncertainty = line[fields['depth_err']].strip() min_distance = line[fields['min_dist']].strip() max_distance = line[fields['max_dist']].strip() magnitude_errors.append(line[fields['mag_err_1']].strip()) magnitude_errors.append(line[fields['mag_err_2']].strip()) magnitude_errors.append(line[fields['mag_err_3']].strip()) analysis_type = line[fields['antype']].strip().lower() location_method = line[fields['loctype']].strip().lower() event_type = line[fields['evtype']].strip().lower() try: origin.quality.standard_error = float(standard_error) except ValueError: pass try: origin.time_errors.uncertainty = float(time_uncertainty) except ValueError: pass try: uncertainty = OriginUncertainty() # Convert values from km to m min_value = float(min_horizontal_uncertainty) * 1000 max_value = float(max_horizontal_uncertainty) * 1000 azimuth_value = float(azimuth_max_horizontal_uncertainty) description = OriginUncertaintyDescription('uncertainty ellipse') uncertainty.min_horizontal_uncertainty = min_value uncertainty.max_horizontal_uncertainty = max_value uncertainty.azimuth_max_horizontal_uncertainty = azimuth_value uncertainty.preferred_description = description origin.origin_uncertainty = uncertainty except ValueError: pass try: # Convert value from km to m origin.depth_errors.uncertainty = float(depth_uncertainty) * 1000 except ValueError: pass try: origin.quality.minimum_distance = float(min_distance) origin.quality.maximum_distance = float(max_distance) except ValueError: self._warn('Missing minimum/maximum distance') for i in range(2): try: mag_errors = magnitudes[i].mag_errors mag_errors.uncertainty = float(magnitude_errors[i]) except (AttributeError, ValueError): pass # No match for 'g' (guess) # We map 'g' to 'manual' and create a comment for origin try: origin.evaluation_mode = EVALUATION_MODES[analysis_type] if analysis_type == 'g': # comment: 'GSE2.0:antype=g' text = 'GSE2.0:antype=g' comment = self._comment(text) origin.comments.append(comment) except KeyError: self._warn('Wrong analysis type') if location_method not in LOCATION_METHODS.keys(): location_method = 'o' method = LOCATION_METHODS[location_method] method_id = "method/%s" % method origin.method_id = self._get_res_id(method_id) if event_type not in EVENT_TYPES.keys(): event_type = 'uk' self._warn('Wrong or unknown event type') event_data = EVENT_TYPES[event_type] event.event_type_certainty, event.event_type = event_data # comment: 'GSE2.0:evtype=<evtype>' if event_type: text = 'GSE2.0:evtype=%s' % event_type comment = self._comment(text) event.comments.append(comment)
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = { 'value': p_phase_count, 'namespace': NAMESPACE } if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = { 'value': s_phase_count, 'namespace': NAMESPACE } origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin
def _map_join2origin(self, db): """ Return an Origin instance from an dict of CSS key/values Inputs ====== db : dict of key/values of CSS fields related to the origin (see Join) Returns ======= obspy.core.event.Origin Notes ===== Any object that supports the dict 'get' method can be passed as input, e.g. OrderedDict, custom classes, etc. Join ---- origin <- origerr (outer) """ #-- Basic location ------------------------------------------ origin = Origin() origin.latitude = db.get('lat') origin.longitude = db.get('lon') origin.depth = _km2m(db.get('depth')) origin.time = _utc(db.get('time')) origin.extra = {} #-- Quality ------------------------------------------------- quality = OriginQuality( associated_phase_count = db.get('nass'), used_phase_count = db.get('ndef'), standard_error = db.get('sdobs'), ) origin.quality = quality #-- Solution Uncertainties ---------------------------------- # in CSS the ellipse is projected onto the horizontal plane # using the covariance matrix uncertainty = OriginUncertainty() a = _km2m(db.get('smajax')) b = _km2m(db.get('sminax')) s = db.get('strike') dep_u = _km2m(db.get('sdepth')) time_u = db.get('stime') uncertainty.max_horizontal_uncertainty = a uncertainty.min_horizontal_uncertainty = b uncertainty.azimuth_max_horizontal_uncertainty = s uncertainty.horizontal_uncertainty = a uncertainty.preferred_description = "horizontal uncertainty" if db.get('conf') is not None: uncertainty.confidence_level = db.get('conf') * 100. if uncertainty.horizontal_uncertainty is not None: origin.origin_uncertainty = uncertainty #-- Parameter Uncertainties --------------------------------- if all([a, b, s]): n, e = _get_NE_on_ellipse(a, b, s) lat_u = _m2deg_lat(n) lon_u = _m2deg_lon(e, lat=origin.latitude) origin.latitude_errors = {'uncertainty': lat_u} origin.longitude_errors = {'uncertainty': lon_u} if dep_u: origin.depth_errors = {'uncertainty': dep_u} if time_u: origin.time_errors = {'uncertainty': time_u} #-- Analyst-determined Status ------------------------------- posted_author = _str(db.get('auth')) mode, status = self.get_event_status(posted_author) origin.evaluation_mode = mode origin.evaluation_status = status # Save etype per origin due to schema differences... css_etype = _str(db.get('etype')) # Compatible with future patch rename "_namespace" -> "namespace" origin.extra['etype'] = { 'value': css_etype, 'namespace': CSS_NAMESPACE } origin.creation_info = CreationInfo( creation_time = _utc(db.get('lddate')), agency_id = self.agency, version = db.get('orid'), author = posted_author, ) origin.resource_id = self._rid(origin) return origin
def _read_single_event(event_file, locate_dir, units, local_mag_ph): """ Parse an event file from QuakeMigrate into an obspy Event object. Parameters ---------- event_file : `pathlib.Path` object Path to .event file to read. locate_dir : `pathlib.Path` object Path to locate directory (contains "events", "picks" etc. directories). units : {"km", "m"} Grid projection coordinates for QM LUT (determines units of depths and uncertainties in the .event files). local_mag_ph : {"S", "P"} Amplitude measurement used to calculate local magnitudes. Returns ------- event : `obspy.Event` object Event object populated with all available information output by :class:`~quakemigrate.signal.scan.locate()`, including event locations and uncertainties, picks, and amplitudes and magnitudes if available. """ # Parse information from event file event_info = pd.read_csv(event_file).iloc[0] event_uid = str(event_info["EventID"]) # Set distance conversion factor (from units of QM LUT projection units). if units == "km": factor = 1e3 elif units == "m": factor = 1 else: raise AttributeError(f"units must be 'km' or 'm'; not {units}") # Create event object to store origin and pick information event = Event() event.extra = AttribDict() event.resource_id = str(event_info["EventID"]) event.creation_info = CreationInfo(author="QuakeMigrate", version=quakemigrate.__version__) # Add COA info to extra event.extra.coa = {"value": event_info["COA"], "namespace": ns} event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns} event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns} event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns} event.extra.dec_coa_norm = { "value": event_info["DEC_COA_NORM"], "namespace": ns } # Determine location of cut waveform data - add to event object as a # custom extra attribute. mseed = locate_dir / "raw_cut_waveforms" / event_uid event.extra.cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "real_cut_waveforms").exists(): mseed = locate_dir / "real_cut_waveforms" / event_uid event.extra.real_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "wa_cut_waveforms").exists(): mseed = locate_dir / "wa_cut_waveforms" / event_uid event.extra.wa_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } # Create origin with spline location and set to preferred event origin. origin = Origin() origin.method_id = "spline" origin.longitude = event_info["X"] origin.latitude = event_info["Y"] origin.depth = event_info["Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins = [origin] event.preferred_origin_id = origin.resource_id # Create origin with gaussian location and associate with event origin = Origin() origin.method_id = "gaussian" origin.longitude = event_info["GAU_X"] origin.latitude = event_info["GAU_Y"] origin.depth = event_info["GAU_Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins.append(origin) ouc = OriginUncertainty() ce = ConfidenceEllipsoid() ce.semi_major_axis_length = event_info["COV_ErrY"] * factor ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor ce.major_axis_plunge = 0 ce.major_axis_azimuth = 0 ce.major_axis_rotation = 0 ouc.confidence_ellipsoid = ce ouc.preferred_description = "confidence ellipsoid" # Set uncertainties for both as the gaussian uncertainties for origin in event.origins: origin.longitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrX"] * factor / 1e3) origin.latitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrY"] * factor / 1e3) origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor origin.origin_uncertainty = ouc # Add OriginQuality info to each origin? for origin in event.origins: origin.origin_type = "hypocenter" origin.evaluation_mode = "automatic" # --- Handle picks file --- pick_file = locate_dir / "picks" / event_uid if pick_file.with_suffix(".picks").is_file(): picks = pd.read_csv(pick_file.with_suffix(".picks")) else: return None for _, pickline in picks.iterrows(): station = str(pickline["Station"]) phase = str(pickline["Phase"]) wid = WaveformStreamID(network_code="", station_code=station) for method in ["modelled", "autopick"]: pick = Pick() pick.extra = AttribDict() pick.waveform_id = wid pick.method_id = method pick.phase_hint = phase if method == "autopick" and str(pickline["PickTime"]) != "-1": pick.time = UTCDateTime(pickline["PickTime"]) pick.time_errors.uncertainty = float(pickline["PickError"]) pick.extra.snr = { "value": float(pickline["SNR"]), "namespace": ns } elif method == "modelled": pick.time = UTCDateTime(pickline["ModelledTime"]) else: continue event.picks.append(pick) # --- Handle amplitudes file --- amps_file = locate_dir / "amplitudes" / event_uid if amps_file.with_suffix(".amps").is_file(): amps = pd.read_csv(amps_file.with_suffix(".amps")) i = 0 for _, ampsline in amps.iterrows(): wid = WaveformStreamID(seed_string=ampsline["id"]) noise_amp = ampsline["Noise_amp"] / 1000 # mm to m for phase in ["P_amp", "S_amp"]: amp = Amplitude() if pd.isna(ampsline[phase]): continue amp.generic_amplitude = ampsline[phase] / 1000 # mm to m amp.generic_amplitude_errors.uncertainty = noise_amp amp.unit = "m" amp.type = "AML" amp.method_id = phase amp.period = 1 / ampsline[f"{phase[0]}_freq"] amp.time_window = TimeWindow( reference=UTCDateTime(ampsline[f"{phase[0]}_time"])) # amp.pick_id = ? amp.waveform_id = wid # amp.filter_id = ? amp.magnitude_hint = "ML" amp.evaluation_mode = "automatic" amp.extra = AttribDict() try: amp.extra.filter_gain = { "value": ampsline[f"{phase[0]}_filter_gain"], "namespace": ns } amp.extra.avg_amp = { "value": ampsline[f"{phase[0]}_avg_amp"] / 1000, # m "namespace": ns } except KeyError: pass if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]): i += 1 stat_mag = StationMagnitude() stat_mag.extra = AttribDict() # stat_mag.origin_id = ? local_mag_loc stat_mag.mag = ampsline["ML"] stat_mag.mag_errors.uncertainty = ampsline["ML_Err"] stat_mag.station_magnitude_type = "ML" stat_mag.amplitude_id = amp.resource_id stat_mag.extra.picked = { "value": ampsline["is_picked"], "namespace": ns } stat_mag.extra.epi_dist = { "value": ampsline["epi_dist"], "namespace": ns } stat_mag.extra.z_dist = { "value": ampsline["z_dist"], "namespace": ns } event.station_magnitudes.append(stat_mag) event.amplitudes.append(amp) mag = Magnitude() mag.extra = AttribDict() mag.mag = event_info["ML"] mag.mag_errors.uncertainty = event_info["ML_Err"] mag.magnitude_type = "ML" # mag.origin_id = ? mag.station_count = i mag.evaluation_mode = "automatic" mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns} event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id return event