def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split('\n'): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split('\t', 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get('lon')) origin.latitude = self._str2num(values.get('lat')) depth = self._str2num(values.get('depth')) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get('z_err')) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get('h_err')) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = 'horizontal uncertainty' origin.origin_uncertainty = ou year = self._str2num(values.get('year')) if year is not None: t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): # no seconds involved if len(comps) < 6: utc_args = [int(v) for v in comps if v is not None] # we also have to handle seconds else: utc_args = [ int(v) if v is not None else 0 for v in comps[:-1] ] # just leave float seconds as is utc_args.append(comps[-1]) origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get('mag')) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get('m_err')) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id event.scope_resource_ids() catalog.append(event) return catalog
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split('\n'): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split('\t', 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get('lon')) origin.latitude = self._str2num(values.get('lat')) depth = self._str2num(values.get('depth')) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get('z_err')) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get('h_err')) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = 'horizontal uncertainty' origin.origin_uncertainty = ou year = self._str2num(values.get('year')) if year is not None: t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): # no seconds involved if len(comps) < 6: utc_args = [int(v) for v in comps if v is not None] # we also have to handle seconds else: utc_args = [int(v) if v is not None else 0 for v in comps[:-1]] # just leave float seconds as is utc_args.append(comps[-1]) origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get('mag')) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get('m_err')) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id event.scope_resource_ids() catalog.append(event) return catalog
def _parse_record_l(self, line, event): """ Parses the '90 percent error ellipse' record L """ origin = event.origins[0] semi_major_axis_azimuth = self._float(line[2:8]) if semi_major_axis_azimuth is None: return semi_major_axis_plunge = self._float(line[8:13]) semi_major_axis_length = self._float(line[13:21]) intermediate_axis_azimuth = self._float(line[21:27]) intermediate_axis_plunge = self._float(line[27:32]) # This is called "intermediate_axis_length", # but it is definitively a "semi_intermediate_axis_length", # since in most cases: # (intermediate_axis_length / 2) < semi_minor_axis_length intermediate_axis_length = self._float(line[32:40]) semi_minor_axis_azimuth = self._float(line[40:46]) semi_minor_axis_plunge = self._float(line[46:51]) semi_minor_axis_length = self._float(line[51:59]) if (semi_minor_axis_azimuth == semi_minor_axis_plunge == semi_minor_axis_length == 0): semi_minor_axis_azimuth = intermediate_axis_azimuth semi_minor_axis_plunge = intermediate_axis_plunge semi_minor_axis_length = intermediate_axis_length origin.depth_type = 'operator assigned' # FIXME: The following code needs to be double-checked! semi_major_axis_unit_vect = \ self._spherical_to_cartesian((1, semi_major_axis_azimuth, semi_major_axis_plunge)) semi_minor_axis_unit_vect = \ self._spherical_to_cartesian((1, semi_minor_axis_azimuth, semi_minor_axis_plunge)) major_axis_rotation = \ self._angle_between(semi_major_axis_unit_vect, semi_minor_axis_unit_vect) origin.origin_uncertainty = OriginUncertainty() origin.origin_uncertainty.preferred_description = \ 'confidence ellipsoid' origin.origin_uncertainty.confidence_level = 90 confidence_ellipsoid = ConfidenceEllipsoid() confidence_ellipsoid.semi_major_axis_length = \ semi_major_axis_length * 1000 confidence_ellipsoid.semi_minor_axis_length = \ semi_minor_axis_length * 1000 confidence_ellipsoid.semi_intermediate_axis_length = \ intermediate_axis_length * 1000 confidence_ellipsoid.major_axis_plunge = semi_major_axis_plunge confidence_ellipsoid.major_axis_azimuth = semi_major_axis_azimuth # We need to add 90 to match NEIC QuakeML format, # but I don't understand why... confidence_ellipsoid.major_axis_rotation = \ major_axis_rotation + 90 origin.origin_uncertainty.confidence_ellipsoid = confidence_ellipsoid
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split("\n"): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split("\t", 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get("lon")) origin.latitude = self._str2num(values.get("lat")) depth = self._str2num(values.get("depth")) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get("z_err")) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get("h_err")) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = ou year = self._str2num(values.get("year")) if year is not None: t_fields = ["year", "month", "day", "hour", "minute", "second"] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): utc_args = [int(v) for v in comps if v is not None] origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get("mag")) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get("m_err")) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id catalog.append(event) return catalog
def ORNL_events_to_cat(ornl_file): """Make Catalog from ORNL locations""" cat = Catalog() loc_df = pd.read_csv(ornl_file, infer_datetime_format=True) loc_df = loc_df.set_index('event_datetime') eid = 0 for dt, row in loc_df.iterrows(): ot = UTCDateTime(dt) hmc_east = row['x(m)'] hmc_north = row['y(m)'] hmc_elev = row['z(m)'] errX = row['error_x (m)'] errY = row['error_y (m)'] errZ = row['error_z (m)'] rms = row['rms (millisecond)'] converter = SURF_converter() lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north, hmc_elev)) o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev) o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality ou.max_horizontal_uncertainty = np.max([errX, errY]) ou.min_horizontal_uncertainty = np.min([errX, errY]) o.depth_errors.uncertainty = errZ oq.standard_error = rms * 1e3 extra = AttribDict({ 'hmc_east': { 'value': hmc_east, 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': hmc_north, 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': hmc_elev, 'namespace': 'smi:local/hmc' }, 'hmc_eid': { 'value': eid, 'namespace': 'smi:local/hmc' } }) o.extra = extra rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f')) # Dummy magnitude of 1. for all events until further notice mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.)) ev = Event(origins=[o], magnitudes=[mag], resource_id=rid) ev.preferred_origin_id = o.resource_id.id cat.events.append(ev) eid += 1 return cat
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % (RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = {'value': p_phase_count, 'namespace': NAMESPACE} if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = {'value': s_phase_count, 'namespace': NAMESPACE} origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0, depth=15000)) test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.creation_info = CreationInfo(agency_id='TES') test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add in error info (line E) test_event.origins[0].quality = OriginQuality(standard_error=0.01, azimuthal_gap=36) # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has # semi-major and semi-minor test_event.origins[0].origin_uncertainty = OriginUncertainty( confidence_ellipsoid=ConfidenceEllipsoid( semi_major_axis_length=3000, semi_minor_axis_length=1000, semi_intermediate_axis_length=2000, major_axis_plunge=20, major_axis_azimuth=100, major_axis_rotation=4)) test_event.origins[0].time_errors = QuantityError(uncertainty=0.5) # Add in fault-plane solution info (line F) - Note have to check program # used to determine which fields are filled.... test_event.focal_mechanisms.append( FocalMechanism(nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(strike=180, dip=20, rake=30, strike_errors=QuantityError(10), dip_errors=QuantityError(10), rake_errors=QuantityError(20))), method_id=ResourceIdentifier( "smi:nc.anss.org/focalMechanism/FPFIT"), creation_info=CreationInfo(agency_id="NC"), misfit=0.5, station_distribution_ratio=0.8)) # Need to test high-precision origin and that it is preferred origin. # Moment tensor includes another origin test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2, depth=14500)) test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'), origin_id=test_event.origins[-1].resource_id)) # Moment tensors go with focal-mechanisms test_event.focal_mechanisms.append( FocalMechanism(moment_tensor=MomentTensor( derived_origin_id=test_event.origins[-1].resource_id, moment_magnitude_id=test_event.magnitudes[-1].resource_id, scalar_moment=100, tensor=Tensor( m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15), method_id=ResourceIdentifier( 'smi:nc.anss.org/momentTensor/BLAH')))) return test_event
def _parse_second_line_origin(self, line, event, origin, magnitudes): magnitude_errors = [] fields = self.fields['line_2'] standard_error = line[fields['rms']].strip() time_uncertainty = line[fields['ot_error']].strip() max_horizontal_uncertainty = line[fields['s_major']].strip() min_horizontal_uncertainty = line[fields['s_minor']].strip() azimuth_max_horizontal_uncertainty = line[fields['az']].strip() depth_uncertainty = line[fields['depth_err']].strip() min_distance = line[fields['min_dist']].strip() max_distance = line[fields['max_dist']].strip() magnitude_errors.append(line[fields['mag_err_1']].strip()) magnitude_errors.append(line[fields['mag_err_2']].strip()) magnitude_errors.append(line[fields['mag_err_3']].strip()) analysis_type = line[fields['antype']].strip().lower() location_method = line[fields['loctype']].strip().lower() event_type = line[fields['evtype']].strip().lower() try: origin.quality.standard_error = float(standard_error) except ValueError: pass try: origin.time_errors.uncertainty = float(time_uncertainty) except ValueError: pass try: uncertainty = OriginUncertainty() # Convert values from km to m min_value = float(min_horizontal_uncertainty) * 1000 max_value = float(max_horizontal_uncertainty) * 1000 azimuth_value = float(azimuth_max_horizontal_uncertainty) description = OriginUncertaintyDescription('uncertainty ellipse') uncertainty.min_horizontal_uncertainty = min_value uncertainty.max_horizontal_uncertainty = max_value uncertainty.azimuth_max_horizontal_uncertainty = azimuth_value uncertainty.preferred_description = description origin.origin_uncertainty = uncertainty except ValueError: pass try: # Convert value from km to m origin.depth_errors.uncertainty = float(depth_uncertainty) * 1000 except ValueError: pass try: origin.quality.minimum_distance = float(min_distance) origin.quality.maximum_distance = float(max_distance) except ValueError: self._warn('Missing minimum/maximum distance') for i in range(2): try: mag_errors = magnitudes[i].mag_errors mag_errors.uncertainty = float(magnitude_errors[i]) except (AttributeError, ValueError): pass # No match for 'g' (guess) # We map 'g' to 'manual' and create a comment for origin try: origin.evaluation_mode = EVALUATION_MODES[analysis_type] if analysis_type == 'g': # comment: 'GSE2.0:antype=g' text = 'GSE2.0:antype=g' comment = self._comment(text) origin.comments.append(comment) except KeyError: self._warn('Wrong analysis type') if location_method not in LOCATION_METHODS.keys(): location_method = 'o' method = LOCATION_METHODS[location_method] method_id = "method/%s" % method origin.method_id = self._get_res_id(method_id) if event_type not in EVENT_TYPES.keys(): event_type = 'uk' self._warn('Wrong or unknown event type') event_data = EVENT_TYPES[event_type] event.event_type_certainty, event.event_type = event_data # comment: 'GSE2.0:evtype=<evtype>' if event_type: text = 'GSE2.0:evtype=%s' % event_type comment = self._comment(text) event.comments.append(comment)
def _read_single_event(event_file, locate_dir, units, local_mag_ph): """ Parse an event file from QuakeMigrate into an obspy Event object. Parameters ---------- event_file : `pathlib.Path` object Path to .event file to read. locate_dir : `pathlib.Path` object Path to locate directory (contains "events", "picks" etc. directories). units : {"km", "m"} Grid projection coordinates for QM LUT (determines units of depths and uncertainties in the .event files). local_mag_ph : {"S", "P"} Amplitude measurement used to calculate local magnitudes. Returns ------- event : `obspy.Event` object Event object populated with all available information output by :class:`~quakemigrate.signal.scan.locate()`, including event locations and uncertainties, picks, and amplitudes and magnitudes if available. """ # Parse information from event file event_info = pd.read_csv(event_file).iloc[0] event_uid = str(event_info["EventID"]) # Set distance conversion factor (from units of QM LUT projection units). if units == "km": factor = 1e3 elif units == "m": factor = 1 else: raise AttributeError(f"units must be 'km' or 'm'; not {units}") # Create event object to store origin and pick information event = Event() event.extra = AttribDict() event.resource_id = str(event_info["EventID"]) event.creation_info = CreationInfo(author="QuakeMigrate", version=quakemigrate.__version__) # Add COA info to extra event.extra.coa = {"value": event_info["COA"], "namespace": ns} event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns} event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns} event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns} event.extra.dec_coa_norm = { "value": event_info["DEC_COA_NORM"], "namespace": ns } # Determine location of cut waveform data - add to event object as a # custom extra attribute. mseed = locate_dir / "raw_cut_waveforms" / event_uid event.extra.cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "real_cut_waveforms").exists(): mseed = locate_dir / "real_cut_waveforms" / event_uid event.extra.real_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "wa_cut_waveforms").exists(): mseed = locate_dir / "wa_cut_waveforms" / event_uid event.extra.wa_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } # Create origin with spline location and set to preferred event origin. origin = Origin() origin.method_id = "spline" origin.longitude = event_info["X"] origin.latitude = event_info["Y"] origin.depth = event_info["Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins = [origin] event.preferred_origin_id = origin.resource_id # Create origin with gaussian location and associate with event origin = Origin() origin.method_id = "gaussian" origin.longitude = event_info["GAU_X"] origin.latitude = event_info["GAU_Y"] origin.depth = event_info["GAU_Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins.append(origin) ouc = OriginUncertainty() ce = ConfidenceEllipsoid() ce.semi_major_axis_length = event_info["COV_ErrY"] * factor ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor ce.major_axis_plunge = 0 ce.major_axis_azimuth = 0 ce.major_axis_rotation = 0 ouc.confidence_ellipsoid = ce ouc.preferred_description = "confidence ellipsoid" # Set uncertainties for both as the gaussian uncertainties for origin in event.origins: origin.longitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrX"] * factor / 1e3) origin.latitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrY"] * factor / 1e3) origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor origin.origin_uncertainty = ouc # Add OriginQuality info to each origin? for origin in event.origins: origin.origin_type = "hypocenter" origin.evaluation_mode = "automatic" # --- Handle picks file --- pick_file = locate_dir / "picks" / event_uid if pick_file.with_suffix(".picks").is_file(): picks = pd.read_csv(pick_file.with_suffix(".picks")) else: return None for _, pickline in picks.iterrows(): station = str(pickline["Station"]) phase = str(pickline["Phase"]) wid = WaveformStreamID(network_code="", station_code=station) for method in ["modelled", "autopick"]: pick = Pick() pick.extra = AttribDict() pick.waveform_id = wid pick.method_id = method pick.phase_hint = phase if method == "autopick" and str(pickline["PickTime"]) != "-1": pick.time = UTCDateTime(pickline["PickTime"]) pick.time_errors.uncertainty = float(pickline["PickError"]) pick.extra.snr = { "value": float(pickline["SNR"]), "namespace": ns } elif method == "modelled": pick.time = UTCDateTime(pickline["ModelledTime"]) else: continue event.picks.append(pick) # --- Handle amplitudes file --- amps_file = locate_dir / "amplitudes" / event_uid if amps_file.with_suffix(".amps").is_file(): amps = pd.read_csv(amps_file.with_suffix(".amps")) i = 0 for _, ampsline in amps.iterrows(): wid = WaveformStreamID(seed_string=ampsline["id"]) noise_amp = ampsline["Noise_amp"] / 1000 # mm to m for phase in ["P_amp", "S_amp"]: amp = Amplitude() if pd.isna(ampsline[phase]): continue amp.generic_amplitude = ampsline[phase] / 1000 # mm to m amp.generic_amplitude_errors.uncertainty = noise_amp amp.unit = "m" amp.type = "AML" amp.method_id = phase amp.period = 1 / ampsline[f"{phase[0]}_freq"] amp.time_window = TimeWindow( reference=UTCDateTime(ampsline[f"{phase[0]}_time"])) # amp.pick_id = ? amp.waveform_id = wid # amp.filter_id = ? amp.magnitude_hint = "ML" amp.evaluation_mode = "automatic" amp.extra = AttribDict() try: amp.extra.filter_gain = { "value": ampsline[f"{phase[0]}_filter_gain"], "namespace": ns } amp.extra.avg_amp = { "value": ampsline[f"{phase[0]}_avg_amp"] / 1000, # m "namespace": ns } except KeyError: pass if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]): i += 1 stat_mag = StationMagnitude() stat_mag.extra = AttribDict() # stat_mag.origin_id = ? local_mag_loc stat_mag.mag = ampsline["ML"] stat_mag.mag_errors.uncertainty = ampsline["ML_Err"] stat_mag.station_magnitude_type = "ML" stat_mag.amplitude_id = amp.resource_id stat_mag.extra.picked = { "value": ampsline["is_picked"], "namespace": ns } stat_mag.extra.epi_dist = { "value": ampsline["epi_dist"], "namespace": ns } stat_mag.extra.z_dist = { "value": ampsline["z_dist"], "namespace": ns } event.station_magnitudes.append(stat_mag) event.amplitudes.append(amp) mag = Magnitude() mag.extra = AttribDict() mag.mag = event_info["ML"] mag.mag_errors.uncertainty = event_info["ML_Err"] mag.magnitude_type = "ML" # mag.origin_id = ? mag.station_count = i mag.evaluation_mode = "automatic" mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns} event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id return event
def _parse_origin(self, line): # 1-10 i4,a1,i2,a1,i2 epicenter date (yyyy/mm/dd) # 12-22 i2,a1,i2,a1,f5.2 epicenter time (hh:mm:ss.ss) time = UTCDateTime.strptime(line[:17], '%Y/%m/%d %H:%M:') time += float(line[17:22]) # 23 a1 fixed flag (f = fixed origin time solution, blank if # not a fixed origin time) time_fixed = fixed_flag(line[22]) # 25-29 f5.2 origin time error (seconds; blank if fixed origin time) time_error = float_or_none(line[24:29]) time_error = time_error and QuantityError(uncertainty=time_error) # 31-35 f5.2 root mean square of time residuals (seconds) rms = float_or_none(line[30:35]) # 37-44 f8.4 latitude (negative for South) latitude = float_or_none(line[36:44]) # 46-54 f9.4 longitude (negative for West) longitude = float_or_none(line[45:54]) # 55 a1 fixed flag (f = fixed epicenter solution, blank if not # a fixed epicenter solution) epicenter_fixed = fixed_flag(line[54]) # 56-60 f5.1 semi-major axis of 90% ellipse or its estimate # (km, blank if fixed epicenter) _uncertainty_major_m = float_or_none(line[55:60], multiplier=1e3) # 62-66 f5.1 semi-minor axis of 90% ellipse or its estimate # (km, blank if fixed epicenter) _uncertainty_minor_m = float_or_none(line[61:66], multiplier=1e3) # 68-70 i3 strike (0 <= x <= 360) of error ellipse clock-wise from # North (degrees) _uncertainty_major_azimuth = float_or_none(line[67:70]) # 72-76 f5.1 depth (km) depth = float_or_none(line[71:76], multiplier=1e3) # 77 a1 fixed flag (f = fixed depth station, d = depth phases, # blank if not a fixed depth) epicenter_fixed = fixed_flag(line[76]) # 79-82 f4.1 depth error 90% (km; blank if fixed depth) depth_error = float_or_none(line[78:82], multiplier=1e3) # 84-87 i4 number of defining phases used_phase_count = int_or_none(line[83:87]) # 89-92 i4 number of defining stations used_station_count = int_or_none(line[88:92]) # 94-96 i3 gap in azimuth coverage (degrees) azimuthal_gap = float_or_none(line[93:96]) # 98-103 f6.2 distance to closest station (degrees) minimum_distance = float_or_none(line[97:103]) # 105-110 f6.2 distance to furthest station (degrees) maximum_distance = float_or_none(line[104:110]) # 112 a1 analysis type: (a = automatic, m = manual, g = guess) evaluation_mode, evaluation_status = \ evaluation_mode_and_status(line[111]) # 114 a1 location method: (i = inversion, p = pattern # recognition, g = ground truth, o = # other) location_method = LOCATION_METHODS[line[113].strip().lower()] # 116-117 a2 event type: # XXX event type and event type certainty is specified per origin, # XXX not sure how to bset handle this, for now only use it if # XXX information on the individual origins do not clash.. not sure yet # XXX how to identify the preferred origin.. event_type, event_type_certainty = \ EVENT_TYPE_CERTAINTY[line[115:117].strip().lower()] # 119-127 a9 author of the origin author = line[118:127].strip() # 129-136 a8 origin identification origin_id = self._construct_id(['origin', line[128:136].strip()]) # do some combinations depth_error = depth_error and dict(uncertainty=depth_error, confidence_level=90) if all(v is not None for v in (_uncertainty_major_m, _uncertainty_minor_m, _uncertainty_major_azimuth)): origin_uncertainty = OriginUncertainty( min_horizontal_uncertainty=_uncertainty_minor_m, max_horizontal_uncertainty=_uncertainty_major_m, azimuth_max_horizontal_uncertainty=_uncertainty_major_azimuth, preferred_description='uncertainty ellipse', confidence_level=90) # event init always sets an empty QuantityError, even when # specifying None, which is strange for key in ['confidence_ellipsoid']: setattr(origin_uncertainty, key, None) else: origin_uncertainty = None origin_quality = OriginQuality(standard_error=rms, used_phase_count=used_phase_count, used_station_count=used_station_count, azimuthal_gap=azimuthal_gap, minimum_distance=minimum_distance, maximum_distance=maximum_distance) comments = [] if location_method: comments.append( self._make_comment('location method: ' + location_method)) if author: creation_info = CreationInfo(author=author) else: creation_info = None # assemble whole event origin = Origin(time=time, resource_id=origin_id, longitude=longitude, latitude=latitude, depth=depth, depth_errors=depth_error, origin_uncertainty=origin_uncertainty, time_fixed=time_fixed, epicenter_fixed=epicenter_fixed, origin_quality=origin_quality, comments=comments, creation_info=creation_info) # event init always sets an empty QuantityError, even when specifying # None, which is strange for key in ('time_errors', 'longitude_errors', 'latitude_errors', 'depth_errors'): setattr(origin, key, None) return origin, event_type, event_type_certainty
def surf_events_to_cat(loc_file, pick_file): """ Take location files (hypoinverse formatted) and picks (format TBD) and creates a single obspy catalog for later use and dissemination. :param loc_file: File path :param pick_file: File path :return: obspy.core.Catalog """ # Read/parse location file and create Events for each surf_cat = Catalog() # Parse the pick file to a dictionary pick_dict = parse_picks(pick_file) with open(loc_file, 'r') as f: next(f) for ln in f: ln = ln.strip('\n') line = ln.split(',') eid = line[0] if eid not in pick_dict: print('No picks for this location, skipping for now.') continue ot = UTCDateTime(line[1]) hmc_east = float(line[2]) hmc_north = float(line[3]) hmc_elev = float(line[4]) gap = float(line[-5]) rms = float(line[-3]) errXY = float(line[-2]) errZ = float(line[-1]) converter = SURF_converter() lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north, hmc_elev)) o = Origin(time=ot, longitude=lon, latitude=lat, depth=130 - elev) o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality ou.horizontal_uncertainty = errXY * 1e3 ou.preferred_description = "horizontal uncertainty" o.depth_errors.uncertainty = errZ * 1e3 oq.standard_error = rms oq.azimuthal_gap = gap extra = AttribDict({ 'hmc_east': { 'value': hmc_east, 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': hmc_north, 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': hmc_elev, 'namespace': 'smi:local/hmc' }, 'hmc_eid': { 'value': eid, 'namespace': 'smi:local/hmc' } }) o.extra = extra rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f')) # Dummy magnitude of 1. for all events until further notice mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.)) ev = Event(origins=[o], magnitudes=[mag], picks=pick_dict[eid], resource_id=rid) surf_cat.append(ev) return surf_cat
def _read_evt(filename, encoding='utf-8', **kwargs): """ Read a SeismicHandler EVT file and returns an ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.read_events` function, call this instead. :param str encoding: encoding used (default: utf-8) :rtype: :class:`~obspy.core.event.Catalog` :return: An ObsPy Catalog object. .. note:: The following fields are supported by this function: %s. Compare with http://www.seismic-handler.org/wiki/ShmDocFileEvt """ with io.open(filename, 'r', encoding=encoding) as f: temp = f.read() # first create phases and phases_o dictionaries for different phases # and phases with origin information phases = defaultdict(list) phases_o = {} phase = {} evid = None for line in temp.splitlines(): if 'End of Phase' in line: if 'origin time' in phase.keys(): if evid in phases_o: # found more than one origin pass phases_o[evid] = phase phases[evid].append(phase) phase = {} evid = None elif line.strip() != '': try: key, value = line.split(':', 1) except ValueError: continue key = key.strip().lower() value = value.strip() if key == 'event id': evid = value elif value != '': phase[key] = value assert evid is None # now create obspy Events from phases and phases_o dictionaries events = [] for evid in phases: picks = [] arrivals = [] stamags = [] origins = [] po = None magnitudes = [] pm = None for p in phases[evid]: sta = p.get('station code', '') comp = p.get('component', '') pick_kwargs = _kw(p, 'pick') widargs = _resolve_seedid(sta, comp, time=pick_kwargs['time'], **kwargs) wid = WaveformStreamID(*widargs) pick = Pick(waveform_id=wid, **pick_kwargs) arrival = Arrival(pick_id=pick.resource_id, **_kw(p, 'arrival')) picks.append(pick) arrivals.append(arrival) stamags_temp, _ = _mags(p, evid, stamag=True, wid=wid) stamags.extend(stamags_temp) if evid in phases_o: o = phases_o[evid] uncertainty = OriginUncertainty(**_kw(o, 'origin_uncertainty')) origin = Origin(arrivals=arrivals, origin_uncertainty=uncertainty, **_kw(o, 'origin')) if origin.latitude is None or origin.longitude is None: warn('latitude or longitude not set for event %s' % evid) else: if origin.longitude_errors.uncertainty is not None: origin.longitude_errors.uncertainty *= cos( origin.latitude / 180 * pi) origins = [origin] po = origin.resource_id magnitudes, pm = _mags(o, evid) else: o = p event = Event(resource_id=ResourceIdentifier(evid), picks=picks, origins=origins, magnitudes=magnitudes, station_magnitudes=stamags, preferred_origin_id=po, preferred_magnitude_id=pm, **_kw(o, 'event')) events.append(event) return Catalog(events, description='Created from SeismicHandler EVT format')
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = { 'value': p_phase_count, 'namespace': NAMESPACE } if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = { 'value': s_phase_count, 'namespace': NAMESPACE } origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin
def _map_join2origin(self, db): """ Return an Origin instance from an dict of CSS key/values Inputs ====== db : dict of key/values of CSS fields related to the origin (see Join) Returns ======= obspy.core.event.Origin Notes ===== Any object that supports the dict 'get' method can be passed as input, e.g. OrderedDict, custom classes, etc. Join ---- origin <- origerr (outer) """ #-- Basic location ------------------------------------------ origin = Origin() origin.latitude = db.get('lat') origin.longitude = db.get('lon') origin.depth = _km2m(db.get('depth')) origin.time = _utc(db.get('time')) origin.extra = {} #-- Quality ------------------------------------------------- quality = OriginQuality( associated_phase_count = db.get('nass'), used_phase_count = db.get('ndef'), standard_error = db.get('sdobs'), ) origin.quality = quality #-- Solution Uncertainties ---------------------------------- # in CSS the ellipse is projected onto the horizontal plane # using the covariance matrix uncertainty = OriginUncertainty() a = _km2m(db.get('smajax')) b = _km2m(db.get('sminax')) s = db.get('strike') dep_u = _km2m(db.get('sdepth')) time_u = db.get('stime') uncertainty.max_horizontal_uncertainty = a uncertainty.min_horizontal_uncertainty = b uncertainty.azimuth_max_horizontal_uncertainty = s uncertainty.horizontal_uncertainty = a uncertainty.preferred_description = "horizontal uncertainty" if db.get('conf') is not None: uncertainty.confidence_level = db.get('conf') * 100. if uncertainty.horizontal_uncertainty is not None: origin.origin_uncertainty = uncertainty #-- Parameter Uncertainties --------------------------------- if all([a, b, s]): n, e = _get_NE_on_ellipse(a, b, s) lat_u = _m2deg_lat(n) lon_u = _m2deg_lon(e, lat=origin.latitude) origin.latitude_errors = {'uncertainty': lat_u} origin.longitude_errors = {'uncertainty': lon_u} if dep_u: origin.depth_errors = {'uncertainty': dep_u} if time_u: origin.time_errors = {'uncertainty': time_u} #-- Analyst-determined Status ------------------------------- posted_author = _str(db.get('auth')) mode, status = self.get_event_status(posted_author) origin.evaluation_mode = mode origin.evaluation_status = status # Save etype per origin due to schema differences... css_etype = _str(db.get('etype')) # Compatible with future patch rename "_namespace" -> "namespace" origin.extra['etype'] = { 'value': css_etype, 'namespace': CSS_NAMESPACE } origin.creation_info = CreationInfo( creation_time = _utc(db.get('lddate')), agency_id = self.agency, version = db.get('orid'), author = posted_author, ) origin.resource_id = self._rid(origin) return origin
def _read_single_hypocenter(lines, coordinate_converter, original_picks): """ Given a list of lines (starting with a 'NLLOC' line and ending with a 'END_NLLOC' line), parse them into an Event. """ try: # some paranoid checks.. assert lines[0].startswith("NLLOC ") assert lines[-1].startswith("END_NLLOC") for line in lines[1:-1]: assert not line.startswith("NLLOC ") assert not line.startswith("END_NLLOC") except Exception: msg = ("This should not have happened, please report this as a bug at " "https://github.com/obspy/obspy/issues.") raise Exception(msg) indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines[:-1]]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) # new NLLoc > 6.0 seems to add prefix 'run:' before date if date.startswith('run:'): date = date[4:] signature = signature.strip() creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S")) if coordinate_converter: # maximum likelihood origin location in km info line line = lines["HYPOCENTER"] x, y, z = coordinate_converter(*map(float, line.split()[1:7:2])) else: # maximum likelihood origin location lon lat info line line = lines["GEOGRAPHIC"] y, x, z = map(float, line.split()[8:13:2]) # maximum likelihood origin time info line line = lines["GEOGRAPHIC"] year, mon, day, hour, min = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] if "COMMENT" in lines: comment = lines["COMMENT"].strip() comment = comment.strip('\'"') comment = comment.strip() hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() o = Origin() event.origins = [o] event.preferred_origin_id = o.resource_id o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string, force_resource_id=False)) event.comments.append(Comment(text=comment, force_resource_id=False)) # SIGNATURE field's first item is LOCSIG, which is supposed to be # 'Identification of an individual, institiution or other entity' # according to # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_ # so use it as author in creation info event.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) o.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() # network codes are not used by NonLinLoc, so they can not be known # when reading the .hyp file.. to conform with QuakeML standard set an # empty network code wid = WaveformStreamID(network_code="", station_code=station) # have to split this into ints for overflow to work correctly date, hourmin, sec = map(str, line[6:9]) ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])] hm = [int(hourmin[:2]), int(hourmin[2:4])] t = UTCDateTime(*(ymd + hm), strict=False) + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id event.scope_resource_ids() return event
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.io.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z (each of type :class:`numpy.ndarray`) and return a tuple of three :class:`numpy.ndarray` (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their ``pick_id`` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
def computeOriginErrors(org): """ Given a NLL's event build the Confidence Ellipsoid from Covariance Matrix :param evt: NLL's QML Event :return: Dictionary containing computed errors """ # WARNING: QuakeML uses meter for origin depth, origin uncertainty and confidence ellipsoid, SC3ML uses kilometers. d = {} confidenceLevel = 0.90 # Conficence level kp1 = np.sqrt(chi2.ppf(confidenceLevel, 1)) # 1D confidence coefficient kp2 = np.sqrt(chi2.ppf(confidenceLevel, 2)) # 2D confidence coefficient kp3 = np.sqrt(chi2.ppf(confidenceLevel, 3)) # 3D confidence coefficient # Covariance matrix is given in the NLL's "STATISTICS" line of *.grid0.loc.hyp file and in the Origin's comments parsed by ObsPy comments = org['comments'][0].text stats = comments.split('STATISTICS')[-1].split() cvm = [float(i) for i in stats[1::2]][3:9] # Covariance matrix # Code adapted from IGN's computation of ConfidenceEllipsoid in "locsat.cpp" program cvxx = cvm[0] cvxy = cvm[1] cvxz = cvm[2] cvyy = cvm[3] cvyz = cvm[4] cvzz = cvm[5] nll3d = np.array([[cvxx, cvxy, cvxz], [cvxy, cvyy, cvyz], [cvxz, cvyz, cvzz] ]) # 1D confidence intervals at confidenceLevel errx = kp1 * np.sqrt(cvxx) qe = QuantityError(uncertainty=errx, confidence_level=confidenceLevel * 100.0) d['longitude_errors'] = qe erry = kp1 * np.sqrt(cvyy) qe = QuantityError(uncertainty=erry, confidence_level=confidenceLevel * 100.0) d['latitude_errors'] = qe errz = kp1 * np.sqrt(cvzz) qe = QuantityError(uncertainty=errz, confidence_level=confidenceLevel * 100.0) d['depth_errors'] = qe #NLL kp1=1 because is up to 1 sigma 68.3%, LocSAT kp1=2.71 because is up to 90% (one dim) #LocSAT np.sqrt(cvzz)/2.71 = NLL np.sqrt(cvzz) # 2D confidence intervals at confidenceLevel nll2d = np.array(nll3d[:2, :2]) eigval2d, eigvec2d = np.linalg.eig(nll2d) # XY (horizontal) plane # indexes are not necessarily ordered. Sort them by eigenvalues idx = eigval2d.argsort() eigval2d = eigval2d[idx] eigvec2d = eigvec2d[:, idx] # sminax = kp2 * np.sqrt(eigval2d[0]) * 1.0e3 # QML in meters # smajax = kp2 * np.sqrt(eigval2d[1]) * 1.0e3 # QML in meters sminax = kp2 * np.sqrt(eigval2d[0]) # SC3ML in kilometers smajax = kp2 * np.sqrt(eigval2d[1]) # SC3ML in kilometers strike = 90.0 - np.rad2deg(np.arctan(eigvec2d[1, 1] / eigvec2d[0, 1])) # calculate and refer it to North # horizontalUncertainty = np.sqrt((errx ** 2) + (erry ** 2)) * 1.0e3 # QML in meters horizontalUncertainty = np.sqrt((errx ** 2) + (erry ** 2)) # SC3ML in kilometers # 3D confidence intervals at confidenceLevel eigval3d, eigvec3d = np.linalg.eig(nll3d) idx = eigval3d.argsort() eigval3d = eigval3d[idx] eigvec3d = eigvec3d[:, idx] # s3dminax = kp3 * np.sqrt(eigval3d[0]) * 1.0e3 # QML in meters # s3dintax = kp3 * np.sqrt(eigval3d[1]) * 1.0e3 # QML in meters # s3dmaxax = kp3 * np.sqrt(eigval3d[2]) * 1.0e3 # QML in meters s3dminax = kp3 * np.sqrt(eigval3d[0]) # SC3ML in kilometers s3dintax = kp3 * np.sqrt(eigval3d[1]) # SC3ML in kilometers s3dmaxax = kp3 * np.sqrt(eigval3d[2]) # SCEML in kilometers majaxplunge = normalizeAngle( np.rad2deg(np.arctan(eigvec3d[2, 2] / np.sqrt((eigvec3d[2, 0] ** 2) + (eigvec3d[2, 1] ** 2))))) majaxazimuth = normalizeAngle(np.rad2deg(np.arctan(eigvec3d[2, 1] / eigvec3d[2, 0]))) majaxrotation = normalizeAngle( np.rad2deg(np.arctan(eigvec3d[0, 2] / np.sqrt((eigvec3d[0, 0] ** 2) + (eigvec3d[0, 1] ** 2))))) # print('2D sminax:\t{}\tsmajax:\t{}\tstrike:\t{}'.format(sminax, smajax, strike)) # print('3D sminax:\t{}\tsmajax:\t{}\tsintax:\t{}'.format(s3dminax, s3dmaxax, s3dintax)) # print(' plunge:\t{}\tazim:\t{}\trotat:\t{}'.format(majaxplunge, majaxazimuth, majaxrotation)) # print('-' * 144) ce = ConfidenceEllipsoid(semi_major_axis_length=s3dmaxax, semi_minor_axis_length=s3dminax, semi_intermediate_axis_length=s3dintax, major_axis_plunge=majaxplunge, major_axis_azimuth=majaxazimuth, major_axis_rotation=majaxrotation) ou = OriginUncertainty(horizontal_uncertainty=horizontalUncertainty, min_horizontal_uncertainty=sminax, max_horizontal_uncertainty=smajax, azimuth_max_horizontal_uncertainty=strike, confidence_ellipsoid=ce, preferred_description='confidence ellipsoid', confidence_level=confidenceLevel * 100.0) d['origin_uncertainty'] = ou return d
def _read_evt(filename, inventory=None, id_map=None, id_default='.{}..{}', encoding='utf-8'): """ Read a SeismicHandler EVT file and returns an ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.read_events` function, call this instead. :type filename: str :param filename: File or file-like object in text mode. :type inventory: :class:`~obspy.core.inventory.inventory.Inventory` :param inventory: Inventory used to retrieve network code, location code and channel code of stations (SEED id). :type id_map: dict :param id_map: If channel information was not found in inventory, it will be looked up in this dictionary (example: `id_map={'MOX': 'GR.{}..HH{}'`). The values must contain three dots and two `{}` which are substituted by station code and component. :type id_default: str :param id_default: Default SEED id expression. The value must contain three dots and two `{}` which are substituted by station code and component. :param str encoding: encoding used (default: utf-8) :rtype: :class:`~obspy.core.event.Catalog` :return: An ObsPy Catalog object. .. note:: The following fields are supported by this function: %s. Compare with http://www.seismic-handler.org/wiki/ShmDocFileEvt """ seed_map = _seed_id_map(inventory, id_map, id_default) with io.open(filename, 'r', encoding=encoding) as f: temp = f.read() # first create phases and phases_o dictionaries for different phases # and phases with origin information phases = defaultdict(list) phases_o = {} phase = {} evid = None for line in temp.splitlines(): if 'End of Phase' in line: if 'origin time' in phase.keys(): if evid in phases_o: # found more than one origin pass phases_o[evid] = phase phases[evid].append(phase) phase = {} evid = None elif line.strip() != '': try: key, value = line.split(':', 1) except ValueError: continue key = key.strip().lower() value = value.strip() if key == 'event id': evid = value elif value != '': phase[key] = value assert evid is None # now create obspy Events from phases and phases_o dictionaries events = [] for evid in phases: picks = [] arrivals = [] stamags = [] origins = [] po = None magnitudes = [] pm = None for p in phases[evid]: try: sta = p['station code'] except KeyError: sta = '' try: comp = p['component'] except KeyError: comp = '' try: wid = seed_map[sta] except KeyError: wid = id_default wid = WaveformStreamID(seed_string=wid.format(sta, comp)) pick = Pick(waveform_id=wid, **_kw(p, 'pick')) arrival = Arrival(pick_id=pick.resource_id, **_kw(p, 'arrival')) picks.append(pick) arrivals.append(arrival) stamags_temp, _ = _mags(p, evid, stamag=True, wid=wid) stamags.extend(stamags_temp) if evid in phases_o: o = phases_o[evid] uncertainty = OriginUncertainty(**_kw(o, 'origin_uncertainty')) origin = Origin(arrivals=arrivals, origin_uncertainty=uncertainty, **_kw(o, 'origin')) if origin.latitude is None or origin.longitude is None: warn('latitude or longitude not set for event %s' % evid) else: if origin.longitude_errors.uncertainty is not None: origin.longitude_errors.uncertainty *= cos( origin.latitude / 180 * pi) origins = [origin] po = origin.resource_id magnitudes, pm = _mags(o, evid) else: o = p event = Event(resource_id=ResourceIdentifier(evid), picks=picks, origins=origins, magnitudes=magnitudes, station_magnitudes=stamags, preferred_origin_id=po, preferred_magnitude_id=pm, **_kw(o, 'event') ) events.append(event) return Catalog(events, description='Created from SeismicHandler EVT format')