def _parse_record_ae(self, line, event): """ Parses the 'additional hypocenter error and magnitude record' AE """ orig_time_stderr = self._float_unused(line[2:7]) latitude_stderr = self._float_unused(line[8:14]) longitude_stderr = self._float_unused(line[15:21]) depth_stderr = self._float_unused(line[22:27]) gap = self._float_unused(line[28:33]) mag1 = self._float(line[33:36]) mag1_type = line[36:38] mag2 = self._float(line[43:46]) mag2_type = line[46:48] evid = event.resource_id.id.split('/')[-1] # this record is to be associated to the latest origin origin = event.origins[-1] self._store_uncertainty(origin.time_errors, orig_time_stderr) self._store_uncertainty(origin.latitude_errors, self._lat_err_to_deg(latitude_stderr)) self._store_uncertainty(origin.longitude_errors, self._lon_err_to_deg(longitude_stderr, origin.latitude)) self._store_uncertainty(origin.depth_errors, depth_stderr, scale=1000) origin.quality.azimuthal_gap = gap if mag1 > 0: mag = Magnitude() mag1_id = mag1_type.lower() res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo( agency_id=origin.creation_info.agency_id) mag.mag = mag1 mag.magnitude_type = mag1_type mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag2 > 0: mag = Magnitude() mag2_id = mag2_type.lower() if mag2_id == mag1_id: mag2_id += '2' res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo( agency_id=origin.creation_info.agency_id) mag.mag = mag2 mag.magnitude_type = mag2_type mag.origin_id = origin.resource_id event.magnitudes.append(mag)
def _parseRecordAE(self, line, event): """ Parses the 'additional hypocenter error and magnitude record' AE """ orig_time_stderr = self._floatUnused(line[2:7]) latitude_stderr = self._floatUnused(line[8:14]) longitude_stderr = self._floatUnused(line[15:21]) depth_stderr = self._floatUnused(line[22:27]) gap = self._floatUnused(line[28:33]) mag1 = self._float(line[33:36]) mag1_type = line[36:38] mag2 = self._float(line[43:46]) mag2_type = line[46:48] evid = event.resource_id.id.split('/')[-1] #this record is to be associated to the latest origin origin = event.origins[-1] self._storeUncertainty(origin.time_errors, orig_time_stderr) self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(latitude_stderr)) self._storeUncertainty( origin.longitude_errors, self._lonErrToDeg(longitude_stderr, origin.latitude)) self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) origin.quality.azimuthal_gap = gap if mag1 > 0: mag = Magnitude() mag1_id = mag1_type.lower() res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo( agency_id=origin.creation_info.agency_id) mag.mag = mag1 mag.magnitude_type = mag1_type mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag2 > 0: mag = Magnitude() mag2_id = mag2_type.lower() if mag2_id == mag1_id: mag2_id += '2' res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo( agency_id=origin.creation_info.agency_id) mag.mag = mag2 mag.magnitude_type = mag2_type mag.origin_id = origin.resource_id event.magnitudes.append(mag)
def __toMagnitude(parser, magnitude_el, origin): """ Parses a given magnitude etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type magnitude_el: etree.element :param magnitude_el: magnitude element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Magnitude` object. """ global CURRENT_TYPE mag = Magnitude() mag.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "magnitude"])) mag.origin_id = origin.resource_id mag.mag, mag.mag_errors = __toFloatQuantity(parser, magnitude_el, "mag") # obspyck used to write variance (instead of std) in magnitude error fields if CURRENT_TYPE == "obspyck": if mag.mag_errors.uncertainty is not None: mag.mag_errors.uncertainty = math.sqrt(mag.mag_errors.uncertainty) mag.magnitude_type = parser.xpath2obj("type", magnitude_el) mag.station_count = parser.xpath2obj("stationCount", magnitude_el, int) mag.method_id = "%s/magnitude_method/%s/1" % (RESOURCE_ROOT, parser.xpath2obj('program', magnitude_el)) if str(mag.method_id).lower().endswith("none"): mag.method_id = None return mag
def __toMagnitude(parser, magnitude_el, origin): """ Parses a given magnitude etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type magnitude_el: etree.element :param magnitude_el: magnitude element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Magnitude` object. """ global CURRENT_TYPE mag = Magnitude() mag.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "magnitude"])) mag.origin_id = origin.resource_id mag.mag, mag.mag_errors = __toFloatQuantity(parser, magnitude_el, "mag") # obspyck used to write variance (instead of std) in magnitude error fields if CURRENT_TYPE == "obspyck": if mag.mag_errors.uncertainty is not None: mag.mag_errors.uncertainty = math.sqrt(mag.mag_errors.uncertainty) mag.mag_errors.confidence_level = 68.3 mag.magnitude_type = parser.xpath2obj("type", magnitude_el) mag.station_count = parser.xpath2obj("stationCount", magnitude_el, int) mag.method_id = "%s/magnitude_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', magnitude_el)) if str(mag.method_id).lower().endswith("none"): mag.method_id = None return mag
def _map_origin2magnitude(self, db, mtype='ml'): """ Return an obspy Magnitude from an dict of CSS key/values corresponding to one record. Inputs ====== db : dict of key/values of CSS fields from the 'origin' table Returns ======= obspy.core.event.Magnitude Notes ===== Any object that supports the dict 'get' method can be passed as input, e.g. OrderedDict, custom classes, etc. """ m = Magnitude() m.mag = db.get(mtype) m.magnitude_type = mtype m.creation_info = CreationInfo( creation_time = _utc(db.get('lddate')), agency_id = self.agency, version = db.get('orid'), author = db.get('auth'), ) if m.creation_info.author.startswith('orb'): m.evaluation_status = "preliminary" else: m.evaluation_status = "reviewed" m.resource_id = self._rid(m) return m
def _map_netmag2magnitude(self, db): """ Return an obspy Magnitude from an dict of CSS key/values corresponding to one record. Inputs ====== db : dict of key/values of CSS fields from the 'netmag' table Returns ======= obspy.core.event.Magnitude Notes ===== Any object that supports the dict 'get' method can be passed as input, e.g. OrderedDict, custom classes, etc. """ m = Magnitude() m.mag = db.get('magnitude') m.magnitude_type = db.get('magtype') m.mag_errors.uncertainty = db.get('uncertainty') m.station_count = db.get('nsta') posted_author = _str(db.get('auth')) mode, status = self.get_event_status(posted_author) m.evaluation_mode = mode m.evaluation_status = status m.creation_info = CreationInfo( creation_time = _utc(db.get('lddate')), agency_id = self.agency, version = db.get('magid'), author = posted_author, ) m.resource_id = self._rid(m) return m
def _parseRecordE(self, line, event): """ Parses the 'error and magnitude' record E """ orig_time_stderr = self._float(line[2:7]) latitude_stderr = self._float(line[8:14]) longitude_stderr = self._float(line[15:21]) depth_stderr = self._float(line[22:27]) mb_mag = self._float(line[28:31]) mb_nsta = self._int(line[32:35]) Ms_mag = self._float(line[36:39]) Ms_nsta = self._int(line[39:42]) mag1 = self._float(line[42:45]) mag1_type = line[45:47] mag1_source_code = line[47:51].strip() mag2 = self._float(line[51:54]) mag2_type = line[54:56] mag2_source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = event.origins[0] self._storeUncertainty(origin.time_errors, orig_time_stderr) self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(latitude_stderr)) self._storeUncertainty( origin.longitude_errors, self._lonErrToDeg(longitude_stderr, origin.latitude)) self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) if mb_mag is not None: mag = Magnitude() res_id = '/'.join((res_id_prefix, 'magnitude', evid, 'mb')) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id='USGS-NEIC') mag.mag = mb_mag mag.magnitude_type = 'Mb' mag.station_count = mb_nsta mag.origin_id = origin.resource_id event.magnitudes.append(mag) if Ms_mag is not None: mag = Magnitude() res_id = '/'.join((res_id_prefix, 'magnitude', evid, 'ms')) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id='USGS-NEIC') mag.mag = Ms_mag mag.magnitude_type = 'Ms' mag.station_count = Ms_nsta mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag1 is not None: mag = Magnitude() mag1_id = mag1_type.lower() res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id=mag1_source_code) mag.mag = mag1 mag.magnitude_type = mag1_type mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag2 is not None: mag = Magnitude() mag2_id = mag2_type.lower() if mag2_id == mag1_id: mag2_id += '2' res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id=mag2_source_code) mag.mag = mag2 mag.magnitude_type = mag2_type mag.origin_id = origin.resource_id event.magnitudes.append(mag)
def __read_single_fnetmt_entry(line, **kwargs): """ Reads a single F-net moment tensor solution to a :class:`~obspy.core.event.Event` object. :param line: String containing moment tensor information. :type line: str. """ a = line.split() try: ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f') except ValueError: ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S') lat, lon, depjma, magjma = map(float, a[1:5]) depjma *= 1000 region = a[5] strike = tuple(map(int, a[6].split(';'))) dip = tuple(map(int, a[7].split(';'))) rake = tuple(map(int, a[8].split(';'))) mo = float(a[9]) depmt = float(a[10]) * 1000 magmt = float(a[11]) var_red = float(a[12]) mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20]) event_name = util.gen_sc3_id(ot) e = Event(event_type="earthquake") e.resource_id = _get_resource_id(event_name, 'event') # Standard JMA solution o_jma = Origin(time=ot, latitude=lat, longitude=lon, depth=depjma, depth_type="from location", region=region) o_jma.resource_id = _get_resource_id(event_name, 'origin', 'JMA') m_jma = Magnitude(mag=magjma, magnitude_type='ML', origin_id=o_jma.resource_id) m_jma.resource_id = _get_resource_id(event_name, 'magnitude', 'JMA') # MT solution o_mt = Origin(time=ot, latitude=lat, longitude=lon, depth=depmt, region=region, depth_type="from moment tensor inversion") o_mt.resource_id = _get_resource_id(event_name, 'origin', 'MT') m_mt = Magnitude(mag=magmt, magnitude_type='Mw', origin_id=o_mt.resource_id) m_mt.resource_id = _get_resource_id(event_name, 'magnitude', 'MT') foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id) foc_mec.resource_id = _get_resource_id(event_name, "focal_mechanism") nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0]) nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1]) nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2) foc_mec.nodal_planes = nod tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz) cm = Comment(text="Basis system: North,East,Down (Jost and \ Herrmann 1989") cm.resource_id = _get_resource_id(event_name, 'comment', 'mt') mt = MomentTensor(derived_origin_id=o_mt.resource_id, moment_magnitude_id=m_mt.resource_id, scalar_moment=mo, comments=[cm], tensor=tensor, variance_reduction=var_red) mt.resource_id = _get_resource_id(event_name, 'moment_tensor') foc_mec.moment_tensor = mt e.origins = [o_jma, o_mt] e.magnitudes = [m_jma, m_mt] e.focal_mechanisms = [foc_mec] e.preferred_magnitude_id = m_mt.resource_id.id e.preferred_origin_id = o_mt.resource_id.id e.preferred_focal_mechanism_id = foc_mec.resource_id.id return e
def write_qml(config, sourcepar): if not config.options.qml_file: return qml_file = config.options.qml_file cat = read_events(qml_file) evid = config.hypo.evid try: ev = [e for e in cat if evid in str(e.resource_id)][0] except Exception: logging.warning('Unable to find evid "{}" in QuakeML file. ' 'QuakeML output will not be written.'.format(evid)) origin = ev.preferred_origin() if origin is None: origin = ev.origins[0] origin_id = origin.resource_id origin_id_strip = origin_id.id.split('/')[-1] origin_id_strip = origin_id_strip.replace(config.smi_strip_from_origin_id, '') # Common parameters ssp_version = get_versions()['version'] method_id = config.smi_base + '/sourcespec/' + ssp_version cr_info = CreationInfo() cr_info.agency_id = config.agency_id if config.author is None: author = '{}@{}'.format(getuser(), gethostname()) else: author = config.author cr_info.author = author cr_info.creation_time = UTCDateTime() means = sourcepar.means_weight errors = sourcepar.errors_weight stationpar = sourcepar.station_parameters # Magnitude mag = Magnitude() _id = config.smi_magnitude_template.replace('$SMI_BASE', config.smi_base) _id = _id.replace('$ORIGIN_ID', origin_id_strip) mag.resource_id = ResourceIdentifier(id=_id) mag.method_id = ResourceIdentifier(id=method_id) mag.origin_id = origin_id mag.magnitude_type = 'Mw' mag.mag = means['Mw'] mag_err = QuantityError() mag_err.uncertainty = errors['Mw'] mag_err.confidence_level = 68.2 mag.mag_errors = mag_err mag.station_count = len([_s for _s in stationpar.keys()]) mag.evaluation_mode = 'automatic' mag.creation_info = cr_info # Seismic moment -- It has to be stored in a MomentTensor object # which, in turn, is part of a FocalMechanism object mt = MomentTensor() _id = config.smi_moment_tensor_template.replace('$SMI_BASE', config.smi_base) _id = _id.replace('$ORIGIN_ID', origin_id_strip) mt.resource_id = ResourceIdentifier(id=_id) mt.derived_origin_id = origin_id mt.moment_magnitude_id = mag.resource_id mt.scalar_moment = means['Mo'] mt_err = QuantityError() mt_err.lower_uncertainty = errors['Mo'][0] mt_err.upper_uncertainty = errors['Mo'][1] mt_err.confidence_level = 68.2 mt.scalar_moment_errors = mt_err mt.method_id = method_id mt.creation_info = cr_info # And here is the FocalMechanism object fm = FocalMechanism() _id = config.smi_focal_mechanism_template.replace('$SMI_BASE', config.smi_base) _id = _id.replace('$ORIGIN_ID', origin_id_strip) fm.resource_id = ResourceIdentifier(id=_id) fm.triggering_origin_id = origin_id fm.method_id = ResourceIdentifier(id=method_id) fm.moment_tensor = mt fm.creation_info = cr_info ev.focal_mechanisms.append(fm) # Station magnitudes for statId in sorted(stationpar.keys()): par = stationpar[statId] st_mag = StationMagnitude() seed_id = statId.split()[0] _id = config.smi_station_magnitude_template.replace( '$SMI_MAGNITUDE_TEMPLATE', config.smi_magnitude_template) _id = _id.replace('$ORIGIN_ID', origin_id_strip) _id = _id.replace('$SMI_BASE', config.smi_base) _id = _id.replace('$WAVEFORM_ID', seed_id) st_mag.resource_id = ResourceIdentifier(id=_id) st_mag.origin_id = origin_id st_mag.mag = par['Mw'] st_mag.station_magnitude_type = 'Mw' st_mag.method_id = mag.method_id st_mag.creation_info = cr_info st_mag.waveform_id = WaveformStreamID(seed_string=seed_id) st_mag.extra = SSPExtra() st_mag.extra.moment = SSPTag(par['Mo']) st_mag.extra.corner_frequency = SSPTag(par['fc']) st_mag.extra.t_star = SSPTag(par['t_star']) ev.station_magnitudes.append(st_mag) st_mag_contrib = StationMagnitudeContribution() st_mag_contrib.station_magnitude_id = st_mag.resource_id mag.station_magnitude_contributions.append(st_mag_contrib) ev.magnitudes.append(mag) # Write other average parameters as custom tags ev.extra = SSPExtra() ev.extra.corner_frequency = SSPContainerTag() ev.extra.corner_frequency.value.value = SSPTag(means['fc']) ev.extra.corner_frequency.value.lower_uncertainty =\ SSPTag(errors['fc'][0]) ev.extra.corner_frequency.value.upper_uncertainty =\ SSPTag(errors['fc'][1]) ev.extra.corner_frequency.value.confidence_level = SSPTag(68.2) ev.extra.t_star = SSPContainerTag() ev.extra.t_star.value.value = SSPTag(means['t_star']) ev.extra.t_star.value.uncertainty = SSPTag(errors['t_star']) ev.extra.t_star.value.confidence_level = SSPTag(68.2) ev.extra.source_radius = SSPContainerTag() ev.extra.source_radius.value.value = SSPTag(means['ra']) ev.extra.source_radius.value.lower_uncertainty =\ SSPTag(errors['ra'][0]) ev.extra.source_radius.value.upper_uncertainty =\ SSPTag(errors['ra'][1]) ev.extra.source_radius.value.confidence_level = SSPTag(68.2) ev.extra.stress_drop = SSPContainerTag() ev.extra.stress_drop.value.value = SSPTag(means['bsd']) ev.extra.stress_drop.value.lower_uncertainty =\ SSPTag(errors['bsd'][0]) ev.extra.stress_drop.value.upper_uncertainty =\ SSPTag(errors['bsd'][1]) ev.extra.stress_drop.value.confidence_level = SSPTag(68.2) if config.set_preferred_magnitude: ev.preferred_magnitude_id = mag.resource_id.id qml_file_out = os.path.join(config.options.outdir, evid + '.xml') ev.write(qml_file_out, format='QUAKEML') logging.info('QuakeML file written to: ' + qml_file_out)
def build(self): """ Build an obspy moment tensor focal mech event This makes the tensor output into an Event containing: 1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes 2) a Magnitude of the Mw from the Tensor Which is what we want for outputting QuakeML using the (slightly modified) obspy code. Input ----- filehandle => open file OR str from filehandle.read() Output ------ event => instance of Event() class as described above """ p = self.parser event = Event(event_type='earthquake') origin = Origin() focal_mech = FocalMechanism() nodal_planes = NodalPlanes() moment_tensor = MomentTensor() principal_ax = PrincipalAxes() magnitude = Magnitude() data_used = DataUsed() creation_info = CreationInfo(agency_id='NN') ev_mode = 'automatic' ev_stat = 'preliminary' evid = None orid = None # Parse the entire file line by line. for n,l in enumerate(p.line): if 'REVIEWED BY NSL STAFF' in l: ev_mode = 'manual' ev_stat = 'reviewed' if 'Event ID' in l: evid = p._id(n) if 'Origin ID' in l: orid = p._id(n) if 'Ichinose' in l: moment_tensor.category = 'regional' if re.match(r'^\d{4}\/\d{2}\/\d{2}', l): ev = p._event_info(n) if 'Depth' in l: derived_depth = p._depth(n) if 'Mw' in l: magnitude.mag = p._mw(n) magnitude.magnitude_type = 'Mw' if 'Mo' in l and 'dyne' in l: moment_tensor.scalar_moment = p._mo(n) if 'Percent Double Couple' in l: moment_tensor.double_couple = p._percent(n) if 'Percent CLVD' in l: moment_tensor.clvd = p._percent(n) if 'Epsilon' in l: moment_tensor.variance = p._epsilon(n) if 'Percent Variance Reduction' in l: moment_tensor.variance_reduction = p._percent(n) if 'Major Double Couple' in l and 'strike' in p.line[n+1]: np = p._double_couple(n) nodal_planes.nodal_plane_1 = NodalPlane(*np[0]) nodal_planes.nodal_plane_2 = NodalPlane(*np[1]) nodal_planes.preferred_plane = 1 if 'Spherical Coordinates' in l: mt = p._mt_sphere(n) moment_tensor.tensor = Tensor( m_rr = mt['Mrr'], m_tt = mt['Mtt'], m_pp = mt['Mff'], m_rt = mt['Mrt'], m_rp = mt['Mrf'], m_tp = mt['Mtf'], ) if 'Eigenvalues and eigenvectors of the Major Double Couple' in l: ax = p._vectors(n) principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev']) principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev']) principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev']) if 'Number of Stations' in l: data_used.station_count = p._number_of_stations(n) if 'Maximum' in l and 'Gap' in l: focal_mech.azimuthal_gap = p._gap(n) if re.match(r'^Date', l): creation_info.creation_time = p._creation_time(n) # Creation Time creation_info.version = orid # Fill in magnitude values magnitude.evaluation_mode = ev_mode magnitude.evaluation_status = ev_stat magnitude.creation_info = creation_info.copy() magnitude.resource_id = self._rid(magnitude) # Stub origin origin.time = ev.get('time') origin.latitude = ev.get('lat') origin.longitude = ev.get('lon') origin.depth = derived_depth * 1000. origin.depth_type = "from moment tensor inversion" origin.creation_info = creation_info.copy() # Unique from true origin ID _oid = self._rid(origin) origin.resource_id = ResourceIdentifier(str(_oid) + '/mt') del _oid # Make an id for the MT that references this origin ogid = str(origin.resource_id) doid = ResourceIdentifier(ogid, referred_object=origin) # Make an id for the moment tensor mag which references this mag mrid = str(magnitude.resource_id) mmid = ResourceIdentifier(mrid, referred_object=magnitude) # MT todo: could check/use URL for RID if parsing the php file moment_tensor.evaluation_mode = ev_mode moment_tensor.evaluation_status = ev_stat moment_tensor.data_used = data_used moment_tensor.moment_magnitude_id = mmid moment_tensor.derived_origin_id = doid moment_tensor.creation_info = creation_info.copy() moment_tensor.resource_id = self._rid(moment_tensor) # Fill in focal_mech values focal_mech.nodal_planes = nodal_planes focal_mech.moment_tensor = moment_tensor focal_mech.principal_axes = principal_ax focal_mech.creation_info = creation_info.copy() focal_mech.resource_id = self._rid(focal_mech) # add mech and new magnitude to event event.focal_mechanisms = [focal_mech] event.magnitudes = [magnitude] event.origins = [origin] event.creation_info = creation_info.copy() # If an MT was done, that's the preferred mag/mech event.preferred_magnitude_id = str(magnitude.resource_id) event.preferred_focal_mechanism_id = str(focal_mech.resource_id) if evid: event.creation_info.version = evid event.resource_id = self._rid(event) self.event = event
def iris2quakeml(url, output_folder=None): if not "/spudservice/" in url: url = url.replace("/spud/", "/spudservice/") if url.endswith("/"): url += "quakeml" else: url += "/quakeml" print "Downloading %s..." % url r = requests.get(url) if r.status_code != 200: msg = "Error Downloading file!" raise Exception(msg) # For some reason the quakeml file is escaped HTML. h = HTMLParser.HTMLParser() data = h.unescape(r.content) # Replace some XML tags. data = data.replace("long-period body waves", "body waves") data = data.replace("intermediate-period surface waves", "surface waves") data = data.replace("long-period mantle waves", "mantle waves") data = data.replace("<html><body><pre>", "") data = data.replace("</pre></body></html>", "") # Change the resource identifiers. Colons are not allowed in QuakeML. pattern = r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.(\d{6})" data = re.sub(pattern, r"\1-\2-\3T\4-\5-\6.\7", data) data = StringIO(data) try: cat = readEvents(data) except: msg = "Could not read downloaded event data" raise ValueError(msg) # Parse the event, and use only one origin, magnitude and focal mechanism. # Only the first event is used. Should not be a problem for the chosen # global cmt application. ev = cat[0] if ev.preferred_origin(): ev.origins = [ev.preferred_origin()] else: ev.origins = [ev.origins[0]] if ev.preferred_focal_mechanism(): ev.focal_mechanisms = [ev.preferred_focal_mechanism()] else: ev.focal_mechanisms = [ev.focal_mechanisms[0]] try: mt = ev.focal_mechanisms[0].moment_tensor except: msg = "No moment tensor found in file." raise ValueError seismic_moment_in_dyn_cm = mt.scalar_moment if not seismic_moment_in_dyn_cm: msg = "No scalar moment found in file." raise ValueError(msg) # Create a new magnitude object with the moment magnitude calculated from # the given seismic moment. mag = Magnitude() mag.magnitude_type = "Mw" mag.origin_id = ev.origins[0].resource_id # This is the formula given on the GCMT homepage. mag.mag = (2.0 / 3.0) * (math.log10(seismic_moment_in_dyn_cm) - 16.1) mag.resource_id = ev.origins[0].resource_id.resource_id.replace("Origin", "Magnitude") ev.magnitudes = [mag] ev.preferred_magnitude_id = mag.resource_id # Convert the depth to meters. org = ev.origins[0] org.depth *= 1000.0 if org.depth_errors.uncertainty: org.depth_errors.uncertainty *= 1000.0 # Ugly asserts -- this is just a simple script. assert(len(ev.magnitudes) == 1) assert(len(ev.origins) == 1) assert(len(ev.focal_mechanisms) == 1) # All values given in the QuakeML file are given in dyne * cm. Convert them # to N * m. for key, value in mt.tensor.iteritems(): if key.startswith("m_") and len(key) == 4: mt.tensor[key] /= 1E7 if key.endswith("_errors") and hasattr(value, "uncertainty"): mt.tensor[key].uncertainty /= 1E7 mt.scalar_moment /= 1E7 if mt.scalar_moment_errors.uncertainty: mt.scalar_moment_errors.uncertainty /= 1E7 p_axes = ev.focal_mechanisms[0].principal_axes for ax in [p_axes.t_axis, p_axes.p_axis, p_axes.n_axis]: if ax is None or not ax.length: continue ax.length /= 1E7 # Check if it has a source time function stf = mt.source_time_function if stf: if stf.type != "triangle": msg = ("Source time function type '%s' not yet mapped. Please " "contact the developers.") % stf.type raise NotImplementedError(msg) if not stf.duration: if not stf.decay_time: msg = "Not known how to derive duration without decay time." raise NotImplementedError(msg) # Approximate the duraction for triangular STF. stf.duration = 2 * stf.decay_time # Get the flinn_engdahl region for a nice name. fe = FlinnEngdahl() region_name = fe.get_region(ev.origins[0].longitude, ev.origins[0].latitude) region_name = region_name.replace(" ", "_") event_name = "GCMT_event_%s_Mag_%.1f_%s-%s-%s-%s-%s.xml" % \ (region_name, ev.magnitudes[0].mag, ev.origins[0].time.year, ev.origins[0].time.month, ev.origins[0].time.day, ev.origins[0].time.hour, ev.origins[0].time.minute) # Check if the ids of the magnitude and origin contain the corresponding # tag. Otherwise replace tme. ev.origins[0].resource_id = ev.origins[0].resource_id.resource_id.replace( "quakeml/gcmtid", "quakeml/origin/gcmtid") ev.magnitudes[0].resource_id = \ ev.magnitudes[0].resource_id.resource_id.replace( "quakeml/gcmtid", "quakeml/magnitude/gcmtid") # Fix up the moment tensor resource_ids. mt.derived_origin_id = ev.origins[0].resource_id mt.resource_id = mt.resource_id.resource_id.replace("focalmechanism", "momenttensor") cat = Catalog() cat.resource_id = ev.origins[0].resource_id.resource_id.replace("origin", "event_parameters") cat.append(ev) if output_folder: event_name = os.path.join(output_folder, event_name) cat.write(event_name, format="quakeml", validate=True) print "Written file", event_name
def _parseRecordE(self, line, event): """ Parses the 'error and magnitude' record E """ orig_time_stderr = self._float(line[2:7]) latitude_stderr = self._float(line[8:14]) longitude_stderr = self._float(line[15:21]) depth_stderr = self._float(line[22:27]) mb_mag = self._float(line[28:31]) mb_nsta = self._int(line[32:35]) Ms_mag = self._float(line[36:39]) Ms_nsta = self._int(line[39:42]) mag1 = self._float(line[42:45]) mag1_type = line[45:47] mag1_source_code = line[47:51].strip() mag2 = self._float(line[51:54]) mag2_type = line[54:56] mag2_source_code = line[56:60].strip() evid = event.resource_id.id.split("/")[-1] origin = event.origins[0] self._storeUncertainty(origin.time_errors, orig_time_stderr) self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(latitude_stderr)) self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(longitude_stderr, origin.latitude)) self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) if mb_mag is not None: mag = Magnitude() res_id = "/".join((res_id_prefix, "magnitude", evid, "mb")) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id="USGS-NEIC") mag.mag = mb_mag mag.magnitude_type = "Mb" mag.station_count = mb_nsta mag.origin_id = origin.resource_id event.magnitudes.append(mag) if Ms_mag is not None: mag = Magnitude() res_id = "/".join((res_id_prefix, "magnitude", evid, "ms")) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id="USGS-NEIC") mag.mag = Ms_mag mag.magnitude_type = "Ms" mag.station_count = Ms_nsta mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag1 is not None: mag = Magnitude() mag1_id = mag1_type.lower() res_id = "/".join((res_id_prefix, "magnitude", evid, mag1_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id=mag1_source_code) mag.mag = mag1 mag.magnitude_type = mag1_type mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag2 is not None: mag = Magnitude() mag2_id = mag2_type.lower() if mag2_id == mag1_id: mag2_id += "2" res_id = "/".join((res_id_prefix, "magnitude", evid, mag2_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id=mag2_source_code) mag.mag = mag2 mag.magnitude_type = mag2_type mag.origin_id = origin.resource_id event.magnitudes.append(mag)
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1: next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ( "Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo( agency_id="GCMT", version=record["version_code"] ) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event( force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ] ) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False)] ) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"]}, latitude=record["centroid_latitude"], latitude_errors={ "uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000}, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy() ) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude( force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy() ) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append(Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." )] )) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append(Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." )] )) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor( m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy() ) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"] ), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy() ) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1] ), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"]) ), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"])], creation_info=creation_info.copy() ) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
def _parse_record_e(self, line, event): """ Parses the 'error and magnitude' record E """ orig_time_stderr = self._float(line[2:7]) latitude_stderr = self._float(line[8:14]) longitude_stderr = self._float(line[15:21]) depth_stderr = self._float(line[22:27]) mb_mag = self._float(line[28:31]) mb_nsta = self._int(line[32:35]) ms_mag = self._float(line[36:39]) ms_nsta = self._int(line[39:42]) mag1 = self._float(line[42:45]) mag1_type = line[45:47] mag1_source_code = line[47:51].strip() mag2 = self._float(line[51:54]) mag2_type = line[54:56] mag2_source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = event.origins[0] self._store_uncertainty(origin.time_errors, orig_time_stderr) self._store_uncertainty(origin.latitude_errors, self._lat_err_to_deg(latitude_stderr)) self._store_uncertainty(origin.longitude_errors, self._lon_err_to_deg(longitude_stderr, origin.latitude)) self._store_uncertainty(origin.depth_errors, depth_stderr, scale=1000) if mb_mag is not None: mag = Magnitude() res_id = '/'.join((res_id_prefix, 'magnitude', evid, 'mb')) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id='USGS-NEIC') mag.mag = mb_mag mag.magnitude_type = 'Mb' mag.station_count = mb_nsta mag.origin_id = origin.resource_id event.magnitudes.append(mag) if ms_mag is not None: mag = Magnitude() res_id = '/'.join((res_id_prefix, 'magnitude', evid, 'ms')) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id='USGS-NEIC') mag.mag = ms_mag mag.magnitude_type = 'Ms' mag.station_count = ms_nsta mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag1 is not None: mag = Magnitude() mag1_id = mag1_type.lower() res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id=mag1_source_code) mag.mag = mag1 mag.magnitude_type = mag1_type mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag2 is not None: mag = Magnitude() mag2_id = mag2_type.lower() if mag2_id == mag1_id: mag2_id += '2' res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo(agency_id=mag2_source_code) mag.mag = mag2 mag.magnitude_type = mag2_type mag.origin_id = origin.resource_id event.magnitudes.append(mag)
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except Exception: try: data = filename.decode() except Exception: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1:next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ("Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo(agency_id="GCMT", version=record["version_code"]) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event(force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ]) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[ Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False) ]) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"] }, latitude=record["centroid_latitude"], latitude_errors={"uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000 }, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy()) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude(force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor(m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy()) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"]), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1]), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"])), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"]) ], creation_info=creation_info.copy()) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
def iris2quakeml(url, output_folder=None): if not "/spudservice/" in url: url = url.replace("/spud/", "/spudservice/") if url.endswith("/"): url += "quakeml" else: url += "/quakeml" print "Downloading %s..." % url r = requests.get(url) if r.status_code != 200: msg = "Error Downloading file!" raise Exception(msg) # For some reason the quakeml file is escaped HTML. h = HTMLParser.HTMLParser() data = h.unescape(r.content) # Replace some XML tags. data = data.replace("long-period body waves", "body waves") data = data.replace("intermediate-period surface waves", "surface waves") data = data.replace("long-period mantle waves", "mantle waves") data = data.replace("<html><body><pre>", "") data = data.replace("</pre></body></html>", "") # Change the resource identifiers. Colons are not allowed in QuakeML. pattern = r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.(\d{6})" data = re.sub(pattern, r"\1-\2-\3T\4-\5-\6.\7", data) data = StringIO(data) try: cat = readEvents(data) except: msg = "Could not read downloaded event data" raise ValueError(msg) # Parse the event, and use only one origin, magnitude and focal mechanism. # Only the first event is used. Should not be a problem for the chosen # global cmt application. ev = cat[0] if ev.preferred_origin(): ev.origins = [ev.preferred_origin()] else: ev.origins = [ev.origins[0]] if ev.preferred_focal_mechanism(): ev.focal_mechanisms = [ev.preferred_focal_mechanism()] else: ev.focal_mechanisms = [ev.focal_mechanisms[0]] try: mt = ev.focal_mechanisms[0].moment_tensor except: msg = "No moment tensor found in file." raise ValueError seismic_moment_in_dyn_cm = mt.scalar_moment if not seismic_moment_in_dyn_cm: msg = "No scalar moment found in file." raise ValueError(msg) # Create a new magnitude object with the moment magnitude calculated from # the given seismic moment. mag = Magnitude() mag.magnitude_type = "Mw" mag.origin_id = ev.origins[0].resource_id # This is the formula given on the GCMT homepage. mag.mag = (2.0 / 3.0) * (math.log10(seismic_moment_in_dyn_cm) - 16.1) mag.resource_id = ev.origins[0].resource_id.resource_id.replace( "Origin", "Magnitude") ev.magnitudes = [mag] ev.preferred_magnitude_id = mag.resource_id # Convert the depth to meters. org = ev.origins[0] org.depth *= 1000.0 if org.depth_errors.uncertainty: org.depth_errors.uncertainty *= 1000.0 # Ugly asserts -- this is just a simple script. assert (len(ev.magnitudes) == 1) assert (len(ev.origins) == 1) assert (len(ev.focal_mechanisms) == 1) # All values given in the QuakeML file are given in dyne * cm. Convert them # to N * m. for key, value in mt.tensor.iteritems(): if key.startswith("m_") and len(key) == 4: mt.tensor[key] /= 1E7 if key.endswith("_errors") and hasattr(value, "uncertainty"): mt.tensor[key].uncertainty /= 1E7 mt.scalar_moment /= 1E7 if mt.scalar_moment_errors.uncertainty: mt.scalar_moment_errors.uncertainty /= 1E7 p_axes = ev.focal_mechanisms[0].principal_axes for ax in [p_axes.t_axis, p_axes.p_axis, p_axes.n_axis]: if ax is None or not ax.length: continue ax.length /= 1E7 # Check if it has a source time function stf = mt.source_time_function if stf: if stf.type != "triangle": msg = ("Source time function type '%s' not yet mapped. Please " "contact the developers.") % stf.type raise NotImplementedError(msg) if not stf.duration: if not stf.decay_time: msg = "Not known how to derive duration without decay time." raise NotImplementedError(msg) # Approximate the duraction for triangular STF. stf.duration = 2 * stf.decay_time # Get the flinn_engdahl region for a nice name. fe = FlinnEngdahl() region_name = fe.get_region(ev.origins[0].longitude, ev.origins[0].latitude) region_name = region_name.replace(" ", "_") event_name = "GCMT_event_%s_Mag_%.1f_%s-%s-%s-%s-%s.xml" % \ (region_name, ev.magnitudes[0].mag, ev.origins[0].time.year, ev.origins[0].time.month, ev.origins[0].time.day, ev.origins[0].time.hour, ev.origins[0].time.minute) # Check if the ids of the magnitude and origin contain the corresponding # tag. Otherwise replace tme. ev.origins[0].resource_id = ev.origins[0].resource_id.resource_id.replace( "quakeml/gcmtid", "quakeml/origin/gcmtid") ev.magnitudes[0].resource_id = \ ev.magnitudes[0].resource_id.resource_id.replace( "quakeml/gcmtid", "quakeml/magnitude/gcmtid") # Fix up the moment tensor resource_ids. mt.derived_origin_id = ev.origins[0].resource_id mt.resource_id = mt.resource_id.resource_id.replace( "focalmechanism", "momenttensor") cat = Catalog() cat.resource_id = ev.origins[0].resource_id.resource_id.replace( "origin", "event_parameters") cat.append(ev) if output_folder: event_name = os.path.join(output_folder, event_name) cat.write(event_name, format="quakeml", validate=True) print "Written file", event_name