def _parseRecordDa(self, line, focal_mechanism): """ Parses the 'source parameter data - principal axes and nodal planes' record Da """ exponent = self._intZero(line[3:5]) scale = math.pow(10, exponent) t_axis_len = self._floatWithFormat(line[5:9], '4.2', scale) t_axis_stderr = self._floatWithFormat(line[9:12], '3.2', scale) t_axis_plunge = self._int(line[12:14]) t_axis_azimuth = self._int(line[14:17]) n_axis_len = self._floatWithFormat(line[17:21], '4.2', scale) n_axis_stderr = self._floatWithFormat(line[21:24], '3.2', scale) n_axis_plunge = self._int(line[24:26]) n_axis_azimuth = self._int(line[26:29]) p_axis_len = self._floatWithFormat(line[29:33], '4.2', scale) p_axis_stderr = self._floatWithFormat(line[33:36], '3.2', scale) p_axis_plunge = self._int(line[36:38]) p_axis_azimuth = self._int(line[38:41]) np1_strike = self._int(line[42:45]) np1_dip = self._int(line[45:47]) np1_slip = self._int(line[47:51]) np2_strike = self._int(line[51:54]) np2_dip = self._int(line[54:56]) np2_slip = self._int(line[56:60]) t_axis = Axis() t_axis.length = t_axis_len self._storeUncertainty(t_axis.length_errors, t_axis_stderr) t_axis.plunge = t_axis_plunge t_axis.azimuth = t_axis_azimuth n_axis = Axis() n_axis.length = n_axis_len self._storeUncertainty(n_axis.length_errors, n_axis_stderr) n_axis.plunge = n_axis_plunge n_axis.azimuth = n_axis_azimuth p_axis = Axis() p_axis.length = p_axis_len self._storeUncertainty(p_axis.length_errors, p_axis_stderr) p_axis.plunge = p_axis_plunge p_axis.azimuth = p_axis_azimuth principal_axes = PrincipalAxes() principal_axes.t_axis = t_axis principal_axes.n_axis = n_axis principal_axes.p_axis = p_axis focal_mechanism.principal_axes = principal_axes nodal_plane_1 = NodalPlane() nodal_plane_1.strike = np1_strike nodal_plane_1.dip = np1_dip nodal_plane_1.rake = np1_slip nodal_plane_2 = NodalPlane() nodal_plane_2.strike = np2_strike nodal_plane_2.dip = np2_dip nodal_plane_2.rake = np2_slip nodal_planes = NodalPlanes() nodal_planes.nodal_plane_1 = nodal_plane_1 nodal_planes.nodal_plane_2 = nodal_plane_2 focal_mechanism.nodal_planes = nodal_planes
def _parse_record_da(self, line, focal_mechanism): """ Parses the 'source parameter data - principal axes and nodal planes' record Da """ exponent = self._int_zero(line[3:5]) scale = math.pow(10, exponent) t_axis_len = self._float_with_format(line[5:9], '4.2', scale) t_axis_stderr = self._float_with_format(line[9:12], '3.2', scale) t_axis_plunge = self._int(line[12:14]) t_axis_azimuth = self._int(line[14:17]) n_axis_len = self._float_with_format(line[17:21], '4.2', scale) n_axis_stderr = self._float_with_format(line[21:24], '3.2', scale) n_axis_plunge = self._int(line[24:26]) n_axis_azimuth = self._int(line[26:29]) p_axis_len = self._float_with_format(line[29:33], '4.2', scale) p_axis_stderr = self._float_with_format(line[33:36], '3.2', scale) p_axis_plunge = self._int(line[36:38]) p_axis_azimuth = self._int(line[38:41]) np1_strike = self._int(line[42:45]) np1_dip = self._int(line[45:47]) np1_slip = self._int(line[47:51]) np2_strike = self._int(line[51:54]) np2_dip = self._int(line[54:56]) np2_slip = self._int(line[56:60]) t_axis = Axis() t_axis.length = t_axis_len self._store_uncertainty(t_axis.length_errors, t_axis_stderr) t_axis.plunge = t_axis_plunge t_axis.azimuth = t_axis_azimuth n_axis = Axis() n_axis.length = n_axis_len self._store_uncertainty(n_axis.length_errors, n_axis_stderr) n_axis.plunge = n_axis_plunge n_axis.azimuth = n_axis_azimuth p_axis = Axis() p_axis.length = p_axis_len self._store_uncertainty(p_axis.length_errors, p_axis_stderr) p_axis.plunge = p_axis_plunge p_axis.azimuth = p_axis_azimuth principal_axes = PrincipalAxes() principal_axes.t_axis = t_axis principal_axes.n_axis = n_axis principal_axes.p_axis = p_axis focal_mechanism.principal_axes = principal_axes nodal_plane_1 = NodalPlane() nodal_plane_1.strike = np1_strike nodal_plane_1.dip = np1_dip nodal_plane_1.rake = np1_slip nodal_plane_2 = NodalPlane() nodal_plane_2.strike = np2_strike nodal_plane_2.dip = np2_dip nodal_plane_2.rake = np2_slip nodal_planes = NodalPlanes() nodal_planes.nodal_plane_1 = nodal_plane_1 nodal_planes.nodal_plane_2 = nodal_plane_2 focal_mechanism.nodal_planes = nodal_planes
def _read_focmec_lst_one_block(lines, polarity_count=None): comment = Comment(text='\n'.join(lines)) while lines and not lines[0].lstrip().startswith('Dip,Strike,Rake'): lines.pop(0) # the last block does not contain a focmec but only a short comment how # many solutions there were overall, so we hit a block that will not have # the above line and we exhaust the lines list if not lines: return None, [] dip, strike, rake = [float(x) for x in lines[0].split()[1:4]] plane1 = NodalPlane(strike=strike, dip=dip, rake=rake) lines.pop(0) dip, strike, rake = [float(x) for x in lines[0].split()[1:4]] plane2 = NodalPlane(strike=strike, dip=dip, rake=rake) planes = NodalPlanes(nodal_plane_1=plane1, nodal_plane_2=plane2, preferred_plane=1) focmec = FocalMechanism(nodal_planes=planes) focmec.comments.append(comment) if polarity_count is not None: polarity_errors = _get_polarity_error_count_lst_block(lines) focmec.station_polarity_count = polarity_count focmec.misfit = float(polarity_errors) / polarity_count return focmec, lines
def __toFocalMechanism(parser, focmec_el): """ """ global CURRENT_TYPE focmec = FocalMechanism() focmec.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "focal_mechanism"])) if CURRENT_TYPE == "obspyck": focmec.method_id = "%s/focal_mechanism_method/focmec/1" % RESOURCE_ROOT else: focmec.method_id = "%s/focal_mechanism_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', focmec_el)) if str(focmec.method_id).lower().endswith("none"): focmec.method_id = None focmec.station_polarity_count = parser.xpath2obj("stationPolarityCount", focmec_el, int) if focmec.station_polarity_count: focmec.misfit = parser.xpath2obj("stationPolarityErrorCount", focmec_el, int) / float( focmec.station_polarity_count) focmec.nodal_planes = NodalPlanes() focmec.nodal_planes.nodal_plane_1 = NodalPlane() nodal_plane = focmec_el.find("nodalPlanes") if nodal_plane is None or not len(nodal_plane): return None n_p = focmec.nodal_planes.nodal_plane_1 # There is always only one nodal plane, called nodalPlane1 n_p.strike, strike_uncertainty = __toFloatQuantity( parser, focmec_el, "nodalPlanes/nodalPlane1/strike") n_p.dip, dip_uncertainty = __toFloatQuantity( parser, focmec_el, "nodalPlanes/nodalPlane1/dip") n_p.rake, rake_uncertainty = __toFloatQuantity( parser, focmec_el, "nodalPlanes/nodalPlane1/rake") if hasattr(strike_uncertainty, "uncertainty"): n_p.strike_errors.uncertainty = strike_uncertainty["uncertainty"] if hasattr(dip_uncertainty, "uncertainty"): n_p.dip_errors.uncertainty = dip_uncertainty["uncertainty"] if hasattr(rake_uncertainty, "uncertainty"): n_p.rake_errors.uncertainty = rake_uncertainty["uncertainty"] solution_count = parser.xpath2obj("possibleSolutionCount", focmec_el, int) if solution_count: focmec.comments.append( Comment(force_resource_id=False, resource_id=None, text="Possible Solution Count: %i" % solution_count)) return focmec
def _read_focmec_out(lines): """ Read given data into an :class:`~obspy.core.event.Event` object. :type lines: list :param lines: List of decoded unicode strings with data from a FOCMEC out file. """ event, _ = _read_common_header(lines) # now move to first line with a focal mechanism for i, line in enumerate(lines): if line.split()[:3] == ['Dip', 'Strike', 'Rake']: break else: return event header = lines[:i] polarity_count, weighted = _get_polarity_count(header) focmec_list_header = lines[i] event.comments.append(Comment(text='\n'.join(header))) try: lines = lines[i + 1:] except IndexError: return event for line in lines: # allow for empty lines (maybe they can happen at the end sometimes..) if not line.strip(): continue comment = Comment(text='\n'.join((focmec_list_header, line))) items = line.split() dip, strike, rake = [float(x) for x in items[:3]] plane = NodalPlane(strike=strike, dip=dip, rake=rake) planes = NodalPlanes(nodal_plane_1=plane, preferred_plane=1) # XXX ideally should compute the auxilliary plane.. focmec = FocalMechanism(nodal_planes=planes) focmec.station_polarity_count = polarity_count focmec.creation_info = CreationInfo( version='FOCMEC', creation_time=event.creation_info.creation_time) if not weighted: errors = sum([int(x) for x in items[3:6]]) focmec.misfit = float(errors) / polarity_count focmec.comments.append(comment) event.focal_mechanisms.append(focmec) return event
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except Exception: try: data = filename.decode() except Exception: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1:next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ("Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo(agency_id="GCMT", version=record["version_code"]) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event(force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ]) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[ Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False) ]) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"] }, latitude=record["centroid_latitude"], latitude_errors={"uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000 }, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy()) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude(force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor(m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy()) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"]), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1]), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"])), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"]) ], creation_info=creation_info.copy()) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
def makeCatalog(StazList, mt, scale, args): epi = args.epi.rsplit() model = args.model.split(os.sep) NrSt = len(StazList) NrCo = NrSt * 3 (Fmin, Fmax) = getFreq(args) Tmin = ('%.0f' % (1 / Fmax)) Tmax = ('%.0f' % (1 / Fmin)) mo = ('%.3e' % (mt[0])) mw = ('%.2f' % (mt[1])) Pdc = ('%.2f' % (float(mt[2]) / 100)) Pclvd = ('%.2f' % (float(mt[3]) / 100)) Tval = ('%10.3e' % (mt[22])) Tplg = ('%4.1f' % (mt[23])) Tazi = ('%5.1f' % (mt[24])) Nval = ('%10.3e' % (mt[25])) Nplg = ('%4.1f' % (mt[26])) Nazi = ('%5.1f' % (mt[27])) Pval = ('%10.3e' % (mt[28])) Pplg = ('%4.1f' % (mt[29])) Pazi = ('%5.1f' % (mt[30])) STp1 = ('%5.1f' % (mt[31])) DPp1 = ('%4.1f' % (mt[32])) RAp1 = ('%6.1f' % (mt[33])) STp2 = ('%5.1f' % (mt[34])) DPp2 = ('%4.1f' % (mt[35])) RAp2 = ('%6.1f' % (mt[36])) var = ('%.2f' % (mt[37])) qua = ('%d' % (mt[38])) mij = [mt[4], mt[5], mt[6], mt[7], mt[8], mt[9]] mm0 = str('%10.3e' % (mij[0])) mm1 = str('%10.3e' % (mij[1])) mm2 = str('%10.3e' % (mij[2])) mm3 = str('%10.3e' % (mij[3])) mm4 = str('%10.3e' % (mij[4])) mm5 = str('%10.3e' % (mij[5])) # Aki konvention Mrr = mm5 Mtt = mm0 Mff = mm1 Mrt = mm3 Mrf = mm4 Mtf = mm2 # stress regime A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24]) A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27]) A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30]) (regime, sh) = stressRegime(A1, A2, A3) sh = ('%5.1f' % (sh)) #### Build classes ################################# # #Resource Id is the event origin time for definition res_id = ResourceIdentifier(args.ori) nowUTC = datetime.datetime.utcnow() info = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC) evOrigin = Origin(resource_id=res_id, time=args.ori, latitude=epi[0], longitude=epi[1], depth=epi[2], earth_model_id=model[-1], creation_info=info) # Magnitudes magnitude = Magnitude(mag=mw, magnitude_type="Mw") # Nodal Planes np1 = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1) np2 = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2) planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2) # Principal axes Taxe = Axis(azimuth=Tazi, plunge=Tplg, length=Tval) Naxe = Axis(azimuth=Nazi, plunge=Nplg, length=Nval) Paxe = Axis(azimuth=Pazi, plunge=Pplg, length=Pval) axes = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe) # MT elements MT = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, m_rt=Mrt, m_rp=Mrf, m_tp=Mtf) # Stress regime regStr = 'Stress regime: ' + regime + ' - SH = ' + sh strDes = EventDescription(regStr) # MT dataset dataInfo = DataUsed(wave_type="combined", station_count=NrSt, component_count=NrCo, shortest_period=Tmin, longest_period=Tmax) source = MomentTensor(data_used=dataInfo, scalar_moment=mo, tensor=MT, variance_reduction=var, double_couple=Pdc, clvd=Pclvd, iso=0) focMec = FocalMechanism(moment_tensor=source, nodal_planes=planes, principal_axes=axes, azimuthal_gap=-1) #Initialize Event Catalog mtSolution = Event(creation_info=info) mtSolution.origins.append(evOrigin) mtSolution.magnitudes.append(magnitude) mtSolution.focal_mechanisms.append(focMec) mtSolution.event_descriptions.append(strDes) cat = Catalog() cat.append(mtSolution) return cat
def __read_single_fnetmt_entry(line, **kwargs): """ Reads a single F-net moment tensor solution to a :class:`~obspy.core.event.Event` object. :param line: String containing moment tensor information. :type line: str. """ a = line.split() try: ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f') except ValueError: ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S') lat, lon, depjma, magjma = map(float, a[1:5]) depjma *= 1000 region = a[5] strike = tuple(map(int, a[6].split(';'))) dip = tuple(map(int, a[7].split(';'))) rake = tuple(map(int, a[8].split(';'))) mo = float(a[9]) depmt = float(a[10]) * 1000 magmt = float(a[11]) var_red = float(a[12]) mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20]) event_name = util.gen_sc3_id(ot) e = Event(event_type="earthquake") e.resource_id = _get_resource_id(event_name, 'event') # Standard JMA solution o_jma = Origin(time=ot, latitude=lat, longitude=lon, depth=depjma, depth_type="from location", region=region) o_jma.resource_id = _get_resource_id(event_name, 'origin', 'JMA') m_jma = Magnitude(mag=magjma, magnitude_type='ML', origin_id=o_jma.resource_id) m_jma.resource_id = _get_resource_id(event_name, 'magnitude', 'JMA') # MT solution o_mt = Origin(time=ot, latitude=lat, longitude=lon, depth=depmt, region=region, depth_type="from moment tensor inversion") o_mt.resource_id = _get_resource_id(event_name, 'origin', 'MT') m_mt = Magnitude(mag=magmt, magnitude_type='Mw', origin_id=o_mt.resource_id) m_mt.resource_id = _get_resource_id(event_name, 'magnitude', 'MT') foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id) foc_mec.resource_id = _get_resource_id(event_name, "focal_mechanism") nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0]) nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1]) nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2) foc_mec.nodal_planes = nod tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz) cm = Comment(text="Basis system: North,East,Down (Jost and \ Herrmann 1989") cm.resource_id = _get_resource_id(event_name, 'comment', 'mt') mt = MomentTensor(derived_origin_id=o_mt.resource_id, moment_magnitude_id=m_mt.resource_id, scalar_moment=mo, comments=[cm], tensor=tensor, variance_reduction=var_red) mt.resource_id = _get_resource_id(event_name, 'moment_tensor') foc_mec.moment_tensor = mt e.origins = [o_jma, o_mt] e.magnitudes = [m_jma, m_mt] e.focal_mechanisms = [foc_mec] e.preferred_magnitude_id = m_mt.resource_id.id e.preferred_origin_id = o_mt.resource_id.id e.preferred_focal_mechanism_id = foc_mec.resource_id.id return e
def outputOBSPY(hp, event=None, only_fm_picks=False): """ Make an Event which includes the current focal mechanism information from HASH Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism. This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones. Inputs ------- hp : hashpy.HashPype instance event : obspy.core.event.Event only_fm_picks : bool of whether to overwrite the picks/arrivals lists Returns ------- obspy.core.event.Event Event will be new if no event was input, FocalMech added to existing event """ # Returns new (or updates existing) Event with HASH solution n = hp.npol if event is None: event = Event(focal_mechanisms=[], picks=[], origins=[]) origin = Origin(arrivals=[]) origin.time = UTCDateTime(hp.tstamp) origin.latitude = hp.qlat origin.longitude = hp.qlon origin.depth = hp.qdep origin.creation_info = CreationInfo(version=hp.icusp) origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format( hp.icusp)) for _i in range(n): p = Pick() p.creation_info = CreationInfo(version=hp.arid[_i]) p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format( p.creation_info.version)) p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i]) if hp.p_pol[_i] > 0: p.polarity = 'positive' else: p.polarity = 'negative' a = Arrival() a.creation_info = CreationInfo(version=hp.arid[_i]) a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format( p.creation_info.version)) a.azimuth = hp.p_azi_mc[_i, 0] a.takeoff_angle = 180. - hp.p_the_mc[_i, 0] a.pick_id = p.resource_id origin.arrivals.append(a) event.picks.append(p) event.origins.append(origin) event.preferred_origin_id = origin.resource_id.resource_id else: # just update the changes origin = event.preferred_origin() picks = [] arrivals = [] for _i in range(n): ind = hp.p_index[_i] a = origin.arrivals[ind] p = a.pick_id.getReferredObject() a.takeoff_angle = hp.p_the_mc[_i, 0] picks.append(p) arrivals.append(a) if only_fm_picks: origin.arrivals = arrivals event.picks = picks # Use me double couple calculator and populate planes/axes etc x = hp._best_quality_index # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred for s in range(hp.nmult): dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]]) ax = dc.axis focal_mech = FocalMechanism() focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author) focal_mech.triggering_origin_id = origin.resource_id focal_mech.resource_id = ResourceIdentifier( 'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1)) focal_mech.method_id = ResourceIdentifier('HASH') focal_mech.nodal_planes = NodalPlanes() focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1) focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2) focal_mech.principal_axes = PrincipalAxes() focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip']) focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip']) focal_mech.station_polarity_count = n focal_mech.azimuthal_gap = hp.magap focal_mech.misfit = hp.mfrac[s] focal_mech.station_distribution_ratio = hp.stdr[s] focal_mech.comments.append( Comment( hp.qual[s], resource_id=ResourceIdentifier( focal_mech.resource_id.resource_id + '/comment/quality'))) #---------------------------------------- event.focal_mechanisms.append(focal_mech) if s == x: event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id return event
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0, depth=15000)) test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.creation_info = CreationInfo(agency_id='TES') test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add in error info (line E) test_event.origins[0].quality = OriginQuality(standard_error=0.01, azimuthal_gap=36) # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has # semi-major and semi-minor test_event.origins[0].origin_uncertainty = OriginUncertainty( confidence_ellipsoid=ConfidenceEllipsoid( semi_major_axis_length=3000, semi_minor_axis_length=1000, semi_intermediate_axis_length=2000, major_axis_plunge=20, major_axis_azimuth=100, major_axis_rotation=4)) test_event.origins[0].time_errors = QuantityError(uncertainty=0.5) # Add in fault-plane solution info (line F) - Note have to check program # used to determine which fields are filled.... test_event.focal_mechanisms.append( FocalMechanism(nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(strike=180, dip=20, rake=30, strike_errors=QuantityError(10), dip_errors=QuantityError(10), rake_errors=QuantityError(20))), method_id=ResourceIdentifier( "smi:nc.anss.org/focalMechanism/FPFIT"), creation_info=CreationInfo(agency_id="NC"), misfit=0.5, station_distribution_ratio=0.8)) # Need to test high-precision origin and that it is preferred origin. # Moment tensor includes another origin test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2, depth=14500)) test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'), origin_id=test_event.origins[-1].resource_id)) # Moment tensors go with focal-mechanisms test_event.focal_mechanisms.append( FocalMechanism(moment_tensor=MomentTensor( derived_origin_id=test_event.origins[-1].resource_id, moment_magnitude_id=test_event.magnitudes[-1].resource_id, scalar_moment=100, tensor=Tensor( m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15), method_id=ResourceIdentifier( 'smi:nc.anss.org/momentTensor/BLAH')))) return test_event
def _internal_read_single_scardec(buf): """ Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog` object. :param buf: File to read. :type buf: open file or file-like object """ # The first line encodes the origin time and epicenter line = buf.readline() origin_time = line.strip().split()[:6] values = list(map(int, origin_time[:-1])) + \ [float(origin_time[-1])] try: origin_time = UTCDateTime(*values) except (TypeError, ValueError): warnings.warn("Could not determine origin time from line: %s. Will " "be set to zero." % line) origin_time = UTCDateTime(0) line = line.split()[6:] latitude, longitude = map(float, line[:2]) # The second line encodes depth and the two focal mechanisms line = buf.readline() line = line.split() # First three values are depth, scalar moment (in Nm) and moment magnitude depth, scalar_moment, moment_mag = map(float, line[0:3]) # depth is in km in SCARDEC files depth *= 1e3 # Next six values are strike, dip, rake for both planes strike1, dip1, rake1 = map(float, line[3:6]) strike2, dip2, rake2 = map(float, line[6:9]) # The rest of the file is the moment rate function # In each line: time (sec), moment rate (Nm/sec) stf_time = [] stf_mr = [] for line in buf: stf_time.append(float(line.split()[0])) stf_mr.append(float(line.split()[1])) # Normalize the source time function stf_mr = np.array(stf_mr) stf_mr /= scalar_moment # Calculate the time step dt = np.mean(np.diff(stf_time)) # Calculate the stf offset (time of first sample wrt to origin time) offset = stf_time[0] # event name is set to generic value for now event_name = 'SCARDEC_event' cmt_origin = Origin(resource_id=_get_resource_id(event_name, "origin", tag="cmt"), time=origin_time, longitude=longitude, latitude=latitude, depth=depth, origin_type="centroid", region=_fe.get_region(longitude=longitude, latitude=latitude)) cmt_mag = Magnitude(resource_id=_get_resource_id(event_name, "magnitude", tag="mw"), mag=moment_mag, magnitude_type="mw", origin_id=cmt_origin.resource_id) nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1) nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2) nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2) foc_mec = FocalMechanism(resource_id=_get_resource_id( event_name, "focal_mechanism"), nodal_planes=nod) dip1 *= np.pi / 180. rake1 *= np.pi / 180. strike1 *= np.pi / 180. mxx = -scalar_moment * ( (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) + (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1))) mxy = scalar_moment * ( (np.sin(dip1) * np.cos(rake1) * np.cos(2 * strike1)) + (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1) * 0.5)) myy = scalar_moment * ( (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) - (np.sin(2 * dip1) * np.sin(rake1) * np.cos(2 * strike1))) mxz = -scalar_moment * ( (np.cos(dip1) * np.cos(rake1) * np.cos(strike1)) + (np.cos(2 * dip1) * np.sin(rake1) * np.sin(strike1))) myz = -scalar_moment * ( (np.cos(dip1) * np.cos(rake1) * np.sin(strike1)) - (np.cos(2 * dip1) * np.sin(rake1) * np.cos(strike1))) mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1)) tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz) cm = [ Comment(text="Basis system: North,East,Down \ (Jost and Herrmann 1989)") ] cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt') cm.append( Comment(text="MT derived from focal mechanism, therefore \ constrained to pure double couple.", force_resource_id=False)) # Write moment rate function extra = { 'moment_rate': { 'value': stf_mr, 'namespace': r"http://test.org/xmlns/0.1" }, 'dt': { 'value': dt, 'namespace': r"http://test.org/xmlns/0.1" }, 'offset': { 'value': offset, 'namespace': r"http://test.org/xmlns/0.1" } } # Source time function stf = SourceTimeFunction(type="unknown") stf.extra = extra mt = MomentTensor(resource_id=_get_resource_id(event_name, "moment_tensor"), derived_origin_id=cmt_origin.resource_id, moment_magnitude_id=cmt_mag.resource_id, scalar_moment=scalar_moment, tensor=tensor, source_time_function=stf, comments=cm) # Assemble everything. foc_mec.moment_tensor = mt ev = Event(resource_id=_get_resource_id(event_name, "event"), event_type="earthquake") ev.event_descriptions.append( EventDescription(text=event_name, type="earthquake name")) ev.comments.append( Comment(text="Hypocenter catalog: SCARDEC", force_resource_id=False)) ev.origins.append(cmt_origin) ev.magnitudes.append(cmt_mag) ev.focal_mechanisms.append(foc_mec) # Set the preferred items. ev.preferred_origin_id = cmt_origin.resource_id.id ev.preferred_magnitude_id = cmt_mag.resource_id.id ev.preferred_focal_mechanism_id = foc_mec.resource_id.id ev.scope_resource_ids() return ev