def _parse_record_hy(self, line): """ Parses the 'hypocenter' record HY """ date = line[2:10] time = line[11:20] # unused: location_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float(line[44:48]) station_number = self._int(line[48:51]) # unused: version_flag = line[51] fe_region_number = line[52:55] fe_region_name = self._decode_fe_region_number(fe_region_number) source_code = line[55:60].strip() event = Event() # FIXME: a smarter way to define evid? evid = date + time res_id = '/'.join((res_id_prefix, 'event', evid)) event.resource_id = ResourceIdentifier(id=res_id) description = EventDescription( type='region name', text=fe_region_name) event.event_descriptions.append(description) description = EventDescription( type='Flinn-Engdahl region', text=fe_region_number) event.event_descriptions.append(description) origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid)) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo() if source_code: origin.creation_info.agency_id = source_code else: origin.creation_info.agency_id = 'USGS-NEIC' res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) origin.earth_model_id = ResourceIdentifier(id=res_id) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.associated_station_count = station_number origin.quality.standard_error = standard_dev # associated_phase_count can be incremented in records 'P ' and 'S ' origin.quality.associated_phase_count = 0 # depth_phase_count can be incremented in record 'S ' origin.quality.depth_phase_count = 0 origin.origin_type = 'hypocenter' origin.region = fe_region_name event.origins.append(origin) return event
def _read_event_header(self): line = self._get_next_line() event_id = self._construct_id(['event', line[6:14].strip()]) region = line[15:80].strip() event = Event(resource_id=event_id, event_descriptions=[ EventDescription(text=region, type='region name') ]) self.cat.append(event)
def read_regex(event_file, regex=regex_GEOFON, creation_info='GEOFON'): """ Read events from event_file with the help of given regular expression. """ with open(event_file, 'r') as f: filedata = f.read() event_matches = re.finditer(regex, filedata, re.VERBOSE + re.MULTILINE) list_ = [i.groupdict() for i in event_matches] events = [] for event in list_: # convert numbers to float and int types for key, item in event.iteritems(): if util.isint(item): event[key] = int(item) elif util.isfloat(item): event[key] = float(item) else: event[key] = item.strip() if 'latitude_sign' in event and event['latitude_sign'] == 'S': event['latitude'] = -event['latitude'] if 'longitude_sign' in event and event['longitude_sign'] == 'W': event['longitude'] = -event['longitude'] if 'AM' in event: ci = creation_info + (' automatic' if event['AM'] == 'A' else ' manual') else: ci = creation_info ev = Event( event_type='earthquake', creation_info=ci, origins=[ Origin(time=UTC(event['time']), latitude=event['latitude'], longitude=event['longitude'], depth=event['depth']) ], magnitudes=[Magnitude(mag=event['magnitude'], magnitude_type='M')], event_descriptions=[ EventDescription(event['flinn'], 'flinn-engdahl region') ] if 'flinn' in event else None) events.append(ev) events.sort(key=lambda x: x.origins[0].time) return Catalog(events)
def test_issue_2339(self): """ Make sure an empty EventDescription object does not prevent a catalog from being saved to disk and re-read, while still being equal. """ # create a catalog with an empty event description empty_description = EventDescription() cat1 = Catalog(events=[read_events()[0]]) cat1[0].event_descriptions.append(empty_description) # serialize the catalog using quakeml and re-read bio = io.BytesIO() cat1.write(bio, 'quakeml') bio.seek(0) cat2 = read_events(bio) # the text of the empty EventDescription instances should be equal text1 = cat1[0].event_descriptions[-1].text text2 = cat2[0].event_descriptions[-1].text self.assertEqual(text1, text2) # the two catalogs should be equal self.assertEqual(cat1, cat2)
def write_events(db, start, end): picks_query, ARC = split_event(db, start, end) ev = Event() ev.event_descriptions.append(EventDescription()) ev.origins.append(Origin( time=UTCDateTime(start), latitude=0, longitude=0, depth=0)) for p in picks_query: _waveform_id_1 = WaveformStreamID( station_code=p.station, channel_code='EHZ', network_code='HL' ) ev.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='I' + p.phase, time=UTCDateTime(p.time), evaluation_mode="automatic") ) return ev, ARC
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].quality = OriginQuality(standard_error=0.01) test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude - 0 test_event.picks = [ Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")] # Test a generic local magnitude amplitude pick test_event.amplitudes = [ Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML'), Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3), Amplitude(generic_amplitude=5.0, period=0.6, pick_id=test_event.picks[2].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', category='point', type='AML')] test_event.origins[0].arrivals = [ Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id), Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25), Arrival(time_weight=2, phase=test_event.picks[4].phase_hint, pick_id=test_event.picks[4].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)] return test_event
def _parse_region_name(self, line, event): event_description = EventDescription() event_description.text = line.strip() event_description.type = EventDescriptionType('region name') event.event_descriptions.append(event_description)
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0, depth=15000)) test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.creation_info = CreationInfo(agency_id='TES') test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add in error info (line E) test_event.origins[0].quality = OriginQuality(standard_error=0.01, azimuthal_gap=36) # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has # semi-major and semi-minor test_event.origins[0].origin_uncertainty = OriginUncertainty( confidence_ellipsoid=ConfidenceEllipsoid( semi_major_axis_length=3000, semi_minor_axis_length=1000, semi_intermediate_axis_length=2000, major_axis_plunge=20, major_axis_azimuth=100, major_axis_rotation=4)) test_event.origins[0].time_errors = QuantityError(uncertainty=0.5) # Add in fault-plane solution info (line F) - Note have to check program # used to determine which fields are filled.... test_event.focal_mechanisms.append( FocalMechanism(nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(strike=180, dip=20, rake=30, strike_errors=QuantityError(10), dip_errors=QuantityError(10), rake_errors=QuantityError(20))), method_id=ResourceIdentifier( "smi:nc.anss.org/focalMechanism/FPFIT"), creation_info=CreationInfo(agency_id="NC"), misfit=0.5, station_distribution_ratio=0.8)) # Need to test high-precision origin and that it is preferred origin. # Moment tensor includes another origin test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2, depth=14500)) test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'), origin_id=test_event.origins[-1].resource_id)) # Moment tensors go with focal-mechanisms test_event.focal_mechanisms.append( FocalMechanism(moment_tensor=MomentTensor( derived_origin_id=test_event.origins[-1].resource_id, moment_magnitude_id=test_event.magnitudes[-1].resource_id, scalar_moment=100, tensor=Tensor( m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15), method_id=ResourceIdentifier( 'smi:nc.anss.org/momentTensor/BLAH')))) return test_event
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except Exception: try: data = filename.decode() except Exception: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1:next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ("Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo(agency_id="GCMT", version=record["version_code"]) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event(force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ]) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[ Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False) ]) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"] }, latitude=record["centroid_latitude"], latitude_errors={"uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000 }, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy()) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude(force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor(m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy()) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"]), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1]), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"])), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"]) ], creation_info=creation_info.copy()) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
def makeCatalog(StazList, mt, scale, args): epi = args.epi.rsplit() model = args.model.split(os.sep) NrSt = len(StazList) NrCo = NrSt * 3 (Fmin, Fmax) = getFreq(args) Tmin = ('%.0f' % (1 / Fmax)) Tmax = ('%.0f' % (1 / Fmin)) mo = ('%.3e' % (mt[0])) mw = ('%.2f' % (mt[1])) Pdc = ('%.2f' % (float(mt[2]) / 100)) Pclvd = ('%.2f' % (float(mt[3]) / 100)) Tval = ('%10.3e' % (mt[22])) Tplg = ('%4.1f' % (mt[23])) Tazi = ('%5.1f' % (mt[24])) Nval = ('%10.3e' % (mt[25])) Nplg = ('%4.1f' % (mt[26])) Nazi = ('%5.1f' % (mt[27])) Pval = ('%10.3e' % (mt[28])) Pplg = ('%4.1f' % (mt[29])) Pazi = ('%5.1f' % (mt[30])) STp1 = ('%5.1f' % (mt[31])) DPp1 = ('%4.1f' % (mt[32])) RAp1 = ('%6.1f' % (mt[33])) STp2 = ('%5.1f' % (mt[34])) DPp2 = ('%4.1f' % (mt[35])) RAp2 = ('%6.1f' % (mt[36])) var = ('%.2f' % (mt[37])) qua = ('%d' % (mt[38])) mij = [mt[4], mt[5], mt[6], mt[7], mt[8], mt[9]] mm0 = str('%10.3e' % (mij[0])) mm1 = str('%10.3e' % (mij[1])) mm2 = str('%10.3e' % (mij[2])) mm3 = str('%10.3e' % (mij[3])) mm4 = str('%10.3e' % (mij[4])) mm5 = str('%10.3e' % (mij[5])) # Aki konvention Mrr = mm5 Mtt = mm0 Mff = mm1 Mrt = mm3 Mrf = mm4 Mtf = mm2 # stress regime A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24]) A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27]) A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30]) (regime, sh) = stressRegime(A1, A2, A3) sh = ('%5.1f' % (sh)) #### Build classes ################################# # #Resource Id is the event origin time for definition res_id = ResourceIdentifier(args.ori) nowUTC = datetime.datetime.utcnow() info = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC) evOrigin = Origin(resource_id=res_id, time=args.ori, latitude=epi[0], longitude=epi[1], depth=epi[2], earth_model_id=model[-1], creation_info=info) # Magnitudes magnitude = Magnitude(mag=mw, magnitude_type="Mw") # Nodal Planes np1 = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1) np2 = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2) planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2) # Principal axes Taxe = Axis(azimuth=Tazi, plunge=Tplg, length=Tval) Naxe = Axis(azimuth=Nazi, plunge=Nplg, length=Nval) Paxe = Axis(azimuth=Pazi, plunge=Pplg, length=Pval) axes = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe) # MT elements MT = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, m_rt=Mrt, m_rp=Mrf, m_tp=Mtf) # Stress regime regStr = 'Stress regime: ' + regime + ' - SH = ' + sh strDes = EventDescription(regStr) # MT dataset dataInfo = DataUsed(wave_type="combined", station_count=NrSt, component_count=NrCo, shortest_period=Tmin, longest_period=Tmax) source = MomentTensor(data_used=dataInfo, scalar_moment=mo, tensor=MT, variance_reduction=var, double_couple=Pdc, clvd=Pclvd, iso=0) focMec = FocalMechanism(moment_tensor=source, nodal_planes=planes, principal_axes=axes, azimuthal_gap=-1) #Initialize Event Catalog mtSolution = Event(creation_info=info) mtSolution.origins.append(evOrigin) mtSolution.magnitudes.append(magnitude) mtSolution.focal_mechanisms.append(focMec) mtSolution.event_descriptions.append(strDes) cat = Catalog() cat.append(mtSolution) return cat
def full_test_event(): """ Function to generate a basic, full test event """ from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude from obspy.core.event import Event, Origin, Magnitude from obspy.core.event import EventDescription, CreationInfo from obspy import UTCDateTime test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01 test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68)) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68)) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72)) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62)) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='Ml')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) return test_event
def _readheader(f): """ Internal header reader. :type f: file :param f: File open in read-mode. :returns: :class:`~obspy.core.event.event.Event` """ f.seek(0) # Base populate to allow for empty parts of file new_event = Event() topline = _get_headline(f=f) if not topline: raise NordicParsingError('No header found, or incorrect ' 'formatting: corrupt s-file') try: sfile_seconds = int(topline[16:18]) if sfile_seconds == 60: sfile_seconds = 0 add_seconds = 60 else: add_seconds = 0 new_event.origins.append(Origin()) new_event.origins[0].time = UTCDateTime(int(topline[1:5]), int(topline[6:8]), int(topline[8:10]), int(topline[11:13]), int(topline[13:15]), sfile_seconds, int(topline[19:20]) * 100000)\ + add_seconds except Exception: NordicParsingError("Couldn't read a date from sfile") # new_event.loc_mod_ind=topline[20] new_event.event_descriptions.append(EventDescription()) new_event.event_descriptions[0].text = topline[21:23] # new_event.ev_id=topline[22] try: new_event.origins[0].latitude = float(topline[23:30]) new_event.origins[0].longitude = float(topline[31:38]) new_event.origins[0].depth = float(topline[39:43]) * 1000 except ValueError: # The origin 'requires' a lat & long new_event.origins[0].latitude = None new_event.origins[0].longitude = None new_event.origins[0].depth = None # new_event.depth_ind = topline[44] # new_event.loc_ind = topline[45] new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip()) ksta = Comment(text='Number of stations=' + topline[49:51].strip()) new_event.origins[0].comments.append(ksta) if _float_conv(topline[51:55]) is not None: new_event.origins[0].quality = OriginQuality( standard_error=_float_conv(topline[51:55])) # Read in magnitudes if they are there. for index in [59, 67, 75]: if not topline[index].isspace(): new_event.magnitudes.append(Magnitude()) new_event.magnitudes[-1].mag = _float_conv(topline[index - 3:index]) new_event.magnitudes[-1].magnitude_type = \ _nortoevmag(topline[index]) new_event.magnitudes[-1].creation_info = \ CreationInfo(agency_id=topline[index + 1:index + 4].strip()) new_event.magnitudes[-1].origin_id = new_event.origins[0].\ resource_id # Set the useful things like preferred magnitude and preferred origin new_event.preferred_origin_id = new_event.origins[0].resource_id try: # Select moment first, then local, then mag_filter = [ 'MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb', 'MS', 'Ms', 'MC', 'Mc' ] _magnitudes = [(m.magnitude_type, m.resource_id) for m in new_event.magnitudes] preferred_magnitude = sorted(_magnitudes, key=lambda x: mag_filter.index(x[0]))[0] new_event.preferred_magnitude_id = preferred_magnitude[1] except (ValueError, IndexError): # If there is a magnitude not specified in filter try: new_event.preferred_magnitude_id = new_event.magnitudes[0].\ resource_id except IndexError: pass return new_event
def _internal_read_single_scardec(buf): """ Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog` object. :param buf: File to read. :type buf: open file or file-like object """ # The first line encodes the origin time and epicenter line = buf.readline() origin_time = line.strip().split()[:6] values = list(map(int, origin_time[:-1])) + \ [float(origin_time[-1])] try: origin_time = UTCDateTime(*values) except (TypeError, ValueError): warnings.warn("Could not determine origin time from line: %s. Will " "be set to zero." % line) origin_time = UTCDateTime(0) line = line.split()[6:] latitude, longitude = map(float, line[:2]) # The second line encodes depth and the two focal mechanisms line = buf.readline() line = line.split() # First three values are depth, scalar moment (in Nm) and moment magnitude depth, scalar_moment, moment_mag = map(float, line[0:3]) # depth is in km in SCARDEC files depth *= 1e3 # Next six values are strike, dip, rake for both planes strike1, dip1, rake1 = map(float, line[3:6]) strike2, dip2, rake2 = map(float, line[6:9]) # The rest of the file is the moment rate function # In each line: time (sec), moment rate (Nm/sec) stf_time = [] stf_mr = [] for line in buf: stf_time.append(float(line.split()[0])) stf_mr.append(float(line.split()[1])) # Normalize the source time function stf_mr = np.array(stf_mr) stf_mr /= scalar_moment # Calculate the time step dt = np.mean(np.diff(stf_time)) # Calculate the stf offset (time of first sample wrt to origin time) offset = stf_time[0] # event name is set to generic value for now event_name = 'SCARDEC_event' cmt_origin = Origin(resource_id=_get_resource_id(event_name, "origin", tag="cmt"), time=origin_time, longitude=longitude, latitude=latitude, depth=depth, origin_type="centroid", region=_fe.get_region(longitude=longitude, latitude=latitude)) cmt_mag = Magnitude(resource_id=_get_resource_id(event_name, "magnitude", tag="mw"), mag=moment_mag, magnitude_type="mw", origin_id=cmt_origin.resource_id) nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1) nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2) nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2) foc_mec = FocalMechanism(resource_id=_get_resource_id( event_name, "focal_mechanism"), nodal_planes=nod) dip1 *= np.pi / 180. rake1 *= np.pi / 180. strike1 *= np.pi / 180. mxx = -scalar_moment * ( (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) + (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1))) mxy = scalar_moment * ( (np.sin(dip1) * np.cos(rake1) * np.cos(2 * strike1)) + (np.sin(2 * dip1) * np.sin(rake1) * np.sin(2 * strike1) * 0.5)) myy = scalar_moment * ( (np.sin(dip1) * np.cos(rake1) * np.sin(2 * strike1)) - (np.sin(2 * dip1) * np.sin(rake1) * np.cos(2 * strike1))) mxz = -scalar_moment * ( (np.cos(dip1) * np.cos(rake1) * np.cos(strike1)) + (np.cos(2 * dip1) * np.sin(rake1) * np.sin(strike1))) myz = -scalar_moment * ( (np.cos(dip1) * np.cos(rake1) * np.sin(strike1)) - (np.cos(2 * dip1) * np.sin(rake1) * np.cos(strike1))) mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1)) tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz) cm = [ Comment(text="Basis system: North,East,Down \ (Jost and Herrmann 1989)") ] cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt') cm.append( Comment(text="MT derived from focal mechanism, therefore \ constrained to pure double couple.", force_resource_id=False)) # Write moment rate function extra = { 'moment_rate': { 'value': stf_mr, 'namespace': r"http://test.org/xmlns/0.1" }, 'dt': { 'value': dt, 'namespace': r"http://test.org/xmlns/0.1" }, 'offset': { 'value': offset, 'namespace': r"http://test.org/xmlns/0.1" } } # Source time function stf = SourceTimeFunction(type="unknown") stf.extra = extra mt = MomentTensor(resource_id=_get_resource_id(event_name, "moment_tensor"), derived_origin_id=cmt_origin.resource_id, moment_magnitude_id=cmt_mag.resource_id, scalar_moment=scalar_moment, tensor=tensor, source_time_function=stf, comments=cm) # Assemble everything. foc_mec.moment_tensor = mt ev = Event(resource_id=_get_resource_id(event_name, "event"), event_type="earthquake") ev.event_descriptions.append( EventDescription(text=event_name, type="earthquake name")) ev.comments.append( Comment(text="Hypocenter catalog: SCARDEC", force_resource_id=False)) ev.origins.append(cmt_origin) ev.magnitudes.append(cmt_mag) ev.focal_mechanisms.append(foc_mec) # Set the preferred items. ev.preferred_origin_id = cmt_origin.resource_id.id ev.preferred_magnitude_id = cmt_mag.resource_id.id ev.preferred_focal_mechanism_id = foc_mec.resource_id.id ev.scope_resource_ids() return ev
def test_read_write(): """ Function to test the read and write capabilities of Sfile_util. """ import os from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude from obspy.core.event import Catalog, Event, Origin, Magnitude from obspy.core.event import EventDescription, CreationInfo import obspy if int(obspy.__version__.split('.')[0]) >= 1: from obspy.core.event import read_events else: from obspy.core.event import readEvents as read_events from obspy import UTCDateTime # Set-up a test event test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15.0 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01 test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') test_event.picks.append( Pick(waveform_id=_waveform_id, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, horizontal_slowness=12, backazimuth=20)) test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m')) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[0].phase_hint, pick_id=test_event.picks[0].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Write the catalog test_cat.write("Test_catalog.xml", format='QUAKEML') # Read and check read_cat = read_events("Test_catalog.xml") os.remove("Test_catalog.xml") assert read_cat[0].resource_id == test_cat[0].resource_id assert read_cat[0].picks == test_cat[0].picks assert read_cat[0].origins[0].resource_id ==\ test_cat[0].origins[0].resource_id assert read_cat[0].origins[0].time == test_cat[0].origins[0].time # Note that time_residuel_RMS is not a quakeML format assert read_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude assert read_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude assert read_cat[0].origins[0].depth == test_cat[0].origins[0].depth assert read_cat[0].magnitudes == test_cat[0].magnitudes assert read_cat[0].event_descriptions == test_cat[0].event_descriptions assert read_cat[0].amplitudes[0].resource_id ==\ test_cat[0].amplitudes[0].resource_id assert read_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period assert read_cat[0].amplitudes[0].unit == test_cat[0].amplitudes[0].unit assert read_cat[0].amplitudes[0].generic_amplitude ==\ test_cat[0].amplitudes[0].generic_amplitude assert read_cat[0].amplitudes[0].pick_id ==\ test_cat[0].amplitudes[0].pick_id assert read_cat[0].amplitudes[0].waveform_id ==\ test_cat[0].amplitudes[0].waveform_id # Check the read-write s-file functionality sfile = eventtoSfile(test_cat[0], userID='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) del read_cat assert readwavename(sfile) == ['test'] read_cat = Catalog() read_cat += readpicks(sfile) os.remove(sfile) assert read_cat[0].picks[0].time == test_cat[0].picks[0].time assert read_cat[0].picks[0].backazimuth == test_cat[0].picks[0].backazimuth assert read_cat[0].picks[0].onset == test_cat[0].picks[0].onset assert read_cat[0].picks[0].phase_hint == test_cat[0].picks[0].phase_hint assert read_cat[0].picks[0].polarity == test_cat[0].picks[0].polarity assert read_cat[0].picks[0].waveform_id.station_code ==\ test_cat[0].picks[0].waveform_id.station_code assert read_cat[0].picks[0].waveform_id.channel_code[-1] ==\ test_cat[0].picks[0].waveform_id.channel_code[-1] # assert read_cat[0].origins[0].resource_id ==\ # test_cat[0].origins[0].resource_id assert read_cat[0].origins[0].time == test_cat[0].origins[0].time # Note that time_residuel_RMS is not a quakeML format assert read_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude assert read_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude assert read_cat[0].origins[0].depth == test_cat[0].origins[0].depth assert read_cat[0].magnitudes[0].mag == test_cat[0].magnitudes[0].mag assert read_cat[0].magnitudes[1].mag == test_cat[0].magnitudes[1].mag assert read_cat[0].magnitudes[2].mag == test_cat[0].magnitudes[2].mag assert read_cat[0].magnitudes[0].creation_info ==\ test_cat[0].magnitudes[0].creation_info assert read_cat[0].magnitudes[1].creation_info ==\ test_cat[0].magnitudes[1].creation_info assert read_cat[0].magnitudes[2].creation_info ==\ test_cat[0].magnitudes[2].creation_info assert read_cat[0].magnitudes[0].magnitude_type ==\ test_cat[0].magnitudes[0].magnitude_type assert read_cat[0].magnitudes[1].magnitude_type ==\ test_cat[0].magnitudes[1].magnitude_type assert read_cat[0].magnitudes[2].magnitude_type ==\ test_cat[0].magnitudes[2].magnitude_type assert read_cat[0].event_descriptions == test_cat[0].event_descriptions # assert read_cat[0].amplitudes[0].resource_id ==\ # test_cat[0].amplitudes[0].resource_id assert read_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period assert read_cat[0].amplitudes[0].snr == test_cat[0].amplitudes[0].snr del read_cat # assert read_cat[0].amplitudes[0].pick_id ==\ # test_cat[0].amplitudes[0].pick_id # assert read_cat[0].amplitudes[0].waveform_id ==\ # test_cat[0].amplitudes[0].waveform_id # Test the wrappers for PICK and EVENTINFO classes picks, evinfo = eventtopick(test_cat) # Test the conversion back conv_cat = Catalog() conv_cat.append(picktoevent(evinfo, picks)) assert conv_cat[0].picks[0].time == test_cat[0].picks[0].time assert conv_cat[0].picks[0].backazimuth == test_cat[0].picks[0].backazimuth assert conv_cat[0].picks[0].onset == test_cat[0].picks[0].onset assert conv_cat[0].picks[0].phase_hint == test_cat[0].picks[0].phase_hint assert conv_cat[0].picks[0].polarity == test_cat[0].picks[0].polarity assert conv_cat[0].picks[0].waveform_id.station_code ==\ test_cat[0].picks[0].waveform_id.station_code assert conv_cat[0].picks[0].waveform_id.channel_code[-1] ==\ test_cat[0].picks[0].waveform_id.channel_code[-1] # assert read_cat[0].origins[0].resource_id ==\ # test_cat[0].origins[0].resource_id assert conv_cat[0].origins[0].time == test_cat[0].origins[0].time # Note that time_residuel_RMS is not a quakeML format assert conv_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude assert conv_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude assert conv_cat[0].origins[0].depth == test_cat[0].origins[0].depth assert conv_cat[0].magnitudes[0].mag == test_cat[0].magnitudes[0].mag assert conv_cat[0].magnitudes[1].mag == test_cat[0].magnitudes[1].mag assert conv_cat[0].magnitudes[2].mag == test_cat[0].magnitudes[2].mag assert conv_cat[0].magnitudes[0].creation_info ==\ test_cat[0].magnitudes[0].creation_info assert conv_cat[0].magnitudes[1].creation_info ==\ test_cat[0].magnitudes[1].creation_info assert conv_cat[0].magnitudes[2].creation_info ==\ test_cat[0].magnitudes[2].creation_info assert conv_cat[0].magnitudes[0].magnitude_type ==\ test_cat[0].magnitudes[0].magnitude_type assert conv_cat[0].magnitudes[1].magnitude_type ==\ test_cat[0].magnitudes[1].magnitude_type assert conv_cat[0].magnitudes[2].magnitude_type ==\ test_cat[0].magnitudes[2].magnitude_type assert conv_cat[0].event_descriptions == test_cat[0].event_descriptions # assert read_cat[0].amplitudes[0].resource_id ==\ # test_cat[0].amplitudes[0].resource_id assert conv_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period assert conv_cat[0].amplitudes[0].snr == test_cat[0].amplitudes[0].snr return True
def _internal_read_single_cmtsolution(buf): """ Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog` object. :param buf: File to read. :type buf: open file or file-like object """ # The first line encodes the preliminary epicenter. line = buf.readline() hypocenter_catalog = line[:5].strip().decode() origin_time = line[5:].strip().split()[:6] values = list(map(int, origin_time[:-1])) + \ [float(origin_time[-1])] try: origin_time = UTCDateTime(*values) except (TypeError, ValueError): warnings.warn("Could not determine origin time from line: %s. Will " "be set to zero." % line) origin_time = UTCDateTime(0) line = line[28:].split() latitude, longitude, depth, body_wave_mag, surface_wave_mag = \ map(float, line[:5]) # The rest encodes the centroid solution. event_name = buf.readline().strip().split()[-1].decode() preliminary_origin = Origin( resource_id=_get_resource_id(event_name, "origin", tag="prelim"), time=origin_time, longitude=longitude, latitude=latitude, # Depth is in meters. depth=depth * 1000.0, origin_type="hypocenter", region=_fe.get_region(longitude=longitude, latitude=latitude), evaluation_status="preliminary") preliminary_bw_magnitude = Magnitude( resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"), mag=body_wave_mag, magnitude_type="Mb", evaluation_status="preliminary", origin_id=preliminary_origin.resource_id) preliminary_sw_magnitude = Magnitude( resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"), mag=surface_wave_mag, magnitude_type="MS", evaluation_status="preliminary", origin_id=preliminary_origin.resource_id) values = [ "time_shift", "half_duration", "latitude", "longitude", "depth", "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp" ] cmt_values = { _i: float(buf.readline().strip().split()[-1]) for _i in values } # Moment magnitude calculation in dyne * cm. m_0 = 1.0 / math.sqrt(2.0) * math.sqrt( cmt_values["m_rr"]**2 + cmt_values["m_tt"]**2 + cmt_values["m_pp"]**2 + 2.0 * cmt_values["m_rt"]**2 + 2.0 * cmt_values["m_rp"]**2 + 2.0 * cmt_values["m_tp"]**2) m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1) # Convert to meters. cmt_values["depth"] *= 1000.0 # Convert to Newton meter. values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"] for value in values: cmt_values[value] /= 1E7 cmt_origin = Origin( resource_id=_get_resource_id(event_name, "origin", tag="cmt"), time=origin_time + cmt_values["time_shift"], longitude=cmt_values["longitude"], latitude=cmt_values["latitude"], depth=cmt_values["depth"], origin_type="centroid", # Could rarely be different than the epicentral region. region=_fe.get_region(longitude=cmt_values["longitude"], latitude=cmt_values["latitude"]) # No evaluation status as it could be any of several and the file # format does not provide that information. ) cmt_mag = Magnitude( resource_id=_get_resource_id(event_name, "magnitude", tag="mw"), # Round to 2 digits. mag=round(m_w, 2), magnitude_type="mw", origin_id=cmt_origin.resource_id) foc_mec = FocalMechanism( resource_id=_get_resource_id(event_name, "focal_mechanism"), # The preliminary origin most likely triggered the focal mechanism # determination. triggering_origin_id=preliminary_origin.resource_id) tensor = Tensor(m_rr=cmt_values["m_rr"], m_pp=cmt_values["m_pp"], m_tt=cmt_values["m_tt"], m_rt=cmt_values["m_rt"], m_rp=cmt_values["m_rp"], m_tp=cmt_values["m_tp"]) # Source time function is a triangle, according to the SPECFEM manual. stf = SourceTimeFunction( type="triangle", # The duration is twice the half duration. duration=2.0 * cmt_values["half_duration"]) mt = MomentTensor( resource_id=_get_resource_id(event_name, "moment_tensor"), derived_origin_id=cmt_origin.resource_id, moment_magnitude_id=cmt_mag.resource_id, # Convert to Nm. scalar_moment=m_0 / 1E7, tensor=tensor, source_time_function=stf) # Assemble everything. foc_mec.moment_tensor = mt ev = Event(resource_id=_get_resource_id(event_name, "event"), event_type="earthquake") ev.event_descriptions.append( EventDescription(text=event_name, type="earthquake name")) ev.comments.append( Comment(text="Hypocenter catalog: %s" % hypocenter_catalog, force_resource_id=False)) ev.origins.append(cmt_origin) ev.origins.append(preliminary_origin) ev.magnitudes.append(cmt_mag) ev.magnitudes.append(preliminary_bw_magnitude) ev.magnitudes.append(preliminary_sw_magnitude) ev.focal_mechanisms.append(foc_mec) # Set the preferred items. ev.preferred_origin_id = cmt_origin.resource_id.id ev.preferred_magnitude_id = cmt_mag.resource_id.id ev.preferred_focal_mechanism_id = foc_mec.resource_id.id ev.scope_resource_ids() return ev
def sdxtoquakeml(sdx_dir, out_xml, time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5], catalog_description="", catalog_version="", agency_id="", author="", vel_mod_id=""): """ Convert SDX to QuakeML format using ObsPy inventory structure. SDX filename prefix is stored under event description. Input parameters: - sdx_dir: directory containing sdx files (required) - out_xml: Filename of quakeML file (required) - time_uncertainties: List containing time uncertainities in seconds for mapping from weights 0-4, respectively (optional) - catalog_description (optional) - cat_agency_id (optional) - author (optional) - vel_mod_id (optional) Output: - xml catalog in QuakeML format. """ # Prepare catalog cat = Catalog(description=catalog_description, creation_info=CreationInfo(author=author, agency_id=agency_id, version=catalog_version)) # Read in sdx files in directory, recursively files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True) if len(files) == 0: print("No SDX files found in path. Exiting") for sdx_file_path in files: print("Working on ", sdx_file_path.split('/')[-1]) # Set-up event evt_id = (sdx_file_path.split('/')[-1])[:-4] event = Event(event_type="earthquake", creation_info=CreationInfo(author=author, agency_id=agency_id), event_descriptions=[EventDescription(text=evt_id)]) # Get station details, append to arrays sdx_file = open(sdx_file_path, "r") stations = [] for line in sdx_file: if line.rstrip() == "station": sdxstation = list(islice(sdx_file, 5)) stations.append([ sdxstation[1].split()[0], float(sdxstation[2].split()[0]), float(sdxstation[3].split()[0]), float(sdxstation[4].split()[0]) ]) sdx_file.close() # Find origin details, append to origin object sdx_file = open(sdx_file_path, "r") found_origin = False for line in sdx_file: if line.rstrip() == "origin": found_origin = True sdxorigin = list(islice(sdx_file, 17)) orig_time = ("{:}T{:}".format( sdxorigin[1][0:10].replace(".", "-"), sdxorigin[1][11:23])) evt_lat = float(sdxorigin[2].split()[0]) evt_lon = float(sdxorigin[3].split()[0]) evt_depth = float(sdxorigin[4].split()[0]) creation_time = UTCDateTime("{:}T{:}".format( sdxorigin[16].split()[6][0:10].replace(".", "-"), sdxorigin[16].split()[6][11:23])) num_arrivals = int(sdxorigin[12].split()[0]) num_arrivals_p = (int(sdxorigin[12].split()[0]) - int(sdxorigin[12].split()[1])) min_dist = float(sdxorigin[12].split()[9]) max_dist = float(sdxorigin[12].split()[10]) med_dist = float(sdxorigin[12].split()[11]) max_az_gap = float(sdxorigin[12].split()[6]) origin = Origin(time=UTCDateTime(orig_time), longitude=evt_lon, latitude=evt_lat, depth=evt_depth * -1000, earth_model_id=vel_mod_id, origin_type="hypocenter", evaluation_mode="manual", evaluation_status="confirmed", method_id=ResourceIdentifier(id="SDX_hypo71"), creation_info=CreationInfo( creation_time=creation_time, author=author, agency_id=agency_id), quality=OriginQuality( associated_phase_count=num_arrivals, used_phase_count=num_arrivals, associated_station_count=num_arrivals_p, used_station_count=num_arrivals_p, azimuthal_gap=max_az_gap, minimum_distance=min_dist, maximum_distance=max_dist, median_distance=med_dist)) event.origins.append(origin) sdx_file.close() # Skip event if no computed origin if found_origin is False: print("No origin found ... skipping event") continue # Get pick details, append to pick and arrival objects sdx_file = open(sdx_file_path, "r") found_pick = False for line in sdx_file: if line.rstrip() == "pick": found_pick = True sdxpick = list(islice(sdx_file, 15)) pick_time = UTCDateTime("{:}T{:}".format( sdxpick[1][0:10].replace(".", "-"), sdxpick[1][11:23])) network = sdxpick[2].split()[0] station = sdxpick[2].split()[1] location = sdxpick[2].split()[2] if "NOT_SET" in location: location = "" channel = sdxpick[2].split()[3] onset = sdxpick[8].split()[0] if onset == "0": pickonset = "emergent" elif onset == "1": pickonset = "impulsive" elif onset == "2": pickonset = "questionable" phase = sdxpick[9].split()[0] polarity = sdxpick[10].split()[0] if polarity == "0": pol = "positive" elif polarity == "1": pol = "negative" elif polarity == "2": pol = "undecidable" weight = int(sdxpick[11].split()[0]) creation_time = UTCDateTime("{:}T{:}".format( sdxpick[14].split()[6][0:10].replace(".", "-"), sdxpick[14].split()[6][11:23])) pick = Pick( time=pick_time, waveform_id=WaveformStreamID(network_code=network, station_code=station, location_code=location, channel_code=channel), time_errors=time_uncertainties[weight], evaluation_mode="manual", evaluation_status="confirmed", onset=pickonset, phase_hint=phase, polarity=pol, method_id=ResourceIdentifier(id="SDX"), creation_info=CreationInfo(creation_time=creation_time)) event.picks.append(pick) # Compute azimuth, distance, append to arrival object for i in range(0, len(stations)): if stations[i][0] == station: azimuth = (gps2dist_azimuth(evt_lat, evt_lon, stations[i][1], stations[i][2])[1]) dist_deg = locations2degrees(evt_lat, evt_lon, stations[i][1], stations[i][2]) arrival = Arrival(phase=phase, pick_id=pick.resource_id, azimuth=azimuth, distance=dist_deg, time_weight=1.00) event.origins[0].arrivals.append(arrival) # Skip event if no picks if found_pick is False: print("No picks found ... skipping event") continue # Set preferred origin and append event to catalogue event.preferred_origin_id = event.origins[0].resource_id cat.events.append(event) sdx_file.close() cat.write(out_xml, format="QUAKEML")
def readheader(sfile): """ Read header information from a seisan nordic format S-file. Returns an obspy.core.event.Catalog type: note this changed for version \ 0.1.0 from the inbuilt class types. :type sfile: str :param sfile: Path to the s-file :returns: :class: obspy.core.event.Event >>> event = readheader('eqcorrscan/tests/test_data/REA/TEST_/' + ... '01-0411-15L.S201309') >>> print(event.origins[0].time) 2013-09-01T04:11:15.700000Z """ import warnings from obspy.core.event import Event, Origin, Magnitude, Comment from obspy.core.event import EventDescription, CreationInfo f = open(sfile, 'r') # Base populate to allow for empty parts of file new_event = Event() topline = f.readline() if not len(topline.rstrip()) == 80: raise IOError('s-file has a corrupt header, not 80 char long') f.seek(0) for line in f: if line[79] in [' ', '1']: topline = line break if line[79] == '7': raise IOError('No header found, corrupt s-file?') try: sfile_seconds = int(topline[16:18]) if sfile_seconds == 60: sfile_seconds = 0 add_seconds = 60 else: add_seconds = 0 new_event.origins.append(Origin()) new_event.origins[0].time = UTCDateTime(int(topline[1:5]), int(topline[6:8]), int(topline[8:10]), int(topline[11:13]), int(topline[13:15]), sfile_seconds, int(topline[19:20]) * 100000)\ + add_seconds except: warnings.warn("Couldn't read a date from sfile: " + sfile) new_event.origins.append(Origin(time=UTCDateTime(0))) # new_event.loc_mod_ind=topline[20] new_event.event_descriptions.append(EventDescription()) new_event.event_descriptions[0].text = topline[21:23] # new_event.ev_id=topline[22] if not _float_conv(topline[23:30]) == 999: new_event.origins[0].latitude = _float_conv(topline[23:30]) new_event.origins[0].longitude = _float_conv(topline[31:38]) new_event.origins[0].depth = _float_conv(topline[39:43]) * 1000 # else: # # The origin 'requires' a lat & long # new_event.origins[0].latitude = float('NaN') # new_event.origins[0].longitude = float('NaN') # new_event.origins[0].depth = float('NaN') # new_event.depth_ind = topline[44] # new_event.loc_ind = topline[45] new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip()) ksta = Comment(text='Number of stations=' + topline[49:51].strip()) new_event.origins[0].comments.append(ksta) # new_event.origins[0].nsta??? = _int_conv(topline[49:51]) if not _float_conv(topline[51:55]) == 999: new_event.origins[0].time_errors['Time_Residual_RMS'] = \ _float_conv(topline[51:55]) # Read in magnitudes if they are there. if len(topline[59].strip()) > 0: new_event.magnitudes.append(Magnitude()) new_event.magnitudes[0].mag = _float_conv(topline[56:59]) new_event.magnitudes[0].magnitude_type = topline[59] new_event.magnitudes[0].creation_info = \ CreationInfo(agency_id=topline[60:63].strip()) new_event.magnitudes[0].origin_id = new_event.origins[0].\ resource_id if len(topline[67].strip()) > 0: new_event.magnitudes.append(Magnitude()) new_event.magnitudes[1].mag = _float_conv(topline[64:67]) new_event.magnitudes[1].magnitude_type = topline[67] new_event.magnitudes[1].creation_info = \ CreationInfo(agency_id=topline[68:71].strip()) new_event.magnitudes[1].origin_id = new_event.origins[0].\ resource_id if len(topline[75].strip()) > 0: new_event.magnitudes.append(Magnitude()) new_event.magnitudes[2].mag = _float_conv(topline[72:75]) new_event.magnitudes[2].magnitude_type = topline[75] new_event.magnitudes[2].creation_info = \ CreationInfo(agency_id=topline[76:79].strip()) new_event.magnitudes[2].origin_id = new_event.origins[0].\ resource_id f.close() # convert the nordic notation of magnitude to more general notation for _magnitude in new_event.magnitudes: _magnitude.magnitude_type = _nortoevmag(_magnitude.magnitude_type) # Set the useful things like preferred magnitude and preferred origin new_event.preferred_origin_id = str(new_event.origins[0].resource_id) if len(new_event.magnitudes) > 1: try: # Select moment first, then local, then mag_filter = [ 'MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb', 'MS', 'Ms', 'Mc', 'MC' ] _magnitudes = [(m.magnitude_type, m.resource_id) for m in new_event.magnitudes] preferred_magnitude = sorted(_magnitudes, key=lambda x: mag_filter.index(x[0])) new_event.preferred_magnitude_id = str(preferred_magnitude[0][1]) except ValueError: # If there is a magnitude not specified in filter new_event.preferred_magnitude_id =\ str(new_event.magnitudes[0].resource_id) elif len(new_event.magnitudes) == 1: new_event.preferred_magnitude_id =\ str(new_event.magnitudes[0].resource_id) return new_event
def basic_test_event(): """ Function to generate a basic, full test event """ from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude from obspy.core.event import Event, Origin, Magnitude from obspy.core.event import EventDescription, CreationInfo from obspy import UTCDateTime test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01 test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') test_event.picks.append( Pick(waveform_id=_waveform_id, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, horizontal_slowness=12, backazimuth=20)) test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m')) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[0].phase_hint, pick_id=test_event.picks[0].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) return test_event
def brightness(stations, nodes, lags, stream, threshold, thresh_type, template_length, template_saveloc, coherence_thresh, coherence_stations=['all'], coherence_clip=False, gap=2.0, clip_level=100, instance=0, pre_pick=0.2, plotvar=False, plotsave=True, cores=1, debug=0, mem_issue=False): """ Calculate the brightness function for a single day. Written to calculate the brightness function for a single day of data, using moveouts from a 3D travel-time grid. .. Note:: Data in stream must be all of the same length and have the same sampling rates, see :func:`eqcorrscan.utils.pre_processing.dayproc` :type stations: list :param stations: List of station names from in the form where stations[i] refers to nodes[i][:] and lags[i][:] :type nodes: list :param nodes: List of node points where nodes[i] refers to stations[i] and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is longitude in degrees, nodes[:][:][2] is depth in km. :type lags: numpy.ndarray :param lags: Array of arrays where lags[i][:] refers to stations[i]. lags[i][j] should be the delay to the nodes[i][j] for stations[i] in seconds. :type stream: obspy.core.stream.Stream :param stream: Data through which to look for detections. :type threshold: float :param threshold: Threshold value for detection of template within the brightness function. :type thresh_type: str :param thresh_type: Either MAD or abs where MAD is the Median Absolute Deviation and abs is an absolute brightness. :type template_length: float :param template_length: Length of template to extract in seconds :type template_saveloc: str :param template_saveloc: Path of where to save the templates. :type coherence_thresh: tuple :param coherence_thresh: Threshold for removing incoherent peaks in the network response, those below this will not be used as templates. Must be in the form of (a,b) where the coherence is given by: :math:`a-kchan/b` where kchan is the number of channels used to compute the coherence. :type coherence_stations: list :param coherence_stations: List of stations to use in the coherence thresholding - defaults to `all` which uses all the stations. :type coherence_clip: tuple :param coherence_clip: Start and end in seconds of data to window around, defaults to False, which uses all the data given. :type gap: float :param gap: Minimum inter-event time in seconds for detections. :type clip_level: float :param clip_level: Multiplier applied to the mean deviation of the energy as an upper limit, used to remove spikes (earthquakes, lightning, electrical spikes) from the energy stack. :type instance: int :param instance: Optional, used for tracking when using a distributed computing system. :type pre_pick: float :param pre_pick: Seconds before the detection time to include in template :type plotvar: bool :param plotvar: Turn plotting on or off :type plotsave: bool :param plotsave: Save or show plots, if `False` will try and show the plots on screen - as this is designed for bulk use this is set to `True` to save any plots rather than show them if you create them - changes the backend of matplotlib, so if is set to `False` you will see NO PLOTS! :type cores: int :param cores: Number of cores to use, defaults to 1. :type debug: int :param debug: Debug level from 0-5, higher is more output. :type mem_issue: bool :param mem_issue: Set to True to write temporary variables to disk rather than store in memory, slow. :return: list of templates as :class:`obspy.core.stream.Stream` objects :rtype: list """ if plotsave: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt plt.ioff() from eqcorrscan.utils import plotting from eqcorrscan.utils.debug_log import debug_print # Check that we actually have the correct stations realstations = [] for station in stations: st = stream.select(station=station) if st: realstations += station del st stream_copy = stream.copy() # Force convert to int16 for tr in stream_copy: # int16 max range is +/- 32767 if max(abs(tr.data)) > 32767: tr.data = 32767 * (tr.data / max(abs(tr.data))) # Make sure that the data aren't clipped it they are high gain # scale the data tr.data = tr.data.astype(np.int16) # The internal _node_loop converts energy to int16 too to conserve memory, # to do this it forces the maximum of a single energy trace to be 500 and # normalises to this level - this only works for fewer than 65 channels of # data if len(stream_copy) > 130: raise BrightnessError( 'Too many streams, either re-code and cope with either more memory' ' usage, or less precision, or reduce data volume') # Loop through each node in the input # Linear run print('Computing the energy stacks') # Parallel run num_cores = cores if num_cores > len(nodes): num_cores = len(nodes) if num_cores > cpu_count(): num_cores = cpu_count() if mem_issue and not os.path.isdir('tmp' + str(instance)): os.makedirs('tmp' + str(instance)) pool = Pool(processes=num_cores) results = [ pool.apply_async( _node_loop, (stations, ), { 'lags': lags[:, i], 'stream': stream, 'i': i, 'clip_level': clip_level, 'mem_issue': mem_issue, 'instance': instance }) for i in range(len(nodes)) ] pool.close() if not mem_issue: print('Computing the cumulative network response from memory') energy = [p.get() for p in results] pool.join() energy.sort(key=lambda tup: tup[0]) energy = [node[1] for node in energy] energy = np.concatenate(energy, axis=0) print(energy.shape) else: pool.join() del results # Now compute the cumulative network response and then detect possible # events if not mem_issue: print(energy.shape) indices = np.argmax(energy, axis=0) # Indices of maximum energy print(indices.shape) cum_net_resp = np.array([np.nan] * len(indices)) cum_net_resp[0] = energy[indices[0]][0] peak_nodes = [nodes[indices[0]]] for i in range(1, len(indices)): cum_net_resp[i] = energy[indices[i]][i] peak_nodes.append(nodes[indices[i]]) del energy, indices else: print('Reading the temp files and computing network response') node_splits = int(len(nodes) // num_cores) print(node_splits) indices = [] for i in range(num_cores): indices.append( list(np.arange(node_splits * i, node_splits * (i + 1)))) indices[-1] += list(np.arange(node_splits * (i + 1), len(nodes))) # results = [_cum_net_resp(node_lis=indices[i], instance=instance) # for i in range(num_cores)] pool = Pool(processes=num_cores) results = [ pool.apply_async(_cum_net_resp, args=(indices[i], instance)) for i in range(num_cores) ] pool.close() results = [p.get() for p in results] pool.join() responses = [result[0] for result in results] print(np.shape(responses)) node_indices = [result[1] for result in results] cum_net_resp = np.array(responses) indices = np.argmax(cum_net_resp, axis=0) print(indices.shape) print(cum_net_resp.shape) cum_net_resp = np.array( [cum_net_resp[indices[i]][i] for i in range(len(indices))]) peak_nodes = [ nodes[node_indices[indices[i]][i]] for i in range(len(indices)) ] del indices, node_indices if plotvar: cum_net_trace = Stream( Trace(data=cum_net_resp, header=Stats({ 'station': 'NR', 'channel': '', 'network': 'Z', 'location': '', 'starttime': stream[0].stats.starttime, 'sampling_rate': stream[0].stats.sampling_rate }))) cum_net_trace += stream.select(channel='*N') cum_net_trace += stream.select(channel='*1') cum_net_trace.sort(['network', 'station', 'channel']) # Find detection within this network response print('Finding detections in the cumulative network response') detections = _find_detections(cum_net_resp, peak_nodes, threshold, thresh_type, stream[0].stats.sampling_rate, realstations, gap) del cum_net_resp templates = [] nodesout = [] good_detections = [] if detections: print('Converting detections into templates') # Generate a catalog of detections # detections_cat = Catalog() for j, detection in enumerate(detections): debug_print( 'Converting for detection %i of %i' % (j, len(detections)), 3, debug) # Create an event for each detection event = Event() # Set up some header info for the event event.event_descriptions.append(EventDescription()) event.event_descriptions[0].text = 'Brightness detection' event.creation_info = CreationInfo(agency_id='EQcorrscan') copy_of_stream = deepcopy(stream_copy) # Convert detections to obspy.core.event type - # name of detection template is the node. node = (detection.template_name.split('_')[0], detection.template_name.split('_')[1], detection.template_name.split('_')[2]) # Look up node in nodes and find the associated lags index = nodes.index( (float(node[0]), float(node[1]), float(node[2]))) detect_lags = lags[:, index] ksta = Comment(text='Number of stations=' + str(len(detect_lags))) event.origins.append(Origin()) event.origins[0].comments.append(ksta) event.origins[0].time = copy_of_stream[0].stats.starttime +\ detect_lags[0] + detection.detect_time event.origins[0].latitude = node[0] event.origins[0].longitude = node[1] event.origins[0].depth = node[2] for i, detect_lag in enumerate(detect_lags): station = stations[i] st = copy_of_stream.select(station=station) if len(st) != 0: for tr in st: _waveform_id = WaveformStreamID( station_code=tr.stats.station, channel_code=tr.stats.channel, network_code=tr.stats.network) event.picks.append( Pick(waveform_id=_waveform_id, time=tr.stats.starttime + detect_lag + detection.detect_time + pre_pick, onset='emergent', evalutation_mode='automatic')) debug_print('Generating template for detection: %i' % j, 0, debug) template = template_gen(picks=event.picks, st=copy_of_stream, length=template_length, swin='all') template_name = template_saveloc + '/' +\ str(template[0].stats.starttime) + '.ms' # In the interests of RAM conservation we write then read # Check coherency here! temp_coher, kchan = coherence(template, coherence_stations, coherence_clip) coh_thresh = float(coherence_thresh[0]) - kchan / \ float(coherence_thresh[1]) coherent = False if temp_coher > coh_thresh: template.write(template_name, format="MSEED") print('Written template as: ' + template_name) print('---------------------------------coherence LEVEL: ' + str(temp_coher)) coherent = True debug_print( 'Template was incoherent, coherence level: ' + str(temp_coher), 0, debug) coherent = False del copy_of_stream, tr, template if coherent: templates.append(obsread(template_name)) nodesout += [node] good_detections.append(detection) debug_print('No template for you', 0, debug) # detections_cat += event if plotvar: good_detections = [(cum_net_trace[-1].stats.starttime + detection.detect_time).datetime for detection in good_detections] if not plotsave: plotting.NR_plot(cum_net_trace[0:-1], Stream(cum_net_trace[-1]), detections=good_detections, size=(18.5, 10), title='Network response') # cum_net_trace.plot(size=(800,600), equal_scale=False) else: savefile = 'plots/' +\ cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\ '_NR_timeseries.pdf' plotting.NR_plot(cum_net_trace[0:-1], Stream(cum_net_trace[-1]), detections=good_detections, size=(18.5, 10), save=True, savefile=savefile, title='Network response') nodesout = list(set(nodesout)) return templates, nodesout
def brightness(stations, nodes, lags, stream, threshold, thresh_type, template_length, template_saveloc, coherence_thresh, coherence_stations=['all'], coherence_clip=False, gap=2.0, clip_level=100, instance=0, pre_pick=0.2, plotsave=True, cores=1): r"""Function to calculate the brightness function in terms of energy for \ a day of data over the entire network for a given grid of nodes. Note data in stream must be all of the same length and have the same sampling rates. :type stations: list :param stations: List of station names from in the form where stations[i] \ refers to nodes[i][:] and lags[i][:] :type nodes: list, tuple :param nodes: List of node points where nodes[i] referes to stations[i] \ and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is \ longitude in degrees, nodes[:][:][2] is depth in km. :type lags: :class: 'numpy.array' :param lags: Array of arrays where lags[i][:] refers to stations[i]. \ lags[i][j] should be the delay to the nodes[i][j] for stations[i] in \ seconds. :type stream: :class: `obspy.Stream` :param data: Data through which to look for detections. :type threshold: float :param threshold: Threshold value for detection of template within the \ brightness function :type thresh_type: str :param thresh_type: Either MAD or abs where MAD is the Median Absolute \ Deviation and abs is an absoulte brightness. :type template_length: float :param template_length: Length of template to extract in seconds :type template_saveloc: str :param template_saveloc: Path of where to save the templates. :type coherence_thresh: tuple of floats :param coherence_thresh: Threshold for removing incoherant peaks in the \ network response, those below this will not be used as templates. \ Must be in the form of (a,b) where the coherence is given by: \ a-kchan/b where kchan is the number of channels used to compute \ the coherence :type coherence_stations: list :param coherence_stations: List of stations to use in the coherance \ thresholding - defaults to 'all' which uses all the stations. :type coherence_clip: float :param coherence_clip: tuple :type coherence_clip: Start and end in seconds of data to window around, \ defaults to False, which uses all the data given. :type pre_pick: float :param pre_pick: Seconds before the detection time to include in template :type plotsave: bool :param plotsave: Save or show plots, if False will try and show the plots \ on screen - as this is designed for bulk use this is set to \ True to save any plots rather than show them if you create \ them - changes the backend of matplotlib, so if is set to \ False you will see NO PLOTS! :type cores: int :param core: Number of cores to use, defaults to 1. :type clip_level: float :param clip_level: Multiplier applied to the mean deviation of the energy \ as an upper limit, used to remove spikes (earthquakes, \ lightning, electircal spikes) from the energy stack. :type gap: float :param gap: Minimum inter-event time in seconds for detections :return: list of templates as :class: `obspy.Stream` objects """ from eqcorrscan.core.template_gen import _template_gen if plotsave: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt plt.ioff() # from joblib import Parallel, delayed from multiprocessing import Pool, cpu_count from copy import deepcopy from obspy import read as obsread from obspy.core.event import Catalog, Event, Pick, WaveformStreamID, Origin from obspy.core.event import EventDescription, CreationInfo, Comment import obspy.Stream import matplotlib.pyplot as plt from eqcorrscan.utils import EQcorrscan_plotting as plotting # Check that we actually have the correct stations realstations = [] for station in stations: st = stream.select(station=station) if st: realstations += station del st stream_copy = stream.copy() # Force convert to int16 for tr in stream_copy: # int16 max range is +/- 32767 if max(abs(tr.data)) > 32767: tr.data = 32767 * (tr.data / max(abs(tr.data))) # Make sure that the data aren't clipped it they are high gain # scale the data tr.data = tr.data.astype(np.int16) # The internal _node_loop converts energy to int16 too to converse memory, # to do this it forces the maximum of a single energy trace to be 500 and # normalises to this level - this only works for fewer than 65 channels of # data if len(stream_copy) > 130: raise OverflowError('Too many streams, either re-code and cope with' + 'either more memory usage, or less precision, or' + 'reduce data volume') detections = [] detect_lags = [] parallel = True plotvar = True mem_issue = False # Loop through each node in the input # Linear run print('Computing the energy stacks') if not parallel: for i in range(0, len(nodes)): print(i) if not mem_issue: j, a = _node_loop(stations, lags[:, i], stream, plot=True) if 'energy' not in locals(): energy = a else: energy = np.concatenate((energy, a), axis=0) print('energy: ' + str(np.shape(energy))) else: j, filename = _node_loop(stations, lags[:, i], stream, i, mem_issue) energy = np.array(energy) print(np.shape(energy)) else: # Parallel run num_cores = cores if num_cores > len(nodes): num_cores = len(nodes) if num_cores > cpu_count(): num_cores = cpu_count() pool = Pool(processes=num_cores) results = [ pool.apply_async(_node_loop, args=(stations, lags[:, i], stream, i, clip_level, mem_issue, instance)) for i in range(len(nodes)) ] pool.close() if not mem_issue: print('Computing the cumulative network response from memory') energy = [p.get() for p in results] pool.join() energy.sort(key=lambda tup: tup[0]) energy = [node[1] for node in energy] energy = np.concatenate(energy, axis=0) print(energy.shape) else: pool.join() # Now compute the cumulative network response and then detect possible # events if not mem_issue: print(energy.shape) indeces = np.argmax(energy, axis=0) # Indeces of maximum energy print(indeces.shape) cum_net_resp = np.array([np.nan] * len(indeces)) cum_net_resp[0] = energy[indeces[0]][0] peak_nodes = [nodes[indeces[0]]] for i in range(1, len(indeces)): cum_net_resp[i] = energy[indeces[i]][i] peak_nodes.append(nodes[indeces[i]]) del energy, indeces else: print('Reading the temp files and computing network response') node_splits = len(nodes) // num_cores indeces = [range(node_splits)] for i in range(1, num_cores - 1): indeces.append(range(node_splits * i, node_splits * (i + 1))) indeces.append(range(node_splits * (i + 1), len(nodes))) pool = Pool(processes=num_cores) results = [ pool.apply_async(_cum_net_resp, args=(indeces[i], instance)) for i in range(num_cores) ] pool.close() results = [p.get() for p in results] pool.join() responses = [result[0] for result in results] print(np.shape(responses)) node_indeces = [result[1] for result in results] cum_net_resp = np.array(responses) indeces = np.argmax(cum_net_resp, axis=0) print(indeces.shape) print(cum_net_resp.shape) cum_net_resp = np.array( [cum_net_resp[indeces[i]][i] for i in range(len(indeces))]) peak_nodes = [ nodes[node_indeces[indeces[i]][i]] for i in range(len(indeces)) ] del indeces, node_indeces if plotvar: cum_net_trace = deepcopy(stream[0]) cum_net_trace.data = cum_net_resp cum_net_trace.stats.station = 'NR' cum_net_trace.stats.channel = '' cum_net_trace.stats.network = 'Z' cum_net_trace.stats.location = '' cum_net_trace.stats.starttime = stream[0].stats.starttime cum_net_trace = obspy.Stream(cum_net_trace) cum_net_trace += stream.select(channel='*N') cum_net_trace += stream.select(channel='*1') cum_net_trace.sort(['network', 'station', 'channel']) # np.save('cum_net_resp.npy',cum_net_resp) # cum_net_trace.plot(size=(800,600), equal_scale=False,\ # outfile='NR_timeseries.eps') # Find detection within this network response print('Finding detections in the cumulatve network response') detections = _find_detections(cum_net_resp, peak_nodes, threshold, thresh_type, stream[0].stats.sampling_rate, realstations, gap) del cum_net_resp templates = [] nodesout = [] good_detections = [] if detections: print('Converting detections in to templates') # Generate a catalog of detections detections_cat = Catalog() for j, detection in enumerate(detections): print('Converting for detection ' + str(j) + ' of ' + str(len(detections))) # Create an event for each detection event = Event() # Set up some header info for the event event.event_descriptions.append(EventDescription()) event.event_descriptions[0].text = 'Brightness detection' event.creation_info = CreationInfo(agency_id='EQcorrscan') copy_of_stream = deepcopy(stream_copy) # Convert detections to obspy.core.event type - # name of detection template is the node. node = (detection.template_name.split('_')[0], detection.template_name.split('_')[1], detection.template_name.split('_')[2]) print(node) # Look up node in nodes and find the associated lags index = nodes.index(node) detect_lags = lags[:, index] ksta = Comment(text='Number of stations=' + len(detect_lags)) event.origins.append(Origin()) event.origins[0].comments.append(ksta) event.origins[0].time = copy_of_stream[0].stats.starttime +\ detect_lags[0] + detection.detect_time event.origins[0].latitude = node[0] event.origins[0].longitude = node[1] event.origins[0].depth = node[2] for i, detect_lag in enumerate(detect_lags): station = stations[i] st = copy_of_stream.select(station=station) if len(st) != 0: for tr in st: _waveform_id = WaveformStreamID( station_code=tr.stats.station, channel_code=tr.stats.channel, network_code='NA') event.picks.append( Pick(waveform_id=_waveform_id, time=tr.stats.starttime + detect_lag + detection.detect_time + pre_pick, onset='emergent', evalutation_mode='automatic')) print('Generating template for detection: ' + str(j)) template = (_template_gen(event.picks, copy_of_stream, template_length, 'all')) template_name = template_saveloc + '/' +\ str(template[0].stats.starttime) + '.ms' # In the interests of RAM conservation we write then read # Check coherancy here! temp_coher, kchan = coherence(template, coherence_stations, coherence_clip) coh_thresh = float(coherence_thresh[0]) - kchan / \ float(coherence_thresh[1]) if temp_coher > coh_thresh: template.write(template_name, format="MSEED") print('Written template as: ' + template_name) print('---------------------------------coherence LEVEL: ' + str(temp_coher)) coherant = True else: print('Template was incoherant, coherence level: ' + str(temp_coher)) coherant = False del copy_of_stream, tr, template if coherant: templates.append(obsread(template_name)) nodesout += [node] good_detections.append(detection) else: print('No template for you') if plotvar: all_detections = [(cum_net_trace[-1].stats.starttime + detection.detect_time).datetime for detection in detections] good_detections = [(cum_net_trace[-1].stats.starttime + detection.detect_time).datetime for detection in good_detections] if not plotsave: plotting.NR_plot(cum_net_trace[0:-1], obspy.Stream(cum_net_trace[-1]), detections=good_detections, size=(18.5, 10), title='Network response') # cum_net_trace.plot(size=(800,600), equal_scale=False) else: savefile = 'plots/' +\ cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\ '_NR_timeseries.pdf' plotting.NR_plot(cum_net_trace[0:-1], obspy.Stream(cum_net_trace[-1]), detections=good_detections, size=(18.5, 10), save=savefile, title='Network response') nodesout = list(set(nodesout)) return templates, nodesout