def test_faulty_events(tmpdir, recwarn): tmpdir = str(tmpdir) file_1 = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath( inspect.getfile(inspect.currentframe())))), "data", "ExampleProject", "EVENTS", "GCMT_event_TURKEY_Mag_5.1_2010-3-24-14-11.xml") file_2 = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath( inspect.getfile(inspect.currentframe())))), "data", "ExampleProject", "EVENTS", "GCMT_event_TURKEY_Mag_5.9_2011-5-19-20-15.xml") cat = obspy.readEvents(file_1) cat += obspy.readEvents(file_2) # Modify it to trigger all problems. temp = io.BytesIO() cat.write(temp, format="quakeml") temp.seek(0, 0) temp = temp.read() pattern = re.compile(r"<depth>.*?</depth>", re.DOTALL) temp = re.sub(pattern, "<depth></depth>", temp) temp = re.sub(r"<type>.*?</type>", "<type></type>", temp) with open(os.path.join(tmpdir, "random.xml"), "wb") as fh: fh.write(temp) comm = Communicator() EventsComponent(tmpdir, comm, "events") event = comm.events.get('random') assert "more than one event" in str(recwarn.pop(LASIFWarning).message) assert "contains no depth" in str(recwarn.pop(LASIFWarning).message) assert "Magnitude has no specified type" in str( recwarn.pop(LASIFWarning).message) # Assert the default values it will then take. assert event["depth_in_km"] == 0.0 assert event["magnitude_type"] == "Mw"
def test_reading_using_obspy_plugin(self): """ Checks that reading with the readEvents() function works correctly. """ filename = os.path.join(self.datapath, "C200604092050A.ndk") cat = readEvents(filename) reference = os.path.join(self.datapath, "C200604092050A.xml") ref_cat = readEvents(reference) self.assertEqual(cat, ref_cat)
def test_read_nlloc_with_picks(self): """ Test correct resource ID linking when reading NLLOC_HYP file with providing original picks. """ picks = readEvents(getExampleFile("nlloc.qml"))[0].picks arrivals = readEvents(getExampleFile("nlloc.hyp"), format="NLLOC_HYP", picks=picks)[0].origins[0].arrivals expected = [p.resource_id for p in picks] got = [a.pick_id for a in arrivals] self.assertEqual(expected, got)
def test_reading_from_open_file_in_binary_mode(self): """ Tests reading from an open file in binary mode. """ filename = os.path.join(self.datapath, "C200604092050A.ndk") with open(filename, "rb") as fh: cat = readEvents(fh) reference = os.path.join(self.datapath, "C200604092050A.xml") ref_cat = readEvents(reference) self.assertEqual(cat, ref_cat)
def test_reading_from_bytes_io(self): """ Tests reading from BytesIO. """ filename = os.path.join(self.datapath, "C200604092050A.ndk") with open(filename, "rb") as fh: file_object = io.BytesIO(fh.read()) cat = readEvents(file_object) file_object.close() reference = os.path.join(self.datapath, "C200604092050A.xml") ref_cat = readEvents(reference) self.assertEqual(cat, ref_cat)
def test_update_user_levels(): obs_tr = read(obsfile)[0] syn_tr = read(synfile)[0] config_file = os.path.join(DATA_DIR, "window", "27_60.BHZ.config.yaml") config = wio.load_window_config_yaml(config_file) cat = readEvents(quakeml) inv = read_inventory(staxml) user_module = "pytomo3d.window.tests.user_module_example" config = win.update_user_levels(user_module, config, inv, cat, obs_tr, syn_tr) npts = obs_tr.stats.npts assert isinstance(config.stalta_waterlevel, np.ndarray) assert len(config.stalta_waterlevel) == npts assert isinstance(config.tshift_acceptance_level, np.ndarray) assert len(config.tshift_acceptance_level) == npts assert isinstance(config.dlna_acceptance_level, np.ndarray) assert len(config.dlna_acceptance_level) == npts assert isinstance(config.cc_acceptance_level, np.ndarray) assert len(config.cc_acceptance_level) == npts assert isinstance(config.s2n_limit, np.ndarray) assert len(config.s2n_limit) == npts
def iris2quakeml(url, output_folder=None): if "/spudservice/" not in url: url = url.replace("/spud/", "/spudservice/") if url.endswith("/"): url += "quakeml" else: url += "/quakeml" print "Downloading %s..." % url r = urllib2.urlopen(url) if r.code != 200: r.close() msg = "Error Downloading file!" raise Exception(msg) # For some reason the quakeml file is escaped HTML. h = HTMLParser.HTMLParser() data = h.unescape(r.read()) r.close() data = StringIO(data) try: cat = readEvents(data) except: msg = "Could not read downloaded event data" raise ValueError(msg) cat.events = cat.events[:1] ev = cat[0] # Parse the event and get the preferred focal mechanism. Then get the # origin and magnitude associated with that focal mechanism. All other # focal mechanisms, origins and magnitudes will be removed. Just makes it # simpler and less error prone. if ev.preferred_focal_mechanism(): ev.focal_mechanisms = [ev.preferred_focal_mechanism()] else: ev.focal_mechanisms = [ev.focal_mechanisms[:1]] # Set the origin and magnitudes of the event. mt = ev.focal_mechanisms[0].moment_tensor ev.magnitudes = [mt.moment_magnitude_id.getReferredObject()] ev.origins = [mt.derived_origin_id.getReferredObject()] # Get the flinn_engdahl region for a nice name. fe = FlinnEngdahl() region_name = fe.get_region(ev.origins[0].longitude, ev.origins[0].latitude) region_name = region_name.replace(" ", "_") event_name = "GCMT_event_%s_Mag_%.1f_%s-%s-%s-%s-%s.xml" % \ (region_name, ev.magnitudes[0].mag, ev.origins[0].time.year, ev.origins[0].time.month, ev.origins[0].time.day, ev.origins[0].time.hour, ev.origins[0].time.minute) if output_folder: event_name = os.path.join(output_folder, event_name) cat.write(event_name, format="quakeml", validate=True) print "Written file", event_name
def test_write_cnv(self): """ Test writing CNV catalog summary file. """ # load QuakeML file to generate CNV file from it filename = os.path.join(self.datapath, "obspyck_20141020150701.xml") cat = readEvents(filename, format="QUAKEML") # read expected OBS file output filename = os.path.join(self.datapath, "obspyck_20141020150701.cnv") with open(filename, "rb") as fh: expected = fh.read().decode() # write via plugin with NamedTemporaryFile() as tf: cat.write(tf, format="CNV") tf.seek(0) got = tf.read().decode() self.assertEqual(expected, got) # write manually with NamedTemporaryFile() as tf: write_CNV(cat, tf) tf.seek(0) got = tf.read().decode() self.assertEqual(expected, got)
def test_write_nlloc_obs(self): """ Test writing nonlinloc observations phase file. """ # load nlloc.qml QuakeML file to generate OBS file from it filename = getExampleFile("nlloc.qml") cat = readEvents(filename, "QUAKEML") # adjust one pick time that got cropped by nonlinloc in NLLOC HYP file # due to less precision in hypocenter file (that we used to create the # reference QuakeML file) for pick in cat[0].picks: if pick.waveform_id.station_code == "UH4" and \ pick.phase_hint == "P": pick.time -= 0.005 # read expected OBS file output filename = getExampleFile("nlloc.obs") with open(filename, "rb") as fh: expected = fh.read().decode() # write via plugin with NamedTemporaryFile() as tf: cat.write(tf, format="NLLOC_OBS") tf.seek(0) got = tf.read().decode() self.assertEqual(expected, got) # write manually with NamedTemporaryFile() as tf: write_nlloc_obs(cat, tf) tf.seek(0) got = tf.read().decode() self.assertEqual(expected, got)
def add_events(self, events): """ Add one or more events to the input file generator. Most inversions should specify only one event but some codes can deal with multiple events. Can currently deal with QuakeML files and obspy.core.event.Event objects. :type events: list or obspy.core.event.Catalog object :param events: A list of filenames, a list of obspy.core.event.Event objects, or an obspy.core.event.Catalog object. """ if isinstance(events, Event) or isinstance(events, dict) or \ not hasattr(events, "__iter__"): events = [events, ] for event in events: if isinstance(event, Event): self._parse_event(event) continue elif isinstance(event, dict): self._events.append(event) continue try: cat = readEvents(event) except: msg = "Could not read %s." % str(event) raise TypeError(msg) for event in cat: self._parse_event(event) # Make sure each event is unique. self._events = unique_list(self._events)
def test_process_synt(): st = obspy.read(testsyn) inv = obspy.read_inventory(teststaxml) event = obspy.readEvents(testquakeml)[0] origin = event.preferred_origin() or event.origins[0] event_lat = origin.latitude event_lon = origin.longitude event_time = origin.time pre_filt = [1 / 90., 1 / 60., 1 / 27.0, 1 / 22.5] t1 = event_time t2 = event_time + 6000.0 st_new = proc.process(st, remove_response_flag=False, inventory=inv, filter_flag=True, pre_filt=pre_filt, starttime=t1, endtime=t2, resample_flag=True, sampling_rate=2.0, taper_type="hann", taper_percentage=0.05, rotate_flag=True, event_latitude=event_lat, event_longitude=event_lon) bmfile = os.path.join(DATA_DIR, "proc", "IU.KBL.syn.proc.mseed") st_compare = obspy.read(bmfile) assert compare_stream_kernel(st_new, st_compare)
def test_IRIS_example_queries_event(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client queries = [ dict(eventid=609301), dict(starttime=UTCDateTime("2011-01-07T01:00:00"), endtime=UTCDateTime("2011-01-07T02:00:00"), catalog="NEIC PDE"), dict(starttime=UTCDateTime("2011-01-07T14:00:00"), endtime=UTCDateTime("2011-01-08T00:00:00"), minlatitude=15, maxlatitude=40, minlongitude=-170, maxlongitude=170, includeallmagnitudes=True, minmagnitude=4, orderby="magnitude"), ] result_files = ["events_by_eventid.xml", "events_by_time.xml", "events_by_misc.xml", ] for query, filename in zip(queries, result_files): file_ = os.path.join(self.datapath, filename) # query["filename"] = file_ got = client.get_events(**query) expected = readEvents(file_) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_events(filename=tf.name, **query) with open(tf.name, 'rb') as fh: got = fh.read() with open(file_, 'rb') as fh: expected = fh.read() self.assertEqual(got, expected, failmsg(got, expected))
def test_process_obsd_2(): st = testobs.copy() inv = deepcopy(teststaxml) event = obspy.readEvents(testquakeml)[0] origin = event.preferred_origin() or event.origins[0] event_lat = origin.latitude event_lon = origin.longitude event_time = origin.time pre_filt = [1/90., 1/60., 1/27.0, 1/22.5] t1 = event_time t2 = event_time + 6000.0 st_new = proc.process_stream( st, remove_response_flag=True, water_level=60, inventory=inv, filter_flag=True, pre_filt=pre_filt, starttime=t1, endtime=t2, resample_flag=True, sampling_rate=2.0, taper_type="hann", taper_percentage=0.05, rotate_flag=True, event_latitude=event_lat, event_longitude=event_lon, sanity_check=True) bmfile = os.path.join(DATA_DIR, "proc", "IU.KBL.obs.proc.mseed") st_compare = obspy.read(bmfile) assert len(st_new) == 3 assert compare_trace_kernel(st_new.select(channel="BHZ")[0], st_compare.select(channel="BHZ")[0])
def _generator(events, inventory, rf=False): """Generator yielding length at first and then station/event information""" inventory = read_inventory(inventory) channels = inventory.get_contents()['channels'] stations = list(set(ch.rsplit('.', 1)[0] for ch in channels)) one_channel = {ch.rsplit('.', 1)[0]: ch for ch in channels} if events is not None: events = readEvents(events) yield len(stations) * len(events) for event in events: for station in stations: seed_id = one_channel[station][:-1] + '?' net, sta, loc, cha = seed_id.split('.') stats = {'network': net, 'station': sta, 'location': loc, 'channel': cha} if rf: stats['event'] = event stats['seed_id'] = seed_id coords = inventory.get_coordinates(one_channel[station]) yield stats, event, coords else: stats['event_time'] = event.preferred_origin()['time'] yield stats else: yield len(stations) for station in stations: net, sta, loc, cha = one_channel[station].split('.') stats = {'network': net, 'station': sta, 'location': loc, 'channel': cha[:-1] + '?', 'event_time': _DummyUTC()} yield stats
def test_process_synt(): staxmlfile = os.path.join(DATA_DIR, "stationxml", "IU.KBL.syn.xml") inv = obspy.read_inventory(staxmlfile) st = testsyn.copy() event = obspy.readEvents(testquakeml)[0] origin = event.preferred_origin() or event.origins[0] event_lat = origin.latitude event_lon = origin.longitude event_time = origin.time pre_filt = [1/90., 1/60., 1/27.0, 1/22.5] t1 = event_time t2 = event_time + 6000.0 st_new = proc.process_stream( st, remove_response_flag=False, inventory=inv, filter_flag=True, pre_filt=pre_filt, starttime=t1, endtime=t2, resample_flag=True, sampling_rate=2.0, taper_type="hann", taper_percentage=0.05, rotate_flag=True, event_latitude=event_lat, event_longitude=event_lon) bmfile = os.path.join(DATA_DIR, "proc", "IU.KBL.syn.proc.mseed") st_compare = obspy.read(bmfile) assert compare_stream_kernel(st_new, st_compare)
def rf_dmt(data_path, rf, events=None, phase='P', dist=None, **rf_kwargs): """ TODO: doc rf_dmt """ events = readEvents(events) print events for event in events: event_id = event.resource_id.getQuakeMLURI().split('/')[-1] inputs = data_path.format(eventid=event_id) inputs = glob.glob(data_path) while len(inputs) > 0: files_tmp = inputs[0][:-1] + '?' for f in glob.glob(files_tmp): inputs.remove(f) st = RFStream(read(files_tmp, headonly=True)) st.read_sac_header() stats = rfstats(stats=st[0].stats, event=event, phase=phase, dist_range=dist) if not stats: continue st = RFStream(read(files_tmp)) st.merge() if len(st) != 3: import warnings warnings.warn('Need 3 component seismograms. ' 'Error for files %s' % files_tmp) continue for tr in st: tr.stats.update(stats) st.rf(method=phase[0], **rf_kwargs) for tr in st: output = rf.format(eventid=event_id, stats=tr.stats) _create_dir(output) tr.write(output, 'SAC')
def rf_client(get_waveform, rf, stations=None, events=None, request_window=(-50, 150), phase='P', dist=None, **rf_kwargs): # S: -300 bis 300 """ TODO: doc rf_client """ events = readEvents(events) stations = _read_stations(stations) for event in events: event_id = event.resource_id.getQuakeMLURI().split('/')[-1] for station in stations: stats = rfstats(station=stations[station], event=event, phase=phase, dist_range=dist) if not stats: continue st = get_waveform(station, stats.onset + request_window[0], stats.onset + request_window[1]) st = RFStream(stream=st) st.merge() if len(st) != 3: import warnings warnings.warn('Need 3 component seismograms. More or less ' 'than three components for event %s, station %s.' % (event_id, station)) continue for tr in st: tr.stats.update(stats) st.rf(method=phase[0], **rf_kwargs) st.write_sac_header() for tr in st: output = rf.format(eventid=event_id, stats=tr.stats) _create_dir(output) tr.write(output, 'SAC')
def test_window_on_trace(): obs_tr = read(obsfile).select(channel="*R")[0] syn_tr = read(synfile).select(channel="*R")[0] config_file = os.path.join(DATA_DIR, "window", "27_60.BHZ.config.yaml") config = wio.load_window_config_yaml(config_file) cat = readEvents(quakeml) inv = read_inventory(staxml) windows = win.window_on_trace(obs_tr, syn_tr, config, station=inv, event=cat, _verbose=False, figure_mode=False) assert len(windows) == 5 winfile_bm = os.path.join(DATA_DIR, "window", "IU.KBL..BHR.window.json") with open(winfile_bm) as fh: windows_json = json.load(fh) for _win, _win_json_bm in zip(windows, windows_json): _win_bm = Window._load_from_json_content(_win_json_bm) assert _win == _win_bm
def test_window_on_stream_user_levels(): obs_tr = read(obsfile) syn_tr = read(synfile) config_file = os.path.join(DATA_DIR, "window", "27_60.BHZ.config.yaml") config = wio.load_window_config_yaml(config_file) config_dict = {"Z": config, "R": config, "T": config} config_file = os.path.join(DATA_DIR, "window", "27_60.BHZ.config.yaml") config = wio.load_window_config_yaml(config_file) cat = readEvents(quakeml) inv = read_inventory(staxml) _mod = "pytomo3d.window.tests.user_module_example" user_modules = {"BHZ": _mod, "BHR": _mod, "BHT": _mod} windows = win.window_on_stream(obs_tr, syn_tr, config_dict, station=inv, event=cat, user_modules=user_modules, _verbose=False, figure_mode=False) assert len(windows) == 3 nwins = dict((_w, len(windows[_w])) for _w in windows) assert nwins == {"IU.KBL..BHR": 5, "IU.KBL..BHZ": 2, "IU.KBL..BHT": 4}
def test_adding_event_in_various_manners(tmpdir): """ Events can be added either as filenames, open files, BytesIOs, or ObsPy objects. In any case, the result should be the same. """ asdf_filename = os.path.join(tmpdir.strpath, "test.h5") data_path = os.path.join(data_dir, "small_sample_data_set") event_filename = os.path.join(data_path, "quake.xml") ref_cat = obspy.readEvents(event_filename) # Add as filename data_set = ASDFDataSet(asdf_filename) assert len(data_set.events) == 0 data_set.add_quakeml(event_filename) assert len(data_set.events) == 1 assert data_set.events == ref_cat del data_set os.remove(asdf_filename) # Add as open file. data_set = ASDFDataSet(asdf_filename) assert len(data_set.events) == 0 with open(event_filename, "rb") as fh: data_set.add_quakeml(fh) assert len(data_set.events) == 1 assert data_set.events == ref_cat del data_set os.remove(asdf_filename) # Add as BytesIO. data_set = ASDFDataSet(asdf_filename) assert len(data_set.events) == 0 with open(event_filename, "rb") as fh: temp = io.BytesIO(fh.read()) temp.seek(0, 0) data_set.add_quakeml(temp) assert len(data_set.events) == 1 assert data_set.events == ref_cat del data_set os.remove(asdf_filename) # Add as ObsPy Catalog. data_set = ASDFDataSet(asdf_filename) assert len(data_set.events) == 0 data_set.add_quakeml(ref_cat.copy()) assert len(data_set.events) == 1 assert data_set.events == ref_cat del data_set os.remove(asdf_filename) # Add as an ObsPy event. data_set = ASDFDataSet(asdf_filename) assert len(data_set.events) == 0 data_set.add_quakeml(ref_cat.copy()[0]) assert len(data_set.events) == 1 assert data_set.events == ref_cat del data_set os.remove(asdf_filename)
def from_quakeml_file(self, filename): """ Initizliaze a source object from a quakeml file :param filename: path to a quakeml file """ from obspy import readEvents cat = readEvents(filename) event = cat[0] cmtsolution = event.preferred_origin() pdesolution = event.origins[0] origin_time = pdesolution.time pde_lat = pdesolution.latitude pde_lon = pdesolution.longitude pde_depth_in_m = pdesolution.depth for mag in event.magnitudes: if mag.magnitude_type == "mb": mb = mag.mag elif mag.magnitude_type == "MS": ms = mag.mag region_tag = event.event_descriptions[0].text for descrip in event.event_descriptions: if descrip.type == "earthquake name": eventname = descrip.text eventname = self.adjust_eventname(eventname) cmt_time = cmtsolution.time focal_mechanism = event.focal_mechanisms[0] half_duration = focal_mechanism.moment_tensor.source_time_function.duration / 2.0 latitude = cmtsolution.latitude longitude = cmtsolution.longitude depth_in_m = cmtsolution.depth tensor = focal_mechanism.moment_tensor.tensor m_rr = tensor.m_rr * 1e7 m_tt = tensor.m_tt * 1e7 m_pp = tensor.m_pp * 1e7 m_rt = tensor.m_rt * 1e7 m_rp = tensor.m_rp * 1e7 m_tp = tensor.m_tp * 1e7 return self(origin_time=origin_time, pde_latitude=pde_lat, pde_longitude=pde_lon, mb=mb, ms=ms, pde_depth_in_m=pde_depth_in_m, region_tag=region_tag, eventname=eventname, cmt_time=cmt_time, half_duration=half_duration, latitude=latitude, longitude=longitude, depth_in_m=depth_in_m, m_rr=m_rr, m_tt=m_tt, m_pp=m_pp, m_rt=m_rt, m_rp=m_rp, m_tp=m_tp)
def events(self): data = self.__file["QuakeML"] if not len(data.value): return obspy.core.event.Catalog() cat = obspy.readEvents(io.BytesIO(data.value.tostring()), format="quakeml") return cat
def get_event_time(event): """ Extract origin time from event XML file. """ if not isinstance(event, obspy.core.event.Event): event = obspy.readEvents(event)[0] origin = event.preferred_origin() or event.origins[0] return origin.time
def test_directory_from_seis2disk(setup_directory_to_save): for fts in [file_to_save1, file_to_save2, file_to_save3]: assert os.path.exists(fts) cat = obspy.core.event.Catalog() for xml in walkdir('DelXML', '.xml'): cat += obspy.readEvents(xml) assert isinstance(cat, obspy.core.event.Catalog) assert len(cat) > 0
def test_parse_obspy_objects(): """ Tests parsing from ObsPy objects. """ cat = obspy.readEvents(EVENT_FILE) ev = cat[0] _assert_src(Source.parse(cat)) _assert_src(Source.parse(ev))
def main(): reprocess = True basedir = 'semucb_dataset' datadir = os.path.join(basedir,'data') #---- input files ---- fname_evcatalogue = os.path.join(basedir,'evcatalogue_semucb.ndk') #==== PROCESSING OPTIONS ==== fs = 1./8. pre_filt = (0.,1./3600.,1./2.*fs-0.0001,1./2.*fs) water_level = 100 #==== PREPARE STATION/EVENT LIST ==== print 'reading event catalogue...' evcatalogue = NDKFile(fname_evcatalogue) print 'found {:d} events'.format(evcatalogue.nevents) #==== LOOP THROUGH DATA ==== for name in evcatalogue.names: print '---- {:s} ----'.format(name) #check if data is already processed path = os.path.join(datadir,name) fname_out = path + '_wfproc.mseed' if os.path.exists(fname_out) and not reprocess: print 'already processed' continue #read dataset: try: events = readEvents(path + '_events.xml') stations = read_inventory(path + '_stations.xml',format='STATIONXML') waveforms = read(path+'_waveforms.mseed',format='MSEED') except Exception,err: print err print 'can\'t open dataset' continue #remove tides with splines and then the instrument response waveforms.attach_response(stations) nwaveforms = len(waveforms) for itr, tr in enumerate(waveforms): if itr%20==0: print 'removing response {:d}/{:d}'.format(itr,nwaveforms) #detide with spline (reduces boundary effects) trange = tr.stats.endtime - tr.stats.starttime taxis = tr.times() dspline = 500. nsplines = trange/dspline splknots = np.linspace(taxis[0]+dspline/2.,taxis[-1]-dspline/2.,nsplines) spl = LSQUnivariateSpline(taxis,tr.data,splknots) tr.data -= spl(taxis).astype('int32') try: tr.remove_response(pre_filt=pre_filt,output='ACC',water_level=water_level, taper=True,taper_fraction=0.05) except Exception,err: print err tr.data = tr.data.astype(np.float32)
def test_faulty_events(tmpdir, recwarn): tmpdir = str(tmpdir) file_1 = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe())))), "data", "ExampleProject", "EVENTS", "GCMT_event_TURKEY_Mag_5.1_2010-3-24-14-11.xml") file_2 = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe())))), "data", "ExampleProject", "EVENTS", "GCMT_event_TURKEY_Mag_5.9_2011-5-19-20-15.xml") cat = obspy.readEvents(file_1) cat += obspy.readEvents(file_2) # Modify it to trigger all problems. temp = io.BytesIO() cat.write(temp, format="quakeml") temp.seek(0, 0) temp = temp.read() pattern = re.compile(r"<depth>.*?</depth>", re.DOTALL) temp = re.sub(pattern, "<depth></depth>", temp) temp = re.sub(r"<type>.*?</type>", "<type></type>", temp) with open(os.path.join(tmpdir, "random.xml"), "wb") as fh: fh.write(temp) comm = Communicator() comm.project = mock.MagicMock() comm.project.read_only_caches = False comm.project.paths = {"cache": tmpdir, "root": tmpdir} EventsComponent(tmpdir, comm, "events") event = comm.events.get('random') assert "QuakeML file must have exactly one event." in str( recwarn.pop(LASIFWarning).message) assert "contains no depth" in str(recwarn.pop(LASIFWarning).message) assert "Magnitude has no specified type" in str( recwarn.pop(LASIFWarning).message) # Assert the default values it will then take. assert event["depth_in_km"] == 0.0 assert event["magnitude_type"] == "Mw"
def _extract_index_values_quakeml(filename): """ Reads QuakeML files and extracts some keys per channel. Only one event per file is allows. """ from obspy.core.util import FlinnEngdahl try: cat = obspy.readEvents(filename) except: msg = "Not a valid QuakeML file?" raise EventCacheError(msg) if len(cat) != 1: warnings.warn( "Each QuakeML file must have exactly one event. Event '%s' " "has %i. Only the first one will be parsed." % ( filename, len(cat)), LASIFWarning) event = cat[0] # Extract information. mag = event.preferred_magnitude() or event.magnitudes[0] org = event.preferred_origin() or event.origins[0] if org.depth is None: warnings.warn("Origin contains no depth. Will be assumed to be 0", LASIFWarning) org.depth = 0.0 if mag.magnitude_type is None: warnings.warn("Magnitude has no specified type. Will be assumed " "to be Mw", LASIFWarning) mag.magnitude_type = "Mw" # Get the moment tensor. fm = event.preferred_focal_mechanism() or event.focal_mechanisms[0] mt = fm.moment_tensor.tensor event_name = os.path.splitext(os.path.basename(filename))[0] return [[ str(filename), str(event_name), float(org.latitude), float(org.longitude), float(org.depth / 1000.0), float(org.time.timestamp), float(mt.m_rr), float(mt.m_pp), float(mt.m_tt), float(mt.m_rp), float(mt.m_rt), float(mt.m_tp), float(mag.mag), str(mag.magnitude_type), str(FlinnEngdahl().get_region(org.longitude, org.latitude)) ]]
def parse(filename_or_obj): """ Attempts to parse anything to a Source object. Can currently read anything ObsPy can read, ObsPy event related objects, and CMTSOLUTION files. For anything ObsPy related, it must contain a full moment tensor, otherwise it will raise an error. :param filename_or_obj: The object or filename to parse. """ if isinstance(filename_or_obj, basestring): # Anything ObsPy can read. try: src = obspy.readEvents(filename_or_obj) except: pass else: return Source.parse(src) # CMT solution file. try: return Source.from_CMTSOLUTION_file(filename_or_obj) except: pass raise SourceParseError("Could not parse the given source.") elif isinstance(filename_or_obj, obspy.Catalog): if len(filename_or_obj) == 0: raise SourceParseError("Event catalog contains zero events.") elif len(filename_or_obj) > 1: raise SourceParseError( "Event catalog contains %i events. Only one is allowed. " "Please parse seperately." % len(filename_or_obj)) return Source.parse(filename_or_obj[0]) elif isinstance(filename_or_obj, obspy.core.event.Event): ev = filename_or_obj if not ev.origins: raise SourceParseError("Event must contain an origin.") if not ev.focal_mechanisms: raise SourceParseError("Event must contain a focal mechanism.") org = ev.preferred_origin() or ev.origins[0] fm = ev.preferred_focal_mechanism() or ev.focal_mechansisms[0] if not fm.moment_tensor: raise SourceParseError("Event must contain a moment tensor.") t = fm.moment_tensor.tensor return Source( latitude=org.latitude, longitude=org.longitude, depth_in_m=org.depth, m_rr=t.m_rr, m_tt=t.m_tt, m_pp=t.m_pp, m_rt=t.m_rt, m_rp=t.m_rp, m_tp=t.m_tp) else: raise NotImplementedError
def test_data_set_creation(tmpdir): """ Test data set creation with a small test dataset. It tests that the the stuff that goes in is correctly saved and can be retrieved again. """ asdf_filename = os.path.join(tmpdir.strpath, "test.h5") data_path = os.path.join(data_dir, "small_sample_data_set") data_set = ASDFDataSet(asdf_filename) for filename in glob.glob(os.path.join(data_path, "*.mseed")): data_set.add_waveforms(filename, tag="raw_recording") for filename in glob.glob(os.path.join(data_path, "*.xml")): if "quake.xml" in filename: data_set.add_quakeml(filename) else: data_set.add_stationxml(filename) # Flush and finish writing. del data_set # Open once again data_set = ASDFDataSet(asdf_filename) # ObsPy is tested enough to make this comparison meaningful. for station in (("AE", "113A"), ("TA", "POKR")): # Test the waveforms stream_asdf = \ getattr(data_set.waveforms, "%s_%s" % station).raw_recording stream_file = obspy.read(os.path.join( data_path, "%s.%s.*.mseed" % station)) # Delete the file format specific stats attributes. These are # meaningless inside ASDF data sets. for trace in stream_file: del trace.stats.mseed del trace.stats._format for trace in stream_asdf: del trace.stats.asdf del trace.stats._format assert stream_asdf == stream_file # Test the inventory data. inv_asdf = \ getattr(data_set.waveforms, "%s_%s" % station).StationXML inv_file = obspy.read_inventory( os.path.join(data_path, "%s.%s..BH*.xml" % station)) assert inv_file == inv_asdf # Test the event. cat_file = obspy.readEvents(os.path.join(data_path, "quake.xml")) cat_asdf = data_set.events # from IPython.core.debugger import Tracer; Tracer(colors="Linux")() assert cat_file == cat_asdf
def get_events(): print('Read event file') try: return readEvents(evname) except: pass client = FSDNClient('NERIES') events = client.get_events(**event_kwargs) events.events.sort(key=lambda e: e.origins[0].time) events.write(evname, 'QUAKEML') return events
def get_events(): try: return readEvents(evname) except: pass client = Client() events = client.get_events(starttime=t1, endtime=t2, latitude=lat, longitude=lon, minradius=20, maxradius=100, minmagnitude=6.) events.write(evname, 'QUAKEML') return events
def create_rfeventsfile(events='events.xml', eventsfile='events_rf.xml', filters=None): if isinstance(events, basestring): if not os.path.exists(events): events = os.path.join(conf.dmt_path, 'EVENT', 'events.xml') events = readEvents(events) if filters: events.filter(*filters) eventsfile = os.path.join(conf.output_path, 'EVENT', eventsfile) create_dir(eventsfile) events.write(eventsfile, 'QUAKEML')
def _process(self, inputs): params = inputs['input'] stations = params['station'] networks = params['network'] data_dir = os.environ['STAGED_DATA']+'/'+params['data_dir'] synt_dir = os.environ['STAGED_DATA']+'/'+params['synt_dir'] event_file = params['events'] event_id = params['event_id'] stations_dir = os.environ['STAGED_DATA']+'/'+params['stations_dir'] output_dir = os.environ['STAGED_DATA']+'/'+params['output_dir'] fe = 'v' if self.output_units == 'velocity': fe = 'v' elif self.output_units == 'displacement': fe = 'd' elif self.output_units == 'acceleration': fe = 'a' else: self.log('Did not recognise output units: %s' % self.output_units) quakeml =[_i for _i in obspy.readEvents(event_file) if _i.resource_id.id == params["event_id"]][0] for i in range(len(stations)): station = stations[i] network = networks[i] data_file = os.path.join(data_dir, network + "." + station + ".." + '?H?.mseed') synt_file = os.path.join(synt_dir, network + "." + station + "." + '?X?.seed' + fe) sxml = os.path.join(stations_dir, network + "." + station + ".xml") real_stream, sta, event = mf.read_stream(data_file, sxml=sxml, event_file=event_file, event_id=event_id) self.log("real_stream %s" % real_stream) synt_stream = get_synthetics(synt_file, get_event_time(quakeml)) data, synt = sync_cut(real_stream, synt_stream) self.write( 'output_real', [data, { 'station' : sta, 'event' : event, 'stationxml' : sxml, 'quakeml' : quakeml, 'output_dir' : output_dir } ],metadata={'output_units':self.output_units,'station' : station, 'eventId' : event_id, 'prov:type':'observed-pipeline'}) self.write( 'output_synt', [synt, { 'station' : sta, 'event' : event, 'stationxml' : sxml, 'quakeml' : quakeml, 'output_dir' : output_dir } ],metadata={'output_units':self.output_units,'station' : station, 'eventId' : event_id,'prov:type':'synthetic-pipeline'})
def iris2quakeml(url, output_folder=None): if "/spudservice/" not in url: url = url.replace("/spud/", "/spudservice/") if url.endswith("/"): url += "quakeml" else: url += "/quakeml" print "Downloading %s..." % url r = urllib2.urlopen(url) if r.code != 200: r.close() msg = "Error Downloading file!" raise Exception(msg) # For some reason the quakeml file is escaped HTML. h = HTMLParser.HTMLParser() data = h.unescape(r.read()) r.close() data = StringIO(data) try: cat = readEvents(data) except: msg = "Could not read downloaded event data" raise ValueError(msg) cat.events = cat.events[:1] ev = cat[0] # Parse the event and get the preferred focal mechanism. Then get the # origin and magnitude associated with that focal mechanism. All other # focal mechanisms, origins and magnitudes will be removed. Just makes it # simpler and less error prone. if ev.preferred_focal_mechanism(): ev.focal_mechanisms = [ev.preferred_focal_mechanism()] else: ev.focal_mechanisms = [ev.focal_mechanisms[:1]] # Set the origin and magnitudes of the event. mt = ev.focal_mechanisms[0].moment_tensor ev.magnitudes = [mt.moment_magnitude_id.getReferredObject()] ev.origins = [mt.derived_origin_id.getReferredObject()] event_name = get_event_filename(ev, "GCMT") if output_folder: event_name = os.path.join(output_folder, event_name) cat.write(event_name, format="quakeml", validate=True) print "Written file", event_name
def test_read_single_ndk(self): """ Test reading a single event from and NDK file and comparing it to a QuakeML file that has been manually checked to contain all the information in the NDK file. """ filename = os.path.join(self.datapath, "C200604092050A.ndk") cat = read_ndk(filename) reference = os.path.join(self.datapath, "C200604092050A.xml") ref_cat = readEvents(reference) self.assertEqual(cat, ref_cat)
def test_adding_a_catalog_object(): """ Tests adding an obspy.core.event.Catalog object. """ event_file_1 = os.path.join(DATA, "event1.xml") event_file_2 = os.path.join(DATA, "event2.xml") cat = obspy.readEvents(event_file_1) cat += obspy.readEvents(event_file_2) gen = InputFileGenerator() gen.add_events(cat) assert sorted(gen._events) == \ [{"description": "FICTIONAL EVENT IN BAVARIA", "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}, {"description": "GUATEMALA", "latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19, "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]
def request_gcmt_events(gcmt_urls): for key in gcmt_urls.keys(): url = gcmt_urls[key] request = requests.get(url) if request.status_code == 200: cat = readEvents(url) if len(cat) > 0: directory = config.QUAKEML_ROOT_DIR + key if not os.path.exists(directory): os.makedirs(directory) for event in cat: fileName = str(event.resource_id).replace( "smi:local/ndk/", "").replace("/event", "") + ".xml" event.write(directory + "/" + fileName, format="quakeml")
def test_reading_event_with_faulty_but_often_occurring_timestamp(self): """ The timestamp "O-00000000000000" is not valid according to the NDK definition but occurs a lot in the GCMT catalog thus we include it here. """ filename = os.path.join(self.datapath, "faulty_cmt_timestamp.ndk") cat = readEvents(filename) self.assertEqual(len(cat), 1) comments = cat[0].focal_mechanisms[0].comments self.assertTrue("CMT Analysis Type: Unknown" in comments[0].text) self.assertTrue("CMT Timestamp: O-000000000" in comments[1].text)
def get_windows(): obs_tr = read(obsfile).select(channel="*R")[0] syn_tr = read(synfile).select(channel="*R")[0] config_file = os.path.join( DATA_DIR, "window", "27_60.BHZ.config.yaml") config = wio.load_window_config_yaml(config_file) cat = readEvents(quakeml) inv = read_inventory(staxml) windows = win.window_on_trace(obs_tr, syn_tr, config, station=inv, event=cat, _verbose=False, figure_mode=False) return windows
def read_file(stream, inventory, catalog, array=False): """ function to read data files, such as MSEED, station-xml and quakeml, in a way of obspy.read if need, pushes stream in an array for further processing """ st = obspy.read(stream) inv = obspy.read_inventory(inventory) cat = obspy.readEvents(catalog) # pushing the trace data in an array if array: ArrayData = stream2array(st) return (st, inv, cat, ArrayData) else: return (st, inv, cat)
def test_reading_the_same_file_twice_does_not_raise_a_warnings(self): """ Asserts that reading the same file twice does not raise a warning due to resource identifier already in use. """ filename = os.path.join(self.datapath, "C200604092050A.ndk") cat_1 = readEvents(filename) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") cat_2 = readEvents(filename) self.assertEqual(len(w), 0) self.assertEqual(cat_1, cat_2) filename = os.path.join(self.datapath, "multiple_events.ndk") cat_1 = readEvents(filename) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") cat_2 = readEvents(filename) self.assertEqual(len(w), 0) self.assertEqual(cat_1, cat_2)
def create_event_json_file(output_filename): """ Download the GCMT catalog up to 2013 and store everything in a JSON file which can be used for the later stage. If the file already exist this function is a no-op. """ if os.path.exists(output_filename): print( "File '%s' already exists. It will not be recreated." % output_filename ) return print("Downloading GCMT catalog up to 2013...") cat = obspy.readEvents( "http://www.ldeo.columbia.edu/~gcmt/projects/CMT/" "catalog/jan76_dec13.ndk.gz" ) events = {} for event in cat: # This only works due to the way ObsPy parses NDK files. gcmt_id = [ _i.text for _i in event.event_descriptions if _i.type == "earthquake name" ][0] # Get the centroid origin. origin = [_i for _i in event.origins if _i.origin_type == "centroid"][ 0 ] mt = event.focal_mechanisms[0].moment_tensor.tensor events[gcmt_id] = { "latitude": float(origin.latitude), "longitude": float(origin.longitude), "depth_in_m": float(origin.depth), "origin_time": str(origin.time), "m_rr": mt.m_rr, "m_tt": mt.m_tt, "m_pp": mt.m_pp, "m_rt": mt.m_rt, "m_rp": mt.m_rp, "m_tp": mt.m_tp, } with open(output_filename, "wt") as fh: json.dump(events, fh)
def get_event(self, event_name): """ Helper function to avoid reading one event twice. """ from obspy import readEvents if not hasattr(self, "_seismic_events"): self._seismic_events = {} # Read the file if it does not exist. if event_name not in self._seismic_events: filename = os.path.join(self.paths["events"], "%s%sxml" % (event_name, os.path.extsep)) if not os.path.exists(filename): return None self._seismic_events[event_name] = readEvents(filename)[0] return self._seismic_events[event_name]
def test_reading_from_bytestring(self): """ Tests reading from a byte string. """ filename = os.path.join(self.datapath, "C200604092050A.ndk") reference = os.path.join(self.datapath, "C200604092050A.xml") ref_cat = readEvents(reference) with io.open(filename, "rb") as fh: data = fh.read() self.assertTrue(is_ndk(data)) cat = read_ndk(data) self.assertEqual(cat, ref_cat)
def _read_GCMT_catalog(min_year=None, max_year=None): """ Helper function reading the GCMT data shipping with LASIF. :param min_year: The minimum year to read. :type min_year: int, optional :param max_year: The maximum year to read. :type max_year: int, optional """ # easier tests if min_year is None: min_year = 0 else: min_year = int(min_year) if max_year is None: max_year = 3000 else: max_year = int(max_year) data_dir = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe())))), "data", "GCMT_Catalog") available_years = [_i for _i in os.listdir(data_dir) if _i.isdigit()] available_years.sort() print("LASIF currently contains GCMT data from %s to %s/%i." % (available_years[0], available_years[-1], len(glob.glob(os.path.join(data_dir, available_years[-1], "*.ndk*"))))) available_years = [ _i for _i in os.listdir(data_dir) if _i.isdigit() and (min_year <= int(_i) <= max_year) ] available_years.sort() print("Parsing the GCMT catalog. This might take a while...") cat = Catalog() for year in available_years: print("\tReading year %s ..." % year) for filename in glob.glob(os.path.join(data_dir, str(year), "*.ndk*")): cat += obspy.readEvents(filename, format="ndk") return cat
def test_event_information_extraction(): """ Event information can either be passed or read from sac files. """ config = pyflex.Config(min_period=50.0, max_period=150.0) # If not passed, it is read from sac files, if available. ws = pyflex.window_selector.WindowSelector(OBS_DATA, SYNTH_DATA, config) assert abs(ws.event.latitude - -3.77) <= 1E-5 assert abs(ws.event.longitude - -77.07) <= 1E-5 assert abs(ws.event.depth_in_m - 112800.00305) <= 1E-5 assert ws.event.origin_time == \ obspy.UTCDateTime(1995, 5, 2, 6, 6, 13, 900000) # If it passed, the passed event will be used. ev = pyflex.Event(1, 2, 3, obspy.UTCDateTime(2012, 1, 1)) ws = pyflex.window_selector.WindowSelector(OBS_DATA, SYNTH_DATA, config, event=ev) assert ws.event == ev # Alternatively, an ObsPy Catalog or Event object can be passed which # opens the gate to more complex workflows. cat = obspy.readEvents() cat.events = cat.events[:1] event = cat[0] ev = pyflex.Event(event.origins[0].latitude, event.origins[0].longitude, event.origins[0].depth, event.origins[0].time) # Test catalog. ws = pyflex.window_selector.WindowSelector(OBS_DATA, SYNTH_DATA, config, event=cat) assert ws.event == ev # Test event. ws = pyflex.window_selector.WindowSelector(OBS_DATA, SYNTH_DATA, config, event=cat[0]) assert ws.event == ev
def test_reading_file_with_multiple_errors(self): """ Tests reading a file with multiple errors. """ filename = os.path.join(self.datapath, "faulty_multiple_events.ndk") with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") cat = readEvents(filename) self.assertEqual(len(w), 6) self.assertTrue("Invalid time in event 2" in str(w[0])) self.assertTrue("Unknown data type" in str(w[1])) self.assertTrue("Moment rate function" in str(w[2])) self.assertTrue("Unknown source type" in str(w[3])) self.assertTrue("Unknown type of depth" in str(w[4])) self.assertTrue("Invalid CMT timestamp" in str(w[5])) # One event should still be available. self.assertEqual(len(cat), 1)