def read_phase(ph_file): """ Read hypoDD phase files into Obspy catalog class. :type ph_file: str :param ph_file: Phase file to read event info from. :returns: obspy.core.event.catlog >>> from obspy.core.event.catalog import Catalog >>> catalog = read_phase('eqcorrscan/tests/test_data/tunnel.phase') >>> isinstance(catalog, Catalog) True """ from obspy.core.event import Catalog ph_catalog = Catalog() f = open(ph_file, 'r') # Topline of each event is marked by # in position 0 for line in f: if line[0] == '#': if 'event_text' not in locals(): event_text = {'header': line.rstrip(), 'picks': []} else: ph_catalog.append(_phase_to_event(event_text)) event_text = {'header': line.rstrip(), 'picks': []} else: event_text['picks'].append(line.rstrip()) ph_catalog.append(_phase_to_event(event_text)) return ph_catalog
def export_picks(self, filename, start_trace=None, end_trace=None, format="NLLOC_OBS", debug=False, **kwargs): """ """ event_list = [] for trace in self.traces[start_trace:end_trace]: event_list.extend([Event(picks=[pick]) for pick in trace.events]) # Export to desired format if format == 'NLLOC_OBS': basename, ext = os.path.splitext(filename) for event in event_list: ts = event.picks[0].time.strftime("%Y%m%d%H%M%S%f") event_filename = "%s_%s%s" % (basename, ts, ext) if debug: print "Generating event file {}".format(event_filename) event.write(event_filename, format=format) else: event_catalog = Catalog(event_list) if debug: print "Generating event file {}".format(filename) event_catalog.write(filename, format=format, **kwargs)
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split('\n'): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split('\t', 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get('lon')) origin.latitude = self._str2num(values.get('lat')) depth = self._str2num(values.get('depth')) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get('z_err')) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get('h_err')) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = 'horizontal uncertainty' origin.origin_uncertainty = ou year = self._str2num(values.get('year')) if year is not None: t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): # no seconds involved if len(comps) < 6: utc_args = [int(v) for v in comps if v is not None] # we also have to handle seconds else: utc_args = [int(v) if v is not None else 0 for v in comps[:-1]] # just leave float seconds as is utc_args.append(comps[-1]) origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get('mag')) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get('m_err')) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id event.scope_resource_ids() catalog.append(event) return catalog
def test_count_and_len(self): """ Tests the count and __len__ methods of the Catalog object. """ # empty catalog without events catalog = Catalog() self.assertEqual(len(catalog), 0) self.assertEqual(catalog.count(), 0) # catalog with events catalog = read_events() self.assertEqual(len(catalog), 3) self.assertEqual(catalog.count(), 3)
def append_cmt_to_catalog(event_origin, cmt_to_add, tag="new_cmt", author="Princeton GATG", change_preferred_id=True): """ Add cmt to event. The cmt.resource_id will be appened tag to avoid tag duplication problem in event. :param event: the event that you want to add cmt in. :type event: str, obspy.core.event.Event or obspy.core.event.Catalog :param cmt: the cmt that you want to add to event. :type event: str, obspy.core.event.Event or obspy.core.event.Catalog :param change_preferred_id: change all preferred_id to the new added cmt :type change_preferred_id: bool :return: obspy.Catalog """ event = _parse_event(event_origin) cmt_event = _parse_event(cmt_to_add) if not isinstance(tag, str): raise TypeError("tag(%s) should be type of str" % type(tag)) if not isinstance(author, str): raise TypeError("author(%s) should be type of str" % type(author)) # User defined creation information creation_info = CreationInfo(author=author, version=tag) # add cmt origin cmt_origin = prepare_cmt_origin(cmt_event, tag, creation_info) event.origins.append(cmt_origin) # add cmt magnitude cmt_mag = prepare_cmt_mag(cmt_event, tag, cmt_origin.resource_id, creation_info) event.magnitudes.append(cmt_mag) # add cmt focal mechanism cmt_focal = prepare_cmt_focal(cmt_event, tag, cmt_origin.resource_id, cmt_mag.resource_id, creation_info) event.focal_mechanisms.append(cmt_focal) # change preferred id if needed if change_preferred_id: event.preferred_origin_id = str(cmt_origin.resource_id) event.preferred_magnitude_id = str(cmt_mag.resource_id) event.preferred_focal_mechanism_id = str(cmt_focal.resource_id) _validator(event, cmt_origin, cmt_mag, cmt_focal) new_cat = Catalog() new_cat.append(event) return new_cat
def node_catalog_no_picks(node_catalog) -> Tuple[obspy.Catalog, Dict]: """return the node catalog with just origins""" eid_map = {} cat = Catalog() for num, eve in enumerate(node_catalog): eve_out = Event(origins=eve.origins) for o in eve_out.origins: o.arrivals = [] eve_out.resource_id = ResourceIdentifier(f"event_{num}") cat.append(eve_out) eid_map[eve.resource_id.id] = eve_out.resource_id.id return cat, eid_map
def test_countAndLen(self): """ Tests the count and __len__ methods of the Catalog object. """ # empty catalog without events catalog = Catalog() self.assertEqual(len(catalog), 0) self.assertEqual(catalog.count(), 0) # catalog with events catalog = read_events() self.assertEqual(len(catalog), 3) self.assertEqual(catalog.count(), 3)
def test_count_and_len(self): """ Tests the count and __len__ methods of the Catalog object. """ # empty catalog without events catalog = Catalog() assert len(catalog) == 0 assert catalog.count() == 0 # catalog with events catalog = read_events() assert len(catalog) == 3 assert catalog.count() == 3
def detections_to_catalog(detections): r"""Helper to convert from list of detections to obspy catalog. :type detections: list :param detections: list of eqcorrscan.core.match_filter.detection :returns: obspy.core.event.Catalog """ from obspy.core.event import Catalog catalog = Catalog() for detection in detections: catalog.append(detection.event) return catalog
def catalog(self): """ Add existing Event to a Catalog """ c = Catalog(events=[self.event]) c.creation_info = CreationInfo( creation_time = UTCDateTime(), agency_id = self.agency, version = self.event.creation_info.version, ) c.resource_id = self._rid(c) return c
def parse_files(fnames): """Parses all given files for seiscomp xml""" j = 0 out = Catalog() for i, fname in enumerate(fnames): print('read ' + fname) out += readSeisComPEventXML0_6(fname) if (i + 1) % 100 == 0 or i == len(fnames) - 1: out_fname = str(j) + '.xml' print('write %d events to %s\n' % (len(out), out_fname)) out.write(out_fname, 'QUAKEML') out = Catalog() j += 1
def get_catalog(detections): """ Generate an obspy catalog from detections of DETECTION class. :type detections: list :param detections: list of eqcorrscan.core.match_filter.DETECTION :returns: obspy.core.event.Catalog """ from obspy.core.event import Catalog catalog = Catalog() for detection in detections: catalog.append(detection.event) return catalog
def get_events(self, evids=None, times=None, lats=None, lons=None, mags=None, depths=None, types=None, gtypes=None, output_file=None, is_xml=False): """ Download events from STP using the EVENT command. """ if not self.connected: print('STP is not connected') return None self._get_event_phase('event', evids, times, lats, lons, mags, depths, types, gtypes, output_file) catalog = Catalog() for line in self.message.splitlines(): if not line.startswith('#'): catalog.append(utils.make_event(line)) self._end_command() return catalog
def test_avoid_empty_stub_elements(self): """ Test for a bug in reading QuakeML. Makes sure that some subelements do not get assigned stub elements, but rather stay None. """ # Test 1: Test subelements of moment_tensor memfile = io.BytesIO() # create virtually empty FocalMechanism mt = MomentTensor(derived_origin_id='smi:local/abc') fm = FocalMechanism(moment_tensor=mt) event = Event(focal_mechanisms=[fm]) cat = Catalog(events=[event]) cat.write(memfile, format="QUAKEML", validate=True) # now read again, and make sure there's no stub subelements on # MomentTensor, but rather `None` memfile.seek(0) cat = read_events(memfile, format="QUAKEML") self.assertEqual(cat[0].focal_mechanisms[0].moment_tensor.tensor, None) self.assertEqual( cat[0].focal_mechanisms[0].moment_tensor.source_time_function, None) # Test 2: Test subelements of focal_mechanism memfile = io.BytesIO() # create virtually empty FocalMechanism fm = FocalMechanism() event = Event(focal_mechanisms=[fm]) cat = Catalog(events=[event]) cat.write(memfile, format="QUAKEML", validate=True) # now read again, and make sure there's no stub MomentTensor, but # rather `None` memfile.seek(0) cat = read_events(memfile, format="QUAKEML") self.assertEqual(cat[0].focal_mechanisms[0].nodal_planes, None) self.assertEqual(cat[0].focal_mechanisms[0].principal_axes, None)
def magnitude_rate_trigger_func( catalog: Catalog, magnitude_threshold: float = 5.5, rate_threshold: float = 10., rate_bin: float = .2, minimum_events_in_bin: int = 5, ) -> Catalog: """ Function to turn triggered response on based on magnitude and rate. Parameters ---------- catalog: Catalog to look in magnitude_threshold: magnitude threshold for triggering a response rate_threshold: rate in events per day for triggering a response rate_bin: radius in degrees to calculate rate for. minimum_events_in_bin Minimum number of events in a bin to calculate a rate for. Returns ------- The events that forced the trigger. """ trigger_events = Catalog() for event in catalog: try: magnitude = event.preferred_magnitude() or event.magnitudes[0] except IndexError: continue if magnitude.mag >= magnitude_threshold: trigger_events.events.append(event) for event in catalog: sub_catalog = get_nearby_events(event, catalog, radius=rate_bin) if len(sub_catalog) >= minimum_events_in_bin: rate = average_rate(sub_catalog) else: rate = 0. if rate >= rate_threshold: for _event in sub_catalog: if _event not in trigger_events: trigger_events.events.append(_event) if len(trigger_events) > 0: return Catalog(trigger_events) return Catalog()
def _day_loop(detection_streams, template, min_cc, interpolate=False, cores=False, debug=0): """ Function to loop through multiple detections for one template. Designed to run for the same day of data for I/O simplicity, but as you are passing stream objects it could run for all the detections ever, as long as you have the RAM! :type detection_streams: list :param detection_streams: List of all the detections for this template that you want to compute the optimum pick for. Individual things in list should be of obspy.core.stream.Stream type. :type template: obspy.core.stream.Stream :param template: The original template used to detect the detections passed :type min_cc: float :param min_cc: Minimum cross-correlation value to be allowed for a pick. :type interpolate: bool :param interpolate: Interpolate the correlation function to achieve \ sub-sample precision. :returns: Catalog object containing Event objects for each detection created by this template. :rtype: obspy.core.event.Catalog """ from multiprocessing import Pool, cpu_count # Used to run detections in parallel from obspy.core.event import Catalog if not cores: num_cores = cpu_count() else: num_cores = cores if num_cores > len(detection_streams): num_cores = len(detection_streams) pool = Pool(processes=num_cores) # Parallelize generation of events for each detection: # results is a list of (i, event class) results = [pool.apply_async(_channel_loop, args=(detection_streams[i], template, min_cc, interpolate, i, debug)) for i in range(len(detection_streams))] pool.close() events_list = [p.get() for p in results] pool.join() events_list.sort(key=lambda tup: tup[0]) # Sort based on i. temp_catalog = Catalog() temp_catalog.events = [event_tup[1] for event_tup in events_list] return temp_catalog
def rest_to_obspy(filename, polfile): """ Read REST formatted event info to an ObsPy Event object. :param filename: File to read from :type filename: str :returns: :class:`obspy.core.event.Event` """ catalog = Catalog() with open(filename, 'r') as f: full_str = [line for line in f] with open(polfile, 'r') as pf: all_pol_str = [line for line in pf] # this test is not valid for the full file. # if not is_rest(event_str): # raise IOError( # "%s is not REST formatted (as coded)" % filename) event_str = [] ev_p_str = [] for eline, pline in zip(full_str, all_pol_str): if len(eline.rstrip(" \n\r")) != 0: event_str.append(eline) ev_p_str.append(pline) else: event = read_origin(event_str) print(event.origins[0]) event = read_picks(event_str, event) print(ev_p_str) event = read_pol(ev_p_str, event) catalog.events.append(event) event_str = [] ev_p_str = [] return catalog
def _read_pha(filename, eventid_map=None, encoding='utf-8', **kwargs): """ Read a HypoDD PHA file and returns an ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.read_events` function, call this instead. :param str filename: File or file-like object in text mode. :param dict ph2comp: mapping of phases to components (default: {'P': 'Z', 'S': 'N'}) :param dict eventid_map: Desired mapping of hypodd event ids (dict values) to event resource ids (dict keys). The returned dictionary of the HYPODDPHA writing operation can be used. By default, ids are not mapped. :param str encoding: encoding used (default: utf-8) :rtype: :class:`~obspy.core.event.Catalog` :return: An ObsPy Catalog object. """ if eventid_map is not None: eventid_map = {v: k for k, v in eventid_map.items()} with io.open(filename, 'r', encoding=encoding) as f: text = f.read() events = [ _block2event(block, eventid_map, **kwargs) for block in text.split('#')[1:] ] return Catalog(events)
def read_select(select_file): """ Read a catalog of events from a Nordic formatted select file. Generates a series of temporary files for each event in the select file. :type select_file: str :param select_file: Nordic formatted select.out file to open :return: catalog of events :rtype: obspy.core.event.Catalog """ from obspy.core.event import Catalog from tempfile import NamedTemporaryFile import os catalog = Catalog() event_str = [] f = open(select_file, 'r') for line in f: if len(line.rstrip()) > 0: event_str.append(line) elif len(event_str) > 0: # Write to a temporary file then read from it tmp_sfile = NamedTemporaryFile(mode='w', delete=False) for event_line in event_str: tmp_sfile.write(event_line) tmp_sfile.close() catalog += read_event(tmp_sfile.name) os.remove(tmp_sfile.name) event_str = [] return catalog
def get_nearby_events( event: Event, catalog: Catalog, radius: float ) -> Catalog: """ Get a catalog of events close to another event. Parameters ---------- event: Central event to calculate distance relative to catalog: Catalog to extract events from radius: Radius around `event` in km Returns ------- Catalog of events close to `event` """ sub_catalog = Catalog( [e for e in catalog.events if inter_event_distance(event, e) <= radius]) return sub_catalog
def _get_geonet_pubids(publicids, parallel=False): """ Get GeoNet events while they haven't included get_events in fdsn. :type publicids: list :param publicids: List of public id numbers for events wanted. :returns: Catalog of events :rtype: obspy.core.event.Catalog """ from obspy.clients.fdsn import Client from obspy.core.event import Catalog from multiprocessing import Pool, cpu_count client = Client('GEONET') catalog = Catalog() # Multi-process this bad-boy if not parallel: for publicid in publicids: catalog += _inner_get_event(publicid=publicid, client=client) else: pool = Pool(processes=cpu_count()) results = [ pool.apply_async(_inner_get_event, args=(publicid, client)) for publicid in publicids ] pool.close() cat_list = [p.get() for p in results] pool.join() for ev in cat_list: catalog += ev return catalog
def read_DD(event_file, network_code): """ Read hypoDD """ ### Parameters #event_file='/home/baillard/Dropbox/_Moi/Projects/Axial/DATA/test.dat' ### Start process f = open(event_file, 'r') catalog = Catalog() k = 0 for line in f: k = k + 1 if line[0] == '#': if k > 1: catalog.events.append(new_event) new_event = read_header_line(line) else: read_pick_line(line, new_event, network_code) ### Append last event when eof reacehd catalog.events.append(new_event) f.close() return catalog
def plot_some_events(): from obspy.core.event import Catalog, Event, Origin, Magnitude from obspy.core import UTCDateTime as UTC eqs = """2008-09-10T16:12:03 6.0 -20.40 -69.40 40 2008-03-24T20:39:06 5.9 -20.10 -69.20 85 2008-03-01T19:51:59 5.7 -20.10 -69.60 15 2008-02-15T16:54:04 5.5 -23.00 -70.20 32 2008-02-04T17:01:30 6.6 -20.20 -70.00 36 2007-12-16T08:09:16 7.1 -22.80 -70.00 14 2007-11-14T15:40:51 7.8 -22.34 -70.06 37""" #GEOFON:-22.30 -69.80 events = [] for eq in eqs.split('\n'): time, mag, lat, lon, depth = eq.split() ev = Event(event_type='earthquake', creation_info='GEOFON', origins=[Origin(time=UTC(time), latitude=float(lat), longitude=float(lon), depth=float(depth))], magnitudes=[Magnitude(mag=float(mag), magnitude_type='M')]) events.append(ev) cat = Catalog(events[::-1]) #print cat #cat.plot(projection='local') lons = [ev.origins[0].longitude for ev in cat] lats = [ev.origins[0].latitude for ev in cat] dates = [ev.origins[0].time for ev in cat] mags = [ev.magnitudes[0].mag for ev in cat]
def _get_geonet_pubids(publicids): """ Get GeoNet events while they haven't included get_events in fdsn. :type publicids: list :param publicids: List of public id numbers for events wanted. :returns: Catalog of events :rtype: obspy.core.event.Catalog """ import obspy if int(obspy.__version__.split('.')[0]) > 0: from obspy.clients.fdsn import Client from obspy import read_events else: from obspy.fdsn import Client from obspy import readEvents as read_events from obspy.core.event import Catalog client = Client('GEONET') catalog = Catalog() for publicid in publicids: data_stream = client._download('http://quakeml.geonet.org.nz/' + 'quakeml/1.2/' + publicid) data_stream.seek(0, 0) catalog += read_events(data_stream, format="quakeml") data_stream.close() return catalog
def _internal_read_cmtsolution(buf, **kwargs): """ Reads a CMTSOLUTION file to a :class:`~obspy.core.event.Catalog` object. :param buf: File to read. :type buf: open file or file-like object """ events = [] cur_pos = buf.tell() # This also works with BytesIO and what not. buf.seek(0, 2) size = buf.tell() buf.seek(cur_pos, 0) # This is pretty inefficient due to all the file pointer jumping but # performance is really the least of our concerns. Also most performance # is still lost initializing the large ObsPy event objects. while True: if buf.tell() >= size: break line = buf.readline().strip() # If there is something, jump back to the beginning of the line and # read the next event. if line: buf.seek(cur_pos, 0) events.append(_internal_read_single_cmtsolution(buf)) cur_pos = buf.tell() return Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())), events=events)
def create_catalog(time): """ Create a events object with a UTCTimeObject as event creation info. """ creation_info = CreationInfo(creation_time=obspy.UTCDateTime(time)) event = Event(creation_info=creation_info) return Catalog(events=[event])
def test_write_pha_minimal(self): ori = Origin(time=UTC(0), latitude=42, longitude=43, depth=10000) pick = Pick(time=UTC(10), phase_hint='S', waveform_id=WaveformStreamID(station_code='STA')) del ori.latitude_errors del ori.longitude_errors del ori.depth_errors cat = Catalog([Event(origins=[ori], picks=[pick])]) with NamedTemporaryFile() as tf: tempfile = tf.name with self.assertWarnsRegex(UserWarning, 'Missing mag'): cat.write(tempfile, 'HYPODDPHA') cat2 = read_events(tempfile) self.assertEqual(len(cat2), 1) self.assertEqual(len(cat2[0].picks), 1)
def test_seishub(self): """Test the seishub method, use obspy default seishub client.""" from obspy.core.event import Catalog, Event, Origin, Pick from obspy.core.event import WaveformStreamID from obspy import UTCDateTime import warnings from future import standard_library with standard_library.hooks(): from urllib.request import URLError t = UTCDateTime(2009, 9, 3) test_cat = Catalog() test_cat.append(Event()) test_cat[0].origins.append(Origin()) test_cat[0].origins[0].time = t test_cat[0].origins[0].latitude = 45 test_cat[0].origins[0].longitude = 45 test_cat[0].origins[0].depth = 5000 test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHZ', network_code='BW'), phase_hint='PG', time=t + 2000)) test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHN', network_code='BW'), phase_hint='SG', time=t + 2005)) test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHE', network_code='BW'), phase_hint='SG', time=t + 2005.5)) test_url = 'http://teide.geophysik.uni-muenchen.de:8080' try: template = from_seishub(test_cat, url=test_url, lowcut=1.0, highcut=5.0, samp_rate=20, filt_order=4, length=3, prepick=0.5, swin='all', process_len=300) except URLError: warnings.warn('Timed out connection to seishub') if 'template' in locals(): self.assertEqual(len(template), 3)
def test_focal_mechanism_write_read(self): """ Test for a bug in reading a FocalMechanism without MomentTensor from QuakeML file. Makes sure that FocalMechanism.moment_tensor stays None if no MomentTensor is in the file. """ memfile = io.BytesIO() # create virtually empty FocalMechanism fm = FocalMechanism() event = Event(focal_mechanisms=[fm]) cat = Catalog(events=[event]) cat.write(memfile, format="QUAKEML", validate=True) # now read again, and make sure there's no stub MomentTensor, but # rather `None` memfile.seek(0) cat = read_events(memfile, format="QUAKEML") self.assertEqual(cat[0].focal_mechanisms[0].moment_tensor, None)
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split("\n"): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split("\t", 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get("lon")) origin.latitude = self._str2num(values.get("lat")) depth = self._str2num(values.get("depth")) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get("z_err")) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get("h_err")) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = ou year = self._str2num(values.get("year")) if year is not None: t_fields = ["year", "month", "day", "hour", "minute", "second"] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): utc_args = [int(v) for v in comps if v is not None] origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get("mag")) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get("m_err")) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id catalog.append(event) return catalog
def test_seishub(self): """Test the seishub method, use obspy default seishub client.""" from obspy.core.event import Catalog, Event, Origin, Pick from obspy.core.event import WaveformStreamID from obspy import UTCDateTime import warnings from future import standard_library with standard_library.hooks(): from urllib.request import URLError t = UTCDateTime(2009, 9, 3) test_cat = Catalog() test_cat.append(Event()) test_cat[0].origins.append(Origin()) test_cat[0].origins[0].time = t test_cat[0].origins[0].latitude = 45 test_cat[0].origins[0].longitude = 45 test_cat[0].origins[0].depth = 5000 test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHZ', network_code='BW'), phase_hint='PG', time=t + 2000)) test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHN', network_code='BW'), phase_hint='SG', time=t + 2005)) test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHE', network_code='BW'), phase_hint='SG', time=t + 2005.5)) test_url = 'http://teide.geophysik.uni-muenchen.de:8080' try: template = from_seishub(test_cat, url=test_url, lowcut=1.0, highcut=5.0, samp_rate=20, filt_order=4, length=3, prepick=0.5, swin='all') except URLError: warnings.warn('Timed out connection to seishub') if 'template' in locals(): self.assertEqual(len(template), 3)
def _deserialize(self, zmap_str): catalog = Catalog() for row in zmap_str.split('\n'): if len(row) == 0: continue origin = Origin() event = Event(origins=[origin]) event.preferred_origin_id = origin.resource_id.id # Begin value extraction columns = row.split('\t', 13)[:13] # ignore extra columns values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns)) # Extract origin origin.longitude = self._str2num(values.get('lon')) origin.latitude = self._str2num(values.get('lat')) depth = self._str2num(values.get('depth')) if depth is not None: origin.depth = depth * 1000.0 z_err = self._str2num(values.get('z_err')) if z_err is not None: origin.depth_errors.uncertainty = z_err * 1000.0 h_err = self._str2num(values.get('h_err')) if h_err is not None: ou = OriginUncertainty() ou.horizontal_uncertainty = h_err ou.preferred_description = 'horizontal uncertainty' origin.origin_uncertainty = ou year = self._str2num(values.get('year')) if year is not None: t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] comps = [self._str2num(values.get(f)) for f in t_fields] if year % 1 != 0: origin.time = self._decyear2utc(year) elif any(v > 0 for v in comps[1:]): utc_args = [int(v) for v in comps if v is not None] origin.time = UTCDateTime(*utc_args) mag = self._str2num(values.get('mag')) # Extract magnitude if mag is not None: magnitude = Magnitude(mag=mag) m_err = self._str2num(values.get('m_err')) magnitude.mag_errors.uncertainty = m_err event.magnitudes.append(magnitude) event.preferred_magnitude_id = magnitude.resource_id.id catalog.append(event) return catalog
def test_with_quakeml(): np1 = NodalPlane(strike=259, dip=74, rake=10) np2 = NodalPlane(strike=166, dip=80, rake=164) nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2) taxis = Axis(plunge=40, azimuth=70) naxis = Axis(plunge=50, azimuth=80) paxis = Axis(plunge=60, azimuth=90) paxes = PrincipalAxes(t_axis=taxis, n_axis=naxis, p_axis=paxis) focal = FocalMechanism(nodal_planes=nodal_planes, principal_axes=paxes) event = Event(focal_mechanisms=[focal]) catalog = Catalog(events=[event]) event_text = '''<shakemap-data code_version="4.0" map_version="1"> <earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9" time="2018-01-23T09:31:42Z" depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/> </shakemap-data>''' try: tempdir = tempfile.mkdtemp() xmlfile = os.path.join(tempdir, 'quakeml.xml') catalog.write(xmlfile, format="QUAKEML") eventfile = os.path.join(tempdir, 'event.xml') f = open(eventfile, 'wt') f.write(event_text) f.close() params = read_moment_quakeml(xmlfile) assert params['moment']['NP1']['strike'] == 259.0 assert params['moment']['NP1']['dip'] == 74.0 assert params['moment']['NP1']['rake'] == 10.0 assert params['moment']['NP2']['strike'] == 166.0 assert params['moment']['NP2']['dip'] == 80.0 assert params['moment']['NP2']['rake'] == 164.0 origin = Origin.fromFile(eventfile, momentfile=xmlfile) assert origin.mag == 7.9 assert origin.lat == 56.046 assert origin.lon == -149.073 assert origin.id == 'us2000cmy3' except Exception: assert False finally: shutil.rmtree(tempdir)
def test_append(self): """ Tests the append method of the Catalog object. """ # 1 - create catalog and add a few events catalog = Catalog() event1 = Event() event2 = Event() self.assertEqual(len(catalog), 0) catalog.append(event1) self.assertEqual(len(catalog), 1) self.assertEqual(catalog.events, [event1]) catalog.append(event2) self.assertEqual(len(catalog), 2) self.assertEqual(catalog.events, [event1, event2]) # 2 - adding objects other as Event should fails self.assertRaises(TypeError, catalog.append, str) self.assertRaises(TypeError, catalog.append, Catalog) self.assertRaises(TypeError, catalog.append, [event1])
def sort_catalogue(cat): """ sort catalogue of retrieved events chronological. :param cat: :return: """ k = [[event, event.origins[0].time] for event in cat] k.sort(key=lambda x: x[1], reverse=True) events = [event[0] for event in k] cat = Catalog(events=events) return cat
def _read_GCMT_catalog(min_year=None, max_year=None): """ Helper function reading the GCMT data shipping with LASIF. :param min_year: The minimum year to read. :type min_year: int, optional :param max_year: The maximum year to read. :type max_year: int, optional """ # easier tests if min_year is None: min_year = 0 else: min_year = int(min_year) if max_year is None: max_year = 3000 else: max_year = int(max_year) data_dir = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe())))), "data", "GCMT_Catalog", ) available_years = [_i for _i in os.listdir(data_dir) if _i.isdigit()] available_years.sort() print("LASIF currently contains GCMT data from %s to %s/%i." % ( available_years[0], available_years[-1], len(glob.glob(os.path.join(data_dir, available_years[-1], "*.ndk*"))), )) available_years = [ _i for _i in os.listdir(data_dir) if _i.isdigit() and (min_year <= int(_i) <= max_year) ] available_years.sort() print("Parsing the GCMT catalog. This might take a while...") cat = Catalog() for year in available_years: print("\tReading year %s ..." % year) if float(year) < 2005: filename = glob.glob(os.path.join(data_dir, str(year), "*.xml*"))[0] cat += obspy.read_events(filename, format="QuakeML") else: for filename in glob.glob( os.path.join(data_dir, str(year), "*.ndk*")): cat += obspy.read_events(filename, format="ndk") return cat
def _read_pha(filename, inventory=None, id_map=None, id_default='.{}..{}', ph2comp={ 'P': 'Z', 'S': 'N' }, encoding='utf-8'): """ Read a HypoDD PHA file and returns an ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.read_events` function, call this instead. The optional parameters all deal with the problem, that the PHA format only stores station names for the picks, but the Pick object expects a SEED id. A SEED id template is retrieved for each station by the following procedure: 1. look at id_map for a direct station name match and use the specified template 2. if 1 did not succeed, look if the station is present in inventory and use its first channel as template 3. if 1 and 2 did not succeed, use specified default template (id_default) :param str filename: File or file-like object in text mode. :type inventory: :class:`~obspy.core.inventory.inventory.Inventory` :param inventory: Inventory used to retrieve network code, location code and channel code of stations (SEED id). :param dict id_map: Default templates for each station (example: `id_map={'MOX': 'GR.{}..HH{}'`). The values must contain three dots and two `{}` which are substituted by station code and component. :param str id_default: Default SEED id template. The value must contain three dots and two `{}` which are substituted by station code and component. :param dict ph2comp: mapping of phases to components (default: {'P': 'Z', 'S': 'N'}) :param str encoding: encoding used (default: utf-8) :rtype: :class:`~obspy.core.event.Catalog` :return: An ObsPy Catalog object. """ seed_map = _seed_id_map(inventory, id_map) with io.open(filename, 'r', encoding=encoding) as f: text = f.read() events = [ _block2event(block, seed_map, id_default, ph2comp) for block in text.split('#')[1:] ] return Catalog(events)
def test_issue_2339(self): """ Make sure an empty EventDescription object does not prevent a catalog from being saved to disk and re-read, while still being equal. """ # create a catalog with an empty event description empty_description = EventDescription() cat1 = Catalog(events=[read_events()[0]]) cat1[0].event_descriptions.append(empty_description) # serialize the catalog using quakeml and re-read bio = io.BytesIO() cat1.write(bio, 'quakeml') bio.seek(0) cat2 = read_events(bio) # the text of the empty EventDescription instances should be equal text1 = cat1[0].event_descriptions[-1].text text2 = cat2[0].event_descriptions[-1].text self.assertEqual(text1, text2) # the two catalogs should be equal self.assertEqual(cat1, cat2)
def read_nlloc_sum(file_in): """ Function made to read a nlloc hypocenter-file and store it into a simple LOTOS_class Catalog The ID is read from the event.comments part """ from obspy.io.nlloc.core import read_nlloc_hyp from lotos.LOTOS_class import Catalog, Event, Phase from general import util as gutil #file_in='/media/baillard/Shared/Dropbox/_Moi/Projects/Axial/PROG/NLLOC_AXIAL/loc3/AXIAL.20170130.005908.grid0.loc.hyp' #file_in='/media/baillard/Shared/Dropbox/_Moi/Projects/Axial/PROG/NLLOC_AXIAL/loc3/sum.nlloc' Ray = Catalog() cat = read_nlloc_hyp(file_in) stations_dic = Ray.stations_realname for event in cat: id_event = event.comments[0].text origin = event.preferred_origin() OT = origin.time #### Initialize Event Event_p = Event() Event_p.x = origin.longitude Event_p.y = origin.latitude Event_p.z = origin.depth / 1000 Event_p.id = id_event Event_p.ot = OT Event_p.num_phase = origin.quality.used_phase_count Picks_p = event.picks for arrival in origin.arrivals: Phase_p = Phase() if arrival.phase in ['P', 'Pn']: Phase_p.type = 1 else: Phase_p.type = 2 Pick_p = gutil.getPickForArrival(Picks_p, arrival) Phase_p.station = stations_dic[Pick_p.waveform_id.station_code] Phase_p.t_obs = Pick_p.time - OT Phase_p.t_tho = Phase_p.t_obs - arrival.time_residual Event_p.phases.append(Phase_p) Ray.events.append(Event_p) return Ray
def convert_dmteventfile(): eventsfile1 = os.path.join(conf.dmt_path, 'EVENT', 'event_list') eventsfile2 = os.path.join(conf.dmt_path, 'EVENT', 'events.xml') with open(eventsfile1) as f: events1 = pickle.load(f) events2 = Catalog() for ev in events1: orkw = {'time': ev['datetime'], 'latitude': ev['latitude'], 'longitude': ev['longitude'], 'depth': ev['depth']} magkw = {'mag': ev['magnitude'], 'magnitude_type': ev['magnitude_type']} evdesargs = (ev['flynn_region'], 'Flinn-Engdahl region') evkw = {'resource_id': ev['event_id'], 'event_type': 'earthquake', 'creation_info': CreationInfo(author=ev['author']), 'event_descriptions': [EventDescription(*evdesargs)], 'origins': [Origin(**orkw)], 'magnitudes': [Magnitude(**magkw)]} events2.append(Event(**evkw)) events2.write(eventsfile2, 'QUAKEML')
def sfiles_to_event(sfile_list): """ Function to write out an event.dat file of the events :type sfile_list: list :param sfile_list: List of s-files to sort and put into the database :returns: List of tuples of event ID (int) and Sfile name """ from obspy.core.event import Catalog event_list = [] sort_list = [(sfile_util.readheader(sfile).origins[0].time, sfile) for sfile in sfile_list] sort_list.sort(key=lambda tup: tup[0]) sfile_list = [sfile[1] for sfile in sort_list] catalog = Catalog() for i, sfile in enumerate(sfile_list): event_list.append((i, sfile)) catalog.append(sfile_util.readheader(sfile)) # Hand off to sister function write_event(catalog) return event_list
def event_to_quakeml(event, filename): """ Write one of those events to QuakeML. """ # Create all objects. cat = Catalog() ev = Event() org = Origin() mag = Magnitude() fm = FocalMechanism() mt = MomentTensor() t = Tensor() # Link them together. cat.append(ev) ev.origins.append(org) ev.magnitudes.append(mag) ev.focal_mechanisms.append(fm) fm.moment_tensor = mt mt.tensor = t # Fill values ev.resource_id = "smi:inversion/%s" % str(event["identifier"]) org.time = event["time"] org.longitude = event["longitude"] org.latitude = event["latitude"] org.depth = event["depth_in_km"] * 1000 mag.mag = event["Mw"] mag.magnitude_type = "Mw" t.m_rr = event["Mrr"] t.m_tt = event["Mpp"] t.m_pp = event["Mtt"] t.m_rt = event["Mrt"] t.m_rp = event["Mrp"] t.m_tp = event["Mtp"] cat.write(filename, format="quakeml")
def _on_file_save(self): """ Creates a new obspy.core.event.Magnitude object and writes the moment magnitude to it. """ # Get the save filename. filename = QtGui.QFileDialog.getSaveFileName(caption="Save as...") filename = os.path.abspath(str(filename)) mag = Magnitude() mag.mag = self.final_result["moment_magnitude"] mag.magnitude_type = "Mw" mag.station_count = self.final_result["station_count"] mag.evaluation_mode = "manual" # Link to the used origin. mag.origin_id = self.current_state["event"].origins[0].resource_id mag.method_id = "Magnitude picker Krischer" # XXX: Potentially change once this program gets more stable. mag.evaluation_status = "preliminary" # Write the other results as Comments. mag.comments.append( \ Comment("Seismic moment in Nm: %g" % \ self.final_result["seismic_moment"])) mag.comments.append( \ Comment("Circular source radius in m: %.2f" % \ self.final_result["source_radius"])) mag.comments.append( \ Comment("Stress drop in Pa: %.2f" % \ self.final_result["stress_drop"])) mag.comments.append( \ Comment("Very rough Q estimation: %.1f" % \ self.final_result["quality_factor"])) event = copy.deepcopy(self.current_state["event"]) event.magnitudes.append(mag) cat = Catalog() cat.events.append(event) cat.write(filename, format="quakeml")
def test_extend(self): """ Tests the extend method of the Catalog object. """ # 1 - create catalog and extend it with list of events catalog = Catalog() event1 = Event() event2 = Event() self.assertEqual(len(catalog), 0) catalog.extend([event1, event2]) self.assertEqual(len(catalog), 2) self.assertEqual(catalog.events, [event1, event2]) # 2 - extend it with other catalog event3 = Event() event4 = Event() catalog2 = Catalog([event3, event4]) self.assertEqual(len(catalog), 2) catalog.extend(catalog2) self.assertEqual(len(catalog), 4) self.assertEqual(catalog.events, [event1, event2, event3, event4]) # adding objects other as Catalog or list should fails self.assertRaises(TypeError, catalog.extend, str) self.assertRaises(TypeError, catalog.extend, event1) self.assertRaises(TypeError, catalog.extend, (event1, event2))
def _deserialize(self): catalog = Catalog() res_id = '/'.join((res_id_prefix, self.filename.replace(':', '/')))\ .replace('\\', '/').replace('//', '/') catalog.resource_id = ResourceIdentifier(id=res_id) catalog.description = 'Created from NEIC PDE mchedr format' catalog.comments = '' catalog.creation_info = CreationInfo(creation_time=UTCDateTime()) for line in self.fh.readlines(): # XXX: ugly, probably we should do everything in byte strings # here? Is the pde / mchedr format unicode aware? line = line.decode() record_id = line[0:2] if record_id == 'HY': event = self._parse_record_hy(line) catalog.append(event) elif record_id == 'P ': pick, arrival = self._parse_record_p(line, event) elif record_id == 'E ': self._parse_record_e(line, event) elif record_id == 'L ': self._parse_record_l(line, event) elif record_id == 'A ': self._parse_record_a(line, event) elif record_id == 'C ': self._parse_record_c(line, event) elif record_id == 'AH': self._parse_record_ah(line, event) elif record_id == 'AE': self._parse_record_ae(line, event) elif record_id == 'Dp': focal_mechanism = self._parse_record_dp(line, event) elif record_id == 'Dt': self._parse_record_dt(line, focal_mechanism) elif record_id == 'Da': self._parse_record_da(line, focal_mechanism) elif record_id == 'Dc': self._parse_record_dc(line, focal_mechanism) elif record_id == 'M ': self._parse_record_m(line, event, pick) elif record_id == 'S ': self._parse_record_s(line, event, pick, arrival) self.fh.close() # strip extra whitespaces from event comments for event in catalog: for comment in event.comments: comment.text = comment.text.strip() event.scope_resource_ids() return catalog
def _deserialize(self): catalog = Catalog() res_id = "/".join((res_id_prefix, self.filename)) catalog.resource_id = ResourceIdentifier(id=res_id) catalog.description = "Created from NEIC PDE mchedr format" catalog.comments = "" catalog.creation_info = CreationInfo(creation_time=UTCDateTime()) for line in self.fh.readlines(): # XXX: ugly, probably we should do everything in byte strings # here? Is the pde / mchedr format unicode aware? line = line.decode() record_id = line[0:2] if record_id == "HY": event = self._parseRecordHY(line) catalog.append(event) elif record_id == "P ": pick, arrival = self._parseRecordP(line, event) elif record_id == "E ": self._parseRecordE(line, event) elif record_id == "L ": self._parseRecordL(line, event) elif record_id == "A ": self._parseRecordA(line, event) elif record_id == "C ": self._parseRecordC(line, event) elif record_id == "AH": self._parseRecordAH(line, event) elif record_id == "AE": self._parseRecordAE(line, event) elif record_id == "Dp": focal_mechanism = self._parseRecordDp(line, event) elif record_id == "Dt": self._parseRecordDt(line, focal_mechanism) elif record_id == "Da": self._parseRecordDa(line, focal_mechanism) elif record_id == "Dc": self._parseRecordDc(line, focal_mechanism) elif record_id == "M ": self._parseRecordM(line, event, pick) elif record_id == "S ": self._parseRecordS(line, event, pick, arrival) self.fh.close() # strip extra whitespaces from event comments for event in catalog: for comment in event.comments: comment.text = comment.text.strip() return catalog
def test_creating_minimal_quakeml_with_mt(self): """ Tests the creation of a minimal QuakeML containing origin, magnitude and moment tensor. """ # Rotate into physical domain lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1) mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18 scalar_moment = math.sqrt( mrr ** 2 + mtt ** 2 + mpp ** 2 + mtr ** 2 + mpr ** 2 + mtp ** 2) moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1) # Initialise event ev = Event(event_type="earthquake") ev_origin = Origin(time=org_time, latitude=lat, longitude=lon, depth=depth, resource_id=ResourceIdentifier()) ev.origins.append(ev_origin) # populate event moment tensor ev_tensor = Tensor(m_rr=mrr, m_tt=mtt, m_pp=mpp, m_rt=mtr, m_rp=mpr, m_tp=mtp) ev_momenttensor = MomentTensor(tensor=ev_tensor) ev_momenttensor.scalar_moment = scalar_moment ev_momenttensor.derived_origin_id = ev_origin.resource_id ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor) ev.focal_mechanisms.append(ev_focalmechanism) # populate event magnitude ev_magnitude = Magnitude() ev_magnitude.mag = moment_magnitude ev_magnitude.magnitude_type = 'Mw' ev_magnitude.evaluation_mode = 'automatic' ev.magnitudes.append(ev_magnitude) # write QuakeML file cat = Catalog(events=[ev]) memfile = io.BytesIO() cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML) memfile.seek(0, 0) new_cat = _read_quakeml(memfile) self.assertEqual(len(new_cat), 1) event = new_cat[0] self.assertEqual(len(event.origins), 1) self.assertEqual(len(event.magnitudes), 1) self.assertEqual(len(event.focal_mechanisms), 1) org = event.origins[0] mag = event.magnitudes[0] fm = event.focal_mechanisms[0] self.assertEqual(org.latitude, lat) self.assertEqual(org.longitude, lon) self.assertEqual(org.depth, depth) self.assertEqual(org.time, org_time) # Moment tensor. mt = fm.moment_tensor.tensor self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) / scalar_moment < scalar_moment * 1E-10) self.assertEqual(mt.m_rr, mrr) self.assertEqual(mt.m_pp, mpp) self.assertEqual(mt.m_tt, mtt) self.assertEqual(mt.m_rt, mtr) self.assertEqual(mt.m_rp, mpr) self.assertEqual(mt.m_tp, mtp) # Mag self.assertAlmostEqual(mag.mag, moment_magnitude) self.assertEqual(mag.magnitude_type, "Mw") self.assertEqual(mag.evaluation_mode, "automatic")
class ISFReader(object): encoding = 'UTF-8' resource_id_prefix = 'smi:local' def __init__(self, fh, **kwargs): self.lines = [_decode_if_possible(line, self.encoding).rstrip() for line in fh.readlines() if line.strip()] self.cat = Catalog() self._no_uuid_hashes = kwargs.get('_no_uuid_hashes', False) def deserialize(self): if not self.lines: raise ObsPyReadingError() line = self._get_next_line() if not line.startswith('DATA_TYPE BULLETIN IMS1.0:short'): raise ObsPyReadingError() try: self._deserialize() except ISFEndOfFile: pass return self.cat def _deserialize(self): line = self._get_next_line() catalog_description = line.strip() self.cat.description = catalog_description if not self.lines[0].startswith('Event'): raise ObsPyReadingError() # get next line stops the loop eventually, raising a controlled # exception while True: next_line_type = self._next_line_type() if next_line_type == 'event': self._read_event_header() elif next_line_type: self._process_block() else: raise ObsPyReadingError def _construct_id(self, parts, add_hash=False): id_ = '/'.join([str(self.cat.resource_id)] + list(parts)) if add_hash and not self._no_uuid_hashes: id_ = str(ResourceIdentifier(prefix=id_)) return id_ def _get_next_line(self): if not self.lines: raise ISFEndOfFile line = self.lines.pop(0) if line.startswith('STOP'): raise ISFEndOfFile return line def _read_event_header(self): line = self._get_next_line() event_id = self._construct_id(['event', line[6:14].strip()]) region = line[15:80].strip() event = Event( resource_id=event_id, event_descriptions=[EventDescription(text=region, type='region name')]) self.cat.append(event) def _next_line_type(self): if not self.lines: raise ISFEndOfFile return _block_header(self.lines[0]) def _process_block(self): if not self.cat: raise ObsPyReadingError line = self._get_next_line() block_type = _block_header(line) # read origins block if block_type == 'origins': self._read_origins() # read publications block elif block_type == 'bibliography': self._read_bibliography() # read magnitudes block elif block_type == 'magnitudes': self._read_magnitudes() # read phases block elif block_type == 'phases': self._read_phases() # unexpected block header line else: msg = ('Unexpected line while reading file (line will be ' 'ignored):\n' + line) warnings.warn(msg) def _read_phases(self): event = self.cat[-1] while not self._next_line_type(): line = self._get_next_line() if line.strip().startswith('('): comment = self._parse_generic_comment(line) event.picks[-1].comments.append(comment) continue pick, amplitude, station_magnitude = self._parse_phase(line) if (pick, amplitude, station_magnitude) == (None, None, None): continue event.picks.append(pick) if amplitude: event.amplitudes.append(amplitude) if station_magnitude: event.station_magnitudes.append(station_magnitude) continue def _read_origins(self): event = self.cat[-1] origins = [] event_types_certainties = [] # just in case origin block is at end of file, make sure the event type # routine below gets executed, even if next line is EOF at some point try: while not self._next_line_type(): line = self._get_next_line() if line.strip().startswith('('): origins[-1].comments.append( self._parse_generic_comment(line)) continue origin, event_type, event_type_certainty = \ self._parse_origin(line) origins.append(origin) event_types_certainties.append( (event_type, event_type_certainty)) continue finally: # check event types/certainties for consistency event_types = set(type_ for type_, _ in event_types_certainties) event_types.discard(None) if len(event_types) == 1: event_type = event_types.pop() certainties = set( cert for type_, cert in event_types_certainties if type_ == event_type) if "known" in certainties: event_type_certainty = "known" elif "suspected" in certainties: event_type_certainty = "suspected" else: event_type_certainty = None else: event_type = None event_type_certainty = None event.origins.extend(origins) event.event_type = event_type event.event_type_certainty = event_type_certainty def _read_magnitudes(self): event = self.cat[-1] while not self._next_line_type(): line = self._get_next_line() if line.strip().startswith('('): event.magnitudes[-1].comments.append( self._parse_generic_comment(line)) continue event.magnitudes.append(self._parse_magnitude(line)) continue def _read_bibliography(self): event = self.cat[-1] while not self._next_line_type(): line = self._get_next_line() if line.strip().startswith('('): # TODO parse bibliography comment blocks continue event.comments.append(self._parse_bibliography_item(line)) continue def _make_comment(self, text): id_ = self._construct_id(['comment'], add_hash=True) comment = Comment(text=text, resource_id=id_) return comment def _parse_bibliography_item(self, line): return self._make_comment(line) def _parse_origin(self, line): # 1-10 i4,a1,i2,a1,i2 epicenter date (yyyy/mm/dd) # 12-22 i2,a1,i2,a1,f5.2 epicenter time (hh:mm:ss.ss) time = UTCDateTime.strptime(line[:17], '%Y/%m/%d %H:%M:') time += float(line[17:22]) # 23 a1 fixed flag (f = fixed origin time solution, blank if # not a fixed origin time) time_fixed = fixed_flag(line[22]) # 25-29 f5.2 origin time error (seconds; blank if fixed origin time) time_error = float_or_none(line[24:29]) time_error = time_error and QuantityError(uncertainty=time_error) # 31-35 f5.2 root mean square of time residuals (seconds) rms = float_or_none(line[30:35]) # 37-44 f8.4 latitude (negative for South) latitude = float_or_none(line[36:44]) # 46-54 f9.4 longitude (negative for West) longitude = float_or_none(line[45:54]) # 55 a1 fixed flag (f = fixed epicenter solution, blank if not # a fixed epicenter solution) epicenter_fixed = fixed_flag(line[54]) # 56-60 f5.1 semi-major axis of 90% ellipse or its estimate # (km, blank if fixed epicenter) _uncertainty_major_m = float_or_none(line[55:60], multiplier=1e3) # 62-66 f5.1 semi-minor axis of 90% ellipse or its estimate # (km, blank if fixed epicenter) _uncertainty_minor_m = float_or_none(line[61:66], multiplier=1e3) # 68-70 i3 strike (0 <= x <= 360) of error ellipse clock-wise from # North (degrees) _uncertainty_major_azimuth = float_or_none(line[67:70]) # 72-76 f5.1 depth (km) depth = float_or_none(line[71:76], multiplier=1e3) # 77 a1 fixed flag (f = fixed depth station, d = depth phases, # blank if not a fixed depth) epicenter_fixed = fixed_flag(line[76]) # 79-82 f4.1 depth error 90% (km; blank if fixed depth) depth_error = float_or_none(line[78:82], multiplier=1e3) # 84-87 i4 number of defining phases used_phase_count = int_or_none(line[83:87]) # 89-92 i4 number of defining stations used_station_count = int_or_none(line[88:92]) # 94-96 i3 gap in azimuth coverage (degrees) azimuthal_gap = float_or_none(line[93:96]) # 98-103 f6.2 distance to closest station (degrees) minimum_distance = float_or_none(line[97:103]) # 105-110 f6.2 distance to furthest station (degrees) maximum_distance = float_or_none(line[104:110]) # 112 a1 analysis type: (a = automatic, m = manual, g = guess) evaluation_mode, evaluation_status = \ evaluation_mode_and_status(line[111]) # 114 a1 location method: (i = inversion, p = pattern # recognition, g = ground truth, o = # other) location_method = LOCATION_METHODS[line[113].strip().lower()] # 116-117 a2 event type: # XXX event type and event type certainty is specified per origin, # XXX not sure how to bset handle this, for now only use it if # XXX information on the individual origins do not clash.. not sure yet # XXX how to identify the preferred origin.. event_type, event_type_certainty = \ EVENT_TYPE_CERTAINTY[line[115:117].strip().lower()] # 119-127 a9 author of the origin author = line[118:127].strip() # 129-136 a8 origin identification origin_id = self._construct_id(['origin', line[128:136].strip()]) # do some combinations depth_error = depth_error and dict(uncertainty=depth_error, confidence_level=90) if all(v is not None for v in (_uncertainty_major_m, _uncertainty_minor_m, _uncertainty_major_azimuth)): origin_uncertainty = OriginUncertainty( min_horizontal_uncertainty=_uncertainty_minor_m, max_horizontal_uncertainty=_uncertainty_major_m, azimuth_max_horizontal_uncertainty=_uncertainty_major_azimuth, preferred_description='uncertainty ellipse', confidence_level=90) # event init always sets an empty QuantityError, even when # specifying None, which is strange for key in ['confidence_ellipsoid']: setattr(origin_uncertainty, key, None) else: origin_uncertainty = None origin_quality = OriginQuality( standard_error=rms, used_phase_count=used_phase_count, used_station_count=used_station_count, azimuthal_gap=azimuthal_gap, minimum_distance=minimum_distance, maximum_distance=maximum_distance) comments = [] if location_method: comments.append( self._make_comment('location method: ' + location_method)) if author: creation_info = CreationInfo(author=author) else: creation_info = None # assemble whole event origin = Origin( time=time, resource_id=origin_id, longitude=longitude, latitude=latitude, depth=depth, depth_errors=depth_error, origin_uncertainty=origin_uncertainty, time_fixed=time_fixed, epicenter_fixed=epicenter_fixed, origin_quality=origin_quality, comments=comments, creation_info=creation_info) # event init always sets an empty QuantityError, even when specifying # None, which is strange for key in ('time_errors', 'longitude_errors', 'latitude_errors', 'depth_errors'): setattr(origin, key, None) return origin, event_type, event_type_certainty def _parse_magnitude(self, line): # 1-5 a5 magnitude type (mb, Ms, ML, mbmle, msmle) magnitude_type = line[0:5].strip() # 6 a1 min max indicator (<, >, or blank) # TODO figure out the meaning of this min max indicator min_max_indicator = line[5:6].strip() # 7-10 f4.1 magnitude value mag = float_or_none(line[6:10]) # 12-14 f3.1 standard magnitude error mag_errors = float_or_none(line[11:14]) # 16-19 i4 number of stations used to calculate magni-tude station_count = int_or_none(line[15:19]) # 21-29 a9 author of the origin author = line[20:29].strip() # 31-38 a8 origin identification origin_id = line[30:38].strip() # process items if author: creation_info = CreationInfo(author=author) else: creation_info = None mag_errors = mag_errors and QuantityError(uncertainty=mag_errors) if origin_id: origin_id = self._construct_id(['origin', origin_id]) else: origin_id = None if not magnitude_type: magnitude_type = None # magnitudes have no id field, so construct a unique one at least resource_id = self._construct_id(['magnitude'], add_hash=True) if min_max_indicator: msg = 'Magnitude min/max indicator field not yet implemented' warnings.warn(msg) # combine and return mag = Magnitude( magnitude_type=magnitude_type, mag=mag, station_count=station_count, creation_info=creation_info, mag_errors=mag_errors, origin_id=origin_id, resource_id=resource_id) # event init always sets an empty QuantityError, even when specifying # None, which is strange for key in ['mag_errors']: setattr(mag, key, None) return mag def _get_pick_time(self, my_string): """ Look up absolute time of pick including date, based on the time-of-day only representation in the phase line Returns absolute pick time or None if it can not be determined safely. """ if not my_string.strip(): return None # TODO maybe we should defer phases block parsing.. but that will make # the whole reading more complex if not self.cat.events: msg = ('Can not parse phases block before parsing origins block, ' 'because phase lines do not contain date information, only ' 'time-of-day') raise NotImplementedError(msg) origin_times = [origin.time for origin in self.cat.events[-1].origins] if not origin_times: msg = ('Can not parse phases block unless origins with origin ' 'time information are present, because phase lines do not ' 'contain date information, only time-of-day') raise NotImplementedError(msg) # XXX this whole routine is on shaky ground.. # since picks only have a time-of-day and there's not even an # association to one of the origins, in principle this would need some # real tough logic to make it failsafe. actually this would mean using # taup with the given epicentral distance of the pick and check what # date is appropriate. # for now just do a very simple logic and raise exceptions when things # look fishy. this is ugly but it's not worth spending more time on # this, unless somebody starts bumping into one of the explicitly # raised exceptions below. origin_time_min = min(origin_times) origin_time_max = max(origin_times) hour = int(my_string[0:2]) minute = int(my_string[3:5]) seconds = float(my_string[6:]) all_guesses = [] for origin in self.cat.events[-1].origins: first_guess = UTCDateTime( origin.time.year, origin.time.month, origin.time.day, hour, minute, seconds) all_guesses.append((first_guess, origin.time)) all_guesses.append((first_guess - 86400, origin.time)) all_guesses.append((first_guess + 86400, origin.time)) pick_date = sorted(all_guesses, key=lambda x: abs(x[0] - x[1]))[0][0] # make sure event origin times are reasonably close together if origin_time_max - origin_time_min > 5 * 3600: msg = ('Origin times in event differ by more than 5 hours, this ' 'is currently not implemented as determining the date of ' 'the pick might be tricky. Sorry.') warnings.warn(msg) return None # now try the date of the latest origin and raise if things seem fishy t = UTCDateTime(pick_date.year, pick_date.month, pick_date.day, hour, minute, seconds) for origin_time in origin_times: if t - origin_time > 6 * 3600: msg = ('This pick would have a time more than 6 hours after ' 'or before one of the origins in the event. This seems ' 'fishy. Please report an issue on our github.') warnings.warn(msg) return None return t def _parse_phase(self, line): # since we can not identify which origin a phase line corresponds to, # we can not use any of the included information that would go in the # Arrival object, as that would have to be attached to the appropriate # origin.. # for now, just append all of these items as comments to the pick comments = [] # 1-5 a5 station code station_code = line[0:5].strip() # 7-12 f6.2 station-to-event distance (degrees) comments.append( 'station-to-event distance (degrees): "{}"'.format(line[6:12])) # 14-18 f5.1 event-to-station azimuth (degrees) comments.append( 'event-to-station azimuth (degrees): "{}"'.format(line[13:18])) # 20-27 a8 phase code phase_hint = line[19:27].strip() # 29-40 i2,a1,i2,a1,f6.3 arrival time (hh:mm:ss.sss) time = self._get_pick_time(line[28:40]) if time is None: msg = ('Could not determine absolute time of pick. This phase ' 'line will be ignored:\n{}').format(line) warnings.warn(msg) return None, None, None # 42-46 f5.1 time residual (seconds) comments.append('time residual (seconds): "{}"'.format(line[41:46])) # 48-52 f5.1 observed azimuth (degrees) comments.append('observed azimuth (degrees): "{}"'.format(line[47:52])) # 54-58 f5.1 azimuth residual (degrees) comments.append('azimuth residual (degrees): "{}"'.format(line[53:58])) # 60-65 f5.1 observed slowness (seconds/degree) comments.append( 'observed slowness (seconds/degree): "{}"'.format(line[59:65])) # 67-72 f5.1 slowness residual (seconds/degree) comments.append( 'slowness residual (seconds/degree): "{}"'.format(line[66:71])) # 74 a1 time defining flag (T or _) comments.append('time defining flag (T or _): "{}"'.format(line[73])) # 75 a1 azimuth defining flag (A or _) comments.append( 'azimuth defining flag (A or _): "{}"'.format(line[74])) # 76 a1 slowness defining flag (S or _) comments.append( 'slowness defining flag (S or _): "{}"'.format(line[75])) # 78-82 f5.1 signal-to-noise ratio comments.append('signal-to-noise ratio: "{}"'.format(line[77:82])) # 84-92 f9.1 amplitude (nanometers) amplitude = float_or_none(line[83:92]) # 94-98 f5.2 period (seconds) period = float_or_none(line[93:98]) # 100 a1 type of pick (a = automatic, m = manual) evaluation_mode = line[99] # 101 a1 direction of short period motion # (c = compression, d = dilatation, _= null) polarity = POLARITY[line[100].strip().lower()] # 102 a1 onset quality (i = impulsive, e = emergent, # q = questionable, _ = null) onset = ONSET[line[101].strip().lower()] # 104-108 a5 magnitude type (mb, Ms, ML, mbmle, msmle) magnitude_type = line[103:108].strip() # 109 a1 min max indicator (<, >, or blank) min_max_indicator = line[108] # 110-113 f4.1 magnitude value mag = float_or_none(line[109:113]) # 115-122 a8 arrival identification phase_id = line[114:122].strip() # process items waveform_id = WaveformStreamID(station_code=station_code) evaluation_mode = PICK_EVALUATION_MODE[evaluation_mode.strip().lower()] comments = [self._make_comment(', '.join(comments))] if phase_id: resource_id = self._construct_id(['pick'], add_hash=True) else: resource_id = self._construct_id(['pick', phase_id]) if mag: comment = ('min max indicator (<, >, or blank): ' + min_max_indicator) station_magnitude = StationMagnitude( mag=mag, magnitude_type=magnitude_type, resource_id=self._construct_id(['station_magnitude'], add_hash=True), comments=[self._make_comment(comment)]) # event init always sets an empty ResourceIdentifier, even when # specifying None, which is strange for key in ['origin_id', 'mag_errors']: setattr(station_magnitude, key, None) else: station_magnitude = None # assemble pick = Pick(phase_hint=phase_hint, time=time, waveform_id=waveform_id, evaluation_mode=evaluation_mode, comments=comments, polarity=polarity, onset=onset, resource_id=resource_id) # event init always sets an empty QuantityError, even when specifying # None, which is strange for key in ('time_errors', 'horizontal_slowness_errors', 'backazimuth_errors'): setattr(pick, key, None) if amplitude: amplitude /= 1e9 # convert from nanometers to meters amplitude = Amplitude( unit='m', generic_amplitude=amplitude, period=period) return pick, amplitude, station_magnitude def _parse_generic_comment(self, line): return self._make_comment(line)
def readSeishubEventFile(filename): """ Reads a Seishub event file and returns a ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.readEvents` function, call this instead. :type filename: str :param filename: Seishub event file to be read. :rtype: :class:`~obspy.core.event.Catalog` :return: A ObsPy Catalog object. .. rubric:: Example """ global CURRENT_TYPE base_name = os.path.basename(filename) if base_name.lower().startswith("baynet"): CURRENT_TYPE = "baynet" elif base_name.lower().startswith("earthworm"): CURRENT_TYPE = "earthworm" elif base_name.lower().startswith("gof"): CURRENT_TYPE = "seiscomp3" elif base_name.lower().startswith("obspyck") or base_name == "5622": CURRENT_TYPE = "obspyck" elif base_name.lower().startswith("toni"): CURRENT_TYPE = "toni" else: print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH" raise Exception # Just init the parser, the SeisHub event file format has no namespaces. parser = XMLParser(filename) # Create new Event object. public_id = parser.xpath('event_id/value')[0].text # A Seishub event just specifies a single event so Catalog information is # not really given. catalog = Catalog() catalog.resource_id = "/".join([RESOURCE_ROOT, "catalog", public_id]) # Read the event_type tag. account = parser.xpath2obj('event_type/account', parser, str) user = parser.xpath2obj('event_type/user', parser, str) global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str) public = parser.xpath2obj('event_type/public', parser, str) public = {"True": True, "False": False}.get(public, None) if account is not None and account.lower() != "sysop": public = False # The author will be stored in the CreationInfo object. This will be the # creation info of the event as well as on all picks. author = user if CURRENT_TYPE in ["seiscomp3", "earthworm"]: author = CURRENT_TYPE creation_info = {"author": author, "agency_id": "Erdbebendienst Bayern", "agency_uri": "%s/agency" % RESOURCE_ROOT, "creation_time": NOW} # Create the event object. event = Event(resource_id="/".join([RESOURCE_ROOT, "event", public_id]), creation_info=creation_info) # If account is None or 'sysop' and public is true, write 'public in the # comment, 'private' otherwise. event.extra = AttribDict() event.extra.public = {'value': public, 'namespace': NAMESPACE} event.extra.evaluationMode = {'value': global_evaluation_mode, 'namespace': NAMESPACE} event_type = parser.xpath2obj('type', parser, str) if event_type is not None: if event_type == "induced earthquake": event_type = "induced or triggered event" if event_type != "null": event.event_type = event_type # Parse the origins. origins = parser.xpath("origin") if len(origins) > 1: msg = "Only files with a single origin are currently supported" raise Exception(msg) for origin_el in parser.xpath("origin"): origin = __toOrigin(parser, origin_el) event.origins.append(origin) # Parse the magnitudes. for magnitude_el in parser.xpath("magnitude"): magnitude = __toMagnitude(parser, magnitude_el, origin) if magnitude.mag is None: continue event.magnitudes.append(magnitude) # Parse the picks. Pass the global evaluation mode (automatic, manual) for pick_el in parser.xpath("pick"): pick = __toPick(parser, pick_el, global_evaluation_mode) if pick is None: continue event.picks.append(pick) # The arrival object gets the following things from the Seishub.pick # objects # arrival.time_weight = pick.phase_weight # arrival.time_residual = pick.phase_res # arrival.azimuth = pick.azimuth # arrival.take_off_angle = pick.incident # arrival.distance = hyp_dist arrival = __toArrival(parser, pick_el, global_evaluation_mode, pick) if event.origins: event.origins[0].arrivals.append(arrival) for mag in event.station_magnitudes: mag.origin_id = event.origins[0].resource_id # Parse the station magnitudes. for stat_magnitude_el in parser.xpath("stationMagnitude"): stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el) event.station_magnitudes.append(stat_magnitude) # Parse the amplitudes # we don't reference their id in the corresponding station magnitude, # because we use one amplitude measurement for each component for el in parser.xpath("stationMagnitude/amplitude"): event.amplitudes.append(__toAmplitude(parser, el)) for mag in event.station_magnitudes: mag.origin_id = event.origins[0].resource_id for _i, stat_mag in enumerate(event.station_magnitudes): contrib = StationMagnitudeContribution() weight = None # The order of station magnitude objects is the same as in the xml # file. weight = parser.xpath2obj("weight", parser.xpath("stationMagnitude")[_i], float) if weight is not None: contrib.weight = weight contrib.station_magnitude_id = stat_mag.resource_id event.magnitudes[0].station_magnitude_contributions.append(contrib) for foc_mec_el in parser.xpath("focalMechanism"): foc_mec = __toFocalMechanism(parser, foc_mec_el) if foc_mec is not None: event.focal_mechanisms.append(foc_mec) # Set the origin id for the focal mechanisms. There is only one origin per # SeisHub event file. for focmec in event.focal_mechanisms: focmec.triggering_origin_id = event.origins[0].resource_id # Add the event to the catalog catalog.append(event) return catalog
def request_gcmt(starttime, endtime, minmagnitude=None, mindepth=None, maxdepth=None, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None): import mechanize from mechanize import Browser import re """ Description I am using mechanize. My attempt is just preliminary, for the current globalcmt.org site. """ #Split numbers and text r = re.compile("([a-zA-Z]+)([0-9]+)") br = Browser() br.open('http://www.globalcmt.org/CMTsearch.html') #Site has just one form br.select_form(nr=0) br.form['yr'] = str(starttime.year) br.form['mo'] = str(starttime.month) br.form['day'] = str(starttime.day) br.form['oyr'] = str(endtime.year) br.form['omo'] = str(endtime.month) br.form['oday'] = str(endtime.day) br.form['list'] = ['4'] br.form['itype'] = ['ymd'] br.form['otype'] = ['ymd'] if minmagnitude: br.form['lmw'] = str(minmagnitude) if minlatitude : br.form['llat'] = str(minlatitude) if maxlatitude : br.form['ulat'] = str(maxlatitude) if minlongitude: br.form['llon'] = str(minlongitude) if maxlongitude: br.form['ulon'] = str(maxlongitude) if mindepth : br.form['lhd'] = str(mindepth) if maxdepth : br.form['uhd'] = str(maxdepth) print("Submitting parameters to globalcmt.") req = br.submit() print("Retrieving data, creating catalog.") data = [] for line in req: data.append(line) data_chunked = _chunking_list(keyword='\n', list=data) origins = [] magnitudes = [] tensor = [] for line in data_chunked: for element in line: if 'event name' in element: for content in element: org = line[1].split() year = int(r.match(org[0]).groups()[1]) mon = int(org[1]) day = int(org[2]) hour = int(org[3]) minute = int(org[4]) sec_temp = int(org[5].split('.')[0]) msec_temp = int(org[5].split('.')[1]) origins_temp = UTCDateTime(year, mon, day, hour, minute, sec_temp, msec_temp) #adding time shift located in line[3] origin = origins_temp + float(line[3].split()[2]) magnitude = float(line[1].split()[10]) latitude = float(line[5].split()[1]) longitude = float(line[6].split()[1]) depth = 1000. * float(line[7].split()[1]) m_rr = float(line[8].split()[1]) m_tt = float(line[9].split()[1]) m_pp = float(line[10].split()[1]) m_rt = float(line[11].split()[1]) m_rp = float(line[12].split()[1]) m_tp = float(line[13].split()[1]) magnitudes.append( ("Mw", magnitude) ) origins.append( (latitude, longitude, depth, origin) ) tensor.append( (m_rr, m_tt, m_pp, m_rt, m_rp, m_tp) ) cat = Catalog() for mag, org, ten in zip(magnitudes, origins, tensor): # Create magnitude object. magnitude = Magnitude() magnitude.magnitude_type = mag[0] magnitude.mag = mag[1] # Write origin object. origin = Origin() origin.latitude = org[0] origin.longitude = org[1] origin.depth = org[2] origin.time = org[3] # Create event object and append to catalog object. event = Event() event.magnitudes.append(magnitude) event.origins.append(origin) event.MomentTensor = MomentTensor() event.MomentTensor.m_rr = ten[0] event.MomentTensor.m_tt = ten[1] event.MomentTensor.m_pp = ten[2] event.MomentTensor.m_rt = ten[3] event.MomentTensor.m_rp = ten[4] event.MomentTensor.m_tp = ten[5] cat.append(event) return cat
def __init__(self, fh, **kwargs): self.lines = [_decode_if_possible(line, self.encoding).rstrip() for line in fh.readlines() if line.strip()] self.cat = Catalog() self._no_uuid_hashes = kwargs.get('_no_uuid_hashes', False)
def readSeishubEventFile(filename): """ Reads a Seishub event file and returns a ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.readEvents` function, call this instead. :type filename: str :param filename: Seishub event file to be read. :rtype: :class:`~obspy.core.event.Catalog` :return: A ObsPy Catalog object. .. rubric:: Example """ # Just init the parser, the SeisHub event file format has no namespaces. parser = XMLParser(filename) # A Seishub event just specifies a single event so Catalog information is # not really given. catalog = Catalog() # Create new Event object. public_id = parser.xpath('event_id/value')[0].text # Read the event_type tag. pick_method = parser.xpath2obj('event_type/account', parser, str) user = parser.xpath2obj('event_type/user', parser, str) global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str) # The author will be stored in the CreationInfo object. This will be the # creation info of the event as well as on all picks. creation_info = {"author": user} # Create the event object. event = Event(resource_id=public_id, creation_info=creation_info) # Parse the origins. for origin_el in parser.xpath("origin"): origin = __toOrigin(parser, origin_el) event.origins.append(origin) # There should always be only one origin. assert(len(event.origins) == 1) # Parse the magnitudes. for magnitude_el in parser.xpath("magnitude"): magnitude = __toMagnitude(parser, magnitude_el) event.magnitudes.append(magnitude) # Parse the station magnitudes. for stat_magnitude_el in parser.xpath("stationMagnitude"): stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el) event.station_magnitudes.append(stat_magnitude) # Parse the picks. Pass the global evaluation mode (automatic, manual) for pick_el in parser.xpath("pick"): pick = __toPick(parser, pick_el, global_evaluation_mode) event.picks.append(pick) # Append the creation info to all picks. And also add the pick_method, e.g. # the event_type/account value as the method_id to the picks. for pick in event.picks: pick.creation_info = creation_info pick.method_id = pick_method # In QuakeML a StationMagnitude object has to be associated with an Origin. # This in turn means that the origin needs to have a resource_id. event.origins[0].resource_id = "smi:local/origins/%s" % \ event.resource_id.resource_id for mag in event.station_magnitudes: mag.origin_id = event.origins[0].resource_id # Add the event to the catalog catalog.append(event) return catalog
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1: next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ( "Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo( agency_id="GCMT", version=record["version_code"] ) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event( force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ] ) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False)] ) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"]}, latitude=record["centroid_latitude"], latitude_errors={ "uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000}, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy() ) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude( force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy() ) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append(Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." )] )) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append(Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." )] )) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor( m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy() ) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"] ), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy() ) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1] ), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"]) ), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"])], creation_info=creation_info.copy() ) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
def test_creationInfo(self): cat = Catalog() cat.creation_info = CreationInfo(author='test2') self.assertTrue(isinstance(cat.creation_info, CreationInfo)) self.assertEqual(cat.creation_info.author, 'test2')