def test_readEvents(self): """ Tests reading a QuakeML document via readEvents. """ with NamedTemporaryFile() as tf: tmpfile = tf.name catalog = readEvents(self.neries_filename) self.assertTrue(len(catalog), 3) catalog.write(tmpfile, format='QUAKEML') # Read file again. Avoid the (legit) warning about the already used # resource identifiers. with warnings.catch_warnings(record=True): warnings.simplefilter("ignore") catalog2 = readEvents(tmpfile) self.assertTrue(len(catalog2), 3)
def test_readEvents(self): """ Tests the readEvents function using entry points. """ # iris catalog = readEvents(self.iris_xml) self.assertEquals(len(catalog), 2) self.assertEquals(catalog[0]._format, 'QUAKEML') self.assertEquals(catalog[1]._format, 'QUAKEML') # neries catalog = readEvents(self.neries_xml) self.assertEquals(len(catalog), 3) self.assertEquals(catalog[0]._format, 'QUAKEML') self.assertEquals(catalog[1]._format, 'QUAKEML') self.assertEquals(catalog[2]._format, 'QUAKEML')
def test_readEvents(self): """ Tests the readEvents function using entry points. """ # iris catalog = readEvents(self.iris_xml) self.assertEqual(len(catalog), 2) self.assertEqual(catalog[0]._format, 'QUAKEML') self.assertEqual(catalog[1]._format, 'QUAKEML') # neries catalog = readEvents(self.neries_xml) self.assertEqual(len(catalog), 3) self.assertEqual(catalog[0]._format, 'QUAKEML') self.assertEqual(catalog[1]._format, 'QUAKEML') self.assertEqual(catalog[2]._format, 'QUAKEML')
def test_filter(self): """ Testing the filter method of the Catalog object. """ def getattrs(event, attr): if attr == 'magnitude': obj = event.magnitudes[0] attr = 'mag' else: obj = event.origins[0] for a in attr.split('.'): obj = getattr(obj, a) return obj cat = readEvents() self.assertTrue( all(event.magnitudes[0].mag < 4. for event in cat.filter('magnitude < 4.'))) attrs = ('magnitude', 'latitude', 'longitude', 'depth', 'time', 'quality.standard_error', 'quality.azimuthal_gap', 'quality.used_station_count', 'quality.used_phase_count') values = (4., 40., 50., 10., UTCDateTime('2012-04-04 14:20:00'), 1., 50, 40, 20) for attr, value in zip(attrs, values): attr_filter = attr.split('.')[-1] cat_smaller = cat.filter('%s < %s' % (attr_filter, value)) cat_bigger = cat.filter('%s >= %s' % (attr_filter, value)) self.assertTrue( all(getattrs(event, attr) < value for event in cat_smaller)) self.assertTrue( all(getattrs(event, attr) >= value for event in cat_bigger)) self.assertTrue( all(event in cat for event in (cat_smaller + cat_bigger)))
def test_event_copying_does_not_raise_duplicate_resource_id_warnings(self): """ Tests that copying an event does not raise a duplicate resource id warning. """ ev = readEvents()[0] with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") ev2 = copy.copy(ev) self.assertEqual(len(w), 0) ev3 = copy.deepcopy(ev) self.assertEqual(len(w), 0) # The two events should compare equal. self.assertEqual(ev, ev2) self.assertEqual(ev, ev3) # A shallow copy should just use the exact same resource identifier, # while a deep copy should not. self.assertTrue(ev.resource_id is ev2.resource_id) self.assertTrue(ev.resource_id is not ev3.resource_id) self.assertTrue(ev.resource_id == ev3.resource_id) # But all should point to the same object. self.assertTrue(ev.resource_id.getReferredObject() is ev2.resource_id.getReferredObject()) self.assertTrue(ev.resource_id.getReferredObject() is ev3.resource_id.getReferredObject())
def test_filter(self): """ Testing the filter method of the Catalog object. """ def getattrs(event, attr): if attr == 'magnitude': obj = event.magnitudes[0] attr = 'mag' else: obj = event.origins[0] for a in attr.split('.'): obj = getattr(obj, a) return obj cat = readEvents() self.assertTrue(all(event.magnitudes[0].mag < 4. for event in cat.filter('magnitude < 4.'))) attrs = ('magnitude', 'latitude', 'longitude', 'depth', 'time', 'quality.standard_error', 'quality.azimuthal_gap', 'quality.used_station_count', 'quality.used_phase_count') values = (4., 40., 50., 10., UTCDateTime('2012-04-04 14:20:00'), 1., 50, 40, 20) for attr, value in zip(attrs, values): attr_filter = attr.split('.')[-1] cat_smaller = cat.filter('%s < %s' % (attr_filter, value)) cat_bigger = cat.filter('%s >= %s' % (attr_filter, value)) self.assertTrue(all(getattrs(event, attr) < value for event in cat_smaller)) self.assertTrue(all(getattrs(event, attr) >= value for event in cat_bigger)) self.assertTrue(all(event in cat for event in (cat_smaller + cat_bigger)))
def cut_events(in_, out): print 'read events...' catalog = readEvents(in_, 'QUAKEML') print 'cut events...' for event in ProgressBar()(catalog): oid = get_event_id(event.origins[0].resource_id.getQuakeMLURI()) ori = event.origins[0] etime = ori.time #print 'Select', event st = Stream() for arrival in ori.arrivals: arrival.pick_id.convertIDToQuakeMLURI() pick = arrival.pick_id.getReferredObject() if not pick: print 'FAIL to get pick from arrival' continue ptime = pick.time seed_id = pick.waveform_id.getSEEDString() try: st1 = Stream(data.client.getWaveform(*(seed_id.split('.') + [ptime - 50, ptime + 250]))) except Exception as ex: print '%s for %s' % (ex, seed_id) continue st1.merge() #print 'load %s %s %.1f' % (seed_id, pick.phase_hint, ptime - etime) st1[0].stats['event'] = AttribDict( id=event.resource_id.resource_id, origin_id=oid, etime=etime, ptime=ptime, lat=ori.latitude, lon=ori.longitude, depth=ori.depth, rms=ori.quality.standard_error, mag=event.magnitudes[0].mag) st += st1 st.write(out % oid, 'Q')
def test_str(self): """ Testing the __str__ method of the Event object. """ event = readEvents()[1] s = event.short_str() self.assertEquals("2012-04-04T14:18:37.000000Z | +39.342, +41.044" + " | 4.3 ML | manual", s)
def getEventDetail(self, uri, format=None): """ Gets event detail information. :type uri: str :param uri: Event identifier as either a EMSC event unique identifier, e.g. ``"19990817_0000001"`` or a QuakeML-formatted event URI, e.g. ``"quakeml:eu.emsc/event#19990817_0000001"``. :type format: ``'list'``, ``'xml'`` or ``'catalog'``, optional :param format: Format of returned results. Defaults to ``'xml'``. :rtype: :class:`~obspy.core.event.Catalog`, list or str :return: Method will return either an ObsPy :class:`~obspy.core.event.Catalog` object, a list of event dictionaries or a QuakeML string depending on the ``format`` keyword. .. seealso:: http://www.seismicportal.eu/services/event/detail/info/ .. rubric:: Example >>> from obspy.neries import Client >>> client = Client() >>> result = client.getEventDetail("19990817_0000001", 'list') >>> len(result) # Number of calculated origins 12 >>> result[0] # Details about first calculated origin #doctest: +SKIP {'author': u'EMSC', 'event_id': u'19990817_0000001', 'origin_id': 1465935, 'longitude': 29.972, 'datetime': UTCDateTime(1999, 8, 17, 0, 1, 35), 'depth': -10.0, 'magnitude': 6.7, 'magnitude_type': u'mw', 'latitude': 40.749} """ # deprecation warning if format is not set if format is None: msg = "The default setting format='xml' for obspy.neries." + \ "Client.getEventDetail() will be changed in the future to " + \ "format='catalog'. Please call this function with the " + \ "format keyword in order to hide this deprecation warning." warnings.warn(msg, category=DeprecationWarning) format = "xml" # parse parameters kwargs = {} if format == 'list': kwargs['format'] = 'json' else: kwargs['format'] = 'xml' if str(uri).startswith('quakeml:'): # QuakeML-formatted event URI kwargs['uri'] = str(uri) else: # EMSC event unique identifier kwargs['unid'] = str(uri) # fetch data data = self._fetch("/services/event/detail", **kwargs) # format output if format == "list": return self._json2list(data) elif format == "catalog": return readEvents(StringIO.StringIO(data), 'QUAKEML') else: return data
def test_str(self): """ Testing the __str__ method of the Catalog object. """ catalog = readEvents() self.assertTrue(catalog.__str__().startswith("3 Event(s) in Catalog:")) self.assertTrue(catalog.__str__().endswith("37.736 | 3.0 ML | manual"))
def test_preferred_tags(self): """ Testing preferred magnitude, origin and focal mechanism tags """ # testing empty event ev = Event() self.assertEqual(ev.preferred_origin(), None) self.assertEqual(ev.preferred_magnitude(), None) self.assertEqual(ev.preferred_focal_mechanism(), None) # testing existing event filename = os.path.join(self.path, 'preferred.xml') catalog = readEvents(filename) self.assertEqual(len(catalog), 1) ev_str = "Event:\t2012-12-12T05:46:24.120000Z | +38.297, +142.373 " + \ "| 2.0 MW" self.assertTrue(ev_str in str(catalog.events[0])) # testing ids ev = catalog.events[0] self.assertEqual('smi:orig2', ev.preferred_origin_id) self.assertEqual('smi:mag2', ev.preferred_magnitude_id) self.assertEqual('smi:fm2', ev.preferred_focal_mechanism_id) # testing objects self.assertEqual(ev.preferred_origin(), ev.origins[1]) self.assertEqual(ev.preferred_magnitude(), ev.magnitudes[1]) self.assertEqual(ev.preferred_focal_mechanism(), ev.focal_mechanisms[1])
def test_read_string(self): """ Test reading a QuakeML string/unicode object via readEvents. """ data = open(self.neries_filename, 'rt').read() catalog = readEvents(data) self.assertEqual(len(catalog), 3)
def test_preferred_tags(self): """ Testing preferred magnitude, origin and focal mechanism tags """ # testing empty event ev = Event() self.assertEqual(ev.preferred_origin(), None) self.assertEqual(ev.preferred_magnitude(), None) self.assertEqual(ev.preferred_focal_mechanism(), None) # testing existing event filename = os.path.join(self.path, 'preferred.xml') catalog = readEvents(filename) self.assertEqual(len(catalog), 1) ev_str = "Event:\t2012-12-12T05:46:24.120000Z | +38.297, +142.373 " + \ "| 2.0 MW" self.assertTrue(ev_str in str(catalog.events[0])) # testing ids ev = catalog.events[0] self.assertEqual('smi:orig2', ev.preferred_origin_id) self.assertEqual('smi:mag2', ev.preferred_magnitude_id) self.assertEqual('smi:fm2', ev.preferred_focal_mechanism_id) # testing objects self.assertEqual(ev.preferred_origin(), ev.origins[1]) self.assertEqual(ev.preferred_magnitude(), ev.magnitudes[1]) self.assertEqual( ev.preferred_focal_mechanism(), ev.focal_mechanisms[1])
def test_catalog_plot_cylindrical(self): """ Tests the catalog preview plot, default parameters. """ cat = readEvents() with ImageComparison(self.image_dir, "catalog1.png") as ic: rcParams['savefig.dpi'] = 72 cat.plot(outfile=ic.name)
def test_readEvents(self): """ Tests reading a QuakeML document via readEvents. """ filename = os.path.join(self.path, 'neries_events.xml') tmpfile = NamedTemporaryFile().name catalog = readEvents(filename) self.assertTrue(len(catalog), 3) catalog.write(tmpfile, format='QUAKEML') # Read file again. Avoid the (legit) warning about the already used # resource identifiers. with warnings.catch_warnings(record=True): warnings.simplefilter("ignore") catalog2 = readEvents(tmpfile) self.assertTrue(len(catalog2), 3) # clean up os.remove(tmpfile)
def test_read_string(self): """ Test reading a QuakeML string/unicode object via readEvents. """ filename = os.path.join(self.path, 'neries_events.xml') data = open(filename, 'rt').read() catalog = readEvents(data) self.assertEqual(len(catalog), 3)
def test_str(self): """ Testing the __str__ method of the Event object. """ event = readEvents()[1] s = event.short_str() self.assertEquals("2012-04-04T14:18:37.000000Z | +39.342, +41.044" + \ " | 4.3 ML | manual", s)
def test_catalog_plot_ortho(self): """ Tests the catalog preview plot, ortho projection, some non-default parameters. """ cat = readEvents() with ImageComparison(self.image_dir, "catalog2.png") as ic: rcParams["savefig.dpi"] = 72 cat.plot(outfile=ic.name, projection="ortho", resolution="c", water_fill_color="b", label=None)
def test_readEvents(self): """ Tests reading an mchedr document via readEvents. """ filename = os.path.join(self.path, 'mchedr.dat') # Read file again. Avoid the (legit) warning about the already used # resource identifiers. with warnings.catch_warnings(record=True): catalog = readEvents(filename) self.assertTrue(len(catalog), 1)
def test_readEvents(self): """ Tests reading a QuakeML document via readEvents. skipIfPython25 due to the use of the warnings context manager. """ filename = os.path.join(self.path, 'neries_events.xml') tmpfile = NamedTemporaryFile().name catalog = readEvents(filename) self.assertTrue(len(catalog), 3) catalog.write(tmpfile, format='QUAKEML') # Read file again. Avoid the (legit) warning about the already used # resource identifiers. with warnings.catch_warnings(record=True): warnings.simplefilter("ignore") catalog2 = readEvents(tmpfile) self.assertTrue(len(catalog2), 3) # clean up os.remove(tmpfile)
def test_read_equivalence(self): """ See #662. Tests if readQuakeML() and readEvents() return the same results. """ warnings.simplefilter("ignore", UserWarning) cat1 = readEvents(self.neries_filename) cat2 = readQuakeML(self.neries_filename) warnings.filters.pop(0) self.assertEqual(cat1, cat2)
def test_getitem(self): """ Tests the __getitem__ method of the Catalog object. """ catalog = readEvents() self.assertEqual(catalog[0], catalog.events[0]) self.assertEqual(catalog[-1], catalog.events[-1]) self.assertEqual(catalog[2], catalog.events[2]) # out of index should fail self.assertRaises(IndexError, catalog.__getitem__, 3) self.assertRaises(IndexError, catalog.__getitem__, -99)
def test_catalog_plot_ortho(self): """ Tests the catalog preview plot, ortho projection, some non-default parameters. """ cat = readEvents() with ImageComparison(self.image_dir, "catalog2.png") as ic: rcParams['savefig.dpi'] = 72 cat.plot(outfile=ic.name, projection="ortho", resolution="c", water_fill_color="b", label=None)
def test_countAndLen(self): """ Tests the count and __len__ methods of the Catalog object. """ # empty catalog without events catalog = Catalog() self.assertEqual(len(catalog), 0) self.assertEqual(catalog.count(), 0) # catalog with events catalog = readEvents() self.assertEqual(len(catalog), 3) self.assertEqual(catalog.count(), 3)
def test_catalog_plot_local(self): """ Tests the catalog preview plot, local projection, some more non-default parameters. """ cat = readEvents() from matplotlib import rcParams with ImageComparison(self.image_dir, "catalog3.png") as ic: rcParams['savefig.dpi'] = 72 cat.plot(outfile=ic.name, projection="local", resolution="i", continent_fill_color="0.3", color="date", colormap="gist_heat")
def test_copy(self): """ Testing the copy method of the Catalog object. """ cat = readEvents() cat2 = cat.copy() self.assertTrue(cat == cat2) self.assertTrue(cat2 == cat) self.assertFalse(cat is cat2) self.assertFalse(cat2 is cat) self.assertTrue(cat.events[0] == cat2.events[0]) self.assertFalse(cat.events[0] is cat2.events[0])
def _openCatalog(self, filename): ''' Open existing QuakeML catalog ''' try: print('Opening QuakeML Catalog %s' % filename) cat = event.readEvents(filename) self._picks = cat[0].picks self._drawPicks() except: msg = 'Could not open QuakeML file %s' % (filename) raise IOError(msg)
def test_slicing(self): """ Tests the __getslice__ method of the Catalog object. """ catalog = readEvents() self.assertEqual(catalog[0:], catalog[0:]) self.assertEqual(catalog[:2], catalog[:2]) self.assertEqual(catalog[:], catalog[:]) self.assertEqual(len(catalog), 3) new_catalog = catalog[1:3] self.assertTrue(isinstance(new_catalog, Catalog)) self.assertEqual(len(new_catalog), 2)
def _openCatalog(self, filename): ''' Open existing QuakeML catalog ''' try: print 'Opening QuakeML Catalog %s' % filename cat = event.readEvents(filename) self._picks = cat[0].picks self._drawPicks() except: msg = 'Could not open QuakeML file %s' % (filename) raise IOError(msg)
def test_plotDayPlotCatalog(self): ''' Plots day plot, with a catalog of events. ''' start = UTCDateTime(2012, 4, 4, 14, 0, 0) cat = readEvents() st = self._createStream(start, start + 3600, 100) # create and compare image image_name = 'waveform_dayplot_catalog.png' with ImageComparison(self.path, image_name) as ic: st.plot(outfile=ic.name, type='dayplot', timezone='EST', time_offset=-5, events=cat)
def read_stream(data_files, sxml, event_file, event_id): print data_files stream = obspy.read(data_files) stations = obspy.read_inventory(sxml, format="STATIONXML") stream.attach_response(stations) events = readEvents(event_file) event = None resource_id = ResourceIdentifier(event_id) for evt in events: if evt.resource_id == resource_id: event = evt if event is None: event = events[0] return stream, stations, event
def setUp(self): data_dir = os.path.join(os.path.dirname(__file__), 'data') path_to_catalog = os.path.join(data_dir, 'neries_events.xml') self.catalog = readEvents(path_to_catalog) self.zmap_fields = _STD_ZMAP_FIELDS # Extract our favorite test event from the catalog test_event_id = 'quakeml:eu.emsc/event/20120404_0000041' self.test_event = next(e for e in self.catalog.events if e.resource_id.id == test_event_id) self.test_data = { 'lon': '79.689000', 'lat': '41.818000', 'month': '4', 'year': '2012.258465590847', 'day': '4', 'hour': '14', 'minute': '21', 'second': '42', 'depth': '1.000000', 'mag': '4.400000' }
def test_read(self): # via file, filename, plugin interface test_events = [self.test_data, dict(self.test_data, lon='5.1')] zmap_str = self._serialize((test_events)) with NamedTemporaryFile() as f: f.write(zmap_str.encode('utf-8')) catalog = zmap.readZmap(f.name) self._assert_zmap_equal(catalog, test_events) f.seek(0) catalog = zmap.readZmap(f) self._assert_zmap_equal(catalog, test_events) catalog = readEvents(f.name) self._assert_zmap_equal(catalog, test_events) # direct ZMAP string catalog = zmap.readZmap(zmap_str) self._assert_zmap_equal(catalog, test_events)
def test_read(self): # via file, file name, plugin interface test_events = [self.test_data, dict(self.test_data, lon='5.1')] zmap_str = self._serialize((test_events)) with NamedTemporaryFile() as f: f.write(zmap_str.encode('utf-8')) catalog = zmap.readZmap(f.name) self._assert_zmap_equal(catalog, test_events) f.seek(0) catalog = zmap.readZmap(f) self._assert_zmap_equal(catalog, test_events) catalog = readEvents(f.name) self._assert_zmap_equal(catalog, test_events) # direct ZMAP string catalog = zmap.readZmap(zmap_str) self._assert_zmap_equal(catalog, test_events)
def test_catalog_plot_local(self): """ Tests the catalog preview plot, local projection, some more non-default parameters. """ cat = readEvents() reltol = 1 # some ticklabels are slightly offset on py 3.3.3 in travis.. # e.g. see http://tests.obspy.org/13309/#1 if (sys.version_info[0], sys.version_info[1]) == (3, 3): reltol = 5 with ImageComparison(self.image_dir, "catalog3.png", reltol=reltol) as ic: rcParams['savefig.dpi'] = 72 cat.plot(outfile=ic.name, projection="local", resolution="i", continent_fill_color="0.3", color="date", colormap="gist_heat")
def test_catalog_plot_local(self): """ Tests the catalog preview plot, local projection, some more non-default parameters. """ cat = readEvents() reltol = 1.5 # Basemap smaller 1.0.4 has a serious issue with plotting. Thus the # tolerance must be much higher. if BASEMAP_VERSION < [1, 0, 4]: reltol = 100 with ImageComparison(self.image_dir, "catalog3.png", reltol=reltol) as ic: rcParams['savefig.dpi'] = 72 cat.plot(outfile=ic.name, projection="local", resolution="i", continent_fill_color="0.3", color="date", colormap="gist_heat")
def __init__(self, stream=None, inpcat=None, parent=None): # Initialising QtGui qApp = QtGui.QApplication(sys.argv) # Init vars if stream is None: msg = 'Define stream = obspy.core.Stream()' raise ValueError(msg) self.st = stream.copy() self._picks = [] self.savefile = None self.onset_types = ['emergent', 'impulsive', 'questionable'] # Load filters from pickle try: self.bpfilter = pickle.load(open('.pick_filters', 'r')) except: self.bpfilter = [] # Internal variables # Gui vars self._shortcuts = { 'st_next': 'c', 'st_previous': 'x', 'filter_apply': 'f', 'pick_p': 'q', 'pick_s': 'w', 'pick_custom': 't', 'pick_remove': 'r', } self._plt_drag = None self._current_filter = None # Init stations self._initStations() # defines list self._stations self._stationCycle = cycle(self._stations) self._streamStation(self._stationCycle.next()) if inpcat is not None: str(inpcat) cat = event.readEvents(inpcat) self._picks = cat[0].picks # Init QtGui QtGui.QMainWindow.__init__(self) self.setupUI() # exec QtApp qApp.exec_()
def __init__(self, stream=None, inpcat=None, parent=None): # Initialising QtGui qApp = QtGui.QApplication(sys.argv) # Init vars if stream is None: msg = 'Define stream = obspy.core.Stream()' raise ValueError(msg) self.st = stream.copy() self._picks = [] self.savefile = None self.onset_types = ['emergent', 'impulsive', 'questionable'] # Load filters from pickle try: self.bpfilter = pickle.load(open('.pick_filters', 'r')) except: self.bpfilter = [] # Internal variables # Gui vars self._shortcuts = {'st_next': 'c', 'st_previous': 'x', 'filter_apply': 'f', 'pick_p': 'q', 'pick_s': 'w', 'pick_custom': 't', 'pick_remove': 'r', } self._plt_drag = None self._current_filter = None # Init stations self._initStations() # defines list self._stations self._stationCycle = cycle(self._stations) self._streamStation(self._stationCycle.next()) if inpcat is not None: str(inpcat) cat = event.readEvents(inpcat) self._picks = cat[0].picks # Init QtGui QtGui.QMainWindow.__init__(self) self.setupUI() # exec QtApp qApp.exec_()
def test_filter(self): """ Testing the filter method of the Catalog object. """ def getattrs(event, attr): if attr == "magnitude": obj = event.magnitudes[0] attr = "mag" else: obj = event.origins[0] for a in attr.split("."): obj = getattr(obj, a) return obj cat = readEvents() self.assertTrue(all(event.magnitudes[0].mag < 4.0 for event in cat.filter("magnitude < 4."))) attrs = ( "magnitude", "latitude", "longitude", "depth", "time", "quality.standard_error", "quality.azimuthal_gap", "quality.used_station_count", "quality.used_phase_count", ) values = (4.0, 40.0, 50.0, 10.0, UTCDateTime("2012-04-04 14:20:00"), 1.0, 50, 40, 20) for attr, value in zip(attrs, values): attr_filter = attr.split(".")[-1] cat_smaller = cat.filter("%s < %s" % (attr_filter, value)) cat_bigger = cat.filter("%s >= %s" % (attr_filter, value)) self.assertTrue( all(True if a is None else a < value for event in cat_smaller for a in [getattrs(event, attr)]) ) self.assertTrue( all(False if a is None else a >= value for event in cat_bigger for a in [getattrs(event, attr)]) ) self.assertTrue(all(event in cat for event in (cat_smaller + cat_bigger))) cat_smaller_inverse = cat.filter("%s < %s" % (attr_filter, value), inverse=True) self.assertTrue(all(event in cat_bigger for event in cat_smaller_inverse)) cat_bigger_inverse = cat.filter("%s >= %s" % (attr_filter, value), inverse=True) self.assertTrue(all(event in cat_smaller for event in cat_bigger_inverse))
def test_catalog_plot_local(self): """ Tests the catalog preview plot, local projection, some more non-default parameters. """ cat = readEvents() reltol = 1.5 # Basemap smaller 1.0.4 has a serious issue with plotting. Thus the # tolerance must be much higher. if BASEMAP_VERSION < [1, 0, 4]: reltol = 100 with ImageComparison(self.image_dir, "catalog3.png", reltol=reltol) as ic: rcParams["savefig.dpi"] = 72 cat.plot( outfile=ic.name, projection="local", resolution="i", continent_fill_color="0.3", color="date", colormap="gist_heat", )
def cut_events(in_, out): print 'read events...' catalog = readEvents(in_, 'QUAKEML') print 'cut events...' for event in ProgressBar()(catalog): oid = get_event_id(event.origins[0].resource_id.getQuakeMLURI()) ori = event.origins[0] etime = ori.time #print 'Select', event st = Stream() for arrival in ori.arrivals: arrival.pick_id.convertIDToQuakeMLURI() pick = arrival.pick_id.getReferredObject() if not pick: print 'FAIL to get pick from arrival' continue ptime = pick.time seed_id = pick.waveform_id.getSEEDString() try: st1 = Stream( data.client.getWaveform(*(seed_id.split('.') + [ptime - 50, ptime + 250]))) except Exception as ex: print '%s for %s' % (ex, seed_id) continue st1.merge() #print 'load %s %s %.1f' % (seed_id, pick.phase_hint, ptime - etime) st1[0].stats['event'] = AttribDict( id=event.resource_id.resource_id, origin_id=oid, etime=etime, ptime=ptime, lat=ori.latitude, lon=ori.longitude, depth=ori.depth, rms=ori.quality.standard_error, mag=event.magnitudes[0].mag) st += st1 st.write(out % oid, 'Q')
def gcmt_catalog( t_start, t_end, min_latitude, max_latitude, min_longitude, max_longitude, latitude, longitude, radius_min, radius_max, d_min, d_max, mag_min, mag_max, link_gcmt='http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog'): """ Function for downloading data from GCMT :param t_start: :param t_end: :param min_latitude: :param max_latitude: :param min_longitude: :param max_longitude: :param latitude: :param longitude: :param radius_min: :param radius_max: :param d_min: :param d_max: :param mag_min: :param mag_max: :param link_gcmt: :return: """ # for the time record tic = datetime.now() try: import obspyDMT dmt_path = obspyDMT.__path__[0] except Exception as error: print("WARNING: %s" % error) dmt_path = '.' gcmt_cat_path = os.path.join(dmt_path, 'gcmt_catalog') if not os.path.exists(gcmt_cat_path): os.mkdir(gcmt_cat_path) os.mkdir(os.path.join(gcmt_cat_path, 'NEW_MONTHLY')) os.mkdir(os.path.join(gcmt_cat_path, 'COMBO')) # creating a time list t_list = [] delta_t = int(UTCDateTime(t_end) - UTCDateTime(t_start) + 1) / 86400 yymm = [] for i in range(delta_t + 1): t_list.append( (UTCDateTime(t_start) + i * 60 * 60 * 24).strftime('%Y/%m/%d')) yy_tmp, mm_tmp, dd_tmp = t_list[i].split('/') yymm.append('%s%s' % (yy_tmp, mm_tmp)) yymmset = set(yymm) yymmls = list(yymmset) yymmls.sort() # starting to search for all events in the time window given by the user: cat = Catalog() yy_ret = [] mm_ret = [] remotefile_add = False for i in range(len(yymmls)): try: yy = yymmls[i][0:4] mm = yymmls[i][4:6] if int(yy) < 2006: month_year = [ 'jan', 'feb', 'mar', 'apr', 'may', 'june', 'july', 'aug', 'sept', 'oct', 'nov', 'dec' ] else: month_year = [ 'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec' ] if int(yy) >= 2005: new_monthly = 'NEW_MONTHLY' file_to_open = os.path.join( gcmt_cat_path, new_monthly, '%s%s.ndk' % (month_year[int(mm) - 1], yy[-2:])) remotefile_add = '%s/%s/%s/%s%s.ndk' \ % (link_gcmt, new_monthly, yy, month_year[int(mm)-1], yy[-2:]) else: new_monthly = 'COMBO' if yy in yy_ret: continue file_to_open = os.path.join(gcmt_cat_path, new_monthly, '%s.qml' % yy) if not os.path.exists(file_to_open) and not new_monthly == 'COMBO': print('Reading the data from GCMT webpage: %s' % yymmls[i]) remotefile = urlopen(remotefile_add) remotefile_read = remotefile.readlines() search_fio = open(file_to_open, 'w') search_fio.writelines(remotefile_read) search_fio.close() print('Reading the data from local gcmt_catalog: %s' % yymmls[i]) cat.extend(readEvents(file_to_open)) yy_ret.append(yy) mm_ret.append(mm) except Exception as error: print("ERROR: %s" % error) print('Done reading the data from GCMT webpage.') toc = datetime.now() print('%s sec to retrieve the event info form GCMT.' % (toc - tic)) filt1 = 'time >= %s' % t_start filt2 = 'time <= %s' % t_end cat = cat.filter(filt1, filt2) filt1 = 'magnitude >= %s' % mag_min filt2 = 'magnitude <= %s' % mag_max cat = cat.filter(filt1, filt2) filt1 = 'depth >= %s' % (float(d_min) * 1000.) filt2 = 'depth <= %s' % (float(d_max) * 1000.) cat = cat.filter(filt1, filt2) if None not in [min_latitude, max_latitude, min_longitude, max_longitude]: filt1 = 'latitude >= %s' % min_latitude filt2 = 'latitude <= %s' % max_latitude cat = cat.filter(filt1, filt2) filt1 = 'longitude >= %s' % min_longitude filt2 = 'longitude <= %s' % max_longitude cat = cat.filter(filt1, filt2) # final filtering for the remaining requests if None not in [latitude, longitude, radius_min, radius_max]: index_rm = [] for i in range(len(cat)): e_lat = cat.events[i].preferred_origin().latitude or \ cat.events[i].origins[0].latitude e_lon = cat.events[i].preferred_origin().longitude or \ cat.events[i].origins[0].longitude dist = locations2degrees(latitude, longitude, e_lat, e_lon) if not radius_min <= dist <= radius_max: index_rm.append(i) index_rm.sort() index_rm.reverse() for i in range(len(index_rm)): del cat[index_rm[i]] return cat
def neic_catalog_urllib( t_start, t_end, min_latitude, max_latitude, min_longitude, max_longitude, latitude, longitude, radius_min, radius_max, d_min, d_max, mag_min, mag_max, link_neic="http://earthquake.usgs.gov/fdsnws/event/1/query.quakeml?"): """ Function for downloading data from NEIC :param t_start: :param t_end: :param min_latitude: :param max_latitude: :param min_longitude: :param max_longitude: :param latitude: :param longitude: :param radius_min: :param radius_max: :param d_min: :param d_max: :param mag_min: :param mag_max: :param link_neic: :return: """ tic = time.clock() dir_name = '%s_temp_xml_files' % int(UTCDateTime.now().timestamp) os.mkdir(dir_name) getVars = { 'minmagnitude': str(mag_min), 'maxmagnitude': str(mag_max), 'mindepth': str(d_min), 'maxdepth': str(d_max), } if None in [latitude, longitude, radius_min, radius_max]: if not None in [ min_latitude, max_latitude, min_longitude, max_longitude ]: getVars['minlongitude'] = str(min_longitude) getVars['maxlongitude'] = str(max_longitude) getVars['minlatitude'] = str(min_latitude) getVars['maxlatitude'] = str(max_latitude) else: getVars['latitude'] = str(latitude) getVars['longitude'] = str(longitude) getVars['maxradiuskm'] = str(float(radius_max) * 111.194) getVars['includeallorigins'] = 'true' getVars['includeallmagnitudes'] = 'true' getVars['producttype'] = 'moment-tensor' m_date = UTCDateTime(t_start) M_date = UTCDateTime(t_end) dur_event = M_date - m_date interval = 30. * 24. * 60. * 60. num_div = int(dur_event / interval) print('#Divisions: %s' % num_div) if not num_div < 1: for i in range(1, num_div + 1): try: print(i, end=',') sys.stdout.flush() t_start_split = m_date + (i - 1) * interval t_end_split = m_date + i * interval getVars['starttime'] = str(t_start_split) getVars['endtime'] = str(t_end_split) url_values = urllib.parse.urlencode(getVars) remotefile = link_neic + url_values page = urlopen(remotefile) page_content = page.read() if 'quakeml' in page_content: with open(os.path.join(dir_name, 'temp_neic_xml_%05i.xml' % i), 'w') \ as fid: fid.write(page_content) fid.close() else: continue page.close() except Exception as error: print("\nWARNING: %s -- %s\n" % (error, remotefile)) elif num_div == 0: try: t_start_split = m_date t_end_split = M_date getVars['starttime'] = str(t_start_split) getVars['endtime'] = str(t_end_split) url_values = urllib.parse.urlencode(getVars) remotefile = link_neic + url_values page = urlopen(remotefile) page_content = page.read() if 'quakeml' in page_content: with open(os.path.join(dir_name, 'temp_neic_xml_%05i.xml' % 0), 'w') \ as fid: fid.write(page_content) fid.close() page.close() except Exception as error: print("\nWARNING: %s -- %s\n" % (error, remotefile)) try: final_time = m_date + num_div * interval if (not M_date == final_time) and (not int(dur_event / interval) == 0): t_start_split = final_time t_end_split = M_date getVars['starttime'] = str(t_start_split) getVars['endtime'] = str(t_end_split) url_values = urllib.parse.urlencode(getVars) remotefile = link_neic + url_values page = urlopen(remotefile) page_content = page.read() if 'quakeml' in page_content: with open( os.path.join(dir_name, 'temp_neic_xml_%05i.xml' % (num_div + 1)), 'w') as fid: fid.write(page_content) fid.close() page.close() except Exception as error: print("\nWARNING: %s\n" % error) xml_add = glob.glob(os.path.join(dir_name, 'temp_neic_xml_*.xml')) xml_add.sort() cat = Catalog() print('\nAssembling %s xml files...' % len(xml_add)) counter = 1 for x_add in xml_add: print(counter, end=',') sys.stdout.flush() counter += 1 try: cat.extend(readEvents(x_add, format='QuakeML')) os.remove(x_add) except Exception as error: print('[WARNING] %s' % error) os.remove(x_add) print("\ncleaning up the temporary folder.") os.rmdir(dir_name) toc = time.clock() print('\n%s sec to retrieve the event info form NEIC.' % (toc - tic)) return cat
def isc_catalog(bot_lat=-90, top_lat=90, left_lon=-180, right_lon=180, ctr_lat=0, ctr_lon=0, radius=180, start_time=UTCDateTime() - 30 * 24 * 3600, end_time=UTCDateTime(), min_dep=-10, max_dep=1000, min_mag=0, max_mag=10, mag_type='MW', req_mag_agcy='Any', rev_comp='REVIEWED'): search_domain = 'rectangular' if None in [ctr_lat, ctr_lon, radius]: if None in [bot_lat, top_lat, left_lon, right_lon]: left_lon = '-180' right_lon = '180' bot_lat = '-90' top_lat = '90' search_domain = 'rectangular' else: ctr_lat = str(ctr_lat) ctr_lon = str(ctr_lon) radius = str(radius) search_domain = 'circular' if not mag_type: mag_type = 'MW' else: mag_type = mag_type.upper() if req_mag_agcy.lower() == 'any': req_mag_agcy = '' start_time = UTCDateTime(start_time) end_time = UTCDateTime(end_time) base_url = isc_url_builder(search_domain=search_domain, bot_lat=bot_lat, top_lat=top_lat, left_lon=left_lon, right_lon=right_lon, ctr_lat=ctr_lat, ctr_lon=ctr_lon, radius=radius, start_time=start_time, end_time=end_time, min_dep=min_dep, max_dep=max_dep, min_mag=min_mag, max_mag=max_mag, mag_type=mag_type, req_mag_agcy=req_mag_agcy, rev_comp=rev_comp) print("URL:\n%s" % base_url) try_url = 5 while try_url > 0: print("Try: %s" % try_url) try: isc_req = urlopen(base_url) isc_contents = isc_req.read() isc_events = readEvents(isc_contents) try_url = 0 except Exception as e: print("requested content from ISC:\n%s" % e) try_url -= 1 isc_events = isc_events.filter("magnitude >= %s" % min_mag, "magnitude <= %s" % max_mag) remove_index = [] for i in range(len(isc_events)): found_mag_type = False for j in range(len(isc_events.events[i].magnitudes)): if mag_type in \ isc_events.events[i].magnitudes[j].magnitude_type.upper(): isc_events.events[i].preferred_mag = \ isc_events.events[i].magnitudes[j].mag isc_events.events[i].preferred_mag_type = \ isc_events.events[i].magnitudes[j].magnitude_type isc_events.events[i].preferred_author = \ isc_events.events[i].magnitudes[j].creation_info.author found_mag_type = True break if not found_mag_type: remove_index.append(i) if len(remove_index) > 0: remove_index.sort(reverse=True) for ri in remove_index: del isc_events.events[ri] return isc_events