def iter_event_names(self, time_range=None, nmax=1000, magmin=None, latmin=-90., latmax=90., lonmin=-180., lonmax=180.): logger.debug('In Geofon.iter_event_names(...)') dmin = time.strftime('%Y-%m-%d', time.gmtime(time_range[0])) dmax = time.strftime('%Y-%m-%d', time.gmtime(time_range[1] + 24 * 60 * 60)) if magmin is None: magmin = '' else: magmin = '%g' % magmin ipage = 1 while True: url = ('http://geofon.gfz-potsdam.de/eqinfo/list.php?' + '&'.join([ 'page=%i' % ipage, 'datemin=%s' % dmin, 'datemax=%s' % dmax, 'latmin=%g' % latmin, 'latmax=%g' % latmax, 'lonmin=%g' % lonmin, 'lonmax=%g' % lonmax, 'magmin=%s' % magmin, 'fmt=geojson', 'nmax=%i' % nmax ])) logger.debug('Opening URL: %s' % url) page = urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) events = self._parse_events_page(page) for ev in events: if ev.moment_tensor is True: ev.moment_tensor = self.get_mt(ev) if not events: break for ev in events: if time_range[0] <= ev.time and ev.time <= time_range[1]: self.events[ev.name] = ev yield ev.name ipage += 1
def iter_event_names(self, time_range=None, magmin=0., magmax=10., latmin=-90., latmax=90., lonmin=-180., lonmax=180., depthmin=0., depthmax=1000 * km): yearbeg, monbeg, daybeg = time.gmtime(time_range[0])[:3] yearend, monend, dayend = time.gmtime(time_range[1])[:3] url = 'http://www.globalcmt.org/cgi-bin/globalcmt-cgi-bin/CMT5/form?' \ + '&'.join([ 'itype=ymd', 'yr=%i' % yearbeg, 'mo=%i' % monbeg, 'day=%i' % daybeg, 'otype=ymd', 'oyr=%i' % yearend, 'omo=%i' % monend, 'oday=%i' % dayend, 'jyr=1976', 'jday=1', 'ojyr=1976', 'ojday=1', 'nday=1', 'lmw=%g' % magmin, 'umw=%g' % magmax, 'lms=0', 'ums=10', 'lmb=0', 'umb=10', 'llat=%g' % latmin, 'ulat=%g' % latmax, 'llon=%g' % lonmin, 'ulon=%g' % lonmax, 'lhd=%g' % (depthmin/km), 'uhd=%g' % (depthmax/km), 'lts=-9999', 'uts=9999', 'lpe1=0', 'upe1=90', 'lpe2=0', 'upe2=90', 'list=5']) while True: logger.debug('Opening URL: %s' % url) req = Request(url) page = urlopen(req).read() logger.debug('Received page (%i bytes)' % len(page)) events, more = self._parse_events_page(page) for ev in events: self.events[ev.name] = ev for ev in events: if time_range[0] <= ev.time and ev.time <= time_range[1]: yield ev.name if more: url = more.decode('ascii') else: break
def get_mt(self, ev): syear = time.strftime('%Y', time.gmtime(ev.time)) url = 'http://geofon.gfz-potsdam.de/data/alerts/%s/%s/mt.txt' % ( syear, ev.name) try: logger.debug('Opening URL: %s' % url) page = urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) except util.HTTPError: logger.warning('No MT found for event "%s".' % ev.name) return None return self._parse_mt_page(page)
def iter_event_names(self, time_range=None, magmin=None, magmax=None, latmin=-90., latmax=90., lonmin=-180., lonmax=180.): p = [] a = p.append a('out_format=CATQuakeML') a('request=REVIEWED') a('searchshape=RECT') self.append_time_params(a, time_range) if magmin: a('min_mag=%g' % magmin) if magmax: a('max_mag=%g' % magmax) a('bot_lat=%g' % latmin) a('top_lat=%g' % latmax) a('left_lon=%g' % lonmin) a('right_lon=%g' % lonmax) url = 'http://www.isc.ac.uk/cgi-bin/web-db-v4?' + '&'.join(p) logger.debug('Opening URL: %s' % url) page = urlopen(url).read().decode() logger.debug('Received page (%i bytes)' % len(page)) if 'The search could not be run due to problems' in page: logger.warn('%s\nurl: %s' % (page, url)) return elif 'No events were found.' in page: logger.info('No events were found.') events = [] else: data = quakeml.QuakeML.load_xml(string=page) events = data.get_pyrocko_events() for ev in events: self.events[ev.name] = ev for ev in events: if time_range[0] <= ev.time and ev.time <= time_range[1]: yield ev.name
def get_phase_markers(self, time_range, station_codes, phases): '''Download phase picks from ISC catalog and return them as a list of `pyrocko.gui.PhaseMarker` instances. :param time_range: Tuple with (tmin tmax) :param station_codes: List with ISC station codes (see http://www.isc.ac.uk/cgi-bin/stations?lista). If `station_codes` is 'global', query all ISC stations. :param phases: List of seismic phases. (e.g. ['P', 'PcP'] ''' p = [] a = p.append a('out_format=QuakeML') a('request=STNARRIVALS') if station_codes == 'global': a('stnsearch=GLOBAL') else: a('stnsearch=STN') a('sta_list=%s' % ','.join(station_codes)) a('phaselist=%s' % ','.join(phases)) self.append_time_params(a, time_range) url = 'http://www.isc.ac.uk/cgi-bin/web-db-v4?' + '&'.join(p) logger.debug('Opening URL: %s' % url) page = urlopen(url) page = page.read().decode() if 'No stations were found.' in page: logger.info('No stations were found.') return [] logger.debug('Received page (%i bytes)' % len(page)) data = quakeml.QuakeML.load_xml(string=page) markers = data.get_pyrocko_phase_markers() markers = self.replace_isc_codes(markers) return markers
def ws_request(url, post=False, **kwargs): url_values = urlencode(kwargs) url = url + '?' + url_values logger.debug('Accessing URL %s' % url) req = Request(url) if post: req.add_data(post) req.add_header('Accept', '*/*') try: return urlopen(req) except HTTPError as e: if e.code == 404: raise NotFound(url) else: raise e
def download_file(fn, dirpath): url = base_url + '/' + fn fpath = op.join(dirpath, fn) logger.info('starting download of %s' % url) util.ensuredirs(fpath) f = urlopen(url) fpath_tmp = fpath + '.%i.temp' % os.getpid() g = open(fpath_tmp, 'wb') while True: data = f.read(1024) if not data: break g.write(data) g.close() f.close() os.rename(fpath_tmp, fpath) logger.info('finished download of %s' % url)
def get_event(self, name): logger.debug('In Geofon.get_event("%s")' % name) if name not in self.events: url = 'http://geofon.gfz-potsdam.de/eqinfo/event.php' \ '?id=%s&fmt=geojson' % name logger.debug('Opening URL: %s' % url) page = urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) try: ev = self._parse_event_page(page) self.events[name] = ev except NotFound: raise NotFound(url) # reraise with url ev = self.events[name] if ev.moment_tensor is True: ev.moment_tensor = self.get_mt(ev) return ev
def test_url_alive(self): # Test urls which are used as references in pyrocko if they still # exist. to_check = [ ('http://nappe.wustl.edu/antelope/css-formats/wfdisc.htm', 'pyrocko.css'), ('http://www.ietf.org/timezones/data/leap-seconds.list', 'pyrocko.config'), ('http://stackoverflow.com/questions/2417794/', 'cake_plot'), ('http://igppweb.ucsd.edu/~gabi/rem.html', 'crust2x2_data'), ('https://mirror.pyrocko.org/gsc20130501.txt', 'crustdb'), ('https://mirror.pyrocko.org/download.geonames.org/export/dump/', 'geonames'), # noqa ('http://emolch.github.io/gmtpy/', 'gmtpy'), ('http://www.apache.org/licenses/LICENSE-2.0', 'kagan.py'), ('http://www.opengis.net/kml/2.2', 'model'), ('http://maps.google.com/mapfiles/kml/paddle/S.png', 'model'), ('http://de.wikipedia.org/wiki/Orthodrome', 'orthodrome'), ('https://mirror.pyrocko.org/peterbird.name/oldFTP/PB2002', 'tectonics'), # noqa ('https://mirror.pyrocko.org/gsrm.unavco.org/model', 'tectonics'), ('http://stackoverflow.com/questions/19332902/', 'util'), ] for url, label in to_check: try: try: req = Request(url, method='HEAD') except TypeError: req = Request(url) f = urlopen(req) f.close() except Exception as e: logger.warn('%s - %s referenced in pyrocko.%s' % (e, url, label))
def retrieve(self): url = 'http://home.uni-leipzig.de/collm/auswertung_temp.html' f = urlopen(url) text = f.read() sec = 0 events = {} for line in text.splitlines(): line = line.strip() if line == '<PRE>': sec += 1 continue if sec == 1 and not line: sec += 1 continue if sec == 1: t = line.split(' ', 1) name = t[0] sdate = t[1][0:11] stime = t[1][12:22] sloc = t[1][23:36] sdepth = t[1][37:42] smag = t[1][51:55] region = t[1][60:] sday, smon, syear = sdate.split('-') smon = { 'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'Mai': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Okt': '10', 'Nov': '11', 'Dez': '12' }[smon] time = util.str_to_time('%s-%s-%s %s' % (syear, smon, sday, stime)) slat, slon = sloc.split(';') ev = model.event.Event(time=time, lat=float(slat), lon=float(slon), depth=float(sdepth) * 1000., magnitude=float(smag), magnitude_type='Ml', name=name, region=region, catalog='Saxony') events[name] = ev self._events = events