def main(): param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"] param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"] param2 = ["-y", "dataselect", "-z"] times = {"starttime": datetime.datetime(1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)} nets = set() def add_param0(option, opt_str, value, parser): param0.append(opt_str) param0.append(value) def add_param1(option, opt_str, value, parser): param1.append(opt_str) param1.append(value) def add_param2(option, opt_str, value, parser): param2.append(opt_str) param2.append(value) def add_param(option, opt_str, value, parser): add_param0(option, opt_str, value, parser) add_param1(option, opt_str, value, parser) add_param2(option, opt_str, value, parser) def add_time(option, opt_str, value, parser): add_param1(option, opt_str, value, parser) try: t = dateutil.parser.parse(value) except ValueError as e: raise optparse.OptionValueError("option '%s': invalid time value: '%s'" % (opt_str, value)) if t.tzinfo is not None: t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) times[option.dest] = t parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o directory", version="%prog " + VERSION) parser.set_defaults( url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5, max_lines=1000, max_timespan=1440) parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", action="callback", callback=add_param, help="URL of routing service (default %default)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_param1, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_param1, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_param1, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_param1, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_time, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_time, help="end time") parser.add_option("-t", "--timeout", type="int", action="callback", callback=add_param, help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", action="callback", callback=add_param, help="number of retries (default %default)") parser.add_option("-w", "--retry-wait", type="int", action="callback", callback=add_param, help="seconds to wait before each retry (default %default)") parser.add_option("-n", "--threads", type="int", action="callback", callback=add_param, help="maximum number of download threads (default %default)") parser.add_option("-c", "--credentials-file", type="string", action="callback", callback=add_param2, help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", action="callback", callback=add_param2, help="file that contains the auth token") parser.add_option("-o", "--output-dir", type="string", help="SDS directory where downloaded data is written") parser.add_option("-l", "--max-lines", type="int", help="max lines per request (default %default)") parser.add_option("-m", "--max-timespan", type="int", help="max timespan per request in minutes (default %default)") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") parser.add_option("-Z", "--no-check", action="store_true", default=False, help="suppress checking received routes and data") (options, args) = parser.parse_args() if args or not options.output_dir: parser.print_usage(sys.stderr) return 1 def log_alert(s): if sys.stderr.isatty(): s = "\033[31m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_notice(s): if sys.stderr.isatty(): s = "\033[32m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_verbose(s): sys.stderr.write(s + '\n') sys.stderr.flush() def log_silent(s): pass logs.error = log_alert logs.warning = log_alert logs.notice = log_notice logs.info = (log_silent, log_verbose)[options.verbose] logs.debug = log_silent try: try: proc = exec_fetch(param1, None, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 timespan = {} for line in proc.stdout: if isinstance(line, bytes): line = line.decode('utf-8') if not line or line.startswith('#'): continue starttime = max(dateutil.parser.parse(line.split('|')[15]), times['starttime']) try: endtime = min(dateutil.parser.parse(line.split('|')[16]), times['endtime']) except ValueError: # dateutil.parser.parse('') now causes ValueError instead of current time endtime = min(datetime.datetime.now(), times['endtime']) if starttime.tzinfo is not None: starttime = starttime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) if endtime.tzinfo is not None: endtime = endtime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) try: ts = timespan[tuple(line.split('|')[:4])] if ts.start > starttime: ts.start = starttime ts.current = starttime if ts.end < endtime: ts.end = endtime except KeyError: timespan[tuple(line.split('|')[:4])] = Timespan(starttime, endtime) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 if os.path.exists(options.output_dir): scan_sds(options.output_dir, timespan, nets) while len(timespan) > 0: postdata = "" ts_used = random.sample(timespan.items(), min(len(timespan), options.max_lines)) for ((net, sta, loc, cha), ts) in ts_used: te = min(ts.end, ts.start + datetime.timedelta(minutes=options.max_timespan)) if loc == '': loc = '--' postdata += "%s %s %s %s %sZ %sZ\n" \ % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat()) if not isinstance(postdata, bytes): postdata = postdata.encode('utf-8') try: proc = exec_fetch(param2, postdata, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 got_data = False try: for rec in mseedlite.Input(proc.stdout): try: ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)] except KeyError: logs.warning("unexpected data: %s.%s.%s.%s" % (rec.net, rec.sta, rec.loc, rec.cha)) continue if rec.end_time <= ts.current: continue sds_dir = "%s/%d/%s/%s/%s.D" \ % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha) sds_file = "%s.%s.%s.%s.D.%s" \ % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j')) if not os.path.exists(sds_dir): os.makedirs(sds_dir) with open(sds_dir + '/' + sds_file, 'ab') as fd: fd.write(rec.header + rec.data) ts.current = rec.end_time nets.add((rec.net, rec.begin_time.year)) got_data = True except mseedlite.MSeedError as e: logs.error(str(e)) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 for ((net, sta, loc, cha), ts) in ts_used: if not got_data: # no progress, skip to next segment ts.start += datetime.timedelta(minutes=options.max_timespan) else: # continue from current position ts.start = ts.current if ts.start >= ts.end: # timespan completed del timespan[(net, sta, loc, cha)] if nets and not options.no_citation: logs.info("retrieving network citation info") get_citation(nets, param0, options.verbose) except (IOError, Error) as e: logs.error(str(e)) return 1 return 0
def __process_channel(self, tree, sta, locs): code = tree.attrib['code'] locationCode = tree.attrib['locationCode'] start = dateutil.parser.parse( tree.attrib['startDate']).replace(tzinfo=None) try: end = dateutil.parser.parse( tree.attrib['endDate']).replace(tzinfo=None) if end > datetime.datetime(2100, 1, 1): end = None except KeyError: end = None try: loc = locs[(locationCode, start)] if loc.end is not None and (end is None or end > loc.end): loc.end = end except KeyError: loc = sta.insert_sensorLocation(locationCode, start, end=end, publicID=_uuid()) locs[(locationCode, start)] = loc cha = loc.insert_stream(code, start, end=end) cha.restricted = (tree.attrib.get("restrictedStatus", "").lower() == "closed") cha.shared = True cha.format = "steim2" cha.flags = "" cha.sensor = _uuid() cha.sensorChannel = 0 cha.datalogger = _uuid() cha.dataloggerChannel = 0 clockDrift = None sensor = self.insert_sensor(name=cha.sensor, publicID=cha.sensor) logger = self.insert_datalogger(name=cha.datalogger, publicID=cha.datalogger) for e in tree: if e.tag == ns + "Latitude": latitude = float(e.text) if loc.latitude is not None and loc.latitude != latitude: logs.warning( "%s: warning: conflicting latitude: %s vs. %s" % (_cha_id(cha), loc.latitude, latitude)) loc.latitude = latitude elif e.tag == ns + "Longitude": longitude = float(e.text) if loc.longitude is not None and loc.longitude != longitude: logs.warning( "%s: warning: conflicting longitude: %s vs. %s" % (_cha_id(cha), loc.longitude, longitude)) loc.longitude = longitude elif e.tag == ns + "Elevation": elevation = float(e.text) if loc.elevation is not None and loc.elevation != elevation: logs.warning( "%s: warning: conflicting elevation: %s vs. %s" % (_cha_id(cha), loc.elevation, elevation)) loc.elevation = elevation elif e.tag == ns + "Depth": cha.depth = float(e.text) elif e.tag == ns + "Azimuth": cha.azimuth = float(e.text) elif e.tag == ns + "Dip": cha.dip = float(e.text) elif e.tag == ns + "Type": cha.flags += e.text[0] elif e.tag == ns + "SampleRate": if cha.sampleRateNumerator is not None: continue sampleRate = float(e.text) if sampleRate > 1: cha.sampleRateNumerator = int(round(sampleRate)) cha.sampleRateDenominator = 1 elif sampleRate > 0: cha.sampleRateNumerator = 1 cha.sampleRateDenominator = int(round(1 / sampleRate)) else: cha.sampleRateNumerator = 0 cha.sampleRateDenominator = 0 elif e.tag == ns + "SampleRateRatio": for e1 in e: if e1.tag == ns + "NumberSamples": cha.sampleRateNumerator = int(e1.text) if e1.tag == ns + "NumberSeconds": cha.sampleRateDenominator = int(e1.text) elif e.tag == ns + "Sensor": for e1 in e: if e1.tag == ns + "Description": sensor.description = e1.text.encode('utf-8') elif e1.tag == ns + "Type": sensor.type = e1.text[:10] if not sensor.description: sensor.description = e1.text.encode('utf-8') elif e1.tag == ns + "Model": sensor.model = e1.text elif e1.tag == ns + "Manufacturer": sensor.manufacturer = e1.text elif e1.tag == ns + "SerialNumber": cha.sensorSerialNumber = e1.text elif e.tag == ns + "DataLogger": for e1 in e: if e1.tag == ns + "Description": logger.description = e1.text.encode('utf-8') elif e1.tag == ns + "Type": if not logger.description: logger.description = e1.text.encode('utf-8') elif e1.tag == ns + "Model": logger.digitizerModel = e1.text logger.recorderModel = e1.text elif e1.tag == ns + "Manufacturer": logger.digitizerManufacturer = e1.text logger.recorderManufacturer = e1.text elif e1.tag == ns + "SerialNumber": cha.dataloggerSerialNumber = e1.text elif e.tag == ns + "ClockDrift": clockDrift = float(e.text) elif e.tag == ns + "StorageFormat": cha.format = e.text elif e.tag == ns + "Response": try: self.__process_response(e, cha, sensor, logger) except Error as ex: logs.error(str(ex)) if cha.sampleRateDenominator and clockDrift is not None: logger.maxClockDrift = clockDrift * cha.sampleRateNumerator / cha.sampleRateDenominator if not cha.flags: cha.flags = "GC"
def main(): param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"] param1 = ["-y", "station", "-q", "format=xml", "-q", "level=response"] param2 = ["-y", "dataselect", "-z"] nets = set() def add_param0(option, opt_str, value, parser): param0.append(opt_str) param0.append(value) def add_param1(option, opt_str, value, parser): param1.append(opt_str) param1.append(value) def add_param2(option, opt_str, value, parser): param2.append(opt_str) param2.append(value) def add_param12(option, opt_str, value, parser): add_param1(option, opt_str, value, parser) add_param2(option, opt_str, value, parser) def add_param(option, opt_str, value, parser): add_param0(option, opt_str, value, parser) add_param1(option, opt_str, value, parser) add_param2(option, opt_str, value, parser) parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o file", version="%prog " + VERSION) parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5) parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", action="callback", callback=add_param, help="URL of routing service (default %default)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_param12, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_param12, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_param12, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_param12, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_param12, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_param12, help="end time") parser.add_option("-t", "--timeout", type="int", action="callback", callback=add_param, help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", action="callback", callback=add_param, help="number of retries (default %default)") parser.add_option( "-w", "--retry-wait", type="int", action="callback", callback=add_param, help="seconds to wait before each retry (default %default)") parser.add_option( "-n", "--threads", type="int", action="callback", callback=add_param, help="maximum number of download threads (default %default)") parser.add_option("-c", "--credentials-file", type="string", action="callback", callback=add_param2, help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", action="callback", callback=add_param2, help="file that contains the auth token") parser.add_option("-p", "--post-file", type="string", action="callback", callback=add_param12, help="request file in FDSNWS POST format") parser.add_option("-f", "--arclink-file", type="string", action="callback", callback=add_param12, help="request file in ArcLink format") parser.add_option("-b", "--breqfast-file", type="string", action="callback", callback=add_param12, help="request file in breq_fast format") parser.add_option("-d", "--dataless", action="store_true", default=False, help="create dataless SEED volume") parser.add_option("-l", "--label", type="string", help="label of SEED volume") parser.add_option("-o", "--output-file", type="string", help="file where SEED data is written") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") parser.add_option("-Z", "--no-check", action="store_true", default=False, help="suppress checking received routes and data") (options, args) = parser.parse_args() if args or not options.output_file: parser.print_usage(sys.stderr) return 1 def log_alert(s): if sys.stderr.isatty(): s = "\033[31m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_notice(s): if sys.stderr.isatty(): s = "\033[32m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_verbose(s): sys.stderr.write(s + '\n') sys.stderr.flush() def log_silent(s): pass logs.error = log_alert logs.warning = log_alert logs.notice = log_notice logs.info = (log_silent, log_verbose)[options.verbose] logs.debug = log_silent try: proc = exec_fetch(param1, None, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 inv = fdsnxml.Inventory() with tempfile.TemporaryFile() as fd: shutil.copyfileobj(proc.stdout, fd) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 if fd.tell(): fd.seek(0) try: inv.load_fdsnxml(fd) except fdsnxml.Error as e: logs.error(str(e)) return 1 seed_volume = fseed.SEEDVolume(inv, ORGANIZATION, options.label, False) if options.dataless: for net in iterinv(inv.network): for sta in iterinv(net.station): for loc in iterinv(sta.sensorLocation): for cha in iterinv(loc.stream): try: seed_volume.add_chan(net.code, sta.code, loc.code, cha.code, cha.start, cha.end) except fseed.SEEDError as e: logs.warning("%s.%s.%s.%s.%s: %s" % (net.code, sta.code, loc.code, cha.code, cha.start.isoformat(), e)) else: try: proc = exec_fetch(param2, None, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 try: for rec in mseedlite.Input(proc.stdout): try: seed_volume.add_data(rec) except fseed.SEEDError as e: logs.warning("%s.%s.%s.%s.%s: %s" % (rec.net.code, rec.sta.code, rec.loc.code, rec.cha.code, rec.cha.start.isoformat(), e)) nets.add((rec.net, rec.begin_time.year)) except mseedlite.MSeedError as e: logs.error(str(e)) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 with open(options.output_file, "wb") as fd: try: seed_volume.output(fd) except fseed.SEEDError as e: logs.error(str(e)) return 1 if nets and not options.no_citation: logs.info("retrieving network citation info") get_citation(nets, param0, options.verbose) return 0