def load_fdsnxml(self, src): try: tree = ET.parse(src).getroot() except Exception as ex: raise Error(ex) for e in tree: if e.tag == ns + "Source": self.__archive = e.text elif e.tag == ns + "Network": if 'startDate' not in e.attrib: logs.error("error: network %s is missing startDate" % e.attrib['code']) continue self.__process_network(e)
def main(): logs.error = log_alert logs.warning = log_alert logs.notice = log_notice logs.info = log_verbose logs.debug = log_silent if len(sys.argv) not in (2, 3): logs.notice("Usage: %s input_file [output_file]" % sys.argv[0]) return 1 inv = fdsnxml.Inventory() try: inv.load_fdsnxml(sys.argv[1]) except fdsnxml.Error as e: logs.error(str(e)) return 1 inv.save_xml(sys.argv[2] if len(sys.argv) == 3 else sys.stdout, instr=1) return 0
def scan_cha(d): last_file = {} for f in os.listdir(d): try: (net, sta, loc, cha, ext, year, doy) = f.split('.') nets.add((net, int(year))) except ValueError: logs.error("invalid SDS file:" + p, True) continue if (net, sta, loc, cha) not in timespan: continue try: if doy > last_file[loc][0]: last_file[loc] = (doy, f) except KeyError: last_file[loc] = (doy, f) for (loc, (doy, f)) in last_file.items(): with open(d + '/' + f, 'rb') as fd: nslc = tuple(f.split('.')[:4]) rec = mseedlite.Record(fd) fd.seek(-rec.size, 2) rec = mseedlite.Record(fd) ts = timespan[nslc] if ts.start < rec.end_time < ts.end: ts.start = rec.end_time ts.current = rec.end_time elif rec.end_time >= ts.end: del timespan[nslc]
def get_citation(nets, param, verbose): postdata = "" for (net, year) in nets: postdata += "%s * * * %d-01-01T00:00:00Z %d-12-31T23:59:59Z\n" \ % (net, year, year) if not isinstance(postdata, bytes): postdata = postdata.encode('utf-8') try: proc = exec_fetch(param, postdata, verbose, True) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 net_desc = {} for line in proc.stdout: try: if isinstance(line, bytes): line = line.decode('utf-8') if not line or line.startswith('#'): continue (code, desc, start) = line.split('|')[:3] year = dateutil.parser.parse(start).year except (ValueError, UnicodeDecodeError) as e: logs.error("error parsing text format: %s" % str(e)) continue if code[0] in "0123456789XYZ": net_desc["%s_%d" % (code, year)] = desc else: net_desc[code] = desc logs.notice("You received seismic waveform data from the following " "network(s):") for code in sorted(net_desc): logs.notice("%s %s" % (code, net_desc[code])) logs.notice("\nAcknowledgment is extremely important for network operators\n" "providing open data. When preparing publications, please\n" "cite the data appropriately. The FDSN service at\n\n" " http://www.fdsn.org/networks/citation/?networks=%s\n\n" "provides a helpful guide based on available network\n" "Digital Object Identifiers.\n" % "+".join(sorted(net_desc)))
def __process_channel(self, tree, sta, locs): code = tree.attrib['code'] locationCode = tree.attrib['locationCode'] start = dateutil.parser.parse( tree.attrib['startDate']).replace(tzinfo=None) try: end = dateutil.parser.parse( tree.attrib['endDate']).replace(tzinfo=None) if end > datetime.datetime(2100, 1, 1): end = None except KeyError: end = None try: loc = locs[(locationCode, start)] if loc.end is not None and (end is None or end > loc.end): loc.end = end except KeyError: loc = sta.insert_sensorLocation(locationCode, start, end=end, publicID=_uuid()) locs[(locationCode, start)] = loc cha = loc.insert_stream(code, start, end=end) cha.restricted = (tree.attrib.get("restrictedStatus", "").lower() == "closed") cha.shared = True cha.format = "steim2" cha.flags = "" cha.sensor = _uuid() cha.sensorChannel = 0 cha.datalogger = _uuid() cha.dataloggerChannel = 0 clockDrift = None sensor = self.insert_sensor(name=cha.sensor, publicID=cha.sensor) logger = self.insert_datalogger(name=cha.datalogger, publicID=cha.datalogger) for e in tree: if e.tag == ns + "Latitude": latitude = float(e.text) if loc.latitude is not None and loc.latitude != latitude: logs.warning( "%s: warning: conflicting latitude: %s vs. %s" % (_cha_id(cha), loc.latitude, latitude)) loc.latitude = latitude elif e.tag == ns + "Longitude": longitude = float(e.text) if loc.longitude is not None and loc.longitude != longitude: logs.warning( "%s: warning: conflicting longitude: %s vs. %s" % (_cha_id(cha), loc.longitude, longitude)) loc.longitude = longitude elif e.tag == ns + "Elevation": elevation = float(e.text) if loc.elevation is not None and loc.elevation != elevation: logs.warning( "%s: warning: conflicting elevation: %s vs. %s" % (_cha_id(cha), loc.elevation, elevation)) loc.elevation = elevation elif e.tag == ns + "Depth": cha.depth = float(e.text) elif e.tag == ns + "Azimuth": cha.azimuth = float(e.text) elif e.tag == ns + "Dip": cha.dip = float(e.text) elif e.tag == ns + "Type": cha.flags += e.text[0] elif e.tag == ns + "SampleRate": if cha.sampleRateNumerator is not None: continue sampleRate = float(e.text) if sampleRate > 1: cha.sampleRateNumerator = int(round(sampleRate)) cha.sampleRateDenominator = 1 elif sampleRate > 0: cha.sampleRateNumerator = 1 cha.sampleRateDenominator = int(round(1 / sampleRate)) else: cha.sampleRateNumerator = 0 cha.sampleRateDenominator = 0 elif e.tag == ns + "SampleRateRatio": for e1 in e: if e1.tag == ns + "NumberSamples": cha.sampleRateNumerator = int(e1.text) if e1.tag == ns + "NumberSeconds": cha.sampleRateDenominator = int(e1.text) elif e.tag == ns + "Sensor": for e1 in e: if e1.tag == ns + "Description": sensor.description = e1.text.encode('utf-8') elif e1.tag == ns + "Type": sensor.type = e1.text[:10] if not sensor.description: sensor.description = e1.text.encode('utf-8') elif e1.tag == ns + "Model": sensor.model = e1.text elif e1.tag == ns + "Manufacturer": sensor.manufacturer = e1.text elif e1.tag == ns + "SerialNumber": cha.sensorSerialNumber = e1.text elif e.tag == ns + "DataLogger": for e1 in e: if e1.tag == ns + "Description": logger.description = e1.text.encode('utf-8') elif e1.tag == ns + "Type": if not logger.description: logger.description = e1.text.encode('utf-8') elif e1.tag == ns + "Model": logger.digitizerModel = e1.text logger.recorderModel = e1.text elif e1.tag == ns + "Manufacturer": logger.digitizerManufacturer = e1.text logger.recorderManufacturer = e1.text elif e1.tag == ns + "SerialNumber": cha.dataloggerSerialNumber = e1.text elif e.tag == ns + "ClockDrift": clockDrift = float(e.text) elif e.tag == ns + "StorageFormat": cha.format = e.text elif e.tag == ns + "Response": try: self.__process_response(e, cha, sensor, logger) except Error as ex: logs.error(str(ex)) if cha.sampleRateDenominator and clockDrift is not None: logger.maxClockDrift = clockDrift * cha.sampleRateNumerator / cha.sampleRateDenominator if not cha.flags: cha.flags = "GC"
def main(): param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"] param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"] param2 = ["-y", "dataselect", "-z"] times = {"starttime": datetime.datetime(1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)} nets = set() def add_param0(option, opt_str, value, parser): param0.append(opt_str) param0.append(value) def add_param1(option, opt_str, value, parser): param1.append(opt_str) param1.append(value) def add_param2(option, opt_str, value, parser): param2.append(opt_str) param2.append(value) def add_param(option, opt_str, value, parser): add_param0(option, opt_str, value, parser) add_param1(option, opt_str, value, parser) add_param2(option, opt_str, value, parser) def add_time(option, opt_str, value, parser): add_param1(option, opt_str, value, parser) try: t = dateutil.parser.parse(value) except ValueError as e: raise optparse.OptionValueError("option '%s': invalid time value: '%s'" % (opt_str, value)) if t.tzinfo is not None: t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) times[option.dest] = t parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o directory", version="%prog " + VERSION) parser.set_defaults( url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5, max_lines=1000, max_timespan=1440) parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", action="callback", callback=add_param, help="URL of routing service (default %default)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_param1, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_param1, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_param1, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_param1, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_time, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_time, help="end time") parser.add_option("-t", "--timeout", type="int", action="callback", callback=add_param, help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", action="callback", callback=add_param, help="number of retries (default %default)") parser.add_option("-w", "--retry-wait", type="int", action="callback", callback=add_param, help="seconds to wait before each retry (default %default)") parser.add_option("-n", "--threads", type="int", action="callback", callback=add_param, help="maximum number of download threads (default %default)") parser.add_option("-c", "--credentials-file", type="string", action="callback", callback=add_param2, help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", action="callback", callback=add_param2, help="file that contains the auth token") parser.add_option("-o", "--output-dir", type="string", help="SDS directory where downloaded data is written") parser.add_option("-l", "--max-lines", type="int", help="max lines per request (default %default)") parser.add_option("-m", "--max-timespan", type="int", help="max timespan per request in minutes (default %default)") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") parser.add_option("-Z", "--no-check", action="store_true", default=False, help="suppress checking received routes and data") (options, args) = parser.parse_args() if args or not options.output_dir: parser.print_usage(sys.stderr) return 1 def log_alert(s): if sys.stderr.isatty(): s = "\033[31m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_notice(s): if sys.stderr.isatty(): s = "\033[32m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_verbose(s): sys.stderr.write(s + '\n') sys.stderr.flush() def log_silent(s): pass logs.error = log_alert logs.warning = log_alert logs.notice = log_notice logs.info = (log_silent, log_verbose)[options.verbose] logs.debug = log_silent try: try: proc = exec_fetch(param1, None, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 timespan = {} for line in proc.stdout: if isinstance(line, bytes): line = line.decode('utf-8') if not line or line.startswith('#'): continue starttime = max(dateutil.parser.parse(line.split('|')[15]), times['starttime']) try: endtime = min(dateutil.parser.parse(line.split('|')[16]), times['endtime']) except ValueError: # dateutil.parser.parse('') now causes ValueError instead of current time endtime = min(datetime.datetime.now(), times['endtime']) if starttime.tzinfo is not None: starttime = starttime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) if endtime.tzinfo is not None: endtime = endtime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) try: ts = timespan[tuple(line.split('|')[:4])] if ts.start > starttime: ts.start = starttime ts.current = starttime if ts.end < endtime: ts.end = endtime except KeyError: timespan[tuple(line.split('|')[:4])] = Timespan(starttime, endtime) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 if os.path.exists(options.output_dir): scan_sds(options.output_dir, timespan, nets) while len(timespan) > 0: postdata = "" ts_used = random.sample(timespan.items(), min(len(timespan), options.max_lines)) for ((net, sta, loc, cha), ts) in ts_used: te = min(ts.end, ts.start + datetime.timedelta(minutes=options.max_timespan)) if loc == '': loc = '--' postdata += "%s %s %s %s %sZ %sZ\n" \ % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat()) if not isinstance(postdata, bytes): postdata = postdata.encode('utf-8') try: proc = exec_fetch(param2, postdata, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 got_data = False try: for rec in mseedlite.Input(proc.stdout): try: ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)] except KeyError: logs.warning("unexpected data: %s.%s.%s.%s" % (rec.net, rec.sta, rec.loc, rec.cha)) continue if rec.end_time <= ts.current: continue sds_dir = "%s/%d/%s/%s/%s.D" \ % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha) sds_file = "%s.%s.%s.%s.D.%s" \ % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j')) if not os.path.exists(sds_dir): os.makedirs(sds_dir) with open(sds_dir + '/' + sds_file, 'ab') as fd: fd.write(rec.header + rec.data) ts.current = rec.end_time nets.add((rec.net, rec.begin_time.year)) got_data = True except mseedlite.MSeedError as e: logs.error(str(e)) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 for ((net, sta, loc, cha), ts) in ts_used: if not got_data: # no progress, skip to next segment ts.start += datetime.timedelta(minutes=options.max_timespan) else: # continue from current position ts.start = ts.current if ts.start >= ts.end: # timespan completed del timespan[(net, sta, loc, cha)] if nets and not options.no_citation: logs.info("retrieving network citation info") get_citation(nets, param0, options.verbose) except (IOError, Error) as e: logs.error(str(e)) return 1 return 0
def main(): param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"] param1 = ["-y", "station", "-q", "format=xml", "-q", "level=response"] param2 = ["-y", "dataselect", "-z"] nets = set() def add_param0(option, opt_str, value, parser): param0.append(opt_str) param0.append(value) def add_param1(option, opt_str, value, parser): param1.append(opt_str) param1.append(value) def add_param2(option, opt_str, value, parser): param2.append(opt_str) param2.append(value) def add_param12(option, opt_str, value, parser): add_param1(option, opt_str, value, parser) add_param2(option, opt_str, value, parser) def add_param(option, opt_str, value, parser): add_param0(option, opt_str, value, parser) add_param1(option, opt_str, value, parser) add_param2(option, opt_str, value, parser) parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o file", version="%prog " + VERSION) parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5) parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", action="callback", callback=add_param, help="URL of routing service (default %default)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_param12, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_param12, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_param12, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_param12, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_param12, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_param12, help="end time") parser.add_option("-t", "--timeout", type="int", action="callback", callback=add_param, help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", action="callback", callback=add_param, help="number of retries (default %default)") parser.add_option( "-w", "--retry-wait", type="int", action="callback", callback=add_param, help="seconds to wait before each retry (default %default)") parser.add_option( "-n", "--threads", type="int", action="callback", callback=add_param, help="maximum number of download threads (default %default)") parser.add_option("-c", "--credentials-file", type="string", action="callback", callback=add_param2, help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", action="callback", callback=add_param2, help="file that contains the auth token") parser.add_option("-p", "--post-file", type="string", action="callback", callback=add_param12, help="request file in FDSNWS POST format") parser.add_option("-f", "--arclink-file", type="string", action="callback", callback=add_param12, help="request file in ArcLink format") parser.add_option("-b", "--breqfast-file", type="string", action="callback", callback=add_param12, help="request file in breq_fast format") parser.add_option("-d", "--dataless", action="store_true", default=False, help="create dataless SEED volume") parser.add_option("-l", "--label", type="string", help="label of SEED volume") parser.add_option("-o", "--output-file", type="string", help="file where SEED data is written") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") parser.add_option("-Z", "--no-check", action="store_true", default=False, help="suppress checking received routes and data") (options, args) = parser.parse_args() if args or not options.output_file: parser.print_usage(sys.stderr) return 1 def log_alert(s): if sys.stderr.isatty(): s = "\033[31m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_notice(s): if sys.stderr.isatty(): s = "\033[32m" + s + "\033[m" sys.stderr.write(s + '\n') sys.stderr.flush() def log_verbose(s): sys.stderr.write(s + '\n') sys.stderr.flush() def log_silent(s): pass logs.error = log_alert logs.warning = log_alert logs.notice = log_notice logs.info = (log_silent, log_verbose)[options.verbose] logs.debug = log_silent try: proc = exec_fetch(param1, None, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 inv = fdsnxml.Inventory() with tempfile.TemporaryFile() as fd: shutil.copyfileobj(proc.stdout, fd) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 if fd.tell(): fd.seek(0) try: inv.load_fdsnxml(fd) except fdsnxml.Error as e: logs.error(str(e)) return 1 seed_volume = fseed.SEEDVolume(inv, ORGANIZATION, options.label, False) if options.dataless: for net in iterinv(inv.network): for sta in iterinv(net.station): for loc in iterinv(sta.sensorLocation): for cha in iterinv(loc.stream): try: seed_volume.add_chan(net.code, sta.code, loc.code, cha.code, cha.start, cha.end) except fseed.SEEDError as e: logs.warning("%s.%s.%s.%s.%s: %s" % (net.code, sta.code, loc.code, cha.code, cha.start.isoformat(), e)) else: try: proc = exec_fetch(param2, None, options.verbose, options.no_check) except OSError as e: logs.error(str(e)) logs.error("error running fdsnws_fetch") return 1 try: for rec in mseedlite.Input(proc.stdout): try: seed_volume.add_data(rec) except fseed.SEEDError as e: logs.warning("%s.%s.%s.%s.%s: %s" % (rec.net.code, rec.sta.code, rec.loc.code, rec.cha.code, rec.cha.start.isoformat(), e)) nets.add((rec.net, rec.begin_time.year)) except mseedlite.MSeedError as e: logs.error(str(e)) proc.stdout.close() proc.wait() if proc.returncode != 0: logs.error("error running fdsnws_fetch") return 1 with open(options.output_file, "wb") as fd: try: seed_volume.output(fd) except fseed.SEEDError as e: logs.error(str(e)) return 1 if nets and not options.no_citation: logs.info("retrieving network citation info") get_citation(nets, param0, options.verbose) return 0
def __init__(self, src): if isinstance(src, basestring): fd = cStringIO.StringIO(src) elif hasattr(src, "read"): fd = src else: raise TypeError, "argument is neither string nor file object" self.header = "" fixhead = fd.read(_FIXHEAD_LEN) if len(fixhead) == 0: raise StopIteration if len(fixhead) < _FIXHEAD_LEN: raise MSeedError, "unexpected end of header" (recno_str, self.rectype, sta, loc, cha, net, bt_year, bt_doy, bt_hour, bt_minute, bt_second, bt_tms, self.nsamp, self.sr_factor, self.sr_mult, self.aflgs, self.cflgs, self.qflgs, self.__num_blk, self.time_correction, self.__pdata, self.__pblk) = \ struct.unpack(">6scx5s2s3s2s2H3Bx2H2h4Bl2H", fixhead) self.header += fixhead if self.rectype != 'D' and self.rectype != 'R' and self.rectype != 'Q' and self.rectype != 'M': fd.read(_MAX_RECLEN - _FIXHEAD_LEN) raise MSeedNoData, "non-data record" if self.__pdata < _FIXHEAD_LEN or self.__pdata >= _MAX_RECLEN or \ (self.__pblk != 0 and \ (self.__pblk < _FIXHEAD_LEN or self.__pblk >= self.__pdata)): raise MSeedError, "invalid pointers" if self.__pblk == 0: blklen = 0 else: blklen = self.__pdata - self.__pblk gaplen = self.__pblk - _FIXHEAD_LEN gap = fd.read(gaplen) if len(gap) < gaplen: raise MSeedError, "unexpected end of data" self.header += gap # defaults self.encoding = 11 self.byteorder = 1 rec_len_exp = 12 self.time_quality = -1 micros = 0 self.nframes = None self.__rec_len_exp_idx = None self.__micros_idx = None self.__nframes_idx = None pos = 0 while pos < blklen: blkhead = fd.read(_BLKHEAD_LEN) if len(blkhead) < _BLKHEAD_LEN: raise MSeedError, "unexpected end of blockettes at ", \ pos + len(blkhead) (blktype, nextblk) = struct.unpack(">2H", blkhead) self.header += blkhead pos += _BLKHEAD_LEN if blktype == 1000: blk1000 = fd.read(_BLK1000_LEN) if len(blk1000) < _BLK1000_LEN: raise MSeedError, "unexpected end of blockettes at ", \ pos + len(blk1000) (self.encoding, self.byteorder, rec_len_exp) = \ struct.unpack(">3Bx", blk1000) self.__rec_len_exp_idx = self.__pblk + pos + 2 self.header += blk1000 pos += _BLK1000_LEN elif blktype == 1001: blk1001 = fd.read(_BLK1001_LEN) if len(blk1001) < _BLK1001_LEN: raise MSeedError, "unexpected end of blockettes at ", \ pos + len(blk1001) (self.time_quality, micros, self.nframes) = \ struct.unpack(">BbxB", blk1001) self.__micros_idx = self.__pblk + pos + 1 self.__nframes_idx = self.__pblk + pos + 3 self.header += blk1001 pos += _BLK1001_LEN if nextblk == 0: break if nextblk < self.__pblk + pos or nextblk >= self.__pdata: raise MSeedError, "invalid pointers" gaplen = nextblk - (self.__pblk + pos) gap = fd.read(gaplen) if len(gap) < gaplen: raise MSeedError, "unexpected end of data" self.header += gap pos += gaplen if pos > blklen: raise MSeedError, "corrupt record" gaplen = self.__pdata - len(self.header) gap = fd.read(gaplen) if len(gap) < gaplen: raise MSeedError, "unexpected end of data" self.header += gap pos += gaplen self.recno = int(recno_str) self.net = net.strip() self.sta = sta.strip() self.loc = loc.strip() self.cha = cha.strip() if self.sr_factor > 0 and self.sr_mult > 0: self.samprate_num = self.sr_factor * self.sr_mult self.samprate_denom = 1 elif self.sr_factor > 0 and self.sr_mult < 0: self.samprate_num = self.sr_factor self.samprate_denom = -self.sr_mult elif self.sr_factor < 0 and self.sr_mult > 0: self.samprate_num = self.sr_mult self.samprate_denom = -self.sr_factor elif self.sr_factor < 0 and self.sr_mult < 0: self.samprate_num = 1 self.samprate_denom = self.sr_factor * self.sr_mult else: self.samprate_num = 0 self.samprate_denom = 1 self.fsamp = float(self.samprate_num) / float(self.samprate_denom) # quick fix to avoid exception from datetime if bt_second > 59: self.leap = bt_second - 59 bt_second = 59 else: self.leap = 0 try: (month, day) = _dy2mdy(bt_doy, bt_year) self.begin_time = datetime.datetime(bt_year, month, day, bt_hour, bt_minute, bt_second) self.begin_time += datetime.timedelta(microseconds=bt_tms * 100 + micros) if self.nsamp != 0 and self.fsamp != 0: self.end_time = self.begin_time + \ datetime.timedelta(microseconds = 1000000 * self.nsamp / self.fsamp) else: self.end_time = self.begin_time except ValueError, e: logs.error("tms = " + str(bt_tms) + ", micros = " + str(micros)) raise MSeedError, "invalid time: " + str(e)