Пример #1
0
    def __override_network(self, data, net):
        inp = BytesIO(data)
        out = BytesIO()

        for rec in mseedlite.Input(inp):
            rec.net = net
            rec_len_exp = 9

            while (1 << rec_len_exp) < rec.size:
                rec_len_exp += 1

            rec.write(out, rec_len_exp)

        return out.getvalue()
Пример #2
0
    def close(self):
        try:
            try:
                seed_volume = SEEDVolume(self.__inv, ORGANIZATION, LABEL,
                                         self.__resp_dict)

                self.__mseed_fd.seek(0)
                for rec in mseed.Input(self.__mseed_fd):
                    seed_volume.add_data(rec)

                seed_volume.output(self.__fd)

            except (mseed.MSeedError, SEEDError, DBError), e:
                logs.error("error creating SEED volume: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Пример #3
0
def rt_simul(f, speed=1., jump=0., delaydict=None):
    """
    Iterator to simulate "real-time" MSeed input

    At startup, the first MSeed record is read. The following records are
    read in pseudo-real-time relative to the time of the first record,
    resulting in data flowing at realistic speed. This is useful e.g. for
    demonstrating real-time processing using real data of past events.

    The data in the input file may be multiplexed, but *must* be sorted by
    time, e.g. using 'mssort'.
    """
    import time

    rtime = time.time()
    etime = None
    skipping = True
    record_iterable = mseed.Input(f)
    if delaydict:
        record_iterable = read_mseed_with_delays(delaydict, record_iterable)
    for rec in record_iterable:
        rec_time = None
        if delaydict:
            rec_time = rec[0]
            rec = rec[1]
        else:
            rec_time = calendar.timegm(rec.end_time.timetuple())
        if etime is None:
            etime = rec_time

        if skipping:
            if (rec_time - etime) / 60.0 < jump:
                continue

            etime = rec_time
            skipping = False

        tmax = etime + speed * (time.time() - rtime)
        last_sample_time = rec.begin_time + datetime.timedelta(
            microseconds=1000000.0 * (rec.nsamp / rec.fsamp))
        last_sample_time = calendar.timegm(last_sample_time.timetuple())
        if last_sample_time > tmax:
            time.sleep((last_sample_time - tmax + 0.001) / speed)
        yield rec
Пример #4
0
def main():
    param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"]
    param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"]
    param2 = ["-y", "dataselect", "-z"]
    times = {"starttime": datetime.datetime(
        1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)}
    nets = set()

    def add_param0(option, opt_str, value, parser):
        param0.append(opt_str)
        param0.append(value)

    def add_param1(option, opt_str, value, parser):
        param1.append(opt_str)
        param1.append(value)

    def add_param2(option, opt_str, value, parser):
        param2.append(opt_str)
        param2.append(value)

    def add_param(option, opt_str, value, parser):
        add_param0(option, opt_str, value, parser)
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    def add_time(option, opt_str, value, parser):
        add_param1(option, opt_str, value, parser)

        try:
            t = dateutil.parser.parse(value)

        except ValueError as e:
            raise optparse.OptionValueError(
                "option '%s': invalid time value: '%s'" % (opt_str, value))

        if t.tzinfo is not None:
            t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

        times[option.dest] = t

    parser = optparse.OptionParser(
        usage="Usage: %prog [-h|--help] [OPTIONS] -o directory",
        version="%prog " + VERSION)

    parser.set_defaults(
        url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
        timeout=600,
        retries=10,
        retry_wait=60,
        threads=5,
        max_lines=1000,
        max_timespan=1440)

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string", action="callback",
                      callback=add_param,
                      help="URL of routing service (default %default)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_param1,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_param1,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_param1,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_param1,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_time,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_time,
                      help="end time")

    parser.add_option("-t", "--timeout", type="int", action="callback",
                      callback=add_param,
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int", action="callback",
                      callback=add_param,
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int", action="callback",
                      callback=add_param,
                      help="seconds to wait before each retry (default %default)")

    parser.add_option("-n", "--threads", type="int", action="callback",
                      callback=add_param,
                      help="maximum number of download threads (default %default)")

    parser.add_option("-c", "--credentials-file", type="string", action="callback",
                      callback=add_param2,
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string", action="callback",
                      callback=add_param2,
                      help="file that contains the auth token")

    parser.add_option("-o", "--output-dir", type="string",
                      help="SDS directory where downloaded data is written")

    parser.add_option("-l", "--max-lines", type="int",
                      help="max lines per request (default %default)")

    parser.add_option("-m", "--max-timespan", type="int",
                      help="max timespan per request in minutes (default %default)")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    parser.add_option("-Z", "--no-check", action="store_true", default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if args or not options.output_dir:
        parser.print_usage(sys.stderr)
        return 1

    def log_alert(s):
        if sys.stderr.isatty():
            s = "\033[31m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_notice(s):
        if sys.stderr.isatty():
            s = "\033[32m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_verbose(s):
        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_silent(s):
        pass

    logs.error = log_alert
    logs.warning = log_alert
    logs.notice = log_notice
    logs.info = (log_silent, log_verbose)[options.verbose]
    logs.debug = log_silent

    try:
        try:
            proc = exec_fetch(param1, None, options.verbose, options.no_check)

        except OSError as e:
            logs.error(str(e))
            logs.error("error running fdsnws_fetch")
            return 1

        timespan = {}

        for line in proc.stdout:
            if isinstance(line, bytes):
                line = line.decode('utf-8')

            if not line or line.startswith('#'):
                continue

            starttime = max(dateutil.parser.parse(
                line.split('|')[15]), times['starttime'])
            endtime = min(dateutil.parser.parse(
                line.split('|')[16]), times['endtime'])

            if starttime.tzinfo is not None:
                starttime = starttime.astimezone(
                    dateutil.tz.tzutc()).replace(tzinfo=None)

            if endtime.tzinfo is not None:
                endtime = endtime.astimezone(
                    dateutil.tz.tzutc()).replace(tzinfo=None)

            try:
                ts = timespan[tuple(line.split('|')[:4])]

                if ts.start > starttime:
                    ts.start = starttime
                    ts.current = starttime

                if ts.end < endtime:
                    ts.end = endtime

            except KeyError:
                timespan[tuple(line.split('|')[:4])] = Timespan(
                    starttime, endtime)

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

        if os.path.exists(options.output_dir):
            scan_sds(options.output_dir, timespan, nets)

        while len(timespan) > 0:
            postdata = ""

            ts_used = random.sample(timespan.items(), min(
                len(timespan), options.max_lines))

            for ((net, sta, loc, cha), ts) in ts_used:
                te = min(ts.end, ts.start +
                         datetime.timedelta(minutes=options.max_timespan))

                if loc == '':
                    loc = '--'

                postdata += "%s %s %s %s %sZ %sZ\n" \
                            % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat())

            if not isinstance(postdata, bytes):
                postdata = postdata.encode('utf-8')

            try:
                proc = exec_fetch(param2, postdata,
                                  options.verbose, options.no_check)

            except OSError as e:
                logs.error(str(e))
                logs.error("error running fdsnws_fetch")
                return 1

            got_data = False

            try:
                for rec in mseedlite.Input(proc.stdout):
                    try:
                        ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)]

                    except KeyError:
                        logs.warning("unexpected data: %s.%s.%s.%s" %
                                     (rec.net, rec.sta, rec.loc, rec.cha))
                        continue

                    if rec.end_time <= ts.current:
                        continue

                    sds_dir = "%s/%d/%s/%s/%s.D" \
                              % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha)

                    sds_file = "%s.%s.%s.%s.D.%s" \
                        % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j'))

                    if not os.path.exists(sds_dir):
                        os.makedirs(sds_dir)

                    with open(sds_dir + '/' + sds_file, 'ab') as fd:
                        fd.write(rec.header + rec.data)

                    ts.current = rec.end_time
                    nets.add((rec.net, rec.begin_time.year))
                    got_data = True

            except mseedlite.MSeedError as e:
                logs.error(str(e))

            proc.stdout.close()
            proc.wait()

            if proc.returncode != 0:
                logs.error("error running fdsnws_fetch")
                return 1

            for ((net, sta, loc, cha), ts) in ts_used:
                if not got_data:
                    # no progress, skip to next segment
                    ts.start += datetime.timedelta(
                        minutes=options.max_timespan)

                else:
                    # continue from current position
                    ts.start = ts.current

                if ts.start >= ts.end:
                    # timespan completed
                    del timespan[(net, sta, loc, cha)]

        if nets and not options.no_citation:
            logs.info("retrieving network citation info")
            get_citation(nets, param0, options.verbose)

    except (IOError, Error) as e:
        logs.error(str(e))
        return 1

    return 0
Пример #5
0
#!/usr/bin/env python

import sys
from seiscomp import mseedlite as mseed

open_files = {}

if len(sys.argv) != 2:
    print("Usage: extr_file FILE")
    sys.exit(1)

for rec in mseed.Input(file(sys.argv[1])):
    oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)
    
    if oname not in open_files:
        postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year,
            rec.begin_time.timetuple()[7], rec.begin_time.hour,
            rec.begin_time.minute)

        open_files[oname] = file(oname + postfix, "a")

    ofile = open_files[oname]
    ofile.write(rec.header + rec.data)

for oname in open_files:
    open_files[oname].close()

Пример #6
0
#!/usr/bin/env seiscomp-python

from __future__ import print_function
import sys
from seiscomp import mseedlite as mseed

open_files = {}

if len(sys.argv) != 2:
    print("Usage: extr_file FILE")
    sys.exit(1)

for rec in mseed.Input(open(sys.argv[1], "rb")):
    oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)

    if oname not in open_files:
        postfix = ".D.%04d.%03d.%02d%02d" % (
            rec.begin_time.year, rec.begin_time.timetuple()[7],
            rec.begin_time.hour, rec.begin_time.minute)

        open_files[oname] = open(oname + postfix, "ab")

    ofile = open_files[oname]
    ofile.write(rec.header + rec.data)

for oname in open_files:
    open_files[oname].close()