Пример #1
0
def cmdLineParse():
    parser = argparse.ArgumentParser(description='Download GPS data over SAR coverage.',\
                                     formatter_class=argparse.RawTextHelpFormatter,\
                                     epilog=INTRODUCTION+'\n'+EXAMPLE)

    parser.add_argument('-p', dest='station', help='GPS stations name')
    parser.add_argument('-s', dest='start', help='start date.')
    parser.add_argument('-e', dest='end', help='end date.')
    parser.add_argument('-t',
                        dest='utc_time',
                        help='troposphere of UTC time, e.g., 14:00')
    parser.add_argument('-o', dest='out', help='output file name.')
    parser.add_argument(
        '--inside',
        action="store_true",
        default=False,
        help=
        'Constraining stations inside the SAR coverage, otherwise, in the corner rectangle region.'
    )
    parser.add_argument(
        '--extend_search',
        dest='extend_search',
        help='extend the search region based on the box corner.')
    inps = parser.parse_args()

    if not inps.station:
        parser.print_usage()
        sys.exit(
            os.path.basename(sys.argv[0]) +
            ': error: GPS station should be provided.')

    return inps
Пример #2
0
def cmdLineParse():
    parser = argparse.ArgumentParser(description='Download GPS data over SAR coverage.',\
                                     formatter_class=argparse.RawTextHelpFormatter,\
                                     epilog=INTRODUCTION+'\n'+EXAMPLE)

    parser.add_argument('-f', dest='search_gps', help='GPS stations file')
    parser.add_argument('-s', dest='start', help='start date.')
    parser.add_argument('-e', dest='end', help='end date.')
    parser.add_argument('-t',
                        dest='run_time',
                        help='downloading time in pegasus.')
    parser.add_argument('-u',
                        dest='utc_time',
                        help='UTC time for troposphere.')
    parser.add_argument('-m',
                        dest='memory',
                        help='downloading memory in pegasus.')

    inps = parser.parse_args()

    if not inps.search_gps:
        parser.print_usage()
        sys.exit(
            os.path.basename(sys.argv[0]) +
            ': error: GPS station file should be provided.')

    return inps
Пример #3
0
def run_command_line(parser, args):
    if not args.config:
        parser.print_usage()
        return

    logging.basicConfig(
        format='%(message)s',
        level=(logging.DEBUG if args.very_verbose else
               logging.INFO if args.verbose else logging.WARNING))

    # Configuration and settings (command line arguments override config file options)

    logger.debug("Parsing options from %s", args.config)
    with open(args.config) as config:
        options = config_to_options(config.read())

    # Allow command line arguments to override options
    override_options(options['connection'], args)
    override_options(options['settings'], args)

    # Set charting context, which determines how charts are rendered
    set_chart_context("paper")

    # Set output directory if required
    if args.output_directory:
        logger.info("Changing working directory to %s" % args.output_directory)
        os.chdir(args.output_directory)

    # Query JIRA and run calculators

    jira = get_jira_client(options['connection'])

    logger.info("Running calculators")
    query_manager = QueryManager(jira, options['settings'])
    run_calculators(CALCULATORS, query_manager, options['settings'])
Пример #4
0
def main():
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option('-c', '--connnection', dest='connection_string', type='string', help='SQL connection URI such as sqlite:///full/path/to/database.db')
    parser.add_option('-s', '--search', dest='search', type='string', help='Archive search results such as #foo')
    parser.add_option('-u', '--user', dest='user', type='string', help='Archive user timeline')
    parser.add_option('--sql', action='store_true', help='Print backend-specific SQL commands to create database tables')
    (options, args) = parser.parse_args()

    if not options.connection_string:
        parser.print_usage()
        print 'Try the --help option'
        sys.exit(1)

    connect_sql(options.connection_string)

    if options.sql:
        print_sql()
        sys.exit(0)

    twitter_search = connect_twitter()

    # process command line
    if options.search:
        print '*** Archiving search: %s' % options.search
        sa = SearchArchiver(options.search, twitter_search)
        archive_loop(sa)
    if options.user:
        for user in options.user.split(','):
            print '*** Archiving user timelime: %s' % user
            ta = TimelineArchiver(user, twitter_search)
            archive_loop(ta)
            print ''
Пример #5
0
def cmdLineParse():
    parser = argparse.ArgumentParser(description='Download GPS data over SAR coverage.',\
                                     formatter_class=argparse.RawTextHelpFormatter,\
                                     epilog=INTRODUCTION+'\n'+EXAMPLE)

    parser.add_argument('RefDate', help='Reference date of differential InSAR')
    parser.add_argument('-d', dest='date', help='date for estimation.')
    parser.add_argument('--datetxt', dest='datetxt', help='text file of date.')
    parser.add_argument('--Atm',
                        action="store_true",
                        default=False,
                        help='Geting SAR LOS tropospheric delay.')
    parser.add_argument('--Def',
                        action="store_true",
                        default=False,
                        help='Getting SAR LOS deformation.')

    inps = parser.parse_args()

    if not inps.date and not inps.datetxt:
        parser.print_usage()
        sys.exit(
            os.path.basename(sys.argv[0]) +
            ': error: date and date_txt File, at least one is needed.')

    return inps
Пример #6
0
def cmdLineParse():
    parser = argparse.ArgumentParser(description='Compare InSAR results and GPS results both for APS and deformation.',\
                                     formatter_class=argparse.RawTextHelpFormatter,\
                                     epilog=INTRODUCTION+'\n'+EXAMPLE)

    parser.add_argument('UNW', help='unwrapped interferograms.')
    parser.add_argument('width', help='width of the unwrap image.')
    parser.add_argument('-m',
                        dest='master',
                        help='Master date of interferograms.')
    parser.add_argument('-s',
                        dest='slave',
                        help='Slave date of interferograms.')
    parser.add_argument('-r',
                        '--RefGPS',
                        dest='ref_gps',
                        help='Referenced GPS station.')
    parser.add_argument('--max_dist',
                        dest='max_dist',
                        help='maximum search radius of InSAR results.')
    parser.add_argument('--diff_atm',
                        dest='diff_atm',
                        help='differential file of GPS ASP results.')
    parser.add_argument('--diff_def',
                        dest='diff_def',
                        help='differential file of GPS Def results.')
    parser.add_argument('-t',
                        '--trans',
                        dest='gps_coord',
                        help='GPS coordinates transfomration file.')
    parser.add_argument('--byteorder',
                        dest='byteorder',
                        help='byte order of the unwrap image.')
    parser.add_argument(
        '--AtmDir',
        dest='AtmDir',
        help='GPS-based atm datasets directory. [default: current directory.]')
    parser.add_argument(
        '--DefDir',
        dest='DefDir',
        help='GPS-based def datasets directory.[default: current directory.]')
    parser.add_argument('--Atm',
                        action="store_true",
                        default=False,
                        help='Comparing APS results of InSAR and GPS.')
    parser.add_argument('--Def',
                        action="store_true",
                        default=False,
                        help='Comparing DEF results of InSAR and GPS.')

    inps = parser.parse_args()

    if not inps.Atm and not inps.Def:
        parser.print_usage()
        sys.exit(
            os.path.basename(sys.argv[0]) +
            ': error: Comparison of Atm and Def, at least one is needed.')

    return inps
Пример #7
0
    def contribute_to_subparser(self, parser):
        parser.set_defaults(function=lambda args: parser.print_usage())
        subparsers = parser.add_subparsers(title='commands')

        setup_parser = subparsers.add_parser(
            'setup', help='setup Google Drive API access through OAuth')
        setup_parser.set_defaults(function=self.setup)
Пример #8
0
def cmdLineParse():
    parser = argparse.ArgumentParser(description='Download GPS data over SAR coverage.',\
                                     formatter_class=argparse.RawTextHelpFormatter,\
                                     epilog=INTRODUCTION+'\n'+EXAMPLE)

    parser.add_argument('date',help='SAR date.')
    parser.add_argument('--station', dest='station_name', help='GPS station name.')
    parser.add_argument('--station_txt', dest='station_txt', help='GPS station txet file.')
    
    inps = parser.parse_args()

    if not inps.station_name and not inps.station_txt:
        parser.print_usage()
        sys.exit(os.path.basename(sys.argv[0])+': error: station name or station txt should provide at least one.')
    
    
    return inps
Пример #9
0
def main():
    parser, arguments = parse_arguments()
    if arguments.file:
        cookies_init()
        try:
            with open(arguments.file) as urls:
                download_executor(list(urls), arguments.concurrent)
        except Exception as e:
            print(e)
    elif arguments.url:
        cookies_init()
        try:
            urls = [arguments.url]
            download_executor(urls, arguments.concurrent)
        except Exception as e:
            print(e)
    else:
        parser.print_usage()
        sys.exit(1)
Пример #10
0
def main():
    param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"]
    param1 = ["-y", "station", "-q", "format=xml", "-q", "level=response"]
    param2 = ["-y", "dataselect", "-z"]
    nets = set()

    def add_param0(option, opt_str, value, parser):
        param0.append(opt_str)
        param0.append(value)

    def add_param1(option, opt_str, value, parser):
        param1.append(opt_str)
        param1.append(value)

    def add_param2(option, opt_str, value, parser):
        param2.append(opt_str)
        param2.append(value)

    def add_param12(option, opt_str, value, parser):
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    def add_param(option, opt_str, value, parser):
        add_param0(option, opt_str, value, parser)
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    parser = optparse.OptionParser(
        usage="Usage: %prog [-h|--help] [OPTIONS] -o file",
        version="%prog " + VERSION)

    parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
                        timeout=600,
                        retries=10,
                        retry_wait=60,
                        threads=5)

    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="verbose mode")

    parser.add_option("-u",
                      "--url",
                      type="string",
                      action="callback",
                      callback=add_param,
                      help="URL of routing service (default %default)")

    parser.add_option("-N",
                      "--network",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="network code or pattern")

    parser.add_option("-S",
                      "--station",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="station code or pattern")

    parser.add_option("-L",
                      "--location",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="location code or pattern")

    parser.add_option("-C",
                      "--channel",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="channel code or pattern")

    parser.add_option("-s",
                      "--starttime",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="start time")

    parser.add_option("-e",
                      "--endtime",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="end time")

    parser.add_option("-t",
                      "--timeout",
                      type="int",
                      action="callback",
                      callback=add_param,
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r",
                      "--retries",
                      type="int",
                      action="callback",
                      callback=add_param,
                      help="number of retries (default %default)")

    parser.add_option(
        "-w",
        "--retry-wait",
        type="int",
        action="callback",
        callback=add_param,
        help="seconds to wait before each retry (default %default)")

    parser.add_option(
        "-n",
        "--threads",
        type="int",
        action="callback",
        callback=add_param,
        help="maximum number of download threads (default %default)")

    parser.add_option("-c",
                      "--credentials-file",
                      type="string",
                      action="callback",
                      callback=add_param2,
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a",
                      "--auth-file",
                      type="string",
                      action="callback",
                      callback=add_param2,
                      help="file that contains the auth token")

    parser.add_option("-p",
                      "--post-file",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="request file in FDSNWS POST format")

    parser.add_option("-f",
                      "--arclink-file",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="request file in ArcLink format")

    parser.add_option("-b",
                      "--breqfast-file",
                      type="string",
                      action="callback",
                      callback=add_param12,
                      help="request file in breq_fast format")

    parser.add_option("-d",
                      "--dataless",
                      action="store_true",
                      default=False,
                      help="create dataless SEED volume")

    parser.add_option("-l",
                      "--label",
                      type="string",
                      help="label of SEED volume")

    parser.add_option("-o",
                      "--output-file",
                      type="string",
                      help="file where SEED data is written")

    parser.add_option("-z",
                      "--no-citation",
                      action="store_true",
                      default=False,
                      help="suppress network citation info")

    parser.add_option("-Z",
                      "--no-check",
                      action="store_true",
                      default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if args or not options.output_file:
        parser.print_usage(sys.stderr)
        return 1

    def log_alert(s):
        if sys.stderr.isatty():
            s = "\033[31m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_notice(s):
        if sys.stderr.isatty():
            s = "\033[32m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_verbose(s):
        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_silent(s):
        pass

    logs.error = log_alert
    logs.warning = log_alert
    logs.notice = log_notice
    logs.info = (log_silent, log_verbose)[options.verbose]
    logs.debug = log_silent

    try:
        proc = exec_fetch(param1, None, options.verbose, options.no_check)

    except OSError as e:
        logs.error(str(e))
        logs.error("error running fdsnws_fetch")
        return 1

    inv = fdsnxml.Inventory()

    with tempfile.TemporaryFile() as fd:
        shutil.copyfileobj(proc.stdout, fd)

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

        if fd.tell():
            fd.seek(0)

            try:
                inv.load_fdsnxml(fd)

            except fdsnxml.Error as e:
                logs.error(str(e))
                return 1

    seed_volume = fseed.SEEDVolume(inv, ORGANIZATION, options.label, False)

    if options.dataless:
        for net in iterinv(inv.network):
            for sta in iterinv(net.station):
                for loc in iterinv(sta.sensorLocation):
                    for cha in iterinv(loc.stream):
                        try:
                            seed_volume.add_chan(net.code, sta.code, loc.code,
                                                 cha.code, cha.start, cha.end)

                        except fseed.SEEDError as e:
                            logs.warning("%s.%s.%s.%s.%s: %s" %
                                         (net.code, sta.code, loc.code,
                                          cha.code, cha.start.isoformat(), e))

    else:
        try:
            proc = exec_fetch(param2, None, options.verbose, options.no_check)

        except OSError as e:
            logs.error(str(e))
            logs.error("error running fdsnws_fetch")
            return 1

        try:
            for rec in mseedlite.Input(proc.stdout):
                try:
                    seed_volume.add_data(rec)

                except fseed.SEEDError as e:
                    logs.warning("%s.%s.%s.%s.%s: %s" %
                                 (rec.net.code, rec.sta.code, rec.loc.code,
                                  rec.cha.code, rec.cha.start.isoformat(), e))

                nets.add((rec.net, rec.begin_time.year))

        except mseedlite.MSeedError as e:
            logs.error(str(e))

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

    with open(options.output_file, "wb") as fd:
        try:
            seed_volume.output(fd)

        except fseed.SEEDError as e:
            logs.error(str(e))
            return 1

    if nets and not options.no_citation:
        logs.info("retrieving network citation info")
        get_citation(nets, param0, options.verbose)

    return 0
Пример #11
0
def main():
    qp = {}

    def add_qp(option, opt_str, value, parser):
        if option.dest == 'query':
            try:
                (p, v) = value.split('=', 1)
                qp[p] = v

            except ValueError:
                raise optparse.OptionValueError("%s expects parameter=value" %
                                                opt_str)

        else:
            qp[option.dest] = value

    parser = optparse.OptionParser(
        usage="Usage: %prog [-h|--help] [OPTIONS] -o file",
        version="%prog " + VERSION,
        add_help_option=False)

    parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
                        timeout=600,
                        retries=10,
                        retry_wait=60,
                        threads=5)

    parser.add_option("-h",
                      "--help",
                      action="store_true",
                      default=False,
                      help="show help message and exit")

    parser.add_option("-l",
                      "--longhelp",
                      action="store_true",
                      default=False,
                      help="show extended help message and exit")

    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="verbose mode")

    parser.add_option("-u",
                      "--url",
                      type="string",
                      help="URL of routing service (default %default)")

    parser.add_option("-y",
                      "--service",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="target service (default dataselect)")

    parser.add_option("-N",
                      "--network",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="network code or pattern")

    parser.add_option("-S",
                      "--station",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="station code or pattern")

    parser.add_option("-L",
                      "--location",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="location code or pattern")

    parser.add_option("-C",
                      "--channel",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="channel code or pattern")

    parser.add_option("-s",
                      "--starttime",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="start time")

    parser.add_option("-e",
                      "--endtime",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="end time")

    parser.add_option("-q",
                      "--query",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      metavar="PARAMETER=VALUE",
                      help="additional query parameter")

    parser.add_option("-t",
                      "--timeout",
                      type="int",
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r",
                      "--retries",
                      type="int",
                      help="number of retries (default %default)")

    parser.add_option("-w",
                      "--retry-wait",
                      type="int",
                      help="seconds to wait before each retry "
                      "(default %default)")

    parser.add_option("-n",
                      "--threads",
                      type="int",
                      help="maximum number of download threads "
                      "(default %default)")

    parser.add_option("-c",
                      "--credentials-file",
                      type="string",
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a",
                      "--auth-file",
                      type="string",
                      help="file that contains the auth token")

    parser.add_option("-p",
                      "--post-file",
                      type="string",
                      help="request file in FDSNWS POST format")

    parser.add_option("-f",
                      "--arclink-file",
                      type="string",
                      help="request file in ArcLink format")

    parser.add_option("-b",
                      "--breqfast-file",
                      type="string",
                      help="request file in breq_fast format")

    parser.add_option("-o",
                      "--output-file",
                      type="string",
                      help="file where downloaded data is written")

    parser.add_option("-z",
                      "--no-citation",
                      action="store_true",
                      default=False,
                      help="suppress network citation info")

    parser.add_option("-Z",
                      "--no-check",
                      action="store_true",
                      default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if options.help:
        print(__doc__.split("Usage Examples", 1)[0], end="")
        parser.print_help()
        return 0

    if options.longhelp:
        print(__doc__)
        parser.print_help()
        return 0

    if args or not options.output_file:
        parser.print_usage(sys.stderr)
        return 1

    if bool(options.post_file) + bool(options.arclink_file) + \
            bool(options.breqfast_file) > 1:
        msg("only one of (--post-file, --arclink-file, --breqfast-file) "
            "can be used")
        return 1

    try:
        cred = {}
        authdata = None
        postdata = None
        chans_to_check = set()

        if options.credentials_file:
            with open(options.credentials_file) as fd:
                try:
                    for (url, user, passwd) in csv.reader(fd):
                        cred[url] = (user, passwd)

                except (ValueError, csv.Error):
                    raise Error("error parsing %s" % options.credentials_file)

                except UnicodeDecodeError:
                    raise Error("invalid unicode character found in %s" %
                                options.credentials_file)

        if options.auth_file:
            with open(options.auth_file, 'rb') as fd:
                authdata = fd.read()

        else:
            try:
                with open(DEFAULT_TOKEN_LOCATION, 'rb') as fd:
                    authdata = fd.read()
                    options.auth_file = DEFAULT_TOKEN_LOCATION

            except IOError:
                pass

        if authdata:
            msg("using token in %s:" % options.auth_file, options.verbose)

            try:
                proc = subprocess.Popen(['gpg', '--decrypt'],
                                        stdin=subprocess.PIPE,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE)

                out, err = proc.communicate(authdata)

                if not out:
                    if isinstance(err, bytes):
                        err = err.decode('utf-8')

                    msg(err)
                    return 1

                if isinstance(out, bytes):
                    out = out.decode('utf-8')

                msg(out, options.verbose)

            except OSError as e:
                msg(str(e))

        if options.post_file:
            try:
                with open(options.post_file) as fd:
                    postdata = fd.read()

            except UnicodeDecodeError:
                raise Error("invalid unicode character found in %s" %
                            options.post_file)

        else:
            parser = None

            if options.arclink_file:
                parser = ArclinkParser()

                try:
                    parser.parse(options.arclink_file)

                except UnicodeDecodeError:
                    raise Error("invalid unicode character found in %s" %
                                options.arclink_file)

            elif options.breqfast_file:
                parser = BreqParser()

                try:
                    parser.parse(options.breqfast_file)

                except UnicodeDecodeError:
                    raise Error("invalid unicode character found in %s" %
                                options.breqfast_file)

            if parser is not None:
                if parser.failstr:
                    msg(parser.failstr)
                    return 1

                postdata = parser.postdata

        if not options.no_check:
            if postdata:
                for line in postdata.splitlines():
                    nslc = line.split()[:4]
                    if nslc[2] == '--': nslc[2] = ''
                    chans_to_check.add('.'.join(nslc))

            else:
                net = qp.get('network', '*')
                sta = qp.get('station', '*')
                loc = qp.get('location', '*')
                cha = qp.get('channel', '*')

                for n in net.split(','):
                    for s in sta.split(','):
                        for l in loc.split(','):
                            for c in cha.split(','):
                                if l == '--': l = ''
                                chans_to_check.add('.'.join((n, s, l, c)))

        url = RoutingURL(urlparse.urlparse(options.url), qp)
        dest = open(options.output_file, 'wb')

        nets = route(url, cred, authdata, postdata, dest, chans_to_check,
                     options.timeout, options.retries, options.retry_wait,
                     options.threads, options.verbose)

        if nets and not options.no_citation:
            msg("retrieving network citation info", options.verbose)
            get_citation(nets, options)

        else:
            msg("", options.verbose)

        msg(
            "In case of problems with your request, plese use the contact "
            "form at\n\n"
            "    http://www.orfeus-eu.org/organization/contact/form/"
            "?recipient=EIDA\n", options.verbose)

    except (IOError, Error) as e:
        msg(str(e))
        return 1

    return 0
Пример #12
0
    try:
        _update_db_tools_labs(cfg)
    except Exception, e:
        traceback.print_exc(file = sys.stderr)
        email('Failed to build database for %s' % cfg.lang_code, logfiles)
        sys.exit(1)
    utils.mkdir_p(cfg.log_dir)
    for logfile in logfiles:
        os.rename(logfile, os.path.join(cfg.log_dir, logfile))

if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='Update the CitationHunt databases.')
    parser.add_argument('lang_code',
        help='One of the language codes in ../config.py')
    args = parser.parse_args()

    if not (utils.running_in_tools_labs() and utils.running_in_virtualenv()):
        print >>sys.stderr, 'Not running in a virtualenv in Tools Labs!'
        sys.exit(1)

    if args.lang_code not in config.LANG_CODES_TO_LANG_NAMES:
        print >>sys.stderr, 'Invalid lang code! Use one of: ',
        print >>sys.stderr, config.LANG_CODES_TO_LANG_NAMES.keys()
        parser.print_usage()
        sys.exit(1)

    cfg = config.get_localized_config(args.lang_code)
    update_db_tools_labs(cfg)
Пример #13
0
def main():
    param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"]
    param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"]
    param2 = ["-y", "dataselect", "-z"]
    times = {"starttime": datetime.datetime(1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)}
    nets = set()

    def add_param0(option, opt_str, value, parser):
        param0.append(opt_str)
        param0.append(value)

    def add_param1(option, opt_str, value, parser):
        param1.append(opt_str)
        param1.append(value)

    def add_param2(option, opt_str, value, parser):
        param2.append(opt_str)
        param2.append(value)

    def add_param(option, opt_str, value, parser):
        add_param0(option, opt_str, value, parser)
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    def add_time(option, opt_str, value, parser):
        add_param1(option, opt_str, value, parser)

        try:
            t = dateutil.parser.parse(value)

        except ValueError as e:
            raise optparse.OptionValueError("option '%s': invalid time value: '%s'" % (opt_str, value))

        if t.tzinfo is not None:
            t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

        times[option.dest] = t

    parser = optparse.OptionParser(
            usage="Usage: %prog [-h|--help] [OPTIONS] -o directory",
            version="%prog " + VERSION)

    parser.set_defaults(
            url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
            timeout=600,
            retries=10,
            retry_wait=60,
            threads=5,
            max_lines=1000,
            max_timespan=1440)

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string", action="callback",
                      callback=add_param,
                      help="URL of routing service (default %default)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_param1,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_param1,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_param1,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_param1,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_time,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_time,
                      help="end time")

    parser.add_option("-t", "--timeout", type="int", action="callback",
                      callback=add_param,
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int", action="callback",
                      callback=add_param,
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int", action="callback",
                      callback=add_param,
                      help="seconds to wait before each retry (default %default)")

    parser.add_option("-n", "--threads", type="int", action="callback",
                      callback=add_param,
                      help="maximum number of download threads (default %default)")

    parser.add_option("-c", "--credentials-file", type="string", action="callback",
                      callback=add_param2,
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string", action="callback",
                      callback=add_param2,
                      help="file that contains the auth token")

    parser.add_option("-o", "--output-dir", type="string",
                      help="SDS directory where downloaded data is written")

    parser.add_option("-l", "--max-lines", type="int",
                      help="max lines per request (default %default)")

    parser.add_option("-m", "--max-timespan", type="int",
                      help="max timespan per request in minutes (default %default)")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    parser.add_option("-Z", "--no-check", action="store_true", default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if args or not options.output_dir:
        parser.print_usage(sys.stderr)
        return 1

    def log_alert(s):
        if sys.stderr.isatty():
            s = "\033[31m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_notice(s):
        if sys.stderr.isatty():
            s = "\033[32m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_verbose(s):
        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_silent(s):
        pass

    logs.error = log_alert
    logs.warning = log_alert
    logs.notice = log_notice
    logs.info = (log_silent, log_verbose)[options.verbose]
    logs.debug = log_silent

    try:
        try:
            proc = exec_fetch(param1, None, options.verbose, options.no_check)

        except OSError as e:
            logs.error(str(e))
            logs.error("error running fdsnws_fetch")
            return 1

        timespan = {}

        for line in proc.stdout:
            if isinstance(line, bytes):
                line = line.decode('utf-8')

            if not line or line.startswith('#'):
                continue

            starttime = max(dateutil.parser.parse(line.split('|')[15]), times['starttime'])
            endtime = min(dateutil.parser.parse(line.split('|')[16]), times['endtime'])

            if starttime.tzinfo is not None:
                starttime = starttime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            if endtime.tzinfo is not None:
                endtime = endtime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            try:
                ts = timespan[tuple(line.split('|')[:4])]

                if ts.start > starttime:
                    ts.start = starttime
                    ts.current = starttime

                if ts.end < endtime:
                    ts.end = endtime

            except KeyError:
                timespan[tuple(line.split('|')[:4])] = Timespan(starttime, endtime)

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

        if os.path.exists(options.output_dir):
            scan_sds(options.output_dir, timespan, nets)

        while len(timespan) > 0:
            postdata = ""

            ts_used = random.sample(timespan.items(), min(len(timespan), options.max_lines))

            for ((net, sta, loc, cha), ts) in ts_used:
                te = min(ts.end, ts.start + datetime.timedelta(minutes=options.max_timespan))

                if loc == '':
                    loc = '--'

                postdata += "%s %s %s %s %sZ %sZ\n" \
                            % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat())

            if not isinstance(postdata, bytes):
                postdata = postdata.encode('utf-8')

            try:
                proc = exec_fetch(param2, postdata, options.verbose, options.no_check)

            except OSError as e:
                logs.error(str(e))
                logs.error("error running fdsnws_fetch")
                return 1

            got_data = False

            try:
                for rec in mseedlite.Input(proc.stdout):
                    try:
                        ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)]

                    except KeyError:
                        logs.warning("unexpected data: %s.%s.%s.%s" % (rec.net, rec.sta, rec.loc, rec.cha))
                        continue

                    if rec.end_time <= ts.current:
                        continue

                    sds_dir = "%s/%d/%s/%s/%s.D" \
                              % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha)

                    sds_file = "%s.%s.%s.%s.D.%s" \
                              % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j'))

                    if not os.path.exists(sds_dir):
                        os.makedirs(sds_dir)

                    with open(sds_dir + '/' + sds_file, 'ab') as fd:
                        fd.write(rec.header + rec.data)

                    ts.current = rec.end_time
                    nets.add((rec.net, rec.begin_time.year))
                    got_data = True

            except mseedlite.MSeedError as e:
                logs.error(str(e))

            proc.stdout.close()
            proc.wait()

            if proc.returncode != 0:
                logs.error("error running fdsnws_fetch")
                return 1

            for ((net, sta, loc, cha), ts) in ts_used:
                if not got_data:
                    # no progress, skip to next segment
                    ts.start += datetime.timedelta(minutes=options.max_timespan)

                else:
                    # continue from current position
                    ts.start = ts.current

                if ts.start >= ts.end:
                    # timespan completed
                    del timespan[(net, sta, loc, cha)]

        if nets and not options.no_citation:
            logs.info("retrieving network citation info")
            get_citation(nets, param0, options.verbose)

    except (IOError, Error) as e:
        logs.error(str(e))
        return 1

    return 0
Пример #14
0
def main():
    qp = {}

    def add_qp(option, opt_str, value, parser):
        if option.dest == 'query':
            try:
                (p, v) = value.split('=', 1)
                qp[p] = v

            except ValueError:
                raise optparse.OptionValueError("%s expects parameter=value"
                                                % opt_str)

        else:
            qp[option.dest] = value

    parser = optparse.OptionParser(
            usage="Usage: %prog [-h|--help] [OPTIONS] -o file",
            version="%prog " + VERSION,
            add_help_option=False)

    parser.set_defaults(
            url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
            timeout=600,
            retries=10,
            retry_wait=60,
            threads=5)

    parser.add_option("-h", "--help", action="store_true", default=False,
                      help="show help message and exit")

    parser.add_option("-l", "--longhelp", action="store_true", default=False,
                      help="show extended help message and exit")

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string",
                      help="URL of routing service (default %default)")

    parser.add_option("-y", "--service", type="string", action="callback",
                      callback=add_qp,
                      help="target service (default dataselect)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_qp,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_qp,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_qp,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_qp,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_qp,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_qp,
                      help="end time")

    parser.add_option("-q", "--query", type="string", action="callback",
                      callback=add_qp, metavar="PARAMETER=VALUE",
                      help="additional query parameter")

    parser.add_option("-t", "--timeout", type="int",
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int",
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int",
                      help="seconds to wait before each retry "
                           "(default %default)")

    parser.add_option("-n", "--threads", type="int",
                      help="maximum number of download threads "
                           "(default %default)")

    parser.add_option("-c", "--credentials-file", type="string",
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string",
                      help="file that contains the auth token")

    parser.add_option("-p", "--post-file", type="string",
                      help="request file in FDSNWS POST format")

    parser.add_option("-f", "--arclink-file", type="string",
                      help="request file in ArcLink format")

    parser.add_option("-b", "--breqfast-file", type="string",
                      help="request file in breq_fast format")

    parser.add_option("-o", "--output-file", type="string",
                      help="file where downloaded data is written")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    (options, args) = parser.parse_args()

    if options.help:
        print(__doc__.split("Usage Examples", 1)[0], end="")
        parser.print_help()
        return 0

    if options.longhelp:
        print(__doc__)
        parser.print_help()
        return 0

    if args or not options.output_file:
        parser.print_usage()
        return 1

    if bool(options.post_file) + bool(options.arclink_file) + \
            bool(options.breqfast_file) > 1:
        msg("only one of (--post-file, --arclink-file, --breqfast-file) "
            "can be used")
        return 1

    try:
        cred = {}
        authdata = None
        postdata = None

        if options.credentials_file:
            with open(options.credentials_file) as fd:
                try:
                    for (url, user, passwd) in csv.reader(fd):
                        cred[url] = (user, passwd)

                except (ValueError, csv.Error):
                    raise Error("error parsing %s" % options.credentials_file)

        if options.auth_file:
            with open(options.auth_file) as fd:
                authdata = fd.read()

        if options.post_file:
            with open(options.post_file) as fd:
                postdata = fd.read()

        else:
            parser = None

            if options.arclink_file:
                parser = ArclinkParser()
                parser.parse(options.arclink_file)

            elif options.breqfast_file:
                parser = BreqParser()
                parser.parse(options.breqfast_file)

            if parser is not None:
                if parser.failstr:
                    msg(parser.failstr)
                    return 1

                postdata = parser.postdata

        url = RoutingURL(urlparse.urlparse(options.url), qp)
        dest = open(options.output_file, 'wb')

        nets = route(url, cred, authdata, postdata, dest, options.timeout,
                     options.retries, options.retry_wait, options.threads,
                     options.verbose)

        if nets and not options.no_citation:
              msg("retrieving network citation info", options.verbose)
              get_citation(nets, options)

        else:
              msg("", options.verbose)

        msg("In case of problems with your request, plese use the contact "
            "form at\n\n"
            "    http://www.orfeus-eu.org/organization/contact/form/\n",
            options.verbose)

    except (IOError, Error) as e:
        msg(str(e))
        return 1

    return 0
Пример #15
0
def main():
    qp = {}

    def add_qp(option, opt_str, value, parser):
        if option.dest == 'query':
            try:
                (p, v) = value.split('=', 1)
                qp[p] = v

            except ValueError:
                raise optparse.OptionValueError("%s expects parameter=value" %
                                                opt_str)

        else:
            qp[option.dest] = value

    parser = optparse.OptionParser(
        usage="Usage: %prog [-h|--help] [OPTIONS] -o file",
        version="%prog " + VERSION,
        add_help_option=False)

    parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
                        timeout=600,
                        retries=10,
                        retry_wait=60,
                        threads=5)

    parser.add_option("-h",
                      "--help",
                      action="store_true",
                      default=False,
                      help="show help message and exit")

    parser.add_option("-l",
                      "--longhelp",
                      action="store_true",
                      default=False,
                      help="show extended help message and exit")

    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="verbose mode")

    parser.add_option("-u",
                      "--url",
                      type="string",
                      help="URL of routing service (default %default)")

    parser.add_option("-y",
                      "--service",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="target service (default dataselect)")

    parser.add_option("-N",
                      "--network",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="network code or pattern")

    parser.add_option("-S",
                      "--station",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="station code or pattern")

    parser.add_option("-L",
                      "--location",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="location code or pattern")

    parser.add_option("-C",
                      "--channel",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="channel code or pattern")

    parser.add_option("-s",
                      "--starttime",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="start time")

    parser.add_option("-e",
                      "--endtime",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      help="end time")

    parser.add_option("-q",
                      "--query",
                      type="string",
                      action="callback",
                      callback=add_qp,
                      metavar="PARAMETER=VALUE",
                      help="additional query parameter")

    parser.add_option("-t",
                      "--timeout",
                      type="int",
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r",
                      "--retries",
                      type="int",
                      help="number of retries (default %default)")

    parser.add_option("-w",
                      "--retry-wait",
                      type="int",
                      help="seconds to wait before each retry "
                      "(default %default)")

    parser.add_option("-n",
                      "--threads",
                      type="int",
                      help="maximum number of download threads "
                      "(default %default)")

    parser.add_option("-c",
                      "--credentials-file",
                      type="string",
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a",
                      "--auth-file",
                      type="string",
                      help="file that contains the auth token")

    parser.add_option("-p",
                      "--post-file",
                      type="string",
                      help="request file in FDSNWS POST format")

    parser.add_option("-f",
                      "--arclink-file",
                      type="string",
                      help="request file in ArcLink format")

    parser.add_option("-b",
                      "--breqfast-file",
                      type="string",
                      help="request file in breq_fast format")

    parser.add_option("-o",
                      "--output-file",
                      type="string",
                      help="file where downloaded data is written")

    parser.add_option("-z",
                      "--no-citation",
                      action="store_true",
                      default=False,
                      help="suppress network citation info")

    (options, args) = parser.parse_args()

    if options.help:
        print(__doc__.split("Usage Examples", 1)[0], end="")
        parser.print_help()
        return 0

    if options.longhelp:
        print(__doc__)
        parser.print_help()
        return 0

    if args or not options.output_file:
        parser.print_usage()
        return 1

    if bool(options.post_file) + bool(options.arclink_file) + \
            bool(options.breqfast_file) > 1:
        msg("only one of (--post-file, --arclink-file, --breqfast-file) "
            "can be used")
        return 1

    try:
        cred = {}
        authdata = None
        postdata = None

        if options.credentials_file:
            with open(options.credentials_file) as fd:
                try:
                    for (url, user, passwd) in csv.reader(fd):
                        cred[url] = (user, passwd)

                except (ValueError, csv.Error):
                    raise Error("error parsing %s" % options.credentials_file)

        if options.auth_file:
            with open(options.auth_file) as fd:
                authdata = fd.read()

        if options.post_file:
            with open(options.post_file) as fd:
                postdata = fd.read()

        else:
            parser = None

            if options.arclink_file:
                parser = ArclinkParser()
                parser.parse(options.arclink_file)

            elif options.breqfast_file:
                parser = BreqParser()
                parser.parse(options.breqfast_file)

            if parser is not None:
                if parser.failstr:
                    msg(parser.failstr)
                    return 1

                postdata = parser.postdata

        url = RoutingURL(urlparse.urlparse(options.url), qp)
        dest = open(options.output_file, 'wb')

        nets = route(url, cred, authdata, postdata, dest, options.timeout,
                     options.retries, options.retry_wait, options.threads,
                     options.verbose)

        if nets and not options.no_citation:
            msg("retrieving network citation info", options.verbose)
            get_citation(nets, options)

        else:
            msg("", options.verbose)

        msg(
            "In case of problems with your request, plese use the contact "
            "form at\n\n"
            "    http://www.orfeus-eu.org/organization/contact/form/\n",
            options.verbose)

    except (IOError, Error) as e:
        msg(str(e))
        return 1

    return 0
Пример #16
0
def main():
    param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"]
    param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"]
    param2 = ["-y", "dataselect", "-z"]
    times = {"starttime": datetime.datetime(1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)}
    nets = set()

    def add_param0(option, opt_str, value, parser):
        param0.append(opt_str)
        param0.append(value)

    def add_param1(option, opt_str, value, parser):
        param1.append(opt_str)
        param1.append(value)

    def add_param2(option, opt_str, value, parser):
        param2.append(opt_str)
        param2.append(value)

    def add_param(option, opt_str, value, parser):
        add_param0(option, opt_str, value, parser)
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    def add_time(option, opt_str, value, parser):
        add_param1(option, opt_str, value, parser)

        try:
            t = dateutil.parser.parse(value)

        except ValueError as e:
            raise optparse.OptionValueError("option '%s': invalid time value: '%s'" % (opt_str, value))

        if t.tzinfo is not None:
            t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

        times[option.dest] = t

    parser = optparse.OptionParser(
            usage="Usage: %prog [-h|--help] [OPTIONS] -o directory",
            version="%prog " + VERSION)

    parser.set_defaults(
            url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
            timeout=600,
            retries=10,
            retry_wait=60,
            threads=5,
            max_lines=1000,
            max_timespan=1440)

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string", action="callback",
                      callback=add_param,
                      help="URL of routing service (default %default)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_param1,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_param1,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_param1,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_param1,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_time,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_time,
                      help="end time")

    parser.add_option("-t", "--timeout", type="int", action="callback",
                      callback=add_param,
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int", action="callback",
                      callback=add_param,
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int", action="callback",
                      callback=add_param,
                      help="seconds to wait before each retry (default %default)")

    parser.add_option("-n", "--threads", type="int", action="callback",
                      callback=add_param,
                      help="maximum number of download threads (default %default)")

    parser.add_option("-c", "--credentials-file", type="string", action="callback",
                      callback=add_param2,
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string", action="callback",
                      callback=add_param2,
                      help="file that contains the auth token")

    parser.add_option("-o", "--output-dir", type="string",
                      help="SDS directory where downloaded data is written")

    parser.add_option("-l", "--max-lines", type="int",
                      help="max lines per request (default %default)")

    parser.add_option("-m", "--max-timespan", type="int",
                      help="max timespan per request in minutes (default %default)")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    parser.add_option("-Z", "--no-check", action="store_true", default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if args or not options.output_dir:
        parser.print_usage(sys.stderr)
        return 1

    def log_alert(s):
        if sys.stderr.isatty():
            s = "\033[31m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_notice(s):
        if sys.stderr.isatty():
            s = "\033[32m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_verbose(s):
        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_silent(s):
        pass

    logs.error = log_alert
    logs.warning = log_alert
    logs.notice = log_notice
    logs.info = (log_silent, log_verbose)[options.verbose]
    logs.debug = log_silent

    try:
        try:
            proc = exec_fetch(param1, None, options.verbose, options.no_check)

        except OSError as e:
            logs.error(str(e))
            logs.error("error running fdsnws_fetch")
            return 1

        timespan = {}

        for line in proc.stdout:
            if isinstance(line, bytes):
                line = line.decode('utf-8')

            if not line or line.startswith('#'):
                continue

            starttime = max(dateutil.parser.parse(line.split('|')[15]), times['starttime'])

            try:
                endtime = min(dateutil.parser.parse(line.split('|')[16]), times['endtime'])

            except ValueError:
                # dateutil.parser.parse('') now causes ValueError instead of current time
                endtime = min(datetime.datetime.now(), times['endtime'])

            if starttime.tzinfo is not None:
                starttime = starttime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            if endtime.tzinfo is not None:
                endtime = endtime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            try:
                ts = timespan[tuple(line.split('|')[:4])]

                if ts.start > starttime:
                    ts.start = starttime
                    ts.current = starttime

                if ts.end < endtime:
                    ts.end = endtime

            except KeyError:
                timespan[tuple(line.split('|')[:4])] = Timespan(starttime, endtime)

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

        if os.path.exists(options.output_dir):
            scan_sds(options.output_dir, timespan, nets)

        while len(timespan) > 0:
            postdata = ""

            ts_used = random.sample(timespan.items(), min(len(timespan), options.max_lines))

            for ((net, sta, loc, cha), ts) in ts_used:
                te = min(ts.end, ts.start + datetime.timedelta(minutes=options.max_timespan))

                if loc == '':
                    loc = '--'

                postdata += "%s %s %s %s %sZ %sZ\n" \
                            % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat())

            if not isinstance(postdata, bytes):
                postdata = postdata.encode('utf-8')

            try:
                proc = exec_fetch(param2, postdata, options.verbose, options.no_check)

            except OSError as e:
                logs.error(str(e))
                logs.error("error running fdsnws_fetch")
                return 1

            got_data = False

            try:
                for rec in mseedlite.Input(proc.stdout):
                    try:
                        ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)]

                    except KeyError:
                        logs.warning("unexpected data: %s.%s.%s.%s" % (rec.net, rec.sta, rec.loc, rec.cha))
                        continue

                    if rec.end_time <= ts.current:
                        continue

                    sds_dir = "%s/%d/%s/%s/%s.D" \
                              % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha)

                    sds_file = "%s.%s.%s.%s.D.%s" \
                              % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j'))

                    if not os.path.exists(sds_dir):
                        os.makedirs(sds_dir)

                    with open(sds_dir + '/' + sds_file, 'ab') as fd:
                        fd.write(rec.header + rec.data)

                    ts.current = rec.end_time
                    nets.add((rec.net, rec.begin_time.year))
                    got_data = True

            except mseedlite.MSeedError as e:
                logs.error(str(e))

            proc.stdout.close()
            proc.wait()

            if proc.returncode != 0:
                logs.error("error running fdsnws_fetch")
                return 1

            for ((net, sta, loc, cha), ts) in ts_used:
                if not got_data:
                    # no progress, skip to next segment
                    ts.start += datetime.timedelta(minutes=options.max_timespan)

                else:
                    # continue from current position
                    ts.start = ts.current

                if ts.start >= ts.end:
                    # timespan completed
                    del timespan[(net, sta, loc, cha)]

        if nets and not options.no_citation:
            logs.info("retrieving network citation info")
            get_citation(nets, param0, options.verbose)

    except (IOError, Error) as e:
        logs.error(str(e))
        return 1

    return 0
Пример #17
0
    def contribute_to_subparser(self, parser):
        parser.set_defaults(function=lambda args: parser.print_usage())
        subparsers = parser.add_subparsers(title='commands')

        setup_parser = subparsers.add_parser('setup', help='setup Google Drive API access through OAuth')
        setup_parser.set_defaults(function=self.setup)
Пример #18
0
                      default=False,
                      help='Import allCountries.txt')
    parser.add_option(
        '--import-alt',
        dest='importAlt',
        action='store_true',
        default=False,
        help='Import alternateNames.txt (must happen before --import-gn)')
    (options, args) = parser.parse_args()

    if len(args) == 0:
        filepath = os.getcwd()
    elif len(args) == 1:
        filepath = args[1]
    else:
        parser.print_usage()
        exit()

    # === Do some initial setup of useful globals ===
    # --- Like the configuration file :) ---
    localdir = os.path.dirname(os.path.abspath(__file__))
    _config = SafeConfigParser()
    fileList = ["%s/geonames.cfg" % localdir]
    if options.configFile is not None:
        fileList.append(options.configFile)
    _config.read(fileList)

    # --- And the MySQL connection ---
    _geonamesDB = MySQL.connect(_config.get('MySQL', 'host'),
                                _config.get('MySQL', 'user'),
                                _config.get('MySQL', 'password'),
def main():
    # === Extract options ===
    parser = OptionParser(usage="usage: %prog [options] <# of seconds to audit>")
    parser.add_option("-c", "--config", dest='configFile', default=None, help='Path to configuration file')
    parser.add_option("-g", "--gracePeriod", dest='gracePeriod', default=0, help='Number of seconds from now backwards to ignore')
    parser.add_option("-i", "--historyFile", dest='historyFile', default=None, help='Stores any pending transactions and the last run time')
    parser.add_option('-l', "--logFile", dest='logFile', default=None, help='Saves a log of all Amazon transactions')
    (options, args) = parser.parse_args()

    if len(args) != 1:
        parser.print_usage()
        exit()

    startTime = datetime.fromtimestamp(int(time.time()) - int(args[0]), pytz.utc)
    endTime = datetime.fromtimestamp(int(time.time()) - int(options.gracePeriod), pytz.utc)
    print("AWS refund audit requested from %s to %s" % (startTime.isoformat(), endTime.isoformat()))

    # === Get the configuration options ===
    config = SafeConfigParser()
    fileList = ['./amazon-config.cfg']
    if options.configFile is not None:
        fileList.append(options.configFile)
    config.read(fileList)

    # === Open up ze STOMP ===
    sc = DistStomp(config.get('Stomp', 'server'), config.getint('Stomp', 'port'))
    sc.connect()

    # === Connection to Amazon ===
    aws = Amazon(
        awsEndpoint = config.get('AwsConfig', 'endpoint'),
        awsAccessKey = config.get('AwsConfig', 'accessKey'),
        awsSecret = config.get('AwsConfig', 'secretKey')
    )

    # === Connection to MySQL ===
    dbcon = MySQL.connect(
        config.get('MySQL', 'host'),
        config.get('MySQL', 'user'),
        config.get('MySQL', 'password'),
        config.get('MySQL', 'schema')
    )

    # === Open up the history and log files ===
    # If the history file exists, it will modify the start time of this script to be the end time of the
    # history file.
    hfile = None
    historyStart = startTime
    historyEnd = endTime
    if options.historyFile and os.path.exists(options.historyFile):
        hfile = open(options.historyFile, 'r')
        if hfile.readline().strip() == AWS_HISTORY_FILE_VERSTR:
            historyStart = dateutil.parser.parse(hfile.readline().strip())
            historyEnd = dateutil.parser.parse(hfile.readline().strip())
            startTime = historyEnd
            print("History file modified search period, now %s to %s" % (startTime.isoformat(), endTime.isoformat()))
    else:
        print('Not starting with a valid history file.')

    sfile = None
    if options.logFile:
        sfile = open(options.logFile, 'a')
        sfile.write("!!! Starting run for dates %s -> %s\n" % (startTime.isoformat(), endTime.isoformat()))

    # === Sanity checks ===
    if endTime < startTime:
         startTime = endTime

    # === Main Application ===
    # --- Process all previously pending transactions from the history file. If the transaction is still in some form
    #     of pending, add it back to the history list.
    historyCount = 0
    historyList = []
    historyStats = {
        'Success': 0,
        'Pending': 0,
        'Failed': 0,
        'Ignored': 0
    }
    if hfile:
        print("Processing history file")
        for txn in hfile:
            historyCount += 1
            txn = json.loads(txn)
            result = processTransaction(txn, dbcon, aws, sc, sfile, config)
            historyStats[result] += 1
            if result == 'Pending':
                historyList.append(txn)
        hfile.close()

    # --- Obtain AWS history ---
    print("Obtaining AWS transactions for the period %s -> %s" % (startTime.isoformat(), endTime.isoformat()))
    awsTransactions = aws.getAccountActivity(startTime, endDate=endTime, fpsOperation='Pay')
    print("Obtained %d transactions" % len(awsTransactions))

    # --- Main loop: checks each aws transaction against the Civi database; adding it if it doesn't exist ---
    txncount = 0
    for txn in awsTransactions:
        txncount += 1
        result = processTransaction(txn, dbcon, aws, sc, sfile, config)
        historyStats[result] += 1
        if result == 'Pending':
            historyList.append(txn)

    print("\n--- Finished processing of messages. ---\n")

    # --- Prepare the history file for write ---
    if options.historyFile:
        print("Rewriting history file with %d transactions" % len(historyList))
        hfile = open(options.historyFile, 'w')
        hfile.write("%s\n%s\n%s\n" % (AWS_HISTORY_FILE_VERSTR, historyStart.isoformat(), endTime.isoformat()))
        for txn in historyList:
            hfile.write("%s\n" % json.dumps(txn))
        print("Flushing history file in preparation for main loop")
        hfile.flush()

    # --- Final statistics ---
    print("%d new AWS messages" % txncount)
    print(" Additionally %d messages were processed from history" % historyCount)
    print("This resulted in the following:")
    for entry in historyStats.items():
        print(" %s Messages: %d" % entry)

    # === Final Application Cleanup ===
    print("\nCleaning up.")
    dbcon.close()
    sc.disconnect()

    if hfile:
        hfile.close()
    if sfile:
        sfile.close()

    time.sleep(1)   # Let the STOMP library catch up