Example #1
0
def get_opts(args=None):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--max-run-time',
        type=timedelta_type,
        help="Maximum time a container is allows to run. Time may "
        "be specified in any pytimeparse supported format."
    )
    parser.add_argument(
        '--prefix', action="append", default=[],
        help="Only stop containers which match one of the "
             "prefix."
    )
    parser.add_argument(
        '--dry-run', action="store_true",
        help="Only log actions, don't stop anything."
    )
    parser.add_argument(
        '-t', '--timeout', type=int, default=60,
        help="HTTP timeout in seconds for making docker API calls."
    )
    opts = parser.parse_args(args=args)

    if not opts.prefix:
        parser.error("Running with no --prefix will match nothing.")

    return opts
Example #2
0
def run():
    import optparse

    parser = optparse.OptionParser()
    parser.add_option("-s", "--server", type="str", default=1)
    parser.add_option("-b", "--batch", type="int", default=10)
    parser.add_option("-c", "--max-connections", type="int", default=3)
    parser.add_option("-d", "--days", type="int", default=7)
    parser.add_option("-l", "--loglevel", default="info")

    options, args = parser.parse_args()
    if not args:
        parser.error("specify a group")
        return 1

    logging.basicConfig(level=getattr(logging, options.loglevel.upper()))

    if options.server.isdigit():
        server = Server.objects.get(pk=int(options.server))
    else:
        server = options.server

    logging.info("[+] using server %s" % (server,))
    scanner = Scanner(server, options.max_connections)
    scanner.start()
    scanner.index(args[0], days=options.days, batch=options.batch)
    scanner.run()
Example #3
0
def get_opts(args=None):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--max-run-time',
        type=timedelta_type,
        help="Maximum time a container is allows to run. Time may be specified "
        "in any pytimeparse supported format.")
    parser.add_argument('--prefix',
                        action="append",
                        default=[],
                        help="Only stop containers which match one of the "
                        "prefix.")
    parser.add_argument('--dry-run',
                        action="store_true",
                        help="Only log actions, don't stop anything.")
    parser.add_argument(
        '-t',
        '--timeout',
        type=int,
        default=60,
        help="HTTP timeout in seconds for making docker API calls.")
    opts = parser.parse_args(args=args)

    if not opts.prefix:
        parser.error("Running with no --prefix will match nothing.")

    return opts
def main():
    parser = OptionParser()
    parser.add_option("-c", "--config", dest="config",
                      help="Configuration file")
    options = parser.parse_args()[0]
    config = ConfigParser()
    try:
        config.read(options.config)
    except:
        parser.error("Could not open configuration file")

    def got_message(*args, **kwargs):
        receive_message(config, *args, **kwargs)

    if not options.config:
        parser.error('Configuration file is required')

    verbosity = {True: log.DEBUG, False: log.WARN}
    log.basicConfig(
        format='%(asctime)s %(message)s',
        level=verbosity[config.getboolean('shipit-notifier', 'verbose')]
    )

    # Adjust applabel when wanting to run shipit on multiple machines
    pulse = consumers.BuildConsumer(applabel='shipit-notifier', ssl=False)
    pulse.configure(topic='build.#.finished',
                    durable=True, callback=got_message)

    log.info('listening for pulse messages')
    pulse.listen()
def get_team_sessions(cursor):

    data = {
        'include_web_sessions': args.web,
        'include_desktop_clients': args.desktop,
        'include_mobile_clients': args.mobile
    }

    if cursor is not None:
        data["cursor"] = cursor

    request = urllib2.Request('https://api.dropboxapi.com/2/team/devices/list_team_devices', json.dumps(data))
    request.add_header("Authorization", "Bearer "+token)
    request.add_header("Content-type", 'application/json')

    try:
        response = json.loads(urllib2.urlopen(request).read())
        
        returned_sessions = [] 
        for d in response["devices"]:
            returned_sessions = returned_sessions + list_sessions(d['team_member_id'], None, d, True)
        if response["has_more"]:
            returned_sessions = returned_sessions + get_team_sessions(cursor=response["cursor"])
        return returned_sessions
    except urllib2.HTTPError, error:
        parser.error(error.read())
Example #6
0
def main(argv):
    """MAIN"""
    parser = OptionParser(usage='%prog [options] <youtube_username>')
    parser.add_option("--show", dest="show", action="store_true", default=False,
            help="show the feed entries")
    parser.add_option("--output", dest="output", default="-",
            help="save output to the FILE")
    parser.add_option("--plot", dest="plot", action="store_true", default=False,
            help="plot chart with statistics")
    parser.add_option("--plot-file", dest="plot_file",
            help="plot chart to the FILE")
    (opts, args) = parser.parse_args(argv)

    if len(args) == 1:
        parser.error("incorrect number of arguments")

    if not opts.show and not opts.plot:
        parser.error("no action asked (--show or --plot)")

    username = args[1]
    data = get_user_uploads(username)
    if opts.show:
        print_video_feed(username, data, opts.output)
    if opts.plot:
        if opts.plot_file:
            outfile = opts.plot_file
        else:
            outfile = "%s.png" % username
        plot_video_stat(username, data, outfile)
Example #7
0
def get_params():
    parser = get_params_parser()
    args = parser.parse_args()

    if not args.org or not args.token:
        parser.error("token and org params must be provided.")
        sys.exit(1)

    return args
Example #8
0
def get_params():
    parser = get_params_parser()
    args = parser.parse_args()

    if not args.org or not args.token:
        parser.error("token and org params must be provided.")
        sys.exit(1)

    return args
def deactivate_sessions(sessions):
    request = urllib2.Request('https://api.dropboxapi.com/2/team/devices/revoke_device_session_batch',
                              json.dumps({'revoke_devices': sessions}))
    request.add_header("Authorization", "Bearer "+token)
    request.add_header("Content-type", 'application/json')
    try:
        json.loads(urllib2.urlopen(request).read())
        print 'Deactivated ' + str(len(sessions)) + ' session(s).'
    except urllib2.HTTPError, error:
        parser.error(error.read())
Example #10
0
    def build_staging(self, args, parser):
        """restore most recent production snapshot to NEW staging database"""
        # Look up production instance info (vpc subnet group, etc.)
        (_retcode, stdout,
         _stderr) = yield SHH, self.local['aws']['rds',
                                                 'describe-db-instances',
                                                 '--db-instance-identifier',
                                                 args.from_name, ]

        if stdout is None:
            subnet_group_name = 'DRY-RUN'
        else:
            try:
                (description, ) = json.loads(stdout)['DBInstances']
                subnet_group_name = description['DBSubnetGroup'][
                    'DBSubnetGroupName']
            except (KeyError, TypeError, ValueError):
                print(stdout)
                raise ValueError('unexpected response')

        def snapshot_datetime(data):
            timestamp = data['SnapshotCreateTime']
            return dateutil.parser.parse(timestamp)

        (_retcode, stdout,
         _stderr) = yield SHH, self.local['aws']['rds',
                                                 'describe-db-snapshots',
                                                 '--snapshot-type',
                                                 'automated',
                                                 '--db-instance-identifier',
                                                 args.from_name, ]

        if stdout is None:
            snapshot_id = 'DRY-RUN'
        else:
            try:
                snapshots = json.loads(stdout)['DBSnapshots']
                snapshots_available = (snapshot for snapshot in snapshots
                                       if snapshot['Status'] == 'available')
                snapshots_sorted = sorted(snapshots_available,
                                          key=snapshot_datetime,
                                          reverse=True)
                snapshot_id = snapshots_sorted[0]['DBSnapshotIdentifier']
            except IndexError:
                parser.error(f"{args.from_name} has no snapshots available")
            except (KeyError, TypeError, ValueError):
                print(stdout)
                raise ValueError('unexpected response')

        yield self.local['aws']['rds', 'restore-db-instance-from-db-snapshot',
                                '--no-publicly-accessible',
                                '--db-subnet-group-name', subnet_group_name,
                                '--db-instance-identifier', args.name,
                                '--db-snapshot-identifier', snapshot_id, ]
Example #11
0
def main():
    """ parse the command line arguments """
    domain = os.getenv('JIRA_DOMAIN', '')
    email = os.getenv('JIRA_EMAIL', '')
    apikey = os.getenv('JIRA_APIKEY', '')

    parser = argparse.ArgumentParser(description='Extract issue changelog data from a Jira Project')
    parser.add_argument('project', help='Jira project from which to extract issues')
    parser.add_argument('since', help='Date from which to start extracting issues (yyyy-mm-dd)')
    parser.add_argument('--updates-only', action='store_true', help='''
        When passed, instead of extracting issues created since the since argument,
        only issues *updated* since the since argument will be extracted.''')
    parser.add_argument('--append', action='store_true', help='Append to the output file instead of overwriting it.')
    parser.add_argument('--anonymize', action='store_true', help='Anonymize the data output (no issue titles, project keys, etc).')
    parser.add_argument('-d', '--domain', default=domain,
                        help='Jira project domain url (i.e., https://company.atlassian.net). Can also be provided via JIRA_DOMAIN environment variable.')
    parser.add_argument('-e', '--email',  default=email,  help='Jira user email address for authentication. Can also be provided via JIRA_EMAIL environment variable.')
    parser.add_argument('-k', '--apikey', default=apikey, help='Jira user api key for authentication. Can also be provided via JIRA_APIKEY environment variable.')
    parser.add_argument('-o', '--output', default='out.csv', help='File to store the csv output.')
    parser.add_argument('-q', '--quiet', action='store_true', help='Be quiet and only output warnings to console.')

    parser.add_argument('-f', '--field', metavar='FIELD_ID', action='append', help='Include one or more custom fields in the query by id.')
    parser.add_argument('-n', '--name', metavar='FIELD_NAME', action='append', help='Corresponding output column names for each custom field.')

    args = parser.parse_args()

    if not args.quiet:
        logging.basicConfig(level=logging.INFO)

    if not all((args.domain, args.email, args.apikey)):
        parser.error("""The JIRA_DOMAIN, JIRA_EMAIL, and JIRA_APIKEY environment variables """
                     """must be set or provided via the -d -e -k command line flags.""")
        return

    logging.info('connecting to {} with {} email...'.format(args.domain, args.email))

    client = Client(args.domain, email=args.email, apikey=args.apikey)

    mode = 'a' if args.append else 'w'

    custom_fields = [k if k.startswith('customfield') else 'customfield_{}'.format(k) for k in args.field] if args.field else []
    custom_field_names = list(args.name or []) + custom_fields[len(args.name or []):]

    with open(args.output, mode, newline='') as csv_file:
        logging.info('{} opened for writing (mode: {})...'.format(args.output, mode))
        generate_csv(client, csv_file, args.project,
                     since=args.since,
                     custom_fields=custom_fields,
                     custom_field_names=custom_field_names,
                     updates_only=args.updates_only,
                     write_header=not args.append,
                     anonymize=args.anonymize)
def main():
    """The main function for generating the mailer.

    Based on the arguments, the mailer generates a Project object with its
    appropriate Section and Tasks objects, and then renders templates
    accordingly. This can either be written out to two files, or can be mailed
    out using a SMTP server running on localhost.
    """

    parser = create_cli_parser()
    args = parser.parse_args()

    if bool(args.from_address) != bool(args.to_addresses):
        parser.error("'To:' and 'From:' address are required for sending email")

    asana = AsanaAPI(args.api_key)
    filters = frozenset((unicode(filter) for filter in args.tag_filters))
    section_filters = frozenset((unicode(section + ":") for section in args.section_filters))
    current_time_utc = datetime.datetime.now(dateutil.tz.tzutc())
    current_date = str(datetime.date.today())
    project = Project.create_project(
        asana,
        args.project_id,
        current_time_utc,
        task_filters=filters,
        section_filters=section_filters,
        completed_lookback_hours=args.completed_lookback_hours,
    )
    rendered_html, rendered_text = generate_templates(
        project, args.html_template, args.text_template, current_date, current_time_utc, args.skip_inline_css
    )

    if args.to_addresses and args.from_address:
        if args.cc_addresses:
            cc_addresses = args.cc_addresses[:]
        else:
            cc_addresses = None
        send_email(
            project,
            args.mail_server,
            args.from_address,
            args.to_addresses[:],
            cc_addresses,
            rendered_html,
            rendered_text,
            current_date,
            args.username,
            args.password,
        )
    else:
        write_rendered_files(rendered_html, rendered_text, current_date)
    log.info("Finished")
Example #13
0
def _entrypoint_main():

    # aiohttp fix for Windows
    # https://github.com/aio-libs/aiohttp/issues/4324

    if os.name == 'nt':
        import aiohttp
        old = (tuple(int(v) for v in aiohttp.__version__.split(".")) < (4, ))
        if old:
            asyncio.set_event_loop_policy(
                asyncio.WindowsSelectorEventLoopPolicy())

    args = init_args()

    if args.get:

        async def get_events_once():
            try:
                import logging
                log = logging.getLogger("ao_killboard")
                log_formatter = logging.Formatter("%(message)s",
                                                  "%Y-%m-%d %H:%M:%S")
                log_handler = logging.StreamHandler(stream=sys.stdout)
                log_handler.setFormatter(log_formatter)
                log.addHandler(log_handler)
                log.setLevel(logging.DEBUG)
                async with httpx.AsyncClient() as client:
                    await get_events(URL_EVENTS,
                                     client,
                                     log,
                                     num=10,
                                     print_events=True)
            except httpx.TimeoutException:
                pass

        asyncio.run(get_events_once())
    else:
        try:
            assert_not_none(args.token, "TOKEN")
            assert_not_none(args.guild, "GUILD")
            assert_not_none(args.channel, "CHANNEL")
        except ValueError as exc:
            parser.error(exc)

        os.environ["AO_KILLBOARD_RETAIN_ARGV"] = "1"

        import discord.ext.commands
        # no reasonable command prefix
        bot = discord.ext.commands.Bot(command_prefix="ti9uPeaGh8")
        bot.load_extension("ao_killboard")
        bot.run(args.token)
    sys.exit(0)
Example #14
0
def main():
  # options handling
  usage="""%prog [options]"""
  description="""A simple script to display voltage/current from aaptos devices.
Support for both live stream (from the SOAP server) or database inspection."""
  parser = OptionParser(usage=usage,add_help_option=True,description=description)
  parser.add_option("-l", "--live", action="store_true", dest="live", default=False, 
                    help="use the live stream from the SOAP server")
  parser.add_option("-f", "--from", action="store", type="string", dest="beginning", 
                    help="beginning of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]")
  parser.add_option("-t", "--to", action="store", type="string", dest="end", 
                    help="end of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]")
  parser.add_option("-b", "--buffer", action="store", type="int", dest="bufferdepth", default=500,
                    help="in live mode, depth of the value buffer. When exceeded, first values will be dropped from the display")
  parser.add_option("-p", "--poll", action="store", type="int", dest="pollingTime", default=AaptosSettings.PoolDelay,
                    help="polling time in seconds")
  (options, args) = parser.parse_args()
  if options.live:
    if options.beginning is not None or options.end is not None:
      parser.error("options --from and --to are incompatible with --live")
    main_live(options.bufferdepth, options.pollingTime)
  else:
    if options.beginning is None or options.end is None:
      parser.error("options --from and --to are both mandatory to access the database")
    try:
      initialTime = dateutil.parser.parse(options.beginning)
    except ValueError:
      parser.error("--from: unknown string format")
    try:
      finalTime = dateutil.parser.parse(options.end)
    except ValueError:
      parser.error("--from: unknown string format")
    main_db(initialTime,finalTime)
def get_dfb_member(tag, value):
    request = urllib2.Request('https://api.dropbox.com/2/team/members/get_info',
                              json.dumps({ 'members': [{'.tag': tag, tag: value}]}))
    request.add_header("Authorization", "Bearer "+token)
    request.add_header("Content-type", 'application/json')

    try:
        response = json.loads(urllib2.urlopen(request).read())
        if 'id_not_found' in response[0]:
            parser.error("Member "+value+" is not on the team")
        return response[0]

    # Exit on error here.  Probably user not found or bad OAuth token.  Show response.
    except urllib2.HTTPError, error:
        parser.error(error.read())
Example #16
0
def logtail_parse_args():
    usage = "%prog " "--date-format <date_format>" "--start-date <start_date> "

    parser = OptionParser(usage=usage)

    parser.add_option(
        "--date-format", dest="date_format", help="Date format (Using date utility notation, e.g '%Y-%m-%d')"
    )
    parser.add_option("--start-date", dest="start_date", help="Start date expression (e.g '120 minutes ago')")

    parser.add_option(
        "--parser",
        dest="parser",
        help="Feed logs through a parser. Useful when reading encoded/escaped formats (e.g JSON) and when "
        "selecting parsed fields rather than matching via regular expression.",
    )
    parser.add_option(
        "-d",
        "--delimiter",
        dest="delimiter",
        help="Delimiter character for field-separation (when not using a --parser)",
    )
    parser.add_option("-f", "--field", dest="field", help="Index of field to use for filtering against")
    parser.add_option("-p", "--print", dest="printlines", action="store_true", help="Print non-filtered lines")

    parser.add_option(
        "-P",
        "--profile",
        dest="profile",
        default="logtail",
        help="Configuration profile (section in configuration file)",
    )

    options, args = parser.parse_args()

    # Interpolate from configuration and open filehandle
    options.date_format = interpolate_config(options.date_format, options.profile, "date_format")
    options.start_date = interpolate_config(options.start_date, options.profile, "start_date")
    options.field = interpolate_config(options.field, options.profile, "field")
    options.delimiter = interpolate_config(options.delimiter, options.profile, "delimiter", default=" ")
    options.parser = interpolate_config(options.parser, options.profile, "parser", default=False)
    options.printlines = interpolate_config(options.printlines, options.profile, "print", default=False, type=bool)

    if options.parser and not options.field:
        parser.error("Must supply --field parameter when using parser-based matching.")

    return AttrDict(options.__dict__), args
def get_member_sessions(email):
    member_id = get_dfb_member('email', email)['profile']['team_member_id']
    data = {
        'include_web_sessions': args.web,
        'include_desktop_clients': args.desktop,
        'include_mobile_clients': args.mobile,
        'team_member_id': member_id
    }
    request = urllib2.Request('https://api.dropboxapi.com/2/team/devices/list_member_devices', json.dumps(data))
    request.add_header("Authorization", "Bearer "+token)
    request.add_header("Content-type", 'application/json')

    try:
        response = json.loads(urllib2.urlopen(request).read())
        return list_sessions(member_id, email, response, False)
    except urllib2.HTTPError, error:
        parser.error(error.read())
def main():
    '''The main function for generating the mailer.

    Based on the arguments, the mailer generates a Project object with its
    appropriate Section and Tasks objects, and then renders templates
    accordingly. This can either be written out to two files, or can be mailed
    out using a SMTP server running on localhost.
    '''

    parser = create_cli_parser()
    args = parser.parse_args()

    if bool(args.from_address) != bool(args.to_addresses):
        parser.error(
            "'To:' and 'From:' address are required for sending email")

    asana = AsanaAPI(args.api_key)
    filters = frozenset((unicode(filter) for filter in args.tag_filters))
    section_filters = frozenset(
        (unicode(section + ':') for section in args.section_filters))
    current_time_utc = datetime.datetime.now(dateutil.tz.tzutc())
    current_date = str(datetime.date.today())
    project = Project.create_project(
        asana,
        args.project_id,
        current_time_utc,
        task_filters=filters,
        section_filters=section_filters,
        completed_lookback_hours=args.completed_lookback_hours)
    rendered_html, rendered_text = generate_templates(
        project, args.html_template, args.text_template, current_date,
        current_time_utc, args.skip_inline_css)

    if args.to_addresses and args.from_address:
        if args.cc_addresses:
            cc_addresses = args.cc_addresses[:]
        else:
            cc_addresses = None
        send_email(project, args.mail_server, args.from_address,
                   args.to_addresses[:], cc_addresses, rendered_html,
                   rendered_text, current_date, args.username, args.password)
    else:
        write_rendered_files(rendered_html, rendered_text, current_date)
    log.info('Finished')
Example #19
0
def main():
    parser = OptionParser()
    parser.add_option("-c", "--config", dest="config",
                      help="Configuration file")
    options = parser.parse_args()[0]
    config = ConfigParser()
    try:
        config.read(options.config)
    except:
        parser.error("Could not open configuration file")

    def got_message(data, message):
        try:
            receive_message(config, data, message)
        finally:
            message.ack()

    if not options.config:
        parser.error('Configuration file is required')

    if not all([config.has_section('pulse'),
                config.has_option('pulse', 'user'),
                config.has_option('pulse', 'password')]):
        log.critical('Config file must have a [pulse] section containing and '
                     'least "user" and "password" options.')
        exit(1)

    verbosity = {True: log.DEBUG, False: log.WARN}
    log.basicConfig(
        format='%(asctime)s %(message)s',
        level=verbosity[config.getboolean('shipit-notifier', 'verbose')]
    )

    pulse_cfg = pconf.PulseConfiguration.read_from_config(config)

    # Adjust applabel when wanting to run shipit on multiple machines
    pulse = consumers.BuildConsumer(applabel='shipit-notifier', connect=False)
    pulse.config = pulse_cfg
    pulse.configure(topic='build.#.finished',
                    durable=True, callback=got_message)

    log.info('listening for pulse messages')
    pulse.listen()
Example #20
0
    def prepare(self, args, parser):
        if args.login and not args.push:
            parser.error("will not log in outside of push operation")

        repository_tag = args.name or (args.resolve_repository_name() +
                                       ':latest')
        command = self.local['docker']['build', '--build-arg',
                                       'TARGET=production', '-t',
                                       repository_tag, '-t',
                                       self.get_full_name(repository_tag), ]

        if args.label:
            for label in args.label:
                name = args.resolve_repository_name() + ':' + label
                command = command['-t', name, '-t', self.get_full_name(name), ]
        elif args.show_warnings and args.target == 'production':
            parser.error(
                textwrap.dedent("""\
                at least the standard versioning label is recommended for builds intended for production

                for example – 0.1.1 –

                    manage build --label 0.1.1 production

                ensure that you have pulled the latest from the Git repository, and consult –

                    git tag -l --sort version:refname

                – for the tags currently in use. and, ensure that you apply (and push) the same tag
                to the source in the Git repository as to the Docker image here, for example –

                    git tag -a 0.1.1

                (to suppress this warning, see: `manage build --force`)\
                """))

        yield command[ROOT_PATH]

        if args.push:
            yield from self['push'].delegate()

        if args.deploy:
            yield from self['deploy'].delegate()
Example #21
0
def is_valid_file(parser, arg):
    """
    Check if arg is a valid file that already exists on the file system.

    Parameters
    ----------
    parser : argparse object
    arg : str

    Returns
    -------
    arg
    """
    try:
        arg = os.path.abspath(arg)
        if not os.path.exists(arg):
            parser.error("The file %s does not exist!" % arg)
        else:
            return arg
    except:
        parser.error("Invalid arguments")
def main() :
    parser = parse()
    (options, args) = parser.parse_args()

    if len(args) != 3 :
        if len(args) == 0  :
            parser.error("Asana API Key is required")
        if len(args) == 1 :
            parser.error("Github username is required")
        if len(args) == 2 :
            parser.error("Github password is required")
        exit(1)

    asana_api = asana.AsanaAPI(args[0], debug=False)  
    project_id = get_project_id_from_asana(asana_api, options)
    if not project_id :
        exit(1)

    github_api = Github(args[1], args[2])
    git_repo = get_repo_from_github(github_api, options)
    if not git_repo:
        exit(1)

    migrate_asana_to_github(asana_api, project_id, git_repo, options)

    exit(0)
Example #23
0
def get_arguments():
    parser = optparse.OptionParser()
    parser.add_option("-i",
                      "--index",
                      dest="index",
                      help="Index to be monitored.")
    parser.add_option(
        "-l",
        "--lookup",
        dest="lookup",
        help="Lookup index for the last x minutes document timestamp.")
    parser.add_option("-u",
                      "--url",
                      dest="url",
                      help="Elasticsearch server URL")
    (options, args) = parser.parse_args()

    if not options.index:
        parser.error(
            "[-] Please specify the index name --index | -i , use --help for more info."
        )
    elif not options.lookup:
        parser.error(
            "[-] Lookup threshold in minutes | -l , use --help for more info.")
    elif not options.url:
        parser.error(
            "[-] Please specify the elasticsearch cluster URL --url | -u , use --help for more info."
        )
    return options
Example #24
0
def parse_options(argv):
    """Parse args and return options, start, end.

    args can be from sys.argv. start and end are datetimes.
    """
    usage = "python %prog START [END]"
    parser = optparse.OptionParser(usage=usage)
    parser.add_option('-p', '--port', type=int, default=4321)
    parser.add_option('-m', '--movie', default=False, action='store_true')
    parser.add_option('-v', '--verbose', default=False, action='store_true')
    parser.add_option('--profile',
                      default=False,
                      action='store_true',
                      help='Run with Yappi')

    options, args = parser.parse_args(argv)

    # args[0] is the program name.
    if len(args) not in (2, 3):
        parser.error("incorrect number of arguments")

    start, end = None, None
    try:
        start = hour(dateutil.parser.parse(args[1], ignoretz=True))
    except Exception:
        parser.error("Couldn't parse start date")

    start = start.replace(tzinfo=pytz.UTC)

    if len(args) == 3:
        try:
            end = hour(dateutil.parser.parse(args[2], ignoretz=True))
        except Exception:
            parser.error("Couldn't parse end date")

        end = end.replace(tzinfo=pytz.UTC)
    else:
        end = next_hour(start)

    if end - start < datetime.timedelta(hours=1):
        parser.error("END must be at least an hour after START")

    return options, start, end
Example #25
0
def parse_options(argv):
    """Parse args and return options, start, end.

    args can be from sys.argv. start and end are datetimes.
    """
    usage = "python %prog START [END]"
    parser = optparse.OptionParser(usage=usage)
    parser.add_option('-p', '--port', type=int, default=4321)
    parser.add_option('-m', '--movie', default=False, action='store_true')
    parser.add_option('-v', '--verbose', default=False, action='store_true')
    parser.add_option('--profile', default=False, action='store_true',
                      help='Run with Yappi')

    options, args = parser.parse_args(argv)

    # args[0] is the program name.
    if len(args) not in (2, 3):
        parser.error("incorrect number of arguments")

    start, end = None, None
    try:
        start = hour(dateutil.parser.parse(args[1], ignoretz=True))
    except Exception:
        parser.error("Couldn't parse start date")

    start = start.replace(tzinfo=pytz.UTC)

    if len(args) == 3:
        try:
            end = hour(dateutil.parser.parse(args[2], ignoretz=True))
        except Exception:
            parser.error("Couldn't parse end date")

        end = end.replace(tzinfo=pytz.UTC)
    else:
        end = next_hour(start)

    if end - start < datetime.timedelta(hours=1):
        parser.error("END must be at least an hour after START")

    return options, start, end
Example #26
0
def main():
    # options handling
    usage = """%prog [options]"""
    description = """A simple script to display voltage/current from aaptos devices.
Support for both live stream (from the SOAP server) or database inspection."""
    parser = OptionParser(usage=usage,
                          add_help_option=True,
                          description=description)
    parser.add_option(
        "-f",
        "--from",
        action="store",
        type="string",
        dest="beginning",
        help=
        "beginning of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]"
    )
    parser.add_option(
        "-t",
        "--to",
        action="store",
        type="string",
        dest="end",
        help=
        "end of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]"
    )
    parser.add_option("-o",
                      "--output",
                      action="store",
                      type="string",
                      dest="filename",
                      help="output file name",
                      default="aaptos.root")
    (options, args) = parser.parse_args()
    if options.beginning is None or options.end is None:
        parser.error(
            "options --from and --to are both mandatory to access the database"
        )
    try:
        initialTime = dateutil.parser.parse(options.beginning)
    except ValueError:
        parser.error("--from: unknown string format")
    try:
        finalTime = dateutil.parser.parse(options.end)
    except ValueError:
        parser.error("--from: unknown string format")
    main_db(initialTime, finalTime, options.filename)
Example #27
0
def main():
  # options handling
  usage="""%prog [options]"""
  description="""A simple script to display voltage/current from aaptos devices.
Support for both live stream (from the SOAP server) or database inspection."""
  parser = OptionParser(usage=usage,add_help_option=True,description=description)
  parser.add_option("-f", "--from", action="store", type="string", dest="beginning", 
                    help="beginning of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]")
  parser.add_option("-t", "--to", action="store", type="string", dest="end", 
                    help="end of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]")
  parser.add_option("-o", "--output", action="store", type="string", dest="filename",
                    help="output file name", default="aaptos.root")
  (options, args) = parser.parse_args()
  if options.beginning is None or options.end is None:
    parser.error("options --from and --to are both mandatory to access the database")
  try:
    initialTime = dateutil.parser.parse(options.beginning)
  except ValueError:
    parser.error("--from: unknown string format")
  try:
    finalTime = dateutil.parser.parse(options.end)
  except ValueError:
    parser.error("--from: unknown string format")
  main_db(initialTime,finalTime,options.filename)
Example #28
0
option_list = [
    # No need to ad '-h' or '-help', optparse automatically adds these options

    optparse.make_option('', '--bug', action='store_true', dest='lpbugs', default=False,
                         help='Process Launchpad Bugs'),
    optparse.make_option('', '--bp', action='store_true', dest='lpbps', default=False,
                         help='Process Launchpad Blueprints')
]

usage = 'usage: %prog [-h|--help|<options>]'
parser = optparse.OptionParser(usage=usage, option_list=option_list)

# OptionParser gets the options out, whatever is not preceeded by
# an option is considered args.
(options, args) = parser.parse_args()

# we are not expecting args right now
if args:
    parser.error('Invalid argment(s) found: ' + str(args))

# check options
if not options.lpbugs and not options.lpbps:
    parser.print_help()
    sys.exit(1)
elif options.lpbugs:
    print "INFO: Processing Launchpad Bugs ..."
    Bug()
elif options.lpbps:
    print "INFO: Processing Launchpad Blueprints ..."
    Blueprint()
Example #29
0
def parse_args():
    """Parse command-line arguments with optparse."""
    usage = "usage: %prog [OPTIONS] " + \
            "--project PROJECT [--project PROJECT] " + \
            "--version VERSION [--version VERSION2 ...]"
    parser = OptionParser(
        usage=usage,
        epilog="Markdown-formatted CHANGES and RELEASENOTES files will be stored"
        " in a directory named after the highest version provided.")
    parser.add_option("-i",
                      "--index",
                      dest="index",
                      action="store_true",
                      default=False,
                      help="build an index file")
    parser.add_option("-l",
                      "--license",
                      dest="license",
                      action="store_true",
                      default=False,
                      help="Add an ASF license")
    parser.add_option("-p",
                      "--project",
                      dest="projects",
                      action="append",
                      type="string",
                      help="projects in JIRA to include in releasenotes",
                      metavar="PROJECT")
    parser.add_option("-r",
                      "--range",
                      dest="range",
                      action="store_true",
                      default=False,
                      help="Given versions are a range")
    parser.add_option(
        "--sortorder",
        dest="sortorder",
        metavar="TYPE",
        default=SORTORDER,
        # dec is supported for backward compatibility
        choices=["asc", "dec", "desc", "newer", "older"],
        help="Sorting order for sort type (default: %s)" % SORTORDER)
    parser.add_option("--sorttype",
                      dest="sorttype",
                      metavar="TYPE",
                      default=SORTTYPE,
                      choices=["resolutiondate", "issueid"],
                      help="Sorting type for issues (default: %s)" % SORTTYPE)
    parser.add_option(
        "-t",
        "--projecttitle",
        dest="title",
        type="string",
        help="Title to use for the project (default is Apache PROJECT)")
    parser.add_option("-u",
                      "--usetoday",
                      dest="usetoday",
                      action="store_true",
                      default=False,
                      help="use current date for unreleased versions")
    parser.add_option("-v",
                      "--version",
                      dest="versions",
                      action="append",
                      type="string",
                      help="versions in JIRA to include in releasenotes",
                      metavar="VERSION")
    parser.add_option(
        "-V",
        dest="release_version",
        action="store_true",
        default=False,
        help="display version information for releasedocmaker and exit.")
    parser.add_option("-O",
                      "--outputdir",
                      dest="output_directory",
                      action="append",
                      type="string",
                      help="specify output directory to put release docs to.")
    parser.add_option("-B",
                      "--baseurl",
                      dest="base_url",
                      action="append",
                      type="string",
                      help="specify base URL of the JIRA instance.")
    parser.add_option(
        "--retries",
        dest="retries",
        action="append",
        type="int",
        help="Specify how many times to retry connection for each URL.")
    parser.add_option(
        "--skip-credits",
        dest="skip_credits",
        action="store_true",
        default=False,
        help=
        "While creating release notes skip the 'reporter' and 'contributor' columns"
    )
    parser.add_option(
        "-X",
        "--incompatiblelabel",
        dest="incompatible_label",
        default="backward-incompatible",
        type="string",
        help="Specify the label to indicate backward incompatibility.")

    Linter.add_parser_options(parser)

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit(1)

    (options, _) = parser.parse_args()

    # Validate options
    if not options.release_version:
        if options.versions is None:
            parser.error("At least one version needs to be supplied")
        if options.projects is None:
            parser.error("At least one project needs to be supplied")
        if options.base_url is not None:
            if len(options.base_url) > 1:
                parser.error("Only one base URL should be given")
            else:
                options.base_url = options.base_url[0]
        if options.output_directory is not None:
            if len(options.output_directory) > 1:
                parser.error("Only one output directory should be given")
            else:
                options.output_directory = options.output_directory[0]

    return options
Example #30
0
                        type=int,
                        default=10,
                        help='max number of parallel crops downloads')
    parser.add_argument('--clip-and-ship',
                        action='store_true',
                        help=('use the '
                              'clip and '
                              'ship API'))
    parser.add_argument(
        '--no-crop',
        action='store_true',
        help=("don't crop but instead download the whole image files"))
    args = parser.parse_args()

    if args.geom and (args.lat or args.lon):
        parser.error('--geom and {--lat, --lon} are mutually exclusive')

    if args.clip_and_ship and args.no_crop:
        parser.error('--clip-and-ship and --no-crop are mutually exclusive')

    if not args.geom and (not args.lat or not args.lon):
        parser.error('either --geom or {--lat, --lon} must be defined')

    if args.geom:
        aoi = args.geom
    else:
        aoi = utils.geojson_geometry_object(args.lat, args.lon, args.width,
                                            args.height)
    get_time_series(aoi,
                    start_date=args.start_date,
                    end_date=args.end_date,
Example #31
0
def parse_command_line(argv):
    me = os.path.basename(argv[0])
    format_from_argv0, argv0_from, argv0_to = argv0_to_format(me)

    parser = argparse.ArgumentParser(
        description='Convert between TOML, MessagePack, YAML, JSON, and CBOR.')

    input_group = parser.add_mutually_exclusive_group()
    input_group.add_argument('input',
                             nargs='?',
                             default='-',
                             help='input file')
    input_group.add_argument('-i',
                             '--input',
                             dest='input_flag',
                             metavar='input',
                             default=None,
                             help='input file')

    output_group = parser.add_mutually_exclusive_group()
    output_group.add_argument('output',
                              nargs='?',
                              default='-',
                              help='output file')
    output_group.add_argument('-o',
                              '--output',
                              dest='output_flag',
                              metavar='output',
                              default=None,
                              help='output file')

    if not format_from_argv0:
        parser.add_argument('--if',
                            '-if',
                            '--input-format',
                            dest='input_format',
                            help="input format",
                            choices=FORMATS)
        parser.add_argument('--of',
                            '-of',
                            '--output-format',
                            dest='output_format',
                            help="output format",
                            choices=FORMATS)

    if not format_from_argv0 or argv0_to == 'json':
        parser.add_argument('--indent-json',
                            dest='indent_json',
                            metavar='n',
                            type=int,
                            default=None,
                            help='indent JSON output')

    if not format_from_argv0 or argv0_to == 'yaml':
        parser.add_argument('--yaml-style',
                            dest='yaml_style',
                            default=None,
                            help='YAML formatting style',
                            choices=['', '\'', '"', '|', '>'])

    parser.add_argument('--wrap',
                        dest='wrap',
                        metavar='key',
                        default=None,
                        help='wrap the data in a map type with the given key')
    parser.add_argument('--unwrap',
                        dest='unwrap',
                        metavar='key',
                        default=None,
                        help='only output the data stored under the given key')
    parser.add_argument('-p',
                        '--preserve-key-order',
                        dest='ordered',
                        action='store_true',
                        help='preserve the order of dictionary/mapping keys')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version=__version__)

    args = parser.parse_args(args=argv[1:])

    # Use the positional input and output arguments.
    if args.input_flag is not None:
        args.input = args.input_flag

    if args.output_flag is not None:
        args.output = args.output_flag

    # Determine the implicit input and output format if possible.
    if format_from_argv0:
        args.input_format = argv0_from
        args.output_format = argv0_to

        if argv0_to != 'json':
            args.__dict__['indent_json'] = None
        if argv0_to != 'yaml':
            args.__dict__['yaml_style'] = None
    else:
        if args.input_format is None:
            args.input_format = extension_to_format(args.input)
            if args.input_format is None:
                parser.error('Need an explicit input format')

        if args.output_format is None:
            args.output_format = extension_to_format(args.output)
            if args.output_format is None:
                parser.error('Need an explicit output format')

    # Wrap yaml_style.
    args.__dict__['yaml_options'] = {'default_style': args.yaml_style}
    del args.__dict__['yaml_style']

    return args
Example #32
0
def main():
    # This script assumes that the input file is sorted by key
    parser = optparse.OptionParser(usage="usage: %prog [options] filename", version="%prog 0.1")
    parser.add_option("-i", "--input", dest="filename", help="read data from FILENAME")
    parser.add_option("-k", "--key", dest="key",  help="tempodb database key")
    parser.add_option("-s", "--secret", dest="secret", help="tempodb database secret")
    parser.add_option("-H", "--host", dest="host", default="api.tempo-db.com", help="tempodb host")
    parser.add_option("-P", "--port", dest="port", default=443, help="tempodb port")
    parser.add_option("-S", "--secure", action="store_true", dest="secure", default=True, help="tempodb secure")

    (options, args) = parser.parse_args()
    print()
    print (options)
    print (args)
    print()
    if not options.filename:
        parser.error("Enter a file to read from.")

    in_filename = options.filename
    source_file = open(in_filename)
    client = tempodb.Client(options.key, options.secret, options.host, int(options.port), options.secure)

    temperature_key = "a.thermostat.1.temperature"
    # solar_radiation_key = "thermostat.1.solar_radiation"
    # humidity_key = "thermostat.1.humidity"

    temperature_data = []
    solar_radiation_data = []
    humidity_data = []

    count = 0

    total = 0

    # Init a Thread pool with the desired number of threads
    pool = ThreadPool(3)

    for line in source_file:
        # timestamp, temperature, solar_radiation, humidity = line.split(',')
        timestamp, temperature = line.split(',')

        # grab 20 lines at a time
        if count >= 5000:
            total += count
            pool.add_task(client.write_key, temperature_key, temperature_data)
            print("time: %s \t count: %d" % (datetime.datetime.now().time(), total))
            # pool.add_task(client.write_key, solar_radiation_key, solar_radiation_data)
            # pool.add_task(client.write_key, humidity_key, humidity_data)
            temperature_data = []
            # solar_radiation_id = []
            # humidity_id = []
            count = 0

        input_date = dateutil.parser.parse(timestamp)
        temperature_data.append(tempodb.DataPoint(input_date, float(temperature)))
        # solar_radiation_data.append(tempodb.DataPoint(input_date, float(solar_radiation)))
        # humidity_data.append(tempodb.DataPoint(input_date, float(humidity)))
        
        count += 1

    # pick up any scraps
    if len(temperature_data) > 0:
        pool.add_task(client.write_key, temperature_key, temperature_data)
        # pool.add_task(client.write_key, solar_radiation_key, solar_radiation_data)
        # pool.add_task(client.write_key, humidity_key, humidity_data)

    source_file.close()

    # Wait for completion
    pool.wait_completion()
Example #33
0
            args.source, args.give_asset, give_quantity, args.get_asset,
            get_quantity, args.expiration, fee_required, fee_provided
        ], args.unsigned)

    elif args.action == 'btcpay':
        cli('create_btcpay', [args.order_match_id], args.unsigned)

    elif args.action == 'issuance':
        quantity = util.devise(db,
                               args.quantity,
                               None,
                               'input',
                               divisible=args.divisible)
        if args.callable_:
            if not args.call_date:
                parser.error('must specify call date of callable asset', )
            if not args.call_price:
                parser.error('must specify call price of callable asset')
            call_date = calendar.timegm(
                dateutil.parser.parse(args.call_date).utctimetuple())
            call_price = float(args.call_price)
        else:
            call_date, call_price = 0, 0

        cli('create_issuance', [
            args.source, args.transfer_destination, args.asset, quantity,
            args.divisible, args.callable_, call_date, call_price,
            args.description
        ], args.unsigned)

    elif args.action == 'broadcast':
Example #34
0
def main():
  # Silence upload.py.
  rietveld.upload.verbosity = 0

  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
  parser.add_option(
      '-u', '--user', metavar='<email>',
      default=os.environ.get('USER'),
      help='Filter on user, default=%default')
  parser.add_option(
      '-b', '--begin', metavar='<date>',
      help='Filter issues created after the date (mm/dd/yy)')
  parser.add_option(
      '-e', '--end', metavar='<date>',
      help='Filter issues created before the date (mm/dd/yy)')
  quarter_begin, quarter_end = get_quarter_of(datetime.today() -
                                              relativedelta(months=2))
  parser.add_option(
      '-Q', '--last_quarter', action='store_true',
      help='Use last quarter\'s dates, i.e. %s to %s' % (
        quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
  parser.add_option(
      '-Y', '--this_year', action='store_true',
      help='Use this year\'s dates')
  parser.add_option(
      '-w', '--week_of', metavar='<date>',
      help='Show issues for week of the date (mm/dd/yy)')
  parser.add_option(
      '-W', '--last_week', action='count',
      help='Show last week\'s issues. Use more times for more weeks.')
  parser.add_option(
      '-a', '--auth',
      action='store_true',
      help='Ask to authenticate for instances with no auth cookie')
  parser.add_option(
      '-d', '--deltas',
      action='store_true',
      help='Fetch deltas for changes.')
  parser.add_option(
      '--no-referenced-issues',
      action='store_true',
      help='Do not fetch issues referenced by owned changes. Useful in '
           'combination with --changes-by-issue when you only want to list '
           'issues that have also been modified in the same time period.')
  parser.add_option(
      '--skip-own-issues-without-changes',
      action='store_true',
      help='Skips listing own issues without changes when showing changes '
           'grouped by referenced issue(s). See --changes-by-issue for more '
           'details.')

  activity_types_group = optparse.OptionGroup(parser, 'Activity Types',
                               'By default, all activity will be looked up and '
                               'printed. If any of these are specified, only '
                               'those specified will be searched.')
  activity_types_group.add_option(
      '-c', '--changes',
      action='store_true',
      help='Show changes.')
  activity_types_group.add_option(
      '-i', '--issues',
      action='store_true',
      help='Show issues.')
  activity_types_group.add_option(
      '-r', '--reviews',
      action='store_true',
      help='Show reviews.')
  activity_types_group.add_option(
      '--changes-by-issue', action='store_true',
      help='Show changes grouped by referenced issue(s).')
  parser.add_option_group(activity_types_group)

  output_format_group = optparse.OptionGroup(parser, 'Output Format',
                              'By default, all activity will be printed in the '
                              'following format: {url} {title}. This can be '
                              'changed for either all activity types or '
                              'individually for each activity type. The format '
                              'is defined as documented for '
                              'string.format(...). The variables available for '
                              'all activity types are url, title and author. '
                              'Format options for specific activity types will '
                              'override the generic format.')
  output_format_group.add_option(
      '-f', '--output-format', metavar='<format>',
      default=u'{url} {title}',
      help='Specifies the format to use when printing all your activity.')
  output_format_group.add_option(
      '--output-format-changes', metavar='<format>',
      default=None,
      help='Specifies the format to use when printing changes. Supports the '
      'additional variable {reviewers}')
  output_format_group.add_option(
      '--output-format-issues', metavar='<format>',
      default=None,
      help='Specifies the format to use when printing issues. Supports the '
           'additional variable {owner}.')
  output_format_group.add_option(
      '--output-format-reviews', metavar='<format>',
      default=None,
      help='Specifies the format to use when printing reviews.')
  output_format_group.add_option(
      '--output-format-heading', metavar='<format>',
      default=u'{heading}:',
      help='Specifies the format to use when printing headings.')
  output_format_group.add_option(
      '--output-format-no-url', default='{title}',
      help='Specifies the format to use when printing activity without url.')
  output_format_group.add_option(
      '-m', '--markdown', action='store_true',
      help='Use markdown-friendly output (overrides --output-format '
           'and --output-format-heading)')
  output_format_group.add_option(
      '-j', '--json', action='store_true',
      help='Output json data (overrides other format options)')
  parser.add_option_group(output_format_group)
  auth.add_auth_options(parser)

  parser.add_option(
      '-v', '--verbose',
      action='store_const',
      dest='verbosity',
      default=logging.WARN,
      const=logging.INFO,
      help='Output extra informational messages.'
  )
  parser.add_option(
      '-q', '--quiet',
      action='store_const',
      dest='verbosity',
      const=logging.ERROR,
      help='Suppress non-error messages.'
  )
  parser.add_option(
      '-M', '--merged-only',
      action='store_true',
      dest='merged_only',
      default=False,
      help='Shows only changes that have been merged.')
  parser.add_option(
      '-C', '--completed-issues',
      action='store_true',
      dest='completed_issues',
      default=False,
      help='Shows only monorail issues that have completed (Fixed|Verified) '
           'by the user.')
  parser.add_option(
      '-o', '--output', metavar='<file>',
      help='Where to output the results. By default prints to stdout.')

  # Remove description formatting
  parser.format_description = (
      lambda _: parser.description)  # pylint: disable=no-member

  options, args = parser.parse_args()
  options.local_user = os.environ.get('USER')
  if args:
    parser.error('Args unsupported')
  if not options.user:
    parser.error('USER is not set, please use -u')
  options.user = username(options.user)

  logging.basicConfig(level=options.verbosity)

  # python-keyring provides easy access to the system keyring.
  try:
    import keyring  # pylint: disable=unused-import,unused-variable,F0401
  except ImportError:
    logging.warning('Consider installing python-keyring')

  if not options.begin:
    if options.last_quarter:
      begin, end = quarter_begin, quarter_end
    elif options.this_year:
      begin, end = get_year_of(datetime.today())
    elif options.week_of:
      begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y')))
    elif options.last_week:
      begin, end = (get_week_of(datetime.today() -
                                timedelta(days=1 + 7 * options.last_week)))
    else:
      begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
  else:
    begin = dateutil.parser.parse(options.begin)
    if options.end:
      end = dateutil.parser.parse(options.end)
    else:
      end = datetime.today()
  options.begin, options.end = begin, end

  if options.markdown:
    options.output_format_heading = '### {heading}\n'
    options.output_format = '  * [{title}]({url})'
    options.output_format_no_url = '  * {title}'
  logging.info('Searching for activity by %s', options.user)
  logging.info('Using range %s to %s', options.begin, options.end)

  my_activity = MyActivity(options)
  my_activity.show_progress('Loading data')

  if not (options.changes or options.reviews or options.issues or
          options.changes_by_issue):
    options.changes = True
    options.issues = True
    options.reviews = True

  # First do any required authentication so none of the user interaction has to
  # wait for actual work.
  if options.changes or options.changes_by_issue:
    my_activity.auth_for_changes()
  if options.reviews:
    my_activity.auth_for_reviews()

  logging.info('Looking up activity.....')

  try:
    if options.changes or options.changes_by_issue:
      my_activity.get_changes()
    if options.reviews:
      my_activity.get_reviews()
    if options.issues or options.changes_by_issue:
      my_activity.get_issues()
    if not options.no_referenced_issues:
      my_activity.get_referenced_issues()
  except auth.AuthenticationError as e:
    logging.error('auth.AuthenticationError: %s', e)

  my_activity.show_progress('\n')

  my_activity.print_access_errors()

  output_file = None
  try:
    if options.output:
      output_file = open(options.output, 'w')
      logging.info('Printing output to "%s"', options.output)
      sys.stdout = output_file
  except (IOError, OSError) as e:
    logging.error('Unable to write output: %s', e)
  else:
    if options.json:
      my_activity.dump_json()
    else:
      if options.changes:
        my_activity.print_changes()
      if options.reviews:
        my_activity.print_reviews()
      if options.issues:
        my_activity.print_issues()
      if options.changes_by_issue:
        my_activity.print_changes_by_issue(
            options.skip_own_issues_without_changes)
  finally:
    if output_file:
      logging.info('Done printing to file.')
      sys.stdout = sys.__stdout__
      output_file.close()

  return 0
Example #35
0
            worker = cls(queues, resq)
            worker.id = worker_id
            return worker
        else:
            return None

    @classmethod
    def exists(cls, worker_id, resq):
        return resq.redis.sismember('resque:workers', worker_id)


try:
    from setproctitle import setproctitle
except ImportError:

    def setproctitle(name):
        pass


if __name__ == "__main__":
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option("-q", dest="queue_list")
    parser.add_option("-s", dest="server", default="localhost:6379")
    (options, args) = parser.parse_args()
    if not options.queue_list:
        parser.print_help()
        parser.error("Please give each worker at least one queue.")
    queues = options.queue_list.split(',')
    Worker.run(queues, options.server)
def parse_args():
    """Parse command-line arguments with optparse."""
    usage = "usage: %prog [OPTIONS] " + \
            "--project PROJECT [--project PROJECT] " + \
            "--version VERSION [--version VERSION2 ...]"
    parser = OptionParser(
        usage=usage,
        epilog="Markdown-formatted CHANGES and RELEASENOTES files will be stored"
        "in a directory named after the highest version provided.")
    parser.add_option("-i",
                      "--index",
                      dest="index",
                      action="store_true",
                      default=False,
                      help="build an index file")
    parser.add_option("-l",
                      "--license",
                      dest="license",
                      action="store_true",
                      default=False,
                      help="Add an ASF license")
    parser.add_option("-p",
                      "--project",
                      dest="projects",
                      action="append",
                      type="string",
                      help="projects in JIRA to include in releasenotes",
                      metavar="PROJECT")
    parser.add_option("-r",
                      "--range",
                      dest="range",
                      action="store_true",
                      default=False,
                      help="Given versions are a range")
    parser.add_option("--sortorder",
                      dest="sortorder",
                      type="string",
                      metavar="TYPE",
                      default=SORTORDER,
                      help="Sorting order for sort type (default: %s)" %
                      SORTORDER)
    parser.add_option("--sorttype",
                      dest="sorttype",
                      type="string",
                      metavar="TYPE",
                      default=SORTTYPE,
                      help="Sorting type for issues (default: %s)" % SORTTYPE)
    parser.add_option(
        "-t",
        "--projecttitle",
        dest="title",
        type="string",
        help="Title to use for the project (default is Apache PROJECT)")
    parser.add_option("-u",
                      "--usetoday",
                      dest="usetoday",
                      action="store_true",
                      default=False,
                      help="use current date for unreleased versions")
    parser.add_option("-v",
                      "--version",
                      dest="versions",
                      action="append",
                      type="string",
                      help="versions in JIRA to include in releasenotes",
                      metavar="VERSION")
    parser.add_option(
        "-V",
        dest="release_version",
        action="store_true",
        default=False,
        help="display version information for releasedocmaker and exit.")
    parser.add_option("-O",
                      "--outputdir",
                      dest="output_directory",
                      action="append",
                      type="string",
                      help="specify output directory to put release docs to.")
    parser.add_option("-B",
                      "--baseurl",
                      dest="base_url",
                      action="append",
                      type="string",
                      help="specify base URL of the JIRA instance.")

    Linter.add_parser_options(parser)

    (options, _) = parser.parse_args()

    # Validate options
    if options.versions is None:
        parser.error("At least one version needs to be supplied")
    if options.projects is None:
        parser.error("At least one project needs to be supplied")
    if options.base_url is not None:
        if len(options.base_url) > 1:
            parser.error("Only one base URL should be given")
        else:
            options.base_url = options.base_url[0]
    if options.output_directory is not None:
        if len(options.output_directory) > 1:
            parser.error("Only one output directory should be given")
        else:
            options.output_directory = options.output_directory[0]

    return options
Example #37
0
def parse_args():
    """Parse command-line arguments with optparse."""
    usage = "usage: %prog [OPTIONS] " + \
            "--project PROJECT [--project PROJECT] " + \
            "--version VERSION [--version VERSION2 ...]"
    parser = OptionParser(
        usage=usage,
        epilog=
        "Markdown-formatted CHANGES and RELEASENOTES files will be stored"
        " in a directory named after the highest version provided.")
    parser.add_option("-i",
                      "--index",
                      dest="index",
                      action="store_true",
                      default=False,
                      help="build an index file")
    parser.add_option("-l",
                      "--license",
                      dest="license",
                      action="store_true",
                      default=False,
                      help="Add an ASF license")
    parser.add_option("-p",
                      "--project",
                      dest="projects",
                      action="append",
                      type="string",
                      help="projects in JIRA to include in releasenotes",
                      metavar="PROJECT")
    parser.add_option("-r",
                      "--range",
                      dest="range",
                      action="store_true",
                      default=False,
                      help="Given versions are a range")
    parser.add_option(
        "--sortorder",
        dest="sortorder",
        metavar="TYPE",
        default=SORTORDER,
        # dec is supported for backward compatibility
        choices=["asc", "dec", "desc", "newer", "older"],
        help="Sorting order for sort type (default: %s)" % SORTORDER)
    parser.add_option("--sorttype",
                      dest="sorttype",
                      metavar="TYPE",
                      default=SORTTYPE,
                      choices=["resolutiondate", "issueid"],
                      help="Sorting type for issues (default: %s)" % SORTTYPE)
    parser.add_option(
        "-t",
        "--projecttitle",
        dest="title",
        type="string",
        help="Title to use for the project (default is Apache PROJECT)")
    parser.add_option("-u",
                      "--usetoday",
                      dest="usetoday",
                      action="store_true",
                      default=False,
                      help="use current date for unreleased versions")
    parser.add_option("-v",
                      "--version",
                      dest="versions",
                      action="append",
                      type="string",
                      help="versions in JIRA to include in releasenotes",
                      metavar="VERSION")
    parser.add_option(
        "-V",
        dest="release_version",
        action="store_true",
        default=False,
        help="display version information for releasedocmaker and exit.")
    parser.add_option("-O",
                      "--outputdir",
                      dest="output_directory",
                      action="append",
                      type="string",
                      help="specify output directory to put release docs to.")
    parser.add_option("-B",
                      "--baseurl",
                      dest="base_url",
                      action="append",
                      type="string",
                      help="specify base URL of the JIRA instance.")
    parser.add_option(
        "--retries",
        dest="retries",
        action="append",
        type="int",
        help="Specify how many times to retry connection for each URL.")
    parser.add_option("-X",
                      "--incompatiblelabel",
                      dest="incompatible_label",
                      default="backward-incompatible",
                      type="string",
                      help="Specify the label to indicate backward incompatibility.")

    Linter.add_parser_options(parser)

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit(1)

    (options, _) = parser.parse_args()

    # Validate options
    if not options.release_version:
        if options.versions is None:
            parser.error("At least one version needs to be supplied")
        if options.projects is None:
            parser.error("At least one project needs to be supplied")
        if options.base_url is not None:
            if len(options.base_url) > 1:
                parser.error("Only one base URL should be given")
            else:
                options.base_url = options.base_url[0]
        if options.output_directory is not None:
            if len(options.output_directory) > 1:
                parser.error("Only one output directory should be given")
            else:
                options.output_directory = options.output_directory[0]

    return options
Example #38
0
def main():
    try:
        import argparse
        parser = argparse.ArgumentParser(description = "RubberPaper")
        parser.add_argument("-v", "--verbose", metavar = "verbose", help = "Verbose output")
        subparser = parser.add_subparsers(dest="command", help = "commands")

        feed_parser = subparser.add_parser("feed", help = "Collect one or more feeds")
        feed_parser.add_argument('--url')
        feed_parser.add_argument('--name')

        render_parser = subparser.add_parser("render", help = "Render a PDF document")
        render_parser.add_argument('--date')
        render_parser.add_argument('--output', type = argparse.FileType("wb", 0))
        render_parser.add_argument('--url')

        convert_parser = subparser.add_parser("convert", help = "Convert a storage")
        convert_parser.add_argument('--input', required = True)
        convert_parser.add_argument('--output', required = True)

        try:
            args = parser.parse_args()
        except IOError, err:
            parser.error(str(err))

        import shelve
        from contextlib import closing

        if args.command == "feed":
            ''' Обработка фидов
            '''
            dateparser = dateutil.parser.parser()
            feeds = list()
            if args.url:
                feeds.append(args.url)
            elif args.name:
                feed_list_filename = os.path.expanduser(os.path.join("~", ".rubber", "feed-lists", args.name))
                with open(feed_list_filename, "rt") as f:
                    for feed in f:
                        feeds.append(feed.strip())
            else:
                for dirpath, dirnames, filenames in os.walk(os.path.expanduser(os.path.join("~", ".rubber", "feed-lists"))):
                    for filename in filenames:
                        feed_list_filename = os.path.join(dirpath, filename)
                        with open(feed_list_filename, "rt") as f:
                            for feed in f:
                                feeds.append(feed.strip())

            import feedparser
            for feed in feeds:
                try:
                    print "Processing feed {0}...".format(feed)
                    index_file_name = os.path.expanduser(os.path.join("~", ".rubber", "index", urlparse.urlsplit(feed).hostname))
                    with closing(shelve.open(index_file_name, flag = "c")) as storage:
                        news = feedparser.parse(feed)
                        # Кодировка сообщений
                        codepage = news.encoding
                        for n in reversed(news.entries):

                            # Ссылка на новость
                            url = n.link
                            # Идентификатор новости
                            if hasattr(n, "id"):
                                topicid = n.id
                            else:
                                topicid = url

                            if not storage.has_key(topicid.encode("utf-8")):
                                article = dict()

                                # Ссылка на статью
                                article["url"] = url
                                print "URL:{0}".format(article["url"].encode("utf-8")).strip()

                                # Заголовок новости
                                article["title"] = n.title
                                print "Title:{0}".format(article["title"].encode("utf-8")).strip()

                                # Дата опубликования новости
                                article["date"] = None
                                if hasattr(n, "published"):
                                    article["date"] = dateparser.parse(n.published)
                                elif hasattr(n, "updated"):
                                    article["date"] = dateparser.parse(n.updated)
                                else:
                                    article["date"] = datetime.datetime.now(pytz.timezone(DEFAULT_TIMEZONE))
                                if article["date"]:
                                    print "Date:{0}".format(article["date"]).strip()

                                # Автор новости
                                if hasattr(n, "author"):
                                    article["author"] = n.author
                                    print "Author:{0}".format(article["author"].encode("utf-8")).strip()

                                # Содержание новости
                                if hasattr(n, "summary"):
                                    article["summary"] = n.summary

                                storage[topicid.encode("utf-8")] = article
                except Exception, exc:
                    print exc
Example #39
0
def main():
  # Silence upload.py.
  rietveld.upload.verbosity = 0
  today = datetime.date.today()
  begin, end = get_previous_quarter(today)
  default_email = os.environ.get('EMAIL_ADDRESS')
  if not default_email:
    user = os.environ.get('USER')
    if user:
      default_email = user + '@chromium.org'

  parser = optparse.OptionParser(description=__doc__)
  parser.add_option(
      '--count', action='store_true',
      help='Just count instead of printing individual issues')
  parser.add_option(
      '-r', '--reviewer', metavar='<email>', default=default_email,
      help='Filter on issue reviewer, default=%default')
  parser.add_option(
      '-b', '--begin', metavar='<date>',
      help='Filter issues created after the date')
  parser.add_option(
      '-e', '--end', metavar='<date>',
      help='Filter issues created before the date')
  parser.add_option(
      '-Q', '--last_quarter', action='store_true',
      help='Use last quarter\'s dates, e.g. %s to %s' % (begin, end))
  parser.add_option(
      '-i', '--instance_url', metavar='<host>',
      default='http://codereview.chromium.org',
      help='Host to use, default is %default')
  auth.add_auth_options(parser)
  # Remove description formatting
  parser.format_description = (
      lambda _: parser.description)  # pylint: disable=no-member
  options, args = parser.parse_args()
  auth_config = auth.extract_auth_config_from_options(options)
  if args:
    parser.error('Args unsupported')
  if options.reviewer is None:
    parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r')

  print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer
  if options.last_quarter:
    options.begin = begin
    options.end = end
    print >> sys.stderr, 'Using range %s to %s' % (
        options.begin, options.end)
  else:
    if options.begin is None or options.end is None:
      parser.error('Please specify either --last_quarter or --begin and --end')

  # Validate dates.
  try:
    options.begin = dateutil.parser.parse(options.begin).strftime('%Y-%m-%d')
    options.end = dateutil.parser.parse(options.end).strftime('%Y-%m-%d')
  except ValueError as e:
    parser.error('%s: %s - %s' % (e, options.begin, options.end))

  if options.count:
    print_count(
        options.reviewer,
        options.begin,
        options.end,
        options.instance_url,
        auth_config)
  else:
    print_reviews(
        options.reviewer,
        options.begin,
        options.end,
        options.instance_url,
        auth_config)
  return 0
Example #40
0
def_start = datetime.datetime.now().strftime('%Y-%m-01')
def_end = datetime.datetime.now().strftime('%Y-%m-' + str(calendar.monthrange(int(datetime.datetime.now().strftime("%Y")),int(datetime.datetime.now().strftime("%m")))[1]))

parser = OptionParser()
parser.add_option("-u", "--username", dest="username")
parser.add_option("-p", "--password",dest="password")
parser.add_option("-c", "--calendar",dest="calendar")
parser.add_option("-q", "--query",dest="query")
parser.add_option("-s", "--start",dest="start",default=def_start)
parser.add_option("-e", "--end",dest="end",default=def_end)

(options, args) = parser.parse_args()

if not options.username or not options.password or not options.calendar or not options.query:
    parser.error("incorrect number of arguments")

def elapsed_time(seconds, suffixes=['y','w','d','h','m','s'], add_s=False, separator=' '):
    """
    Takes an amount of seconds and turns it into a human-readable amount of time.
    From: http://snipplr.com/view/5713/python-elapsedtime-human-readable-time-span-given-total-seconds/
    """
    # the formatted time string to be returned
    time = []

    # the pieces of time to iterate over (days, hours, minutes, etc)
    # - the first piece in each tuple is the suffix (d, h, w)
    # - the second piece is the length in seconds (a day is 60s * 60m * 24h)
    parts = [(suffixes[0], 60 * 60 * 24 * 7 * 52),
             (suffixes[1], 60 * 60 * 24 * 7),
             (suffixes[2], 60 * 60 * 24),
Example #41
0
    def prepare(self, args, parser):
        if args.login and not args.push:
            parser.error("will not log in outside of push operation")

        yield from self.check_latest()

        if args.deploy:
            deployment = self.record_deployment()
        else:
            deployment = None

        repository_tag = args.name or (args.resolve_repository_name() +
                                       ':latest')
        command = self.local['docker']['build', '--build-arg',
                                       'TARGET=production', '-t',
                                       repository_tag, '-t',
                                       self.get_full_name(repository_tag), ]

        if args.label:
            # treat first label/tag as version to be handled specially by build
            command = command['--build-arg', f'APPVERSION={args.label[0]}', ]

            for label in args.label:
                name = args.resolve_repository_name() + ':' + label
                command = command['-t', name, '-t', self.get_full_name(name), ]
        elif args.show_warnings and args.target == 'production':
            parser.error(
                textwrap.dedent("""\
                at least the standard versioning label is recommended for builds intended for production

                for example – 0.1.1 –

                    manage build --label 0.1.1 production

                ensure that you have pulled the latest from the Git repository, and consult –

                    git tag -l --sort version:refname

                – for the tags currently in use. and, ensure that you apply (and push) the same tag
                to the source in the Git repository as to the Docker image here, for example –

                    git tag -a 0.1.1

                (to suppress this warning, see: `manage build --force`)\
                """))

        yield command[ROOT_PATH]

        if args.push:
            if deployment:
                self.record_status(
                    deployment['id'],
                    'pending',
                    self.get_record_description('PUSH'),
                )

            yield from self['push'].delegate()

        if args.deploy:
            if deployment:
                args.deployment_id = deployment['id']

            yield from self['deploy'].delegate()
def read_options():
    parser = OptionParser(usage="usage: %prog [options]",
                          version="%prog 0.1")
    parser.add_option("-d", "--database",
                      action="store",
                      dest="dbname",
                      help="Database where information is stored")
    parser.add_option("-u","--dbuser",
                      action="store",
                      dest="dbuser",
                      default="root",
                      help="Database user")
    parser.add_option("-p","--dbpassword",
                      action="store",
                      dest="dbpassword",
                      default="",
                      help="Database password")
    parser.add_option("-g", "--granularity",
                      action="store",
                      dest="granularity",
                      default="months",
                      help="year,months,weeks granularity")
    parser.add_option("-o", "--destination",
                      action="store",
                      dest="destdir",
                      default="data/json",
                      help="Destination directory for JSON files")
    parser.add_option("-r", "--reports",
                      action="store",
                      dest="reports",
                      default="",
                      help="Reports to be generated (repositories, companies, countries, people)")
    parser.add_option("-s", "--start",
                      action="store",
                      dest="startdate",
                      default="1900-01-01",
                      help="Start date for the report")
    parser.add_option("-e", "--end",
                      action="store",
                      dest="enddate",
                      default="2100-01-01",
                      help="End date for the report")
    parser.add_option("-i", "--identities",
                      action="store",
                      dest="identities_db",
                      help="Database with unique identities and affiliations")
    parser.add_option("-t", "--type",
                      action="store",
                      dest="backend",
                      default="bugzilla",
                      help="Type of backend: bugzilla, allura, jira, github")
    parser.add_option("--npeople",
                      action="store",
                      dest="npeople",
                      default="10",
                      help="Limit for people analysis")


    (opts, args) = parser.parse_args()

    if len(args) != 0:
        parser.error("Wrong number of arguments")

    if not(opts.dbname and opts.dbuser and opts.identities_db):
        parser.error("--database --db-user and --identities are needed")
    return opts
            except Exception, e:
                # bad luck
                logger.warning('cannot translate date_string "%s": %s' % (date_string, repr(e)))
        return None


if __name__ == '__main__':
    from optparse import OptionParser

    logging.basicConfig()
    parser = OptionParser(usage="%prog [-f] [-q] <free-form date string>", version=str(__version__))
    parser.add_option("-f", "--force-update",
                      action='store_true', dest="force_update", default=False,
                      help="force self-updating of TRANSLATION_DICT in this file", metavar="FILE")
    parser.add_option("-q", "--quiet",
                      action="store_false", dest="verbose", default=True,
                      help="don't print debug messages to stdout")
    (options, args) = parser.parse_args()
    if len(args) != 1:
        parser.error("you have missed query string")
    date_string = args[0]
    if options.verbose:
        print '%s -> %s' % (date_string, parse(date_string, loglevel=logging.DEBUG, force_update=options.force_update))
    else:
        print parse(date_string, loglevel=logging.WARNING, force_update=options.force_update)

# Emacs:
# Local variables:
# time-stamp-pattern: "100/^__updated__ = '%%'$"
# End:
Example #44
0
    import sys
    import codecs
    from optparse import OptionParser

    try:
        c = codecs.getwriter(sys.stdout.encoding)
    except:
        c = codecs.getwriter('ascii')
    sys.stdout = c(sys.stdout, 'replace')


    parser = OptionParser(usage="%prog [-i] endpoint",
        version="%prog " + str(__version__))
    parser.add_option("-i", dest="interactive", action="store_true",
                help="Enables interactive mode")

    (options, args) = parser.parse_args()

    if len(args) != 1:
        parser.error("Endpoint must be specified")

    endpoint = args[0]

    if options.interactive:
        _interactive(endpoint)

    q = sys.stdin.read()
    result = query(endpoint, q)
    for row in result.fetchone():
        print "\t".join(map(unicode,row))
Example #45
0
def parse_command_line(argv):
    me = os.path.basename(argv[0])
    format_from_argv0, argv0_from, argv0_to = argv0_to_format(me)

    parser = argparse.ArgumentParser(description='Convert between TOML, YAML '
                                     'and JSON.')

    input_group = parser.add_mutually_exclusive_group()
    input_group.add_argument('input',
                             nargs='?',
                             default='-',
                             help='input file')
    input_group.add_argument('-i', '--input',
                             dest='input_flag',
                             metavar='input',
                             default=None,
                             help='input file')

    output_group = parser.add_mutually_exclusive_group()
    output_group.add_argument('output',
                              nargs='?',
                              default='-',
                              help='input file')
    output_group.add_argument('-o', '--output',
                              dest='output_flag',
                              metavar='output',
                              default=None,
                              help='output file')

    if not format_from_argv0:
        parser.add_argument('--if', '-if', '--input-format',
                            dest='input_format',
                            help="input format",
                            choices=FORMATS)
        parser.add_argument('--of', '-of', '--output-format',
                            dest='output_format',
                            help="output format",
                            choices=FORMATS)

    if not format_from_argv0 or argv0_to == 'json':
        parser.add_argument('--indent-json',
                            dest='indent_json',
                            metavar='n',
                            type=int,
                            default=None,
                            help='indent JSON output')

    if not format_from_argv0 or argv0_to == 'yaml':
        parser.add_argument('--yaml-style',
                            dest='yaml_style',
                            default=None,
                            help='YAML formatting style',
                            choices=['', '\'', '"', '|', '>'])

    parser.add_argument('--wrap',
                        dest='wrap',
                        metavar='key',
                        default=None,
                        help='wrap the data in a map type with the given key')
    parser.add_argument('--unwrap',
                        dest='unwrap',
                        metavar='key',
                        default=None,
                        help='only output the data stored under the given key')
    parser.add_argument('--preserve-key-order',
                        dest='ordered',
                        action='store_true',
                        help='preserve the order of dictionary/mapping keys')
    parser.add_argument('-v', '--version',
                        action='version',
                        version=__version__)

    args = parser.parse_args(args=argv[1:])

    # Use the positional input and output arguments.
    if args.input_flag is not None:
        args.input = args.input_flag

    if args.output_flag is not None:
        args.output = args.output_flag

    # Determine the implicit input and output format if possible.
    if format_from_argv0:
        args.input_format = argv0_from
        args.output_format = argv0_to

        if argv0_to != 'json':
            args.__dict__['indent_json'] = None
        if argv0_to != 'yaml':
            args.__dict__['yaml_style'] = None
    else:
        if args.input_format is None:
            args.input_format = extension_to_format(args.input)
            if args.input_format is None:
                parser.error('Need an explicit input format')

        if args.output_format is None:
            args.output_format = extension_to_format(args.output)
            if args.output_format is None:
                parser.error('Need an explicit output format')

    # Wrap yaml_style.
    args.__dict__['yaml_options'] = {'default_style': args.yaml_style}
    del args.__dict__['yaml_style']

    return args
Example #46
0
            queues = worker_id.split(':')[-1].split(',')
            worker = cls(queues,resq)
            worker.id = worker_id
            return worker
        else:
            return None

    @classmethod
    def exists(cls, worker_id, resq):
        return resq.redis.sismember('resque:workers', worker_id)


try:
    from setproctitle import setproctitle
except ImportError:
    def setproctitle(name):
        pass


if __name__ == "__main__":
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option("-q", dest="queue_list")
    parser.add_option("-s", dest="server", default="localhost:6379")
    (options,args) = parser.parse_args()
    if not options.queue_list:
        parser.print_help()
        parser.error("Please give each worker at least one queue.")
    queues = options.queue_list.split(',')
    Worker.run(queues, options.server)
Example #47
0
        get_quantity = util.devise(db, get_quantity, args.get_asset, 'input')

        cli('create_order', [args.source, args.give_asset, give_quantity,
                            args.get_asset, get_quantity, args.expiration,
                            fee_required, fee_provided],
           args.unsigned)

    elif args.action == 'btcpay':
        cli('create_btcpay', [args.order_match_id], args.unsigned)

    elif args.action == 'issuance':
        quantity = util.devise(db, args.quantity, None, 'input',
                               divisible=args.divisible)
        if args.callable_:
            if not args.call_date:
                parser.error('must specify call date of callable asset', )
            if not args.call_price:
                parser.error('must specify call price of callable asset')
            call_date = calendar.timegm(dateutil.parser.parse(args.call_date).utctimetuple())
            call_price = float(args.call_price)
        else:
            call_date, call_price = 0, 0

        cli('create_issuance', [args.source, args.asset, quantity,
                                args.divisible, args.description,
                                args.callable_, call_date, call_price,
                                args.transfer_destination],
           args.unsigned)

    elif args.action == 'broadcast':
        value = util.devise(db, args.value, 'value', 'input')
Example #48
0
def main(args=None):
    parser = argparse.ArgumentParser(description="Ordersv2 Simple Client")
    subparsers = parser.add_subparsers()

    parser_read = subparsers.add_parser(
        "readme", help="Go the web based porder readme page")
    parser_read.set_defaults(func=read_from_parser)

    parser_planet_quota = subparsers.add_parser(
        "quota", help="Prints your Planet Quota Details")
    parser_planet_quota.set_defaults(func=planet_quota_from_parser)

    parser_convert = subparsers.add_parser(
        "convert", help="Convert all shapefiles or kmls in folder to GeoJSON")
    parser_convert.add_argument("--source", help="Choose Source Folder")
    parser_convert.add_argument("--destination",
                                help="Choose Destination Folder")
    parser_convert.set_defaults(func=convert_metadata_from_parser)

    parser_gcs_cred = subparsers.add_parser("base64",
                                            help="Base 64 encode a JSON file")
    required_named = parser_gcs_cred.add_argument_group(
        "Required named arguments.")
    required_named.add_argument("--cred",
                                help="Path to GCS credential file",
                                required=True)
    parser_gcs_cred.set_defaults(func=gcs_cred_from_parser)

    parser_simplify = subparsers.add_parser(
        "simplify",
        help=
        "Simplifies geometry to number of vertices specified using Visvalingam-Wyatt line simplification algorithm",
    )
    parser_simplify.add_argument("--input", help="Input GeoJSON file")
    parser_simplify.add_argument("--output",
                                 help="Output simplified GeoJSON file")
    parser_simplify.add_argument(
        "--number", help="Total number of vertices in output GeoJSON")
    parser_simplify.set_defaults(func=simplify_from_parser)

    parser_idlist = subparsers.add_parser(
        "idlist", help="Get idlist using geometry & filters")
    required_named = parser_idlist.add_argument_group(
        "Required named arguments.")
    required_named.add_argument(
        "--input",
        help="Input geometry file for now geojson/json/kml",
        required=True)
    required_named.add_argument(
        "--start",
        help="Start Date &/or Time yyyy-mm-ddTHH:MM:SS",
        required=True)
    required_named.add_argument("--end",
                                help="End Date &/or Time yyyy-mm-ddTHH:MM:SS",
                                required=True)
    required_named.add_argument(
        "--item",
        help="Item Type PSScene4Band|PSOrthoTile|REOrthoTile etc",
        required=True,
    )
    required_named.add_argument(
        "--asset",
        help="Asset Type analytic, analytic_sr,visual etc",
        default=None)
    required_named.add_argument("--outfile",
                                help="Output csv file",
                                required=True)
    optional_named = parser_idlist.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument(
        "--cmin",
        help="Minimum cloud cover 0-1 represents 0 to 100",
        default=None)
    optional_named.add_argument(
        "--cmax",
        help="Maximum cloud cover 0-1 represents 0 to 100",
        default=None)
    optional_named.add_argument(
        "--number",
        help="Total number of assets, give a large number if you are not sure",
        default=None,
    )
    optional_named.add_argument(
        "--overlap",
        help=
        "Percentage overlap of image with search area range between 0 to 100",
        default=None,
    )
    optional_named.add_argument(
        "--filters",
        nargs="+",
        help="Add an additional string or range filter, Read Me",
        default=None,
    )
    parser_idlist.set_defaults(func=idlist_from_parser)

    parser_difflist = subparsers.add_parser(
        "difflist",
        help=
        "Checks the difference between local files and available Planet assets",
    )
    required_named = parser_difflist.add_argument_group(
        "Required named arguments.")
    required_named.add_argument(
        "--folder",
        help="local folder where image or metadata files are stored",
        required=True,
    )
    required_named.add_argument("--typ",
                                help="File type image or metadata",
                                required=True)
    required_named.add_argument(
        "--input",
        help="Input boundary to search (geojson, json)",
        required=True)
    required_named.add_argument(
        "--item",
        help="Planet Item Type PSScene4Band|PSOrthoTile|REOrthoTile etc",
        required=True,
    )
    required_named.add_argument(
        "--asset",
        help="Asset Type analytic, analytic_sr,visual etc",
        required=True)
    required_named.add_argument("--start",
                                help="Start date in format YYYY-MM-DD",
                                required=True)
    required_named.add_argument("--end",
                                help="End date in format YYYY-MM-DD",
                                required=True)
    required_named.add_argument(
        "--outfile",
        help="Full path to CSV file with difference ID list",
        required=True)
    optional_named = parser_difflist.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument(
        "--cmin", help="Minimum cloud cover 0-1 represents 0 to 100")
    optional_named.add_argument(
        "--cmax", help="Maximum cloud cover 0-1 represents 0 to 100")
    parser_difflist.set_defaults(func=difflist_from_parser)

    parser_idsplit = subparsers.add_parser(
        "idsplit",
        help="Splits ID list incase you want to run them in small batches")
    parser_idsplit.add_argument("--idlist", help="Idlist txt file to split")
    parser_idsplit.add_argument(
        "--lines", help="Maximum number of lines in each split files")
    parser_idsplit.add_argument(
        "--local", help="Output folder where split files will be exported")
    parser_idsplit.set_defaults(func=idsplit_from_parser)

    parser_idcheck = subparsers.add_parser(
        "idcheck", help="Check idlist for estimating clipped area")
    parser_idcheck.add_argument("--idlist", help="Idlist csv file")
    parser_idcheck.add_argument("--item", help="Item type")
    parser_idcheck.add_argument("--asset", help="Asset type")
    optional_named = parser_idcheck.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--geometry",
                                help="Geometry file for clip if any",
                                default=None)
    parser_idcheck.set_defaults(func=idcheck_from_parser)

    parser_bundles = subparsers.add_parser(
        "bundles", help="Check bundles of assets for given tiem type")
    parser_bundles.add_argument("--item", help="Item type")
    parser_bundles.set_defaults(func=bundles_from_parser)

    parser_order = subparsers.add_parser(
        "order",
        help=
        'Place an order & get order url currently supports "toar","clip","composite","reproject","compression"',
    )
    required_named = parser_order.add_argument_group(
        "Required named arguments.")
    required_named.add_argument("--name",
                                help="Order Name to be Submitted",
                                required=True)
    required_named.add_argument("--idlist",
                                help="CSV idlist with item IDs",
                                required=True)
    required_named.add_argument(
        "--item",
        help="Item Type PSScene4Band|PSOrthoTile|REOrthoTile etc",
        required=True,
    )
    required_named.add_argument(
        "--bundle",
        help="Bundle Type: analytic, analytic_sr,analytic_sr_udm2",
        required=True,
    )
    optional_named = parser_order.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--sid", help="Subscription ID", default=None)
    optional_named.add_argument(
        "--boundary",
        help="Boundary/geometry for clip operation geojson|json|kml",
        default=None,
    )
    optional_named.add_argument(
        "--projection",
        help='Projection for reproject operation of type "EPSG:4326"',
        default=None,
    )
    optional_named.add_argument(
        "--gee",
        help='provide gee-project,gee-collection',
        default=None,
    )
    optional_named.add_argument(
        "--kernel",
        help=
        'Resampling kernel used "near", "bilinear", "cubic", "cubicspline", "lanczos", "average" and "mode"',
        default=None,
    )
    optional_named.add_argument(
        "--compression",
        help='Compression type used for tiff_optimize tool, "lzw"|"deflate"',
        default=None,
    )
    optional_named.add_argument(
        "--anchor",
        help='Anchor Item ID to use for the coregistration tool',
        default=None,
    )
    optional_named.add_argument(
        "--format",
        help=
        'Delivery format choose from COG/PL_NITF to use for the format tool',
        default=None,
    )
    optional_named.add_argument("--aws",
                                help="AWS cloud credentials config yml file",
                                default=None)
    optional_named.add_argument("--azure",
                                help="Azure cloud credentials config yml file",
                                default=None)
    optional_named.add_argument("--gcs",
                                help="GCS cloud credentials config yml file",
                                default=None)
    optional_named.add_argument(
        "--op",
        nargs="+",
        help=
        "Add operations, delivery & notification clip|toar|harmonize|composite|zip|zipall|compression|projection|kernel|coreg|format|aws|azure|gcs|gee|email <Choose indices from>: ndvi|gndvi|ndwi|bndvi|tvi_deering|tvi_brogeleblanc|osavi|evi2|sr|msavi2",
        default=None,
    )

    parser_order.set_defaults(func=order_from_parser)

    parser_reorder = subparsers.add_parser("reorder",
                                           help="Reorder an existing order")
    required_named = parser_reorder.add_argument_group(
        "Required named arguments.")
    required_named.add_argument("--url",
                                help="Order url to be ordered",
                                required=True)
    optional_named = parser_reorder.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument(
        "--notification",
        help='Use "email" to get an email notification',
        default=None)
    parser_reorder.set_defaults(func=reorder_from_parser)

    parser_cancel = subparsers.add_parser("cancel",
                                          help="Cancel queued order(s)")
    parser_cancel.add_argument(
        "--id", help='order id you want to cancel use "all" to cancel all')
    parser_cancel.set_defaults(func=cancel_from_parser)

    parser_ordersize = subparsers.add_parser(
        "ordersize", help="Estimate total download size")
    parser_ordersize.add_argument("--url",
                                  help="order url you got for your order")
    parser_ordersize.set_defaults(func=ordersize_from_parser)

    parser_ostate = subparsers.add_parser(
        "ostate", help="Get list of orders by state and date range")
    parser_ostate.add_argument(
        "--state",
        help=
        "choose state between queued| running | success | failed | partial",
    )
    parser_ostate.add_argument("--start",
                               help="start date in format YYYY-MM-DD")
    parser_ostate.add_argument("--end", help="end date in format YYYY-MM-DD")
    optional_named = parser_ostate.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--limit",
                                help="Limit the maximum table size",
                                default=None)
    parser_ostate.set_defaults(func=ostate_from_parser)

    parser_stats = subparsers.add_parser(
        "stats",
        help="Prints number of orders queued and running for org & user")
    parser_stats.set_defaults(func=stats_from_parser)

    parser_download = subparsers.add_parser(
        "download", help="Downloads all files in your order")
    parser_download.add_argument("--url",
                                 help="order url you got for your order")
    parser_download.add_argument(
        "--local", help="Output folder where ordered files will be exported")
    optional_named = parser_download.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--ext",
                                help="File Extension to download",
                                default=None)
    parser_download.set_defaults(func=download_from_parser)

    parser_asyncdownload = subparsers.add_parser(
        "multipart",
        help="Uses multiprocessing to download for all files in your order")
    parser_asyncdownload.add_argument("--url",
                                      help="order url you got for your order")
    parser_asyncdownload.add_argument(
        "--local", help="Output folder where ordered files will be exported")
    optional_named = parser_asyncdownload.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--ext",
                                help="File Extension to download",
                                default=None)
    parser_asyncdownload.set_defaults(func=asyncdownload_from_parser)

    parser_multiproc = subparsers.add_parser(
        "multiproc",
        help=
        "Multiprocess based downloader to download for all files in your order",
    )
    parser_multiproc.add_argument("--url",
                                  help="order url you got for your order")
    parser_multiproc.add_argument(
        "--local", help="Output folder where ordered files will be exported")
    optional_named = parser_multiproc.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--ext",
                                help="File Extension to download",
                                default=None)
    parser_multiproc.set_defaults(func=multiproc_from_parser)
    args = parser.parse_args()

    try:
        func = args.func
    except AttributeError:
        parser.error("too few arguments")
    func(args)
Example #49
0
 def cmderror(parser, msg):
     try:
         parser.error(msg)
     except SystemExit:
         pass
Example #50
0
                         help='Process Launchpad Bugs'),
    optparse.make_option('',
                         '--bp',
                         action='store_true',
                         dest='lpbps',
                         default=False,
                         help='Process Launchpad Blueprints')
]

usage = 'usage: %prog [-h|--help|<options>]'
parser = optparse.OptionParser(usage=usage, option_list=option_list)

# OptionParser gets the options out, whatever is not preceeded by
# an option is considered args.
(options, args) = parser.parse_args()

# we are not expecting args right now
if args:
    parser.error('Invalid argment(s) found: ' + str(args))

# check options
if not options.lpbugs and not options.lpbps:
    parser.print_help()
    sys.exit(1)
elif options.lpbugs:
    print "INFO: Processing Launchpad Bugs ..."
    Bug()
elif options.lpbps:
    print "INFO: Processing Launchpad Blueprints ..."
    Blueprint()
Example #51
0
    try:
        c = codecs.getwriter(sys.stdout.encoding)
    except:
        c = codecs.getwriter('ascii')
    sys.stdout = c(sys.stdout, 'replace')


    parser = OptionParser(usage="%prog [-i] endpoint",
        version="%prog " + str(__version__))
    parser.add_option("-i", dest="interactive", action="store_true",
                help="Enables interactive mode")

    (options, args) = parser.parse_args()

    if len(args) != 1:
        parser.error("Endpoint must be specified")

    endpoint = args[0]

    if options.interactive:
        _interactive(endpoint)

    q = sys.stdin.read()
    try:
        result = query(endpoint, q)
        for row in result.fetchone():
            print("\t".join(map(six.text_type, row)))
    except SparqlException as e:
        faultString = e.message
        print(faultString)
def parse_args():

    class WPFileAction(argparse.Action):
        """
        This action is fired upon parsing the --wpfiles option which should be a list of
        tsv file names.  Each named file should have the wp project codes as the first column
        The codes will be used to query the databse with the name <ID>wiki.

        (Sorry about the nasty python functional syntax.)
        """

        def __call__(self, parser, namespace, values, option_string=None):

            # hack because moka flatten is broken
            #def flatten(l):
                #return moka.List(reduce(moka.List.extend, l, moka.List()))
            #moka.List.flatten = flatten

            #logging.info('values: %s', values)
            #projects = moka.List(values)\
                #.map(open)\
                #.map(file.readlines)\
                #.flatten()\
                #.keep(lambda line : line[0] != '#')\
                #.map(str.split)\
                #.map(itemgetter(0))\
                #.uniq()
            #project_list = list(projects)

            # logging.info('moka projects: %s', projects)

            projects = list(set(
                map(
                    itemgetter(0),
                    map(
                        str.split,
                        filter(
                            lambda line: line[0] != '#',
                            reduce(
                                list.__add__,
                                map(
                                    file.readlines,
                                    map(
                                        open,
                                        values)), []))))))

            # logging.info('new - old: %s', set(project_list) - set(old))
            # logging.info('old - new: %s', set(old) - set(project_list))
            # import sys
            # sys.exit()
            setattr(namespace, self.dest, projects)

    def auto_date(datestr):
        #logger.debug('entering autodate: %s', datestr)
        return dateutil.parser.parse(datestr).date()

    parser = argparse.ArgumentParser(
        description="""Geo coding editor activity on Wikipedia""",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter
    )
    parser.add_argument(
        '-o', '--output',
        dest='output_dir',
        metavar='output_dir',
        default='./output',
        help='<path> for output.  program will actually make a subdirectory within the'
        'provided directory named according to the start and end date'
    )
    parser.add_argument(
        '-b', '--basename',
        default='geowiki',
        help='base output file name used in <BASENAME>_wp_pr_start_end.{json,tsv}'
    )
    parser.add_argument(
        '-p', '--wp',
        metavar='proj',
        nargs='+',
        dest='wp_projects',
        default=[],
        help='the wiki project to analyze (e.g. `en`)',
    )
    parser.add_argument(
        '--wpfiles',
        metavar='wp_ids.tsv',
        nargs='+',
        dest='wp_projects',
        action=WPFileAction,
        help='list of tsv files in which the first column is the project id and the second column is the full name'
        'will clobber any arguments passed in by --wp if --wpfiles appears later in list'
    )
    parser.add_argument(
        '-s', '--start',
        metavar='start_timestamp',
        type=auto_date,
        default=None,
        dest='start',
        help="inclusive query start date. parses string with dateutil.parser.parse().  Note that if only "
        "a date is given, the hours, minutes and seconds will be filled in with 0's"
    )
    parser.add_argument(
        '-e', '--end',
        metavar='end_timestamp',
        type=auto_date,
        default=datetime.date.today() - datetime.timedelta(days=1),
        dest='end',
        help="exclusive query end date. parses string with dateutil.parser.parse()"
    )
    parser.add_argument(
        '--daily',
        action='store_true',
        default=False,
        help='including this flag instructs the program to run monthly queries ending on each day between the '
        'start and end date instead of only once for the entire range'
    )
    parser.add_argument(
        '-n', '--threads',
        metavar='',
        type=int,
        dest='threads',
        help="number of threads"
    )
    parser.add_argument(
        '-q', '--quiet',
        action='store_true',
        help="set logging level to INFO rather than DEBUG"
    )
    parser.add_argument(
        '-g', '--geoDB',
        metavar='',
        default='/usr/share/GeoIP/GeoIPCity.dat',
        #default='/home/erosen/share/GeoIP/GeoIPCity.dat', # this one I manually manage
        dest='geoIP_db',
        help='<path> to geo IP database'
    )
    parser.add_argument(
        '--top_cities',
        type=int,
        default=10,
        help='number of cities to report when aggregating by city'
    )
    parser.add_argument(
        '--source_sql_cnf',
        type=os.path.expanduser,
        default='~/.my.cnf',
        help='mysql ini-style option file which allows a user to write to the read from database'
        'production mediawiki databases to collect ip info. For more information, see '
        'http://dev.mysql.com/doc/refman/5.1/en/option-files.html'
    )
    parser.add_argument(
        '--dest_sql_cnf',
        type=os.path.expanduser,
        default='~/.my.cnf.research',
        help='mysql ini-style option file which allows a user to write to the destination database'
        'for use with the write_*_sql output options.  For more information, see '
        'http://dev.mysql.com/doc/refman/5.1/en/option-files.html'
    )
    parser.add_argument(
        '--dest_db_name',
        default='staging',
        help='name of database in which to insert results'
    )
    parser.add_argument(
        '--active_editors_country',
        default=mysql_config.DEST_TABLE_NAMES['active_editors_country'],
        help='table in `dest_sql` db in which the active editor cohorts by country will be stored'
    )
    parser.add_argument(
        '--active_editors_world',
        default=mysql_config.DEST_TABLE_NAMES['active_editors_world'],
        help='table in `dest_sql` db in which the active editor cohorts for the entire world will be stored'
    )
    parser.add_argument(
        '--city_edit_fraction',
        default=mysql_config.DEST_TABLE_NAMES['city_edit_fraction'],
        help='table in `dest_sql` db in which the fraction of total country edits originating from'
        'the given city will be stored'
    )
    parser.add_argument(
        '--country_total_edit',
        default=mysql_config.DEST_TABLE_NAMES['country_total_edit'],
        help='table in `dest_sql` db in which the total number of edits from a given country will be stored'
    )

    # post processing
    args = parser.parse_args()
    if not args.start:
        args.start = args.end - dateutil.relativedelta.relativedelta(months=1)

    cu_start = datetime.date.today() - datetime.timedelta(days=90)
    if args.daily and args.start < cu_start + datetime.timedelta(days=30):
        parser.error('starting date (%s) exceeds persistence of check_user table (90 days, i.e. %s)' % (args.start, cu_start))

    wp_projects = wikipedia_projects.check_validity(args.wp_projects)
    if not wp_projects:
        parser.error('no valid wikipedia projects recieved\n'
                     '       must either include the --wp flag or the --wpfiles flag\n')

    if not args.threads:
        setattr(args, 'threads', min(len(args.wp_projects), 30))
        logger.info('Running with %d threads', len(args.wp_projects))

    if args.quiet:
        logger.setLevel(logging.INFO)

    # create top-level dir
    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    args.subdir = '%s_%s' % (
        datetime.date.strftime(args.start, '%Y%m%d'),
        datetime.date.strftime(args.end, '%Y%m%d'))

    # check for mysql login credentials
    if not os.path.exists(os.path.expanduser("~/.my.cnf")):
        logger.error("~/.my.cnf does not exist! MySql connection might fail")

    logger.info('args: %s', pprint.pformat(vars(args), indent=2))
    return vars(args)
Example #53
0
def read_options():
    parser = OptionParser(usage="usage: %prog [options]", version="%prog 0.1")
    parser.add_option("-d",
                      "--database",
                      action="store",
                      dest="dbname",
                      help="Database where information is stored")
    parser.add_option("-u",
                      "--dbuser",
                      action="store",
                      dest="dbuser",
                      default="root",
                      help="Database user")
    parser.add_option("-p",
                      "--dbpassword",
                      action="store",
                      dest="dbpassword",
                      default="",
                      help="Database password")
    parser.add_option("-g",
                      "--granularity",
                      action="store",
                      dest="granularity",
                      default="months",
                      help="year,months,weeks granularity")
    parser.add_option("-o",
                      "--destination",
                      action="store",
                      dest="destdir",
                      default="data/json",
                      help="Destination directory for JSON files")
    parser.add_option(
        "-r",
        "--reports",
        action="store",
        dest="reports",
        default="",
        help=
        "Reports to be generated (repositories, companies, countries, people)")
    parser.add_option("-s",
                      "--start",
                      action="store",
                      dest="startdate",
                      default="1900-01-01",
                      help="Start date for the report")
    parser.add_option("-e",
                      "--end",
                      action="store",
                      dest="enddate",
                      default="2100-01-01",
                      help="End date for the report")
    parser.add_option("-i",
                      "--identities",
                      action="store",
                      dest="identities_db",
                      help="Database with unique identities and affiliations")
    parser.add_option("-t",
                      "--type",
                      action="store",
                      dest="backend",
                      default="bugzilla",
                      help="Type of backend: bugzilla, allura, jira, github")
    parser.add_option("--npeople",
                      action="store",
                      dest="npeople",
                      default="10",
                      help="Limit for people analysis")

    (opts, args) = parser.parse_args()

    if len(args) != 0:
        parser.error("Wrong number of arguments")

    if not (opts.dbname and opts.dbuser and opts.identities_db):
        parser.error("--database --db-user and --identities are needed")
    return opts
Example #54
0
    parser.add_argument('--swath-identifier',
                        help='(for S1) subswath id: S1..S6 or IW1..IW3 or EW1..EW5')
    parser.add_argument('--api', default='scihub',
                        help='search API to use: scihub or planet')
    parser.add_argument('--mirror', default='peps',
                        help='download mirror: scihub or aws (GRD only)')
    parser.add_argument('--orbit', type=int,
                        help='relative orbit number, from 1 to 175')
    parser.add_argument('--parallel', type=int, default=multiprocessing.cpu_count(),
                        help='number of parallel downloads')
    parser.add_argument('--timeout', type=int, default=600,
                        help='timeout for images downloads, in seconds')
    args = parser.parse_args()

    if args.geom and (args.lat or args.lon):
        parser.error('--geom and {--lat, --lon} are mutually exclusive')

    if args.title and (args.geom or args.lat or args.lon):
        parser.error('--title, --geom and {--lat, --lon} are mutually exclusive')

    if not args.geom and (not args.lat or not args.lon) and (not args.title):
        parser.error('either --geom, {--lat, --lon} or --title must be defined')

    if args.title:
        download_safe_from_peps(args.title, args.outdir)
    else:
        if args.geom:
            aoi = args.geom
        else:
            aoi = utils.geojson_geometry_object(args.lat, args.lon, args.width,
                                                args.height)
Example #55
0
def main():
    parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
    parser.add_option(
        '-u',
        '--user',
        metavar='<email>',
        # Look for USER and USERNAME (Windows) environment variables.
        default=os.environ.get('USER', os.environ.get('USERNAME')),
        help='Filter on user, default=%default')
    parser.add_option('-b',
                      '--begin',
                      metavar='<date>',
                      help='Filter issues created after the date (mm/dd/yy)')
    parser.add_option('-e',
                      '--end',
                      metavar='<date>',
                      help='Filter issues created before the date (mm/dd/yy)')
    quarter_begin, quarter_end = get_quarter_of(datetime.today() -
                                                relativedelta(months=2))
    parser.add_option(
        '-Q',
        '--last_quarter',
        action='store_true',
        help='Use last quarter\'s dates, i.e. %s to %s' %
        (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
    parser.add_option('-Y',
                      '--this_year',
                      action='store_true',
                      help='Use this year\'s dates')
    parser.add_option('-w',
                      '--week_of',
                      metavar='<date>',
                      help='Show issues for week of the date (mm/dd/yy)')
    parser.add_option(
        '-W',
        '--last_week',
        action='count',
        help='Show last week\'s issues. Use more times for more weeks.')
    parser.add_option(
        '-a',
        '--auth',
        action='store_true',
        help='Ask to authenticate for instances with no auth cookie')
    parser.add_option('-d',
                      '--deltas',
                      action='store_true',
                      help='Fetch deltas for changes.')
    parser.add_option(
        '--no-referenced-issues',
        action='store_true',
        help='Do not fetch issues referenced by owned changes. Useful in '
        'combination with --changes-by-issue when you only want to list '
        'issues that have also been modified in the same time period.')
    parser.add_option(
        '--skip-own-issues-without-changes',
        action='store_true',
        help='Skips listing own issues without changes when showing changes '
        'grouped by referenced issue(s). See --changes-by-issue for more '
        'details.')

    activity_types_group = optparse.OptionGroup(
        parser, 'Activity Types',
        'By default, all activity will be looked up and '
        'printed. If any of these are specified, only '
        'those specified will be searched.')
    activity_types_group.add_option('-c',
                                    '--changes',
                                    action='store_true',
                                    help='Show changes.')
    activity_types_group.add_option('-i',
                                    '--issues',
                                    action='store_true',
                                    help='Show issues.')
    activity_types_group.add_option('-r',
                                    '--reviews',
                                    action='store_true',
                                    help='Show reviews.')
    activity_types_group.add_option(
        '--changes-by-issue',
        action='store_true',
        help='Show changes grouped by referenced issue(s).')
    parser.add_option_group(activity_types_group)

    output_format_group = optparse.OptionGroup(
        parser, 'Output Format',
        'By default, all activity will be printed in the '
        'following format: {url} {title}. This can be '
        'changed for either all activity types or '
        'individually for each activity type. The format '
        'is defined as documented for '
        'string.format(...). The variables available for '
        'all activity types are url, title, author, '
        'created and modified. Format options for '
        'specific activity types will override the '
        'generic format.')
    output_format_group.add_option(
        '-f',
        '--output-format',
        metavar='<format>',
        default=u'{url} {title}',
        help='Specifies the format to use when printing all your activity.')
    output_format_group.add_option(
        '--output-format-changes',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing changes. Supports the '
        'additional variable {reviewers}')
    output_format_group.add_option(
        '--output-format-issues',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing issues. Supports the '
        'additional variable {owner}.')
    output_format_group.add_option(
        '--output-format-reviews',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing reviews.')
    output_format_group.add_option(
        '--output-format-heading',
        metavar='<format>',
        default=u'{heading}:',
        help='Specifies the format to use when printing headings.')
    output_format_group.add_option(
        '--output-format-no-url',
        default='{title}',
        help='Specifies the format to use when printing activity without url.')
    output_format_group.add_option(
        '-m',
        '--markdown',
        action='store_true',
        help='Use markdown-friendly output (overrides --output-format '
        'and --output-format-heading)')
    output_format_group.add_option(
        '-j',
        '--json',
        action='store_true',
        help='Output json data (overrides other format options)')
    parser.add_option_group(output_format_group)
    auth.add_auth_options(parser)

    parser.add_option('-v',
                      '--verbose',
                      action='store_const',
                      dest='verbosity',
                      default=logging.WARN,
                      const=logging.INFO,
                      help='Output extra informational messages.')
    parser.add_option('-q',
                      '--quiet',
                      action='store_const',
                      dest='verbosity',
                      const=logging.ERROR,
                      help='Suppress non-error messages.')
    parser.add_option('-M',
                      '--merged-only',
                      action='store_true',
                      dest='merged_only',
                      default=False,
                      help='Shows only changes that have been merged.')
    parser.add_option(
        '-C',
        '--completed-issues',
        action='store_true',
        dest='completed_issues',
        default=False,
        help='Shows only monorail issues that have completed (Fixed|Verified) '
        'by the user.')
    parser.add_option(
        '-o',
        '--output',
        metavar='<file>',
        help='Where to output the results. By default prints to stdout.')

    # Remove description formatting
    parser.format_description = (lambda _: parser.description)  # pylint: disable=no-member

    options, args = parser.parse_args()
    options.local_user = os.environ.get('USER')
    if args:
        parser.error('Args unsupported')
    if not options.user:
        parser.error('USER/USERNAME is not set, please use -u')
    # Retains the original -u option as the email address.
    options.email = options.user
    options.user = username(options.email)

    logging.basicConfig(level=options.verbosity)

    # python-keyring provides easy access to the system keyring.
    try:
        import keyring  # pylint: disable=unused-import,unused-variable,F0401
    except ImportError:
        logging.warning('Consider installing python-keyring')

    if not options.begin:
        if options.last_quarter:
            begin, end = quarter_begin, quarter_end
        elif options.this_year:
            begin, end = get_year_of(datetime.today())
        elif options.week_of:
            begin, end = (get_week_of(
                datetime.strptime(options.week_of, '%m/%d/%y')))
        elif options.last_week:
            begin, end = (
                get_week_of(datetime.today() -
                            timedelta(days=1 + 7 * options.last_week)))
        else:
            begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
    else:
        begin = dateutil.parser.parse(options.begin)
        if options.end:
            end = dateutil.parser.parse(options.end)
        else:
            end = datetime.today()
    options.begin, options.end = begin, end
    if begin >= end:
        # The queries fail in peculiar ways when the begin date is in the future.
        # Give a descriptive error message instead.
        logging.error(
            'Start date (%s) is the same or later than end date (%s)' %
            (begin, end))
        return 1

    if options.markdown:
        options.output_format_heading = '### {heading}\n'
        options.output_format = '  * [{title}]({url})'
        options.output_format_no_url = '  * {title}'
    logging.info('Searching for activity by %s', options.user)
    logging.info('Using range %s to %s', options.begin, options.end)

    my_activity = MyActivity(options)
    my_activity.show_progress('Loading data')

    if not (options.changes or options.reviews or options.issues
            or options.changes_by_issue):
        options.changes = True
        options.issues = True
        options.reviews = True

    # First do any required authentication so none of the user interaction has to
    # wait for actual work.
    if options.changes or options.changes_by_issue:
        my_activity.auth_for_changes()
    if options.reviews:
        my_activity.auth_for_reviews()

    logging.info('Looking up activity.....')

    try:
        if options.changes or options.changes_by_issue:
            my_activity.get_changes()
        if options.reviews:
            my_activity.get_reviews()
        if options.issues or options.changes_by_issue:
            my_activity.get_issues()
        if not options.no_referenced_issues:
            my_activity.get_referenced_issues()
    except auth.AuthenticationError as e:
        logging.error('auth.AuthenticationError: %s', e)

    my_activity.show_progress('\n')

    my_activity.print_access_errors()

    output_file = None
    try:
        if options.output:
            output_file = open(options.output, 'w')
            logging.info('Printing output to "%s"', options.output)
            sys.stdout = output_file
    except (IOError, OSError) as e:
        logging.error('Unable to write output: %s', e)
    else:
        if options.json:
            my_activity.dump_json()
        else:
            if options.changes:
                my_activity.print_changes()
            if options.reviews:
                my_activity.print_reviews()
            if options.issues:
                my_activity.print_issues()
            if options.changes_by_issue:
                my_activity.print_changes_by_issue(
                    options.skip_own_issues_without_changes)
    finally:
        if output_file:
            logging.info('Done printing to file.')
            sys.stdout = sys.__stdout__
            output_file.close()

    return 0
def main():
    # Silence upload.py.
    rietveld.upload.verbosity = 0

    parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
    parser.add_option('-u',
                      '--user',
                      metavar='<email>',
                      default=os.environ.get('USER'),
                      help='Filter on user, default=%default')
    parser.add_option('-b',
                      '--begin',
                      metavar='<date>',
                      help='Filter issues created after the date (mm/dd/yy)')
    parser.add_option('-e',
                      '--end',
                      metavar='<date>',
                      help='Filter issues created before the date (mm/dd/yy)')
    quarter_begin, quarter_end = get_quarter_of(datetime.today() -
                                                relativedelta(months=2))
    parser.add_option(
        '-Q',
        '--last_quarter',
        action='store_true',
        help='Use last quarter\'s dates, i.e. %s to %s' %
        (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
    parser.add_option('-Y',
                      '--this_year',
                      action='store_true',
                      help='Use this year\'s dates')
    parser.add_option('-w',
                      '--week_of',
                      metavar='<date>',
                      help='Show issues for week of the date (mm/dd/yy)')
    parser.add_option(
        '-W',
        '--last_week',
        action='count',
        help='Show last week\'s issues. Use more times for more weeks.')
    parser.add_option(
        '-a',
        '--auth',
        action='store_true',
        help='Ask to authenticate for instances with no auth cookie')
    parser.add_option('-d',
                      '--deltas',
                      action='store_true',
                      help='Fetch deltas for changes (slow).')

    activity_types_group = optparse.OptionGroup(
        parser, 'Activity Types',
        'By default, all activity will be looked up and '
        'printed. If any of these are specified, only '
        'those specified will be searched.')
    activity_types_group.add_option('-c',
                                    '--changes',
                                    action='store_true',
                                    help='Show changes.')
    activity_types_group.add_option('-i',
                                    '--issues',
                                    action='store_true',
                                    help='Show issues.')
    activity_types_group.add_option('-r',
                                    '--reviews',
                                    action='store_true',
                                    help='Show reviews.')
    parser.add_option_group(activity_types_group)

    output_format_group = optparse.OptionGroup(
        parser, 'Output Format',
        'By default, all activity will be printed in the '
        'following format: {url} {title}. This can be '
        'changed for either all activity types or '
        'individually for each activity type. The format '
        'is defined as documented for '
        'string.format(...). The variables available for '
        'all activity types are url, title and author. '
        'Format options for specific activity types will '
        'override the generic format.')
    output_format_group.add_option(
        '-f',
        '--output-format',
        metavar='<format>',
        default=u'{url} {title}',
        help='Specifies the format to use when printing all your activity.')
    output_format_group.add_option(
        '--output-format-changes',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing changes. Supports the '
        'additional variable {reviewers}')
    output_format_group.add_option(
        '--output-format-issues',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing issues. Supports the '
        'additional variable {owner}.')
    output_format_group.add_option(
        '--output-format-reviews',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing reviews.')
    output_format_group.add_option(
        '--output-format-heading',
        metavar='<format>',
        default=u'{heading}:',
        help='Specifies the format to use when printing headings.')
    output_format_group.add_option(
        '-m',
        '--markdown',
        action='store_true',
        help='Use markdown-friendly output (overrides --output-format '
        'and --output-format-heading)')
    output_format_group.add_option(
        '-j',
        '--json',
        action='store_true',
        help='Output json data (overrides other format options)')
    parser.add_option_group(output_format_group)
    auth.add_auth_options(parser)

    parser.add_option('-v',
                      '--verbose',
                      action='store_const',
                      dest='verbosity',
                      default=logging.WARN,
                      const=logging.INFO,
                      help='Output extra informational messages.')
    parser.add_option('-q',
                      '--quiet',
                      action='store_const',
                      dest='verbosity',
                      const=logging.ERROR,
                      help='Suppress non-error messages.')
    parser.add_option(
        '-o',
        '--output',
        metavar='<file>',
        help='Where to output the results. By default prints to stdout.')

    # Remove description formatting
    parser.format_description = (lambda _: parser.description)  # pylint: disable=no-member

    options, args = parser.parse_args()
    options.local_user = os.environ.get('USER')
    if args:
        parser.error('Args unsupported')
    if not options.user:
        parser.error('USER is not set, please use -u')
    options.user = username(options.user)

    logging.basicConfig(level=options.verbosity)

    # python-keyring provides easy access to the system keyring.
    try:
        import keyring  # pylint: disable=unused-import,unused-variable,F0401
    except ImportError:
        logging.warning('Consider installing python-keyring')

    if not options.begin:
        if options.last_quarter:
            begin, end = quarter_begin, quarter_end
        elif options.this_year:
            begin, end = get_year_of(datetime.today())
        elif options.week_of:
            begin, end = (get_week_of(
                datetime.strptime(options.week_of, '%m/%d/%y')))
        elif options.last_week:
            begin, end = (
                get_week_of(datetime.today() -
                            timedelta(days=1 + 7 * options.last_week)))
        else:
            begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
    else:
        begin = datetime.strptime(options.begin, '%m/%d/%y')
        if options.end:
            end = datetime.strptime(options.end, '%m/%d/%y')
        else:
            end = datetime.today()
    options.begin, options.end = begin, end

    if options.markdown:
        options.output_format = ' * [{title}]({url})'
        options.output_format_heading = '### {heading} ###'
    logging.info('Searching for activity by %s', options.user)
    logging.info('Using range %s to %s', options.begin, options.end)

    my_activity = MyActivity(options)

    if not (options.changes or options.reviews or options.issues):
        options.changes = True
        options.issues = True
        options.reviews = True

    # First do any required authentication so none of the user interaction has to
    # wait for actual work.
    if options.changes:
        my_activity.auth_for_changes()
    if options.reviews:
        my_activity.auth_for_reviews()

    logging.info('Looking up activity.....')

    try:
        if options.changes:
            my_activity.get_changes()
        if options.reviews:
            my_activity.get_reviews()
        if options.issues:
            my_activity.get_issues()
    except auth.AuthenticationError as e:
        logging.error('auth.AuthenticationError: %s', e)

    output_file = None
    try:
        if options.output:
            output_file = open(options.output, 'w')
            logging.info('Printing output to "%s"', options.output)
            sys.stdout = output_file
    except (IOError, OSError) as e:
        logging.error('Unable to write output: %s', e)
    else:
        if options.json:
            my_activity.dump_json()
        else:
            my_activity.print_changes()
            my_activity.print_reviews()
            my_activity.print_issues()
    finally:
        if output_file:
            logging.info('Done printing to file.')
            sys.stdout = sys.__stdout__
            output_file.close()

    return 0
Example #57
0
                        default='devseed',
                        help='search API')
    parser.add_argument('--product-type',
                        choices=['L1C', 'L2A'],
                        help='type of image')
    parser.add_argument('--parallel-downloads',
                        type=int,
                        default=multiprocessing.cpu_count(),
                        help='max number of parallel crops downloads')
    args = parser.parse_args()

    if 'all' in args.band:
        args.band = ALL_BANDS

    if args.geom and (args.lat or args.lon):
        parser.error('--geom and {--lat, --lon} are mutually exclusive')

    if not args.geom and (not args.lat or not args.lon):
        parser.error('either --geom or {--lat, --lon} must be defined')

    if args.geom:
        aoi = args.geom
    else:
        aoi = utils.geojson_geometry_object(args.lat, args.lon, args.width,
                                            args.height)
    get_time_series(aoi,
                    start_date=args.start_date,
                    end_date=args.end_date,
                    bands=args.band,
                    out_dir=args.outdir,
                    search_api=args.api,
Example #58
0
#                         calculations should take place. ")
parser.add_option("-j",
                  "--jobs",
                  dest="jobs",
                  default=1,
                  metavar="jobs",
                  type="int",
                  help="Specifies the number of jobs (operations) to run \
                    in parallel.")

(options, remaining_args) = parser.parse_args()

jobs = options.jobs

if not options.input_fasta:
    parser.error("\n\n\tMissing parameter --input_fasta FILE\n\n")

#get argvs from flags
original_fasta = options.input_fasta
sorted_fasta_file = options.output_fasta
stats_result_file = options.result_file
filter_list = options.filter_list


def fasta_reader(fasta_filename):
    with open(fasta_filename) as fasta_handle:
        fasta_iter = (
            x[1] for x in groupby(fasta_handle, lambda line: line[0] == ">"))
        for header in fasta_iter:
            headerStr = header.__next__()[1:].strip()
            seq = "".join(s.strip() for s in fasta_iter.__next__())