Example #1
0
def argparser():
    parser = argparse.ArgumentParser(
        description="Work with Bluetooth enabled" +
        " SMA photovoltaic inverters")

    parser.add_argument("--config")

    subparsers = parser.add_subparsers()

    parse_status = subparsers.add_parser("status", help="Read inverter status")
    parse_status.set_defaults(func=status)

    parse_yieldat = subparsers.add_parser("yieldat",
                                          help="Get production at"
                                          " a given date")
    parse_yieldat.set_defaults(func=yieldat)
    parse_yieldat.add_argument(type=str, dest="datetime")

    parse_download = subparsers.add_parser("download",
                                           help="Download power history" +
                                           " and record in database")
    parse_download.set_defaults(func=download)

    parse_setupdb = subparsers.add_parser("setupdb",
                                          help="Create database or" +
                                          " update schema")
    parse_setupdb.set_defaults(func=setupdb)

    parse_upload_date = subparsers.add_parser("upload",
                                              help="Upload"
                                              " power history to pvoutput.org")
    parse_upload_date.set_defaults(func=upload)
    parse_upload_date.add_argument("--date", type=str, dest="upload_date")

    return parser
Example #2
0
    def argparse(self):
        parser = argparse.ArgumentParser()
        parser.add_argument('-g', '--global')

        subparsers = parser.add_subparsers(dest="action")

        import_parser = subparsers.add_parser('sync',
                                              help="Import cards from file")
        import_parser.add_argument('board',
                                   help="ID or URL of board",
                                   type=parse_trello_board_url)
        import_parser.add_argument('file',
                                   help="Filename of file with cards",
                                   type=argparse.FileType('rb'))

        export_parser = subparsers.add_parser('download',
                                              help="Export cards to file")
        export_parser.add_argument('board',
                                   help="ID or URL of board",
                                   type=parse_trello_board_url)
        export_parser.add_argument('file',
                                   help="Filename of file to write cards",
                                   type=argparse.FileType('wb'))

        init_parser = subparsers.add_parser(
            'setup', help="Perform a initial configuration of tool")
        init_parser.add_argument('-k', '--key', required=True)

        return parser.parse_args(self.argv[1:])
Example #3
0
def main():
    config = load_config()
    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers()

    lists_parser = subparsers.add_parser('lists')
    lists_parser.set_defaults(func=get_lists)

    list_parser = subparsers.add_parser('list')
    list_parser.add_argument('list_id')
    list_parser.set_defaults(func=get_list_data)

    list_parser = subparsers.add_parser('record')
    list_parser.add_argument('-d', '--datetime', default=None, nargs='?')
    list_parser.add_argument('args', nargs='+', help='Args should be alternating '
        'pairs of metric_id value. e.g. record 4 150 5 1 6 0 '
        'to record 150 for metric 4, 1 for 5, and 0 for 6')
    list_parser.set_defaults(func=post_metric_id_data)

    list_parser = subparsers.add_parser('new_metric')
    list_parser.add_argument('list_id')
    list_parser.add_argument('name')
    list_parser.set_defaults(func=post_metric)

    list_parser = subparsers.add_parser('new_list')
    list_parser.add_argument('name')
    list_parser.set_defaults(func=post_list)

    args = parser.parse_args()
    args.func(args, config)
Example #4
0
    def contribute_to_subparser(self, parser):
        parser.set_defaults(function=lambda args: parser.print_usage())
        subparsers = parser.add_subparsers(title='commands')

        setup_parser = subparsers.add_parser(
            'setup', help='setup Google Drive API access through OAuth')
        setup_parser.set_defaults(function=self.setup)
Example #5
0
def main():
    log.addHandler(logging.StreamHandler(sys.stderr))
    api_log.addHandler(logging.StreamHandler(sys.stderr))

    parser = argparse.ArgumentParser()
    parser.add_argument('-v', '--verbose', action='store_true',
                        help='Display debug messages')
    subparsers = parser.add_subparsers(help='sub-command help')

    parser1 = subparsers.add_parser('upload')
    parser1.add_argument('filename', help='Timesheet csv')
    parser1.add_argument('--dry-run', action='store_true',
                         help='Preview changes')
    parser1.set_defaults(cmd='upload')

    parser2 = subparsers.add_parser('download')
    parser2.add_argument('date', help='List entries for specified week')
    parser2.set_defaults(cmd='download')

    parser3 = subparsers.add_parser('lookups')
    parser3.add_argument('kind', choices=['customer', 'activity'],
                         help='Download specified lookups')
    parser3.set_defaults(cmd='lookups')

    args = parser.parse_args()

    log_level = logging.DEBUG if args.verbose else logging.INFO
    log.setLevel(log_level)
    api_log.setLevel(log_level)

    run(args)
Example #6
0
    def parse_args(self):
        parser = argparse.ArgumentParser(prog=self.prog)
        parser.add_argument("-q", "--quiet", action="store_true",
                            help="Be quiet on logging to stdout/stderr")
        subparsers = parser.add_subparsers()

        parser_backup = subparsers.add_parser("backup")
        parser_backup.set_defaults(verb="backup")

        parser_list = subparsers.add_parser("list")
        parser_list.set_defaults(verb="list")
        parser_list.add_argument("--before", dest="before", default=None,
                                 type=parse_simple_date)
        parser_list.add_argument("--after", dest="after", default=None,
                                 type=parse_simple_date)

        parser_restore = subparsers.add_parser("restore")
        parser_restore.set_defaults(verb="restore")
        parser_restore.add_argument("backup", metavar="BACKUPNAME", type=str)
        parser_restore.add_argument("backend", metavar="BACKENDNAME", type=str)
        parser_restore.add_argument("archive_spec", metavar="SPEC", type=str)
        parser_restore.add_argument("destination", metavar="DEST", type=str)

        parser_list_backups = subparsers.add_parser("list-configured-backups")
        parser_list_backups.set_defaults(verb="list-configured-backups")

        parser_list_backends = subparsers.add_parser("list-backends")
        parser_list_backends.set_defaults(verb="list-backends")

        parser_prune = subparsers.add_parser("prune")
        parser_prune.set_defaults(verb="prune")

        return parser.parse_args(self.argv)
Example #7
0
    def command_dispatcher(self, args=None):
        desc = ('pydl4j,  a system to manage your DL4J dependencies from Python.\n')
        parser = argparse.ArgumentParser(description=desc)
        parser.add_argument(
            '-v', '--version', action='version',
            version=pkg_resources.get_distribution("pydl4j").version,
            help='Print pydl4j version'
        )

        subparsers = parser.add_subparsers(title='subcommands', dest='command')
        subparsers.add_parser('init', help='Initialize pydl4j')
        subparsers.add_parser('install', help='Install jars for pydl4j')

        argcomplete.autocomplete(parser)
        args = parser.parse_args(args)
        self.var_args = vars(args)

        if not args.command:
            parser.print_help()
            return

        self.command = args.command

        if self.command == 'init':
            self.init()
            return

        if self.command == 'install':
            self.install()
            return
Example #8
0
def get_args():
    parser = argparse.ArgumentParser()
    parser.add_argument('--debug', action='store_true')

    sub = parser.add_subparsers(dest='action', required=True)
    back = sub.add_parser('backup')
    back.add_argument('--cluster', dest='cluster', nargs='?')
    back.add_argument('--vmid', dest='vmid', nargs='?', type=int)
    back.add_argument('--profile', dest='profile', nargs='?')
    back.add_argument('--force', action='store_true')
    back.add_argument('--no-cleanup', action='store_true')
    back.add_argument('--cleanup', action='store_true')

    sub.add_parser('precheck')
    sub.add_parser('check')
    sub.add_parser('check-snap')
    sub.add_parser('stats')
    sub.add_parser('version')

    ls = sub.add_parser('list-mapped')
    ls.add_argument('--json', action='store_true')

    ls = sub.add_parser('ls')
    ls.add_argument(dest='rbd', nargs='?')
    ls.add_argument('--json', action='store_true')

    _map = sub.add_parser('map')
    _map.add_argument(dest='rbd')
    _map.add_argument(dest='snapshot')
    _map.add_argument(dest='vmdk', nargs='?')

    unmap = sub.add_parser('unmap')
    unmap.add_argument(dest='rbd')
    unmap.add_argument(dest='snapshot')
    return parser.parse_args()
Example #9
0
    def command_dispatcher(self, args=None):
        desc = ('pyreadme,  is an interface for the readme.io HTTP API.\n')
        parser = argparse.ArgumentParser(description=desc)
        parser.add_argument(
            '-v',
            '--version',
            action='version',
            version=pkg_resources.get_distribution("pydl4j").version,
            help='Print pyreadme version')

        subparsers = parser.add_subparsers(title='subcommands', dest='command')
        subparsers.add_parser('init', help='Initialize pyreadme')

        argcomplete.autocomplete(parser)
        args = parser.parse_args(args)
        self.var_args = vars(args)

        if not args.command:
            parser.print_help()
            return

        self.command = args.command

        if self.command == 'init':
            self.init()
            return
Example #10
0
def get_args():
    parser = argparse.ArgumentParser()
    sub = parser.add_subparsers(dest='action', required=True)
    sub.add_parser('backup')
    sub.add_parser('precheck')
    sub.add_parser('check')
    sub.add_parser('check-snap')
    sub.add_parser('stats')
    sub.add_parser('version')

    ls = sub.add_parser('list-mapped')
    ls.add_argument('--json', action='store_true')

    ls = sub.add_parser('ls')
    ls.add_argument(dest='rbd', nargs='?')
    ls.add_argument('--json', action='store_true')

    _map = sub.add_parser('map')
    _map.add_argument(dest='rbd')
    _map.add_argument(dest='snapshot')

    unmap = sub.add_parser('unmap')
    unmap.add_argument(dest='rbd')
    unmap.add_argument(dest='snapshot')
    return parser.parse_args()
Example #11
0
def parse_cmdline(argv):
    """Parse commandline"""
    parser = argparse.ArgumentParser()

    parser.add_argument('-v',
                        '--verbosity',
                        action='count',
                        help='increase output verbosity')

    subparsers = parser.add_subparsers()

    parser_fetch = subparsers.add_parser('fetch')
    parser_fetch.set_defaults(func=command_fetch)
    parser_fetch.add_argument('stock', help='WKN, ticker name of a stock')

    parser_fetch.add_argument('-s',
                              '--start-date',
                              type=mkdate,
                              default=datetime.date.today(),
                              help='Start date (default today)')
    parser_fetch.add_argument('-e',
                              '--end-date',
                              type=mkdate,
                              default=datetime.date.today(),
                              help='End date (default today)')
    parser_fetch.add_argument('--intra-day',
                              default=None,
                              action='store_true',
                              help='Intra day information')

    args = parser.parse_args(argv)

    return args
Example #12
0
    def command_dispatcher(self, args=None):
        desc = (
            'pydl4j,  a system to manage your DL4J dependencies from Python.\n'
        )
        parser = argparse.ArgumentParser(description=desc)
        parser.add_argument(
            '-v',
            '--version',
            action='version',
            version=pkg_resources.get_distribution("pydl4j").version,
            help='Print pydl4j version')

        subparsers = parser.add_subparsers(title='subcommands', dest='command')
        subparsers.add_parser('init', help='Initialize pydl4j')
        subparsers.add_parser('install', help='Install jars for pydl4j')

        argcomplete.autocomplete(parser)
        args = parser.parse_args(args)
        self.var_args = vars(args)

        if not args.command:
            parser.print_help()
            return

        self.command = args.command

        if self.command == 'init':
            self.init()
            return

        if self.command == 'install':
            self.install()
            return
Example #13
0
def main():
    log.addHandler(logging.StreamHandler(sys.stderr))
    api_log.addHandler(logging.StreamHandler(sys.stderr))

    parser = argparse.ArgumentParser()
    parser.add_argument('-v', '--verbose', action='store_true',
                        help='Display debug messages')
    subparsers = parser.add_subparsers(help='sub-command help')

    parser1 = subparsers.add_parser('upload')
    parser1.add_argument('filename', help='Timesheet csv')
    parser1.add_argument('--dry-run', action='store_true',
                         help='Preview changes')
    parser1.set_defaults(cmd='upload')

    parser2 = subparsers.add_parser('download')
    parser2.add_argument('date', help='List entries for specified week')
    parser2.set_defaults(cmd='download')

    parser3 = subparsers.add_parser('lookups')
    parser3.add_argument('kind', choices=['customer', 'activity'],
                         help='Download specified lookups')
    parser3.set_defaults(cmd='lookups')

    args = parser.parse_args()

    log_level = logging.DEBUG if args.verbose else logging.INFO
    log.setLevel(log_level)
    api_log.setLevel(log_level)

    run(args)
Example #14
0
File: main.py Project: thara/gcal
def main(args):
    client_secret_file_path = os.environ['GCAL_CLIENT_SECRET_PATH']
    credentials = get_credentials(client_secret_file_path)
    service = get_service(credentials)

    parser = argparse.ArgumentParser()

    sub = parser.add_subparsers(title='sub commands')

    list_parser = sub.add_parser('list')
    list_parser.set_defaults(func=list_cals)

    event_parser = sub.add_parser('events')
    event_parser.set_defaults(func=list_events)
    event_parser.add_argument('cal_ids', nargs='?')
    event_parser.add_argument('days', type=int, nargs='?')
    event_parser.add_argument('--no-times', action='store_true')
    event_parser.add_argument('--markdown-list', action='store_true')

    hour_parser = sub.add_parser('hour')
    hour_parser.set_defaults(func=calc_hours)
    hour_parser.add_argument('cal_ids', nargs='?')
    hour_parser.add_argument('days', type=int, nargs='?')
    hour_parser.add_argument('--no-times', action='store_true')

    args = parser.parse_args()
    args.func(service, args)
Example #15
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-c',
        '--config-file',
        default=os.path.expanduser('~/.schedulemaster-api.json'),
        help='JSON state file path')

    sp = parser.add_subparsers(dest='command', required=True)

    p = sp.add_parser('login')
    p.add_argument('username', help='username (e.g. 12345-1)')
    p.add_argument('password', help='password')

    p = sp.add_parser('adopt')
    p.add_argument('userid', type=int)
    p.add_argument('session', type=int)

    p = sp.add_parser('adopturl')
    p.add_argument('url')

    p = sp.add_parser('mysched')

    p = sp.add_parser('allsched')
    p.add_argument('-s', '--start-time', help='starting time (default: today)')
    p.add_argument('-e', '--end-time', help='ending time (default: today)')
    p.add_argument('-r',
                   '--resources',
                   help='comma-separated list of resources to show')
    p.add_argument('-m',
                   '--models',
                   help='comma-separated list of models to show')
    p.add_argument('--statuses',
                   default='yel,grn,gry,None',
                   help='comma-separated list of acceptible resource statuses')
    p.add_argument('-l',
                   '--location',
                   help='substring of location (default any)')

    p = sp.add_parser('me')

    p = sp.add_parser('listres')
    p.add_argument('-r',
                   '--resources',
                   help='comma-separated list of resources to show')
    p.add_argument('-m',
                   '--models',
                   help='comma-separated list of models to show')
    p.add_argument('-l',
                   '--location',
                   help='substring of location (default any)')
    p.add_argument('--statuses',
                   default='yel,grn,gry,None',
                   help='comma-separated list of acceptible resource statuses')

    args = parser.parse_args()

    api = ScheduleMasterAPI(args)

    getattr(api, 'cmd_' + args.command)(args)
def main(argv):
    # get lock to avoid multiple simultaneous instances of this script
    get_lock(os.path.basename(__file__))
    
    if '--run-tests' in argv:
        run_tests("--forever" in argv)
    else:
        parser = argparse.ArgumentParser(description="Monitors a book archive and commits changes to git.")
        subparsers = parser.add_subparsers(title='subcommands', metavar="")
        
        parser_update = subparsers.add_parser("update", help="Check archive for changes.")
        parser_update.add_argument("archive", help="Path to the archive.", metavar="PATH")
        parser_update.add_argument("-f", "--forever", help="Loop script forever.", action='store_true')
        parser_update.set_defaults(func=update)
        
        parser_init = subparsers.add_parser("git-init", help="Initialize archive from remote git repository.")
        parser_init.add_argument("archive", help="Path to the archive.", metavar="PATH")
        parser_init.add_argument("git_url", help="Initialize the archive from this git repository.", metavar="URL")
        parser_init.set_defaults(func=git_init)
        
        args = parser.parse_args()
        if "func" in args:
            args.func(args)
        else:
            print(parser.format_help())
Example #17
0
def setup_toggl_parser(parser):
    setup_common(parser)
    subparsers = parser.add_subparsers(dest="toggl_cmd", required=True)
    _ = subparsers.add_parser("download-tasks")
    validate = setup_common(subparsers.add_parser("status"))
    validate.add_argument("--local", action="store_true")
    _ = subparsers.add_parser("export")
    _ = subparsers.add_parser("open")
Example #18
0
def main():
    setup_honeybadger()

    parser = argparse.ArgumentParser(
        description=
        'Wrap Twarc in a cloudy sort of way for collecting Twitter data.')
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('-V',
                        '--version',
                        action='store_true',
                        help='Show version and exit')
    subparsers = parser.add_subparsers(help='command help', dest='command')

    local_parser = add_local_subparser(subparsers)
    aws_parser = add_aws_subparser(subparsers)

    m_args = parser.parse_args()
    setup_logging(debug=m_args.debug)

    if m_args.version:
        print('Version {}'.format(__version__))
    elif m_args.command == 'aws':
        if os.path.exists('twarc_cloud.ini'):
            setup_aws_keys(load_ini_config('twarc_cloud.ini'))
        if m_args.subcommand == 'harvester':
            harvester = TweetHarvester(m_args.collection_id,
                                       m_args.temp,
                                       bucket=m_args.bucket,
                                       tweets_per_file=m_args.tweets_per_file,
                                       monitor=m_args.monitor,
                                       shutdown=m_args.shutdown)
            harvester.harvest()
        elif m_args.subcommand == 'unlock':
            force_unlock(m_args.temp,
                         m_args.collection_id,
                         bucket=m_args.bucket)
            print('Unlocked')
        else:
            aws_parser.print_help()
            exit(1)

    elif m_args.command == 'local':
        if m_args.subcommand == 'harvester':
            harvester = TweetHarvester(m_args.collection_id,
                                       m_args.collections_path,
                                       tweets_per_file=m_args.tweets_per_file,
                                       monitor=m_args.monitor,
                                       shutdown=True)
            harvester.harvest()
        elif m_args.subcommand == 'unlock':
            force_unlock(m_args.collections_path, m_args.collection_id)
            print('Unlocked')
        else:
            local_parser.print_help()
            exit(1)
    else:
        parser.print_help()
        exit(1)
Example #19
0
def main():
    parser = argparse.ArgumentParser(description="Yo database utils")
    parser.add_argument('db_url', type=str)
    subparsers = parser.add_subparsers(help='sub-command help')
    init_sub = subparsers.add_parser('init')
    init_sub.set_defaults(func=init_db)
    reset_sub = subparsers.add_parser('reset')
    reset_sub.set_defaults(func=reset_db)
    args = parser.parse_args()
    args.func(args=args)
Example #20
0
    def create_parser(self):
        parser = argparse.ArgumentParser()
        parser.add_argument("--version", action='version', help="show program version",
                            version=version.__version__)
        sub = parser.add_subparsers(help="sub-command help")

        def add_cmd(method):
            cp = sub.add_parser(method.__name__.replace("_", "-"), help=method.__doc__)
            cp.set_defaults(func=method)
            return cp

        def generic_args(require_config=True, require_site=False):
            config_path = os.environ.get("PGHOARD_CONFIG")
            cmd.add_argument("-v", "--verbose", help="verbose output", action="store_true")
            if config_path:
                cmd.add_argument("--config", help="pghoard config file", default=config_path)
            else:
                cmd.add_argument("--config", help="pghoard config file", required=require_config)

            cmd.add_argument("--site", help="pghoard site", required=require_site)

        def host_port_args():
            cmd.add_argument("--host", help="pghoard repository host", default=PGHOARD_HOST)
            cmd.add_argument("--port", help="pghoard repository port", default=PGHOARD_PORT)

        def target_args():
            cmd.add_argument("--basebackup", help="pghoard basebackup", default="latest")
            cmd.add_argument("--primary-conninfo", help="replication.conf primary_conninfo", default="")
            cmd.add_argument("--target-dir", help="pghoard restore target 'pgdata' dir", required=True)
            cmd.add_argument("--overwrite", help="overwrite existing target directory",
                             default=False, action="store_true")
            cmd.add_argument("--tablespace-dir", metavar="NAME=DIRECTORY", action="append",
                             help="map the given tablespace to an existing empty directory; "
                                  "this option can be used multiple times to map multiple tablespaces")
            cmd.add_argument("--recovery-end-command", help="PostgreSQL recovery_end_command", metavar="COMMAND")
            cmd.add_argument("--recovery-target-action", help="PostgreSQL recovery_target_action",
                             choices=["pause", "promote", "shutdown"])
            cmd.add_argument("--recovery-target-name", help="PostgreSQL recovery_target_name", metavar="RESTOREPOINT")
            cmd.add_argument("--recovery-target-time", help="PostgreSQL recovery_target_time", metavar="ISO_TIMESTAMP")
            cmd.add_argument("--recovery-target-xid", help="PostgreSQL recovery_target_xid", metavar="XID")
            cmd.add_argument("--restore-to-master", help="Restore the database to a PG master", action="store_true")

        cmd = add_cmd(self.list_basebackups_http)
        host_port_args()
        generic_args(require_config=False, require_site=True)

        cmd = add_cmd(self.list_basebackups)
        generic_args()

        cmd = add_cmd(self.get_basebackup)
        target_args()
        generic_args()

        return parser
def parse_args(argv):
    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers()

    # subparser that facilitates adding new configurations
    add_subparser = subparsers.add_parser('add', help='Adds a new playlist')
    add_subparser.add_argument('--config-file', type=str, required=True,
            help='The config file to augment. If this file does not exist, a new config file is created at that location')
    add_subparser.add_argument('--top-n', type=int, default=25,
            help='The number of posts per query to add to the playlist.')
    add_subparser.add_argument('--expire-days', type=int, default=7,
            help='The number of days to retain added songs.')
    add_subparser.add_argument('--subreddits', type=str, required=True, nargs='+',
            help='The subreddits to pull posts from.')

    # mutex that makes creation vs retrieve preexisting easier
    playlist_mutex = add_subparser.add_mutually_exclusive_group(required=True)
    playlist_mutex.add_argument('--playlist-id', type=str,
            help='The Spotify playlist id to use for this configuration. If another playlist in this configuration has \
            the same playlist id, an error will be raised. If this spotify id does not exist or the \
            authenticated user does not have access to this playlist, an exception will be raised. Note that entering \
            a preexisting playlist will cause all songs to be removed that are older than --expire-days.')
    playlist_mutex.add_argument('--playlist-name', type=str,
            help='The name of the playlist. If this is specified instead of --playlist-id, a new playlist will be created with this name.')
    add_subparser.set_defaults(func=add_handler)


    list_subparser = subparsers.add_parser('list', help='Lists the current configurations.')
    list_subparser.add_argument('--config-file', type=str, required=True,
            help='The config file to list the contents of.')
    list_subparser.set_defaults(func=list_handler)

    update_subparser = subparsers.add_parser('update', help='Facilitates updating preexisting configurations. \
            Any value provided will overwrite the current settings for this playlist_id')
    update_subparser.add_argument('--config-file', type=str, required=True,
            help='The config file to make changes to')
    update_subparser.add_argument('--playlist-id', type=str, required=True,
            help='The playlist id to change. This is used as the key to lookup the item.')
    update_subparser.add_argument('--top-n', type=int,
            help='The number of posts per query to add to the playlist. Providing this value will overwrite the current value.')
    update_subparser.add_argument('--expire-days', type=int,
            help='The number of days to retain added songs. Providing this value will overwrite the current value.')
    update_subparser.add_argument('--subreddits', type=str, nargs='+',
            help='The subreddit to pull posts from. Providing this value will overwrite the current value.')
    update_subparser.set_defaults(func=update_handler)

    curate_subparser = subparsers.add_parser('curate', help='Runs a curation cycle, removing expired tracks and adding \
            new tracks from posts.')
    curate_subparser.add_argument('--config-file', type=str, required=True,
            help='The config file to pull playlist information from.')
    curate_subparser.set_defaults(func=curate_handler)

    return parser.parse_args(argv)
Example #22
0
    def command_dispatcher(self, args=None):
        desc = (
            'Pyskil - train, deploy and manage deep learning experiments with SKIL from Python.\n'
        )
        parser = argparse.ArgumentParser(description=desc)
        parser.add_argument(
            '-v',
            '--version',
            action='version',
            version=pkg_resources.get_distribution("skil").version,
            help='Print pyskil version')

        subparsers = parser.add_subparsers(title='subcommands', dest='command')
        subparsers.add_parser('configure',
                              help='Base configuration for pyskil. Run once')

        exp_parser = subparsers.add_parser(
            'init-experiment',
            help='Initialize a SKIL experiment from scratch.')
        exp_parser.add_argument('-f',
                                '--file',
                                help='File to persist the experiment to.')

        dep_parser = subparsers.add_parser(
            'init-deployment',
            help='Initialize a SKIL deployment from scratch.')
        dep_parser.add_argument('-f',
                                '--file',
                                help='File to persist the deployment to.')

        argcomplete.autocomplete(parser)
        args = parser.parse_args(args)
        self.var_args = vars(args)

        if not args.command:
            parser.print_help()
            return

        self.command = args.command

        if self.command == 'configure':
            self.configure()
            return

        if self.command == 'init-experiment':
            self.init_experiment(self.var_args['file'])
            return

        if self.command == 'init-deployment':
            self.init_deployment(self.var_args['file'])
            return
Example #23
0
def run_cli():
    conf = load_conf()

    parser = setup_common(
        argparse.ArgumentParser(
            description="Run different easytrack components"))
    subparsers = parser.add_subparsers(dest="cmd", required=True)
    setup_trackdir_parser(subparsers.add_parser("trackdir"))
    setup_common(subparsers.add_parser("config"))
    setup_common(subparsers.add_parser("remind"))
    setup_monitor_parser(subparsers.add_parser("monitor"))
    setup_toggl_parser(subparsers.add_parser("toggl"))
    setup_reporter_parser(subparsers.add_parser("reporter"))
    setup_vacuum_parser(subparsers.add_parser("vacuum", help="clean old data"))

    args = parser.parse_args()

    with workdir_setup(conf, args):
        if args.cmd == "trackdir":
            if args.trackdir_cmd == "prep":
                common_routine(conf)
                logging.info("Finished prepping the trackdir")
            elif args.trackdir_cmd == "open":
                open_trackdir(conf, TrackdirTrackfiles)
        elif args.cmd == "config":
            print(to_json(conf))
        elif args.cmd == "remind":
            common_routine(conf)
        elif args.cmd == "monitor":
            _run_monitor(conf, args.ticks)
        elif args.cmd == "toggl":
            if args.toggl_cmd == "download-tasks":
                toggl_download_tasks(conf)
            elif args.toggl_cmd == "status":
                toggl_status(conf, args.local)
            elif args.toggl_cmd == "export":
                toggl_export(conf)
            elif args.toggl_cmd == "open":
                open_trackdir(conf, TrackdirToggl)
        elif args.cmd == "reporter":
            if args.reporter_cmd == "report":
                reporter_report(conf, args)
        elif args.cmd == "vacuum":
            do_vacuum(
                conf,
                verb=args.verb,
                desc=args.desc,
                advs=args.advs,
                dry_run=args.dry_run,
            )
def argparser():
    parser = argparse.ArgumentParser(
        description="Work with Bluetooth enabled" +
        " SMA photovoltaic inverters")

    parser.add_argument("--config")

    subparsers = parser.add_subparsers()

    parse_status = subparsers.add_parser("status", help="Read inverter status")
    parse_status.set_defaults(func=status)

    parse_record_now = subparsers.add_parser(
        "record_now",
        help="Read current inverter totals and "
        "write them to the database. (ignoring inverter time)")
    parse_record_now.set_defaults(func=record_now)

    parse_upload_energy = subparsers.add_parser(
        "upload_energy",
        help="Upload entries to energy.nur-jan.de "
        "that have not yet been uploaded")
    parse_upload_energy.set_defaults(func=upload_energy)

    parse_yieldat = subparsers.add_parser("yieldat",
                                          help="Get production at"
                                          " a given date")
    parse_yieldat.set_defaults(func=yieldat)
    parse_yieldat.add_argument(type=str, dest="datetime")

    parse_download = subparsers.add_parser("download",
                                           help="Download power history" +
                                           " and record in database")
    parse_download.set_defaults(func=download)

    parse_setupdb = subparsers.add_parser("setupdb",
                                          help="Create database or" +
                                          " update schema")
    parse_setupdb.set_defaults(func=setupdb)

    parse_upload_date = subparsers.add_parser("upload",
                                              help="Upload"
                                              " power history to pvoutput.org")
    parse_upload_date.set_defaults(func=upload)
    parse_upload_date.add_argument("--date", type=str, dest="upload_date")

    return parser
Example #25
0
    def create_parser(self):
        parser = argparse.ArgumentParser()
        sub = parser.add_subparsers(help="sub-command help")

        def add_cmd(method):
            cp = sub.add_parser(method.__name__.replace("_", "-"), help=method.__doc__)
            cp.set_defaults(func=method)
            return cp

        def generic_args(require_config=True):
            cmd.add_argument("--site", help="pghoard site", required=True)
            cmd.add_argument("--config", help="pghoard config file", required=require_config)

        def host_port_args():
            cmd.add_argument("--host", help="pghoard repository host", default="localhost")
            cmd.add_argument("--port", help="pghoard repository port", default=16000)

        def target_args():
            cmd.add_argument("--basebackup", help="pghoard basebackup", default="latest")
            cmd.add_argument("--primary-conninfo", help="replication.conf primary_conninfo", default="")
            cmd.add_argument("--target-dir", help="pghoard restore target 'pgdata' dir", required=True)
            cmd.add_argument(
                "--overwrite", help="overwrite existing target directory", default=False, action="store_true"
            )
            cmd.add_argument("--recovery-end-command", help="PostgreSQL recovery_end_command", metavar="COMMAND")
            cmd.add_argument(
                "--recovery-target-action",
                help="PostgreSQL recovery_target_action",
                choices=["pause", "promote", "shutdown"],
            )
            cmd.add_argument("--recovery-target-name", help="PostgreSQL recovery_target_name", metavar="RESTOREPOINT")
            cmd.add_argument("--recovery-target-time", help="PostgreSQL recovery_target_time", metavar="ISO_TIMESTAMP")
            cmd.add_argument("--recovery-target-xid", help="PostgreSQL recovery_target_xid", metavar="XID")
            cmd.add_argument("--restore-to-master", help="Restore the database to a PG master", action="store_true")

        cmd = add_cmd(self.list_basebackups_http)
        host_port_args()
        generic_args(require_config=False)

        cmd = add_cmd(self.list_basebackups)
        generic_args()

        cmd = add_cmd(self.get_basebackup)
        target_args()
        generic_args()

        return parser
Example #26
0
def setup_arguments():
    parser = argparse.ArgumentParser('codereview')
    subparsers = parser.add_subparsers(help="Core commands", dest="command")

    init = subparsers.add_parser(
        'init',
        help="Start using code review for this repository"
    )
    init.add_argument(
        '--branch',
        help="Target meta branch to use",
        default=ARG_DEFAULTS['branch'],
        metavar='<branchname>',
    )
    init.add_argument(
        '--scoring',
        help="Scoring scale to use",
        default=ARG_DEFAULTS['scoring'],
        type=int,
        metavar='<score>',
    )
    init.add_argument(
        '--strategy',
        help="Merge strategy to use",
        default='merge',
        choices=ARG_DEFAULTS['strategy'],
    )

    subparsers.add_parser(
        'new',
        help="Create a new review"
    )

    subparsers.add_parser(
        'list',
        help="List reviews"
    )

    show = subparsers.add_parser(
        'show',
        help="Detailed view of a review"
    )
    show.add_argument(
        'id', help="Review ID", default=1, nargs="?", type=int
    )

    return parser
Example #27
0
    def parse_args():
        parser = argparse.ArgumentParser(prog='ecog', description='Ecogwiki client - Information in your fingertips', epilog=' ')
        parser.add_argument('--auth', metavar='FILE', dest='authfile', default='.auth',
                           help='auth file storing access token')
        parser.add_argument('--host', metavar='HOST', dest='ecoghost', default='www.ecogwiki.com',
                           help='ecogwiki server host')
        parser.add_argument('--version',  action='version', version='%(prog)s ' + __version__, default=None)

        subparsers = parser.add_subparsers(metavar='COMMAND', dest='command', title='ecogwiki commands')
        cat_parser    = subparsers.add_parser('cat',    help='print page in markdown')
        get_parser    = subparsers.add_parser('get',    help='print page in json')
        list_parser   = subparsers.add_parser('list',   help="list pages info")
        title_parser  = subparsers.add_parser('title',  help='list all titles')
        recent_parser = subparsers.add_parser('recent', help='list recent modified pages')
        edit_parser   = subparsers.add_parser('edit',   help='edit page with editor', description='Edit page with your favorite editor ($EDITOR)')
        append_parser = subparsers.add_parser('append', help='only append text',      description='Quickly append to page')
        memo_parser   = subparsers.add_parser('memo',   help='quick memo',            description='Edit your daily memo')
        
        edit_parser.add_argument('title', metavar='TITLE', help='page title')
        edit_parser.add_argument('--template', metavar='TEXT', help='text on new file', default=None)
        edit_parser.add_argument('--comment',  metavar='MSG',  help='edit comment message', default='')
        memo_parser.add_argument('--comment',  metavar='MSG',  help='edit comment message', default='')
        get_parser.add_argument('title', metavar='TITLE', help='page title')
        cat_parser.add_argument('title', metavar='TITLE', help='page title')
        get_parser.add_argument('--revision', metavar='REV', help='specific revision number', type=int)
        cat_parser.add_argument('--revision', metavar='REV', help='specific revision number', type=int)
        get_parser.add_argument('--format', metavar='FORMAT', help='one of [json|html|markdown|atom], json by default',
            choices=['json', 'txt', 'atom', 'markdown', 'html'], default='json')

        append_parser.add_argument('title',           metavar='TITLE', help='page title')
        append_parser.add_argument('body', nargs='?', metavar='TEXT',  help='body text. fires editor if not given', default='')
        append_parser.add_argument('--comment',       metavar='MSG',   help='comment message', default='')

        #
        args = parser.parse_args()

        if '://' not in args.ecoghost:
            args.ecoghost = 'http://' + args.ecoghost

        if not args.authfile.startswith('/'):
            args.authfile = os.path.join(CWD, args.authfile)

        if args.version:
            output(__version__)
            sys.exit(0)

        return args
def parse_args(argv):
    def db_cmd(sub_p, cmd_name, cmd_help):
        cmd_p = sub_p.add_parser(cmd_name, help=cmd_help)
        cmd_p.add_argument('--log',
                           action='store', dest='log', default='stdout', choices=['stdout', 'syslog', 'both'],
                           help='log to stdout and/or syslog')
        cmd_p.add_argument('--log-level',
                           action='store', dest='log_level', default='WARNING',
                           choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'],
                           help='log to stdout and/or syslog')
        cmd_p.add_argument('--log-facility',
                           action='store', dest='log_facility', default='user',
                           help='facility to use when using syslog')
        cmd_p.add_argument('--trello-file',
                           action='store', dest='trello_file', required=True,
                           help='trello exported json file')

        return cmd_p

    parser = argparse.ArgumentParser(description="")
    sub_p = parser.add_subparsers(dest='cmd')

    foo_p = db_cmd(sub_p, 'foo', '')
    foo_p.set_defaults(func=cmd_foo)

    print_labelnames_p = db_cmd(sub_p, 'print-labelnames', '')
    print_labelnames_p.set_defaults(func=cmd_print_labelnames)

    print_users_p = db_cmd(sub_p, 'print-users', '')
    print_users_p.set_defaults(func=cmd_print_users)

    print_user_map_test_p = db_cmd(sub_p, 'print-user-map-test', '')
    print_user_map_test_p.set_defaults(func=cmd_print_user_map_test)

    dump_cards_p = db_cmd(sub_p, 'dump-cards', '')
    dump_cards_p.set_defaults(func=cmd_dump_cards)
    dump_cards_p.add_argument('--dump-dir',
                              action='store', dest='dump_dir', default='out/tickets')
    dump_cards_p.add_argument('--phab-project',
                              action='store', dest='phab_project', required=True)
    dump_cards_p.add_argument('--start-id', type=int,
                              action='store', dest='start_id')
    dump_cards_p.set_defaults(func=cmd_dump_cards)

    args = parser.parse_args(argv)
    return args
Example #29
0
 def add_arguments(self, parser):
     subparsers = parser.add_subparsers(dest="action")
     dump_parser = subparsers.add_parser("dump")
     load_parser = subparsers.add_parser("load")
     delta_parser = subparsers.add_parser("delta")
     dump_parser.add_argument("target_file", nargs="?")
     load_parser.add_argument("target_file", nargs="?")
     since = dt.datetime.now(dt.timezone.utc) - dt.timedelta(hours=1,
                                                             minutes=5)
     delta_parser.add_argument(
         "--since",
         default=str(since),
         type=dateutil.parser.parse,
         help=
         "Time start to backup draws. Defaults to 1h and 5m in the past from now.",
     )
     delta_parser.add_argument("target_file", nargs="?")
Example #30
0
def argparser():
    parser = argparse.ArgumentParser(description="Work with Bluetooth"
                                     " enabled SMA photovoltaic inverters")

    parser.add_argument("--config")

    subparsers = parser.add_subparsers()

    parse_status = subparsers.add_parser("status", help="Read inverter status")
    parse_status.set_defaults(func=status)

    help = "Get production at a given date"
    parse_yieldat = subparsers.add_parser("yieldat", help=help)
    parse_yieldat.set_defaults(func=yieldat)
    parse_yieldat.add_argument(type=str, dest="datetime")

    help = "Download power history and record in database"
    parse_download = subparsers.add_parser("download", help=help)
    parse_download.set_defaults(func=download)

    help = "Create database or update schema"
    parse_setupdb = subparsers.add_parser("setupdb", help=help)
    parse_setupdb.set_defaults(func=setupdb)

    help = "Update inverters' clocks"
    parse_settime = subparsers.add_parser("settime", help=help)
    parse_settime.set_defaults(func=settime)

    help = "Upload power history to pvoutput.org"
    parse_upload_date = subparsers.add_parser("upload", help=help)
    parse_upload_date.set_defaults(func=upload)
    parse_upload_date.add_argument("--date", type=str, dest="upload_date")

    help = "Get daily production totals"
    parse_yieldlog = subparsers.add_parser("yieldlog", help=help)
    parse_yieldlog.set_defaults(func=yieldlog)
    parse_yieldlog.add_argument(type=str, dest="start")
    parse_yieldlog.add_argument(type=str, dest="end")
    parse_yieldlog.add_argument("--system",
                                type=str,
                                dest="system",
                                default="")
    parse_yieldlog.add_argument("--csv", action='store_true', dest="csv")

    return parser
Example #31
0
File: main.py Project: nijel/odorik
def get_parser():
    """Create argument parser."""
    parser = ArgumentParser(
        description='Odorik <{0}> command line utility.'.format(odorik.URL),
        epilog='This utility is developed at <{0}>.'.format(odorik.DEVEL_URL),
    )
    parser.add_argument(
        '--format',
        default='text',
        choices=('text', 'csv', 'json', 'html'),
        help='Output format to use'
    )
    parser.add_argument(
        '--version',
        action='version',
        version='odorik {0}'.format(odorik.__version__)
    )
    parser.add_argument(
        '--config',
        help='Path to configuration file',
    )
    parser.add_argument(
        '--config-section',
        default='odorik',
        help='Configuration section to use'
    )
    parser.add_argument(
        '--user',
        help='API username',
    )
    parser.add_argument(
        '--password',
        help='API password',
    )
    parser.add_argument(
        '--url',
        help='API URL',
    )
    subparser = parser.add_subparsers(dest="cmd")

    for command in COMMANDS:
        COMMANDS[command].add_parser(subparser)

    return parser
Example #32
0
    def parse_args(self):
        parser = argparse.ArgumentParser(prog=self.prog)
        parser.add_argument("-q",
                            "--quiet",
                            action="store_true",
                            help="Be quiet on logging to stdout/stderr")
        parser.add_argument("--version",
                            action="store_const",
                            dest="verb",
                            const="version")
        parser.set_defaults(verb=None)
        subparsers = parser.add_subparsers()

        parser_backup = subparsers.add_parser("backup")
        parser_backup.set_defaults(verb="backup")

        parser_list = subparsers.add_parser("list")
        parser_list.set_defaults(verb="list")
        parser_list.add_argument("--before",
                                 dest="before",
                                 default=None,
                                 type=parse_simple_date)
        parser_list.add_argument("--after",
                                 dest="after",
                                 default=None,
                                 type=parse_simple_date)

        parser_restore = subparsers.add_parser("restore")
        parser_restore.set_defaults(verb="restore")
        parser_restore.add_argument("backup", metavar="BACKUPNAME", type=str)
        parser_restore.add_argument("backend", metavar="BACKENDNAME", type=str)
        parser_restore.add_argument("archive_spec", metavar="SPEC", type=str)
        parser_restore.add_argument("destination", metavar="DEST", type=str)

        parser_list_backups = subparsers.add_parser("list-configured-backups")
        parser_list_backups.set_defaults(verb="list-configured-backups")

        parser_list_backends = subparsers.add_parser("list-backends")
        parser_list_backends.set_defaults(verb="list-backends")

        parser_prune = subparsers.add_parser("prune")
        parser_prune.set_defaults(verb="prune")

        return parser.parse_args(self.argv)
Example #33
0
def get_args():
    parser = argparse.ArgumentParser('Client API to cya server')
    sub = parser.add_subparsers(help='sub-command help')
    p = sub.add_parser('register', help='Register this host with the server')
    p.set_defaults(func=_register_host)
    p.add_argument('server_url')
    p.add_argument('version')

    p = sub.add_parser('update', help='Update host props with the server')
    p.set_defaults(func=_update_host)

    p = sub.add_parser('check', help='Check in with server for updates')
    p.set_defaults(func=_check)

    p = sub.add_parser('uninstall', help='Uninstall the client')
    p.set_defaults(func=_uninstall)

    args = parser.parse_args()
    return args
Example #34
0
def get_args():
    parser = argparse.ArgumentParser('Client API to cya server')
    sub = parser.add_subparsers(help='sub-command help')
    p = sub.add_parser('register', help='Register this host with the server')
    p.set_defaults(func=_register_host)
    p.add_argument('server_url')
    p.add_argument('version')

    p = sub.add_parser('update', help='Update host props with the server')
    p.set_defaults(func=_update_host)

    p = sub.add_parser('check', help='Check in with server for updates')
    p.set_defaults(func=_check)

    p = sub.add_parser('uninstall', help='Uninstall the client')
    p.set_defaults(func=_uninstall)

    args = parser.parse_args()
    return args
Example #35
0
def main():  # pragma: no cover
    """Entry point for the `vtes` command"""
    parser = ArgumentParser()
    storage = parser.add_mutually_exclusive_group()
    storage.add_argument("--journal-file",
                         dest="journal",
                         type=PickleStore,
                         default=PickleStore(pathlib.Path.home() /
                                             ".vtes-journal"))
    storage.add_argument("--journal-db", dest="journal", type=DatabaseStore)
    subcommands = parser.add_subparsers()

    add = subcommands.add_parser("add")
    add.add_argument("--date", default=None, type=dateutil.parser.parse)
    add.add_argument("--namespace", default=None, type=parse_namespace)
    add.add_argument("players", action=ParsePlayerAction, nargs='*')
    add.set_defaults(func=add_command)

    games = subcommands.add_parser("games")
    games.add_argument("--namespace", default=None, type=parse_namespace)
    games.set_defaults(func=games_command)

    gamefix = subcommands.add_parser("game-fix")
    gamefix.add_argument("game_index", type=int)
    gamefix.add_argument("--date", default=None, type=dateutil.parser.parse)
    gamefix.add_argument("--namespace", default=None, type=parse_namespace)
    gamefix.add_argument("players", action=ParsePlayerAction, nargs='*')
    gamefix.set_defaults(func=gamefix_command)

    decks = subcommands.add_parser("decks")
    decks.add_argument("player", nargs='?', default=None)
    decks.add_argument("--namespace", default=None, type=parse_namespace)
    decks.set_defaults(func=decks_command)

    stats = subcommands.add_parser("stats")
    stats.add_argument("--namespace", default=None, type=parse_namespace)
    stats.set_defaults(func=stats_command)

    args = parser.parse_args()

    command = args.func
    delattr(args, "func")
    command(**vars(args))
Example #36
0
def parse_cmdline(argv):
    """Parse commandline"""
    parser = argparse.ArgumentParser()

    parser.add_argument('-v', '--verbosity', action='count', help='increase output verbosity')

    subparsers = parser.add_subparsers()

    parser_fetch = subparsers.add_parser('fetch')
    parser_fetch.set_defaults(func=command_fetch)
    parser_fetch.add_argument('stock', help='WKN, ticker name of a stock')

    parser_fetch.add_argument('-s', '--start-date', type=mkdate, default=datetime.date.today(), help='Start date (default today)')
    parser_fetch.add_argument('-e', '--end-date', type=mkdate, default=datetime.date.today(), help='End date (default today)')
    parser_fetch.add_argument('--intra-day', default=None, action='store_true', help='Intra day information')

    args = parser.parse_args(argv)

    return args
Example #37
0
def setup_reporter_parser(parser):
    setup_common(parser)
    subparsers = parser.add_subparsers(dest="reporter_cmd", required=True)
    report = setup_common(
        subparsers.add_parser(
            "report", formatter_class=argparse.ArgumentDefaultsHelpFormatter))
    default_to = parse_arg_fromto(serialize_arg_fromto(
        datetime.datetime.now())).replace(tzinfo=None)
    default_from = parse_arg_fromto(
        default_to.date().isoformat()).replace(tzinfo=None)
    report.add_argument(
        "--from",
        type=parse_arg_fromto,
        dest="from_",
        default=default_from,
        help="left bound",
    )
    report.add_argument("--to",
                        type=parse_arg_fromto,
                        default=default_to,
                        help="right bound")
    report.add_argument("--chunk-minutes",
                        type=int,
                        default=0,
                        help="minutes per a single aggregate")
    report.add_argument(
        "--chunk-colors",
        type=int,
        default=0,
        help="top x categories in a single aggregate",
    )
    report.add_argument(
        "--format",
        default="basic",
        help="output format, basic jsonstream or jsonpretty",
    )
    report.add_argument("--features",
                        nargs="*",
                        help="enable various report features")
    report.add_argument("--output",
                        default="-",
                        help='supported are "-" (default) and "workspace"')
def cli():
    import argparse

    desc = """A command-line utility for interacting with the Decipher API.
    Help is available on subcommands (e.g. `DecipherAPI.pull --help`)"""

    parser = argparse.ArgumentParser(description=desc)
    subparsers = parser.add_subparsers(title='valid subcommands')

    parser.add_argument(
        "-U", "--username", help="user identification", type=str, required=True)
    parser.add_argument(
        "-P", "--password", help="user password", type=str, required=True)
    parser.add_argument(
        "-H", "--host", help="host", default=DecipherAPI.client.DEFAULT_HOST, type=str)

    # pull command
    pull_parser = subparsers.add_parser('pull', help='pull survey data')
    pull_parser.add_argument(
        "-s", "--survey", help="survey name", type=str, required=True)
    pull_parser.add_argument(
        "-t", "--start", help="utc start time: YYYY-MM-DDTHH:MM:SS.mmmmmm", type=str)
    pull_parser.add_argument(
        "-T", "--end", help="utc end time: YYYY-MM-DDTHH:MM:SS.mmmmmm", type=str)
    pull_parser.add_argument("-S", "--status", help="survey status", choices=[
                             'all', 'partial', 'complete', 'qualified', 'terminated', 'overquota'], type=str)
    pull_parser.add_argument("-c", "--columns", help="include columns", type=str)
    pull_parser.add_argument(
        "-F", "--filters", help="filter columns", type=str)
    pull_parser.add_argument(
        "-f", "--fmt", help="return format", choices=["json", "tsv", "csv"], default="json", type=str)
    pull_parser.set_defaults(func=pull_command)

    # list command
    list_parser = subparsers.add_parser('list', help='list surveys')
    list_parser.add_argument(
        "-f", "--fmt", help="return format", choices=["json", "tsv", "csv"], default="json", type=str)
    list_parser.set_defaults(func=list_command)

    args = parser.parse_args()
    args.func(args)
Example #39
0
def create_parser():
    parser = argparse.ArgumentParser("swclient")
    parser.add_argument("server_url", help="Address of Snailwatch server")
    subparsers = parser.add_subparsers(title="action", dest="action")

    create_user_parser = subparsers.add_parser("create-user",
                                               help="Create a user account")
    create_user_parser.add_argument("token", help="Admin token")
    create_user_parser.add_argument("username", help="Username")
    create_user_parser.add_argument("--email",
                                    default='',
                                    help="E-mail (used for regression "
                                         "notifications)")

    create_project_parser = subparsers.add_parser("create-project",
                                                  help="Create a project")
    create_project_parser.add_argument("token", help="Session token")
    create_project_parser.add_argument("name", help="Project name")
    create_project_parser.add_argument("--repository",
                                       default='',
                                       help="URL of the project repository")

    upload_parser = subparsers.add_parser(
        "upload", help="Upload a single measurement to Snailwatch")
    upload_parser.add_argument("token", help="Upload token")
    upload_parser.add_argument("benchmark", help="Benchmark name")
    upload_parser.add_argument("env", help="Environment of the benchmark")
    upload_parser.add_argument("result", help="Measured result")
    upload_parser.add_argument("--timestamp",
                               help="Time of measurement "
                                    "(YYYY-MM-DDTHH:mm:ss)")

    upload_file_parser = subparsers.add_parser(
        "upload-file", help="Upload measurement(s) from JSON file")
    upload_file_parser.add_argument("token", help="Upload token")
    upload_file_parser.add_argument("filename",
                                    help="Path to measurement file")

    return parser
Example #40
0
def main(argv):
    # get lock to avoid multiple simultaneous instances of this script
    get_lock(os.path.basename(__file__))

    if '--run-tests' in argv:
        run_tests("--forever" in argv)
    else:
        parser = argparse.ArgumentParser(
            description="Monitors a book archive and commits changes to git.")
        subparsers = parser.add_subparsers(title='subcommands', metavar="")

        parser_update = subparsers.add_parser(
            "update", help="Check archive for changes.")
        parser_update.add_argument("archive",
                                   help="Path to the archive.",
                                   metavar="PATH")
        parser_update.add_argument("-f",
                                   "--forever",
                                   help="Loop script forever.",
                                   action='store_true')
        parser_update.set_defaults(func=update)

        parser_init = subparsers.add_parser(
            "git-init", help="Initialize archive from remote git repository.")
        parser_init.add_argument("archive",
                                 help="Path to the archive.",
                                 metavar="PATH")
        parser_init.add_argument(
            "git_url",
            help="Initialize the archive from this git repository.",
            metavar="URL")
        parser_init.set_defaults(func=git_init)

        args = parser.parse_args()
        if "func" in args:
            args.func(args)
        else:
            print(parser.format_help())
Example #41
0
def initialize_parser():
    parser = argparse.ArgumentParser(description='Warsaw Bus analytics tool.')
    subparsers = parser.add_subparsers(dest='subcommand',
                                       help='Choose analytics command.')

    # Add positions sub parser
    _parser = subparsers.add_parser('positions', help='Fetch bus positions')
    _parser.add_argument('--api_key', type=str, help='API key')
    _parser.add_argument('--file', type=str, help='Data destination filename')
    _parser.add_argument('--start', type=str, help='Fetching start timestamp')
    _parser.add_argument('--end', type=str, help='Fetching end timestamp')

    # Add schedules sub parser
    _parser = subparsers.add_parser('schedules', help='Fetch bus schedules')
    _parser.add_argument('--api_key', type=str, help='API key')
    _parser.add_argument('--file', type=str, help='Data destination filename')

    # Add speed sub parser
    _parser = subparsers.add_parser('speeds', help='Calculate bus speeds')
    _parser.add_argument('--file', type=str, help='Data destination filename')
    _parser.add_argument('--positions', type=str, help='Positions filename')
    _parser.add_argument('--start', type=str, help='Fetching start timestamp')
    _parser.add_argument('--end', type=str, help='Fetching end timestamp')
    _parser.add_argument('--dry', dest='dry', action='store_true',
                         help='Dry run - just visualise data')

    # Add delay sub parser
    _parser = subparsers.add_parser('delays', help='Calculate bus delays')
    _parser.add_argument('--file', type=str, help='Data destination filename')
    _parser.add_argument('--schedules', type=str, help='Schedules filename')
    _parser.add_argument('--positions', type=str, help='Positions filename')
    _parser.add_argument('--start', type=str, help='Fetching start timestamp')
    _parser.add_argument('--end', type=str, help='Fetching end timestamp')
    _parser.add_argument('--dry', dest='dry', action='store_true',
                         help='Dry run - just visualise data')

    return parser
def main():

    def main_help(args):
        parser.print_help(sys.stderr)

    parser = argparse.ArgumentParser(
        description='CLI Tools',
        prog='tools.py')

    parser_user_parent = argparse.ArgumentParser()
    subparsers = parser.add_subparsers()

    parser_export_links = subparsers.add_parser(
        'export',
        description='Export as JSON'
    )
    parser_export_links.add_argument(
        '-u', '--user', type=str, required=True
    )
    parser_export_links.set_defaults(func=export_bookmarks)

    parser_import_links = subparsers.add_parser(
        'import',
        description='Import links from external sources')
    parser_import_links.add_argument(
        '-u', '--user', type=str, required=True)
    parser_import_links.add_argument('source', type=str)
    parser_import_links.set_defaults(func=import_bookmarks)

    create_user_parser = subparsers.add_parser('create_user', description='Create a user')
    create_user_parser.add_argument('-u', '--username', type=str, required=True)
    create_user_parser.add_argument('-p', '--password', type=str, required=True)
    create_user_parser.set_defaults(func=create_user)

    parser.set_defaults(func=main_help)
    args = parser.parse_args()
    args.func(args)
def get_args():
    parser = argparse.ArgumentParser(description='Maintenance')
    subparsers = parser.add_subparsers(help='Options')
    sp = subparsers.add_parser('add', help='Add a maintenance')
    sp.add_argument('-k', '--key', required=True, help='Api key')
    sp.add_argument('-e', '--email', required=True, help='User email')
    sp.add_argument('-d',
                    '--duration',
                    required=True,
                    help='Duration in seconds')
    sp.add_argument('-s',
                    '--service',
                    required=True,
                    help='PagerDuty service id')
    sp.set_defaults(cmd='add')
    sp = subparsers.add_parser('end', help='End maintenance')
    sp.set_defaults(cmd='end')
    sp.add_argument('-k', '--key', required=True, help='Api key')
    sp.add_argument('-e', '--email', required=True, help='User email')
    sp.add_argument('-s',
                    '--service',
                    required=True,
                    help='PagerDuty service id')
    sp = subparsers.add_parser('change', help='Change maintenance')
    sp.set_defaults(cmd='change')
    sp.add_argument('-k', '--key', required=True, help='Api key')
    sp.add_argument('-e', '--email', required=True, help='User email')
    sp.add_argument('-d',
                    '--duration',
                    required=True,
                    help='Duration in seconds')
    sp.add_argument('-s',
                    '--service',
                    required=True,
                    help='PagerDuty service id')
    return parser.parse_args()
Example #44
0
def get_parser():
    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers(help='sub-command help')

    ht = "Run subsections of a DAG for a specified date range"
    parser_backfill = subparsers.add_parser('backfill', help=ht)
    parser_backfill.add_argument("dag_id", help="The id of the dag to run")
    parser_backfill.add_argument(
        "-t", "--task_regex",
        help="The regex to filter specific task_ids to backfill (optional)")
    parser_backfill.add_argument(
        "-s", "--start_date", help="Override start_date YYYY-MM-DD")
    parser_backfill.add_argument(
        "-e", "--end_date", help="Override end_date YYYY-MM-DD")
    parser_backfill.add_argument(
        "-m", "--mark_success",
        help=mark_success_help, action="store_true")
    parser_backfill.add_argument(
        "-l", "--local",
        help="Run the task using the LocalExecutor", action="store_true")
    parser_backfill.add_argument(
        "-x", "--donot_pickle",
        help=(
            "Do not attempt to pickle the DAG object to send over "
            "to the workers, just tell the workers to run their version "
            "of the code."),
        action="store_true")
    parser_backfill.add_argument(
        "-a", "--include_adhoc",
        help="Include dags with the adhoc parameter.", action="store_true")
    parser_backfill.add_argument(
        "-i", "--ignore_dependencies",
        help=(
            "Skip upstream tasks, run only the tasks "
            "matching the regexp. Only works in conjunction with task_regex"),
        action="store_true")
    parser_backfill.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_backfill.add_argument(
        "-dr", "--dry_run", help="Perform a dry run", action="store_true")
    parser_backfill.set_defaults(func=backfill)

    ht = "Clear a set of task instance, as if they never ran"
    parser_clear = subparsers.add_parser('clear', help=ht)
    parser_clear.add_argument("dag_id", help="The id of the dag to run")
    parser_clear.add_argument(
        "-t", "--task_regex",
        help="The regex to filter specific task_ids to clear (optional)")
    parser_clear.add_argument(
        "-s", "--start_date", help="Override start_date YYYY-MM-DD")
    parser_clear.add_argument(
        "-e", "--end_date", help="Override end_date YYYY-MM-DD")
    ht = "Include upstream tasks"
    parser_clear.add_argument(
        "-u", "--upstream", help=ht, action="store_true")
    ht = "Only failed jobs"
    parser_clear.add_argument(
        "-f", "--only_failed", help=ht, action="store_true")
    ht = "Only running jobs"
    parser_clear.add_argument(
        "-r", "--only_running", help=ht, action="store_true")
    ht = "Include downstream tasks"
    parser_clear.add_argument(
        "-d", "--downstream", help=ht, action="store_true")
    parser_clear.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_clear.add_argument(
        "-c", "--no_confirm", help=ht, action="store_true")
    parser_clear.set_defaults(func=clear)

    ht = "Run a single task instance"
    parser_run = subparsers.add_parser('run', help=ht)
    parser_run.add_argument("dag_id", help="The id of the dag to run")
    parser_run.add_argument("task_id", help="The task_id to run")
    parser_run.add_argument(
        "execution_date", help="The execution date to run")
    parser_run.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_run.add_argument(
        "-s", "--task_start_date",
        help="Override the tasks's start_date (used internally)",)
    parser_run.add_argument(
        "-m", "--mark_success", help=mark_success_help, action="store_true")
    parser_run.add_argument(
        "-f", "--force",
        help="Force a run regardless or previous success",
        action="store_true")
    parser_run.add_argument(
        "-l", "--local",
        help="Runs the task locally, don't use the executor",
        action="store_true")
    parser_run.add_argument(
        "-r", "--raw",
        help=argparse.SUPPRESS,
        action="store_true")
    parser_run.add_argument(
        "-i", "--ignore_dependencies",
        help="Ignore upstream and depends_on_past dependencies",
        action="store_true")
    parser_run.add_argument(
        "--ship_dag",
        help="Pickles (serializes) the DAG and ships it to the worker",
        action="store_true")
    parser_run.add_argument(
        "-p", "--pickle",
        help="Serialized pickle object of the entire dag (used internally)")
    parser_run.add_argument(
        "-j", "--job_id", help=argparse.SUPPRESS)
    parser_run.set_defaults(func=run)

    ht = (
        "Test a task instance. This will run a task without checking for "
        "dependencies or recording it's state in the database."
    )
    parser_test = subparsers.add_parser('test', help=ht)
    parser_test.add_argument("dag_id", help="The id of the dag to run")
    parser_test.add_argument("task_id", help="The task_id to run")
    parser_test.add_argument(
        "execution_date", help="The execution date to run")
    parser_test.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_test.add_argument(
        "-dr", "--dry_run", help="Perform a dry run", action="store_true")
    parser_test.set_defaults(func=test)

    ht = "Get the status of a task instance."
    parser_task_state = subparsers.add_parser('task_state', help=ht)
    parser_task_state.add_argument("dag_id", help="The id of the dag to check")
    parser_task_state.add_argument("task_id", help="The task_id to check")
    parser_task_state.add_argument(
        "execution_date", help="The execution date to check")
    parser_task_state.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_task_state.set_defaults(func=task_state)

    ht = "Start a Airflow webserver instance"
    parser_webserver = subparsers.add_parser('webserver', help=ht)
    parser_webserver.add_argument(
        "-p", "--port",
        default=conf.get('webserver', 'WEB_SERVER_PORT'),
        type=int,
        help="Set the port on which to run the web server")
    parser_webserver.add_argument(
        "-w", "--threads",
        default=conf.get('webserver', 'THREADS'),
        type=int,
        help="Number of threads to run the webserver on")
    parser_webserver.add_argument(
        "-hn", "--hostname",
        default=conf.get('webserver', 'WEB_SERVER_HOST'),
        help="Set the hostname on which to run the web server")
    ht = "Use the server that ships with Flask in debug mode"
    parser_webserver.add_argument(
        "-d", "--debug", help=ht, action="store_true")
    parser_webserver.set_defaults(func=webserver)

    ht = "Start a scheduler scheduler instance"
    parser_scheduler = subparsers.add_parser('scheduler', help=ht)
    parser_scheduler.add_argument(
        "-d", "--dag_id", help="The id of the dag to run")
    parser_scheduler.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_scheduler.add_argument(
        "-n", "--num_runs",
        default=None,
        type=int,
        help="Set the number of runs to execute before exiting")
    parser_scheduler.add_argument(
        "-p", "--do_pickle",
        default=False,
        help=(
            "Attempt to pickle the DAG object to send over "
            "to the workers, instead of letting workers run their version "
            "of the code."),
        action="store_true")
    parser_scheduler.set_defaults(func=scheduler)

    ht = "Initialize the metadata database"
    parser_initdb = subparsers.add_parser('initdb', help=ht)
    parser_initdb.set_defaults(func=initdb)

    ht = "Burn down and rebuild the metadata database"
    parser_resetdb = subparsers.add_parser('resetdb', help=ht)
    parser_resetdb.set_defaults(func=resetdb)

    ht = "Upgrade metadata database to latest version"
    parser_upgradedb = subparsers.add_parser('upgradedb', help=ht)
    parser_upgradedb.set_defaults(func=upgradedb)

    ht = "List the DAGs"
    parser_list_dags = subparsers.add_parser('list_dags', help=ht)
    parser_list_dags.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_list_dags.set_defaults(func=list_dags)

    ht = "List the tasks within a DAG"
    parser_list_tasks = subparsers.add_parser('list_tasks', help=ht)
    parser_list_tasks.add_argument(
        "-t", "--tree", help="Tree view", action="store_true")
    parser_list_tasks.add_argument(
        "dag_id", help="The id of the dag")
    parser_list_tasks.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_list_tasks.set_defaults(func=list_tasks)

    ht = "Start a Celery worker node"
    parser_worker = subparsers.add_parser('worker', help=ht)
    parser_worker.add_argument(
        "-q", "--queues",
        help="Comma delimited list of queues to serve",
        default=conf.get('celery', 'DEFAULT_QUEUE'))
    parser_worker.add_argument(
        "-c", "--concurrency",
        type=int,
        help="The number of worker processes",
        default=conf.get('celery', 'celeryd_concurrency'))
    parser_worker.set_defaults(func=worker)

    ht = "Serve logs generate by worker"
    parser_logs = subparsers.add_parser('serve_logs', help=ht)
    parser_logs.set_defaults(func=serve_logs)

    ht = "Start a Celery Flower"
    parser_flower = subparsers.add_parser('flower', help=ht)
    parser_flower.add_argument(
        "-p", "--port", help="The port")
    parser_flower.add_argument(
        "-a", "--broker_api", help="Broker api")
    parser_flower.set_defaults(func=flower)

    parser_version = subparsers.add_parser('version', help="Show version")
    parser_version.set_defaults(func=version)

    ht = "Start a kerberos ticket renewer"
    parser_kerberos = subparsers.add_parser('kerberos', help=ht)
    parser_kerberos.add_argument(
        "-kt", "--keytab", help="keytab",
        nargs='?', default=conf.get('kerberos', 'keytab'))
    parser_kerberos.add_argument(
        "principal", help="kerberos principal",
        nargs='?', default=conf.get('kerberos', 'principal'))
    parser_kerberos.set_defaults(func=kerberos)

    return parser
Example #45
0
def main():
    try:
        import argparse
        parser = argparse.ArgumentParser(description = "RubberPaper")
        parser.add_argument("-v", "--verbose", metavar = "verbose", help = "Verbose output")
        subparser = parser.add_subparsers(dest="command", help = "commands")

        feed_parser = subparser.add_parser("feed", help = "Collect one or more feeds")
        feed_parser.add_argument('--url')
        feed_parser.add_argument('--name')

        render_parser = subparser.add_parser("render", help = "Render a PDF document")
        render_parser.add_argument('--date')
        render_parser.add_argument('--output', type = argparse.FileType("wb", 0))
        render_parser.add_argument('--url')

        convert_parser = subparser.add_parser("convert", help = "Convert a storage")
        convert_parser.add_argument('--input', required = True)
        convert_parser.add_argument('--output', required = True)

        try:
            args = parser.parse_args()
        except IOError, err:
            parser.error(str(err))

        import shelve
        from contextlib import closing

        if args.command == "feed":
            ''' Обработка фидов
            '''
            dateparser = dateutil.parser.parser()
            feeds = list()
            if args.url:
                feeds.append(args.url)
            elif args.name:
                feed_list_filename = os.path.expanduser(os.path.join("~", ".rubber", "feed-lists", args.name))
                with open(feed_list_filename, "rt") as f:
                    for feed in f:
                        feeds.append(feed.strip())
            else:
                for dirpath, dirnames, filenames in os.walk(os.path.expanduser(os.path.join("~", ".rubber", "feed-lists"))):
                    for filename in filenames:
                        feed_list_filename = os.path.join(dirpath, filename)
                        with open(feed_list_filename, "rt") as f:
                            for feed in f:
                                feeds.append(feed.strip())

            import feedparser
            for feed in feeds:
                try:
                    print "Processing feed {0}...".format(feed)
                    index_file_name = os.path.expanduser(os.path.join("~", ".rubber", "index", urlparse.urlsplit(feed).hostname))
                    with closing(shelve.open(index_file_name, flag = "c")) as storage:
                        news = feedparser.parse(feed)
                        # Кодировка сообщений
                        codepage = news.encoding
                        for n in reversed(news.entries):

                            # Ссылка на новость
                            url = n.link
                            # Идентификатор новости
                            if hasattr(n, "id"):
                                topicid = n.id
                            else:
                                topicid = url

                            if not storage.has_key(topicid.encode("utf-8")):
                                article = dict()

                                # Ссылка на статью
                                article["url"] = url
                                print "URL:{0}".format(article["url"].encode("utf-8")).strip()

                                # Заголовок новости
                                article["title"] = n.title
                                print "Title:{0}".format(article["title"].encode("utf-8")).strip()

                                # Дата опубликования новости
                                article["date"] = None
                                if hasattr(n, "published"):
                                    article["date"] = dateparser.parse(n.published)
                                elif hasattr(n, "updated"):
                                    article["date"] = dateparser.parse(n.updated)
                                else:
                                    article["date"] = datetime.datetime.now(pytz.timezone(DEFAULT_TIMEZONE))
                                if article["date"]:
                                    print "Date:{0}".format(article["date"]).strip()

                                # Автор новости
                                if hasattr(n, "author"):
                                    article["author"] = n.author
                                    print "Author:{0}".format(article["author"].encode("utf-8")).strip()

                                # Содержание новости
                                if hasattr(n, "summary"):
                                    article["summary"] = n.summary

                                storage[topicid.encode("utf-8")] = article
                except Exception, exc:
                    print exc
Example #46
0
            query = query[:-2] + ' WHERE task_id = %s' % (task_id)
            cursor.execute(query, task)
            db.commit()
            db.close()
        except Exception as e:
            return False
        finally:
            return True

if __name__ == "__main__":
    import argparse

    gt = gittasks()

    parser = argparse.ArgumentParser(description="Tasks in your codebase")
    subparsers = parser.add_subparsers(help='commands', dest='subparser_name')

    add = subparsers.add_parser('add', help='add a task')
    add.add_argument('task', action='store', help='the task in quotes')

    do = subparsers.add_parser('do', help='complete a task')
    do.add_argument(
        'task_id',
        action='store',
        help='the task id to mark as done'
    )

    parse = subparsers.add_parser(
        'parse',
        help='parse a file or directory for tasks'
    )
Example #47
0
def get_parser():
    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers(help='sub-command help')

    ht = "Run subsections of a DAG for a specified date range"
    parser_backfill = subparsers.add_parser('backfill', help=ht)
    parser_backfill.add_argument("dag_id", help="The id of the dag to run")
    parser_backfill.add_argument(
        "-t", "--task_regex",
        help="The regex to filter specific task_ids to backfill (optional)")
    parser_backfill.add_argument(
        "-s", "--start_date", help="Override start_date YYYY-MM-DD")
    parser_backfill.add_argument(
        "-e", "--end_date", help="Override end_date YYYY-MM-DD")
    parser_backfill.add_argument(
        "-m", "--mark_success",
        help=mark_success_help, action="store_true")
    parser_backfill.add_argument(
        "-l", "--local",
        help="Run the task using the LocalExecutor", action="store_true")
    parser_backfill.add_argument(
        "-x", "--donot_pickle",
        help=(
            "Do not attempt to pickle the DAG object to send over "
            "to the workers, just tell the workers to run their version "
            "of the code."),
        action="store_true")
    parser_backfill.add_argument(
        "-a", "--include_adhoc",
        help="Include dags with the adhoc parameter.", action="store_true")
    parser_backfill.add_argument(
        "-i", "--ignore_dependencies",
        help=(
            "Skip upstream tasks, run only the tasks "
            "matching the regexp. Only works in conjunction with task_regex"),
        action="store_true")
    parser_backfill.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_backfill.set_defaults(func=backfill)

    ht = "Clear a set of task instance, as if they never ran"
    parser_clear = subparsers.add_parser('clear', help=ht)
    parser_clear.add_argument("dag_id", help="The id of the dag to run")
    parser_clear.add_argument(
        "-t", "--task_regex",
        help="The regex to filter specific task_ids to clear (optional)")
    parser_clear.add_argument(
        "-s", "--start_date", help="Override start_date YYYY-MM-DD")
    parser_clear.add_argument(
        "-e", "--end_date", help="Override end_date YYYY-MM-DD")
    ht = "Include upstream tasks"
    parser_clear.add_argument(
        "-u", "--upstream", help=ht, action="store_true")
    ht = "Only failed jobs"
    parser_clear.add_argument(
        "-f", "--only_failed", help=ht, action="store_true")
    ht = "Only running jobs"
    parser_clear.add_argument(
        "-r", "--only_running", help=ht, action="store_true")
    ht = "Include downstream tasks"
    parser_clear.add_argument(
        "-d", "--downstream", help=ht, action="store_true")
    parser_clear.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_clear.add_argument(
        "-c", "--no_confirm", help=ht, action="store_true")
    parser_clear.set_defaults(func=clear)

    ht = "Run a single task instance"
    parser_run = subparsers.add_parser('run', help=ht)
    parser_run.add_argument("dag_id", help="The id of the dag to run")
    parser_run.add_argument("task_id", help="The task_id to run")
    parser_run.add_argument(
        "execution_date", help="The execution date to run")
    parser_run.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_run.add_argument(
        "-s", "--task_start_date",
        help="Override the tasks's start_date (used internally)",)
    parser_run.add_argument(
        "-m", "--mark_success", help=mark_success_help, action="store_true")
    parser_run.add_argument(
        "-f", "--force",
        help="Force a run regardless or previous success",
        action="store_true")
    parser_run.add_argument(
        "-l", "--local",
        help="Runs the task locally, don't use the executor",
        action="store_true")
    parser_run.add_argument(
        "-r", "--raw",
        help=argparse.SUPPRESS,
        action="store_true")
    parser_run.add_argument(
        "-i", "--ignore_dependencies",
        help="Ignore upstream and depends_on_past dependencies",
        action="store_true")
    parser_run.add_argument(
        "--ship_dag",
        help="Pickles (serializes) the DAG and ships it to the worker",
        action="store_true")
    parser_run.add_argument(
        "-p", "--pickle",
        help="Serialized pickle object of the entire dag (used internally)")
    parser_run.add_argument(
        "-j", "--job_id", help=argparse.SUPPRESS)
    parser_run.set_defaults(func=run)

    ht = (
        "Test a task instance. This will run a task without checking for "
        "dependencies or recording it's state in the database."
    )
    parser_test = subparsers.add_parser('test', help=ht)
    parser_test.add_argument("dag_id", help="The id of the dag to run")
    parser_test.add_argument("task_id", help="The task_id to run")
    parser_test.add_argument(
        "execution_date", help="The execution date to run")
    parser_test.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_test.set_defaults(func=test)

    ht = "Get the status of a task instance."
    parser_task_state = subparsers.add_parser('task_state', help=ht)
    parser_task_state.add_argument("dag_id", help="The id of the dag to check")
    parser_task_state.add_argument("task_id", help="The task_id to check")
    parser_task_state.add_argument(
        "execution_date", help="The execution date to check")
    parser_task_state.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_task_state.set_defaults(func=task_state)

    ht = "Start a Airflow webserver instance"
    parser_webserver = subparsers.add_parser('webserver', help=ht)
    parser_webserver.add_argument(
        "-p", "--port",
        default=conf.get('webserver', 'WEB_SERVER_PORT'),
        type=int,
        help="Set the port on which to run the web server")
    parser_webserver.add_argument(
        "-w", "--threads",
        default=conf.get('webserver', 'THREADS'),
        type=int,
        help="Number of threads to run the webserver on")
    parser_webserver.add_argument(
        "-hn", "--hostname",
        default=conf.get('webserver', 'WEB_SERVER_HOST'),
        help="Set the hostname on which to run the web server")
    ht = "Use the server that ships with Flask in debug mode"
    parser_webserver.add_argument(
        "-d", "--debug", help=ht, action="store_true")
    parser_webserver.set_defaults(func=webserver)

    ht = "Start a scheduler scheduler instance"
    parser_scheduler = subparsers.add_parser('scheduler', help=ht)
    parser_scheduler.add_argument(
        "-d", "--dag_id", help="The id of the dag to run")
    parser_scheduler.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_scheduler.add_argument(
        "-n", "--num_runs",
        default=None,
        type=int,
        help="Set the number of runs to execute before exiting")
    parser_scheduler.set_defaults(func=scheduler)

    ht = "Initialize the metadata database"
    parser_initdb = subparsers.add_parser('initdb', help=ht)
    parser_initdb.set_defaults(func=initdb)

    ht = "Burn down and rebuild the metadata database"
    parser_resetdb = subparsers.add_parser('resetdb', help=ht)
    parser_resetdb.set_defaults(func=resetdb)

    ht = "Upgrade metadata database to latest version"
    parser_upgradedb = subparsers.add_parser('upgradedb', help=ht)
    parser_upgradedb.set_defaults(func=upgradedb)

    ht = "List the DAGs"
    parser_list_dags = subparsers.add_parser('list_dags', help=ht)
    parser_list_dags.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_list_dags.set_defaults(func=list_dags)

    ht = "List the tasks within a DAG"
    parser_list_tasks = subparsers.add_parser('list_tasks', help=ht)
    parser_list_tasks.add_argument(
        "-t", "--tree", help="Tree view", action="store_true")
    parser_list_tasks.add_argument(
        "dag_id", help="The id of the dag")
    parser_list_tasks.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_list_tasks.set_defaults(func=list_tasks)

    ht = "Start a Celery worker node"
    parser_worker = subparsers.add_parser('worker', help=ht)
    parser_worker.add_argument(
        "-q", "--queues",
        help="Comma delimited list of queues to cater serve",
        default=conf.get('celery', 'DEFAULT_QUEUE'))
    parser_worker.set_defaults(func=worker)

    ht = "Serve logs generate by worker"
    parser_logs = subparsers.add_parser('serve_logs', help=ht)
    parser_logs.set_defaults(func=serve_logs)

    ht = "Start a Celery Flower"
    parser_flower = subparsers.add_parser('flower', help=ht)
    parser_flower.add_argument(
        "-p", "--port", help="The port")
    parser_flower.add_argument(
        "-a", "--broker_api", help="Broker api")
    parser_flower.set_defaults(func=flower)

    parser_version = subparsers.add_parser('version', help="Show version")
    parser_version.set_defaults(func=version)

    return parser
    parser.add_argument('--log-file', help='the location of the log file')

    parser.add_argument('--bitcoind-rpc-connect', help='the hostname of the Bitcoind JSON-RPC server')
    parser.add_argument('--bitcoind-rpc-port', type=int, help='the port used to communicate with Bitcoind over JSON-RPC')
    parser.add_argument('--bitcoind-rpc-user', help='the username used to communicate with Bitcoind over JSON-RPC')
    parser.add_argument('--bitcoind-rpc-password', help='the password used to communicate with Bitcoind over JSON-RPC')

    parser.add_argument('--rpc-host', help='the host to provide the counterpartyd JSON-RPC API')
    parser.add_argument('--rpc-port', type=int, help='port on which to provide the counterpartyd JSON-RPC API')
    parser.add_argument('--rpc-user', help='required username to use the counterpartyd JSON-RPC API (via HTTP basic auth)')
    parser.add_argument('--rpc-password', help='required password (for rpc-user) to use the counterpartyd JSON-RPC API (via HTTP basic auth)')

    parser.add_argument('--zeromq-host', help='the host to provide the realtime event publisher')
    parser.add_argument('--zeromq-port', type=int, help='port on which to provide the realtime event publisher')

    subparsers = parser.add_subparsers(dest='action', help='the action to be taken')

    parser_server = subparsers.add_parser('server', help='run the server')

    parser_send = subparsers.add_parser('send', help='create and broadcast a *send* message')
    parser_send.add_argument('--from', metavar='SOURCE', dest='source', required=True, help='the source address')
    parser_send.add_argument('--to', metavar='DESTINATION', dest='destination', required=True, help='the destination address')
    parser_send.add_argument('--quantity', metavar='QUANTITY', required=True, help='the quantity of ASSET to send')
    parser_send.add_argument('--asset', metavar='ASSET', dest='asset', required=True, help='the ASSET of which you would like to send QUANTITY')

    parser_order = subparsers.add_parser('order', help='create and broadcast an *order* message')
    parser_order.add_argument('--from', metavar='SOURCE', dest='source', required=True, help='the source address')
    parser_order.add_argument('--get-quantity', metavar='GET_QUANTITY', required=True, help='the quantity of GET_ASSET that you would like to receive')
    parser_order.add_argument('--get-asset', metavar='GET_ASSET', required=True, help='the asset that you would like to sell')
    parser_order.add_argument('--give-quantity', metavar='GIVE_QUANTITY', required=True, help='the quantity of GIVE_ASSET that you are willing to give')
    parser_order.add_argument('--give-asset', metavar='GIVE_ASSET', required=True, help='the asset that you would like to buy')
Example #49
0
def main():
    ## This boilerplate pattern is from
    ## http://stackoverflow.com/questions/3609852 
    ## We want defaults for the command line options to be fetched from the config file

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = argparse.ArgumentParser(
        description=__doc__, # printed with -h/--help
        # Don't mess with format of description
        formatter_class=argparse.RawDescriptionHelpFormatter,
        # Turn off help, so we print all options in response to -h
        add_help=False
        )
    conf_parser.add_argument("--config-file",
                             help="Specify config file", metavar="FILE", default=os.getenv('XDG_CONFIG_HOME', os.getenv('HOME', '~') + '/.config')+'/calendar.conf')
    conf_parser.add_argument("--config-section",
                             help="Specify config section; allows several caldav servers to be configured in the same config file",  default='default')
    args, remaining_argv = conf_parser.parse_known_args()

    config = {}
    try:
        with open(args.config_file) as config_file:
            config = json.load(config_file)
    except IOError:
        ## File not found
        logging.info("no config file found")
    except ValueError:
        logging.error("error in config file", exc_info=True)
        raise

    defaults = config.get(args.config_section, {})

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h
    parser = argparse.ArgumentParser(
        # Inherit options from config_parser
        parents=[conf_parser]
        )
    parser.set_defaults(**defaults)

    ## Global options
    parser.add_argument("--nocaldav", help="Do not connect to CalDAV server, but read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--icalendar", help="Read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--timezone", help="Timezone to use")
    parser.add_argument('--language', help="language used", default="EN")
    parser.add_argument("--caldav-url", help="Full URL to the caldav server", metavar="URL")
    parser.add_argument("--caldav-user", help="username to log into the caldav server", metavar="USER")
    parser.add_argument("--caldav-pass", help="password to log into the caldav server", metavar="PASS")
    parser.add_argument("--debug-logging", help="turn on debug logging", action="store_true")

    ## TODO: check sys.argv[0] to find command
    ## TODO: set up logging
    subparsers = parser.add_subparsers(title='command')

    calendar_parser = subparsers.add_parser('calendar')
    calendar_parser.add_argument("--calendar-url", help="URL for calendar to be used (may be absolute or relative to caldav URL)")
    calendar_subparsers = calendar_parser.add_subparsers(title='subcommand')
    calendar_add_parser = calendar_subparsers.add_parser('add')
    calendar_add_parser.add_argument('event_time', help="Timestamp and duration of the event.  See the documentation for event_time specifications")
    calendar_add_parser.add_argument('description', nargs='+')
    calendar_add_parser.set_defaults(func=calendar_add)
    calendar_addics_parser = calendar_subparsers.add_parser('addics')
    calendar_addics_parser.add_argument('--file', help="ICS file to upload", default='-')
    calendar_addics_parser.set_defaults(func=calendar_addics)

    calendar_agenda_parser = calendar_subparsers.add_parser('agenda')
    calendar_agenda_parser.set_defaults(func=niy)
    todo_parser = subparsers.add_parser('todo')
    todo_parser.set_defaults(func=niy)
    args = parser.parse_args(remaining_argv)

    if not args.nocaldav:
        caldav_conn = caldav_connect(args)

    ret = args.func(caldav_conn, args)
Example #50
0
def main():
    """
    the main function does (almost) nothing but parsing command line parameters
    """
    ## This boilerplate pattern is from
    ## http://stackoverflow.com/questions/3609852
    ## We want defaults for the command line options to be fetched from the config file

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = argparse.ArgumentParser(
        description=__doc__,  # printed with -h/--help
        # Don't mess with format of description
        formatter_class=argparse.RawDescriptionHelpFormatter,
        # Turn off help, so we print all options in response to -h
        add_help=False,
    )
    conf_parser.add_argument(
        "--config-file",
        help="Specify config file",
        metavar="FILE",
        default=os.getenv("XDG_CONFIG_HOME", os.getenv("HOME", "~") + "/.config") + "/calendar.conf",
    )
    conf_parser.add_argument(
        "--config-section",
        help="Specify config section; allows several caldav servers to be configured in the same config file",
        default="default",
    )
    conf_parser.add_argument("--interactive-config", help="Interactively ask for configuration", action="store_true")
    args, remaining_argv = conf_parser.parse_known_args()

    config = {}

    try:
        with open(args.config_file) as config_file:
            config = json.load(config_file)
    except IOError:
        ## File not found
        logging.info("no config file found")
    except ValueError:
        if args.interactive_config:
            logging.error(
                "error in config file.  Be aware that the current config file will be ignored and overwritten",
                exc_info=True,
            )
        else:
            logging.error(
                "error in config file.  You may want to run --interactive-config or fix the config file", exc_info=True
            )

    if args.interactive_config:
        config = interactive_config(args, config, remaining_argv)
        if not remaining_argv:
            return
    else:
        defaults = config_section(config, args.config_section)

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h
    parser = argparse.ArgumentParser(
        # Inherit options from config_parser
        parents=[conf_parser]
    )
    parser.set_defaults(**defaults)

    ## Global options
    parser.add_argument(
        "--nocaldav",
        help="Do not connect to CalDAV server, but read/write icalendar format from stdin/stdout",
        action="store_true",
    )
    parser.add_argument("--icalendar", help="Read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--timezone", help="Timezone to use")
    parser.add_argument("--language", help="language used", default="EN")
    parser.add_argument("--caldav-url", help="Full URL to the caldav server", metavar="URL")
    parser.add_argument("--caldav-user", help="username to log into the caldav server", metavar="USER")
    parser.add_argument("--caldav-pass", help="password to log into the caldav server", metavar="PASS")
    parser.add_argument("--debug-logging", help="turn on debug logging", action="store_true")
    parser.add_argument(
        "--calendar-url",
        help="URL for calendar to be used (may be absolute or relative to caldav URL, or just the name of the calendar)",
    )

    ## TODO: check sys.argv[0] to find command
    ## TODO: set up logging
    subparsers = parser.add_subparsers(title="command")

    ## Tasks
    todo_parser = subparsers.add_parser("todo")
    todo_parser.add_argument("--top", "-1", action="count", default=0)
    todo_parser.add_argument("--offset", action="count", default=0)
    todo_parser.add_argument("--offsetn", type=int, default=0)
    todo_parser.add_argument("--limit", type=int, default=0)
    todo_parser.add_argument("--todo-uid")

    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_parser.add_argument("--" + attr, help="for filtering tasks")

    # todo_parser.add_argument('--priority', ....)
    # todo_parser.add_argument('--sort-by', ....)
    # todo_parser.add_argument('--due-before', ....)
    todo_subparsers = todo_parser.add_subparsers(title="tasks subcommand")
    todo_add_parser = todo_subparsers.add_parser("add")
    todo_add_parser.add_argument("summaryline", nargs="+")
    todo_add_parser.add_argument("--set-due", default=date.today() + timedelta(7))
    todo_add_parser.add_argument("--set-dtstart", default=date.today() + timedelta(1))
    todo_add_parser.add_argument(
        "--is-child", help="the new task is a child-task of the selected task(s)", action="store_true"
    )
    for attr in vtodo_txt_one + vtodo_txt_many:
        if attr != "summary":
            todo_add_parser.add_argument("--set-" + attr, help="Set " + attr)
    todo_add_parser.set_defaults(func=todo_add)

    todo_list_parser = todo_subparsers.add_parser("list")
    todo_list_parser.add_argument(
        "--todo-template",
        help="Template for printing out the event",
        default="{dtstart}{dtstart_passed_mark} {due}{due_passed_mark} {summary}",
    )
    todo_list_parser.add_argument(
        "--default-due", help="Default number of days from a task is submitted until it's considered due", default=14
    )
    todo_list_parser.add_argument(
        "--list-categories",
        help="Instead of listing the todo-items, list the unique categories used",
        action="store_true",
    )
    todo_list_parser.set_defaults(func=todo_list)

    todo_edit_parser = todo_subparsers.add_parser("edit")
    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_edit_parser.add_argument("--set-" + attr, help="Set " + attr)
    for attr in vtodo_txt_many:
        todo_edit_parser.add_argument("--add-" + attr, help="Add an " + attr)
    todo_edit_parser.add_argument(
        "--pdb", help="Allow interactive edit through the python debugger", action="store_true"
    )
    todo_edit_parser.set_defaults(func=todo_edit)

    todo_postpone_parser = todo_subparsers.add_parser("postpone")
    todo_postpone_parser.add_argument(
        "until",
        help="either a new date or +interval to add some interval to the existing time, or i.e. 'in 3d' to set the time to a new time relative to the current time.  interval is a number postfixed with a one character unit (any of smhdwy).  If the todo-item has a dstart, this field will be modified, else the due timestamp will be modified.    If both timestamps exists and dstart will be moved beyond the due time, the due time will be set to dtime.",
    )
    todo_postpone_parser.add_argument("--due", help="move the due, not the dtstart", action="store_true")
    todo_postpone_parser.set_defaults(func=todo_postpone)

    todo_complete_parser = todo_subparsers.add_parser("complete")
    todo_complete_parser.set_defaults(func=todo_complete)

    todo_delete_parser = todo_subparsers.add_parser("delete")
    todo_delete_parser.set_defaults(func=todo_delete)

    calendar_parser = subparsers.add_parser("calendar")
    calendar_subparsers = calendar_parser.add_subparsers(title="cal subcommand")
    calendar_add_parser = calendar_subparsers.add_parser("add")
    calendar_add_parser.add_argument(
        "event_time", help="Timestamp and duration of the event.  See the documentation for event_time specifications"
    )
    calendar_add_parser.add_argument("summary", nargs="+")
    calendar_add_parser.set_defaults(func=calendar_add)
    calendar_addics_parser = calendar_subparsers.add_parser("addics")
    calendar_addics_parser.add_argument("--file", help="ICS file to upload", default="-")
    calendar_addics_parser.set_defaults(func=calendar_addics)

    calendar_agenda_parser = calendar_subparsers.add_parser("agenda")
    calendar_agenda_parser.add_argument(
        "--from-time",
        help="Fetch calendar events from this timestamp.  See the documentation for time specifications.  Defaults to now",
    )
    calendar_agenda_parser.add_argument("--to-time", help="Fetch calendar until this timestamp")
    calendar_agenda_parser.add_argument("--agenda-mins", help="Fetch calendar for so many minutes", type=int)
    calendar_agenda_parser.add_argument("--agenda-days", help="Fetch calendar for so many days", type=int, default=7)
    calendar_agenda_parser.add_argument(
        "--event-template", help="Template for printing out the event", default="{dstart} {summary}"
    )
    calendar_agenda_parser.add_argument(
        "--timestamp-format", help="strftime-style format string for the output timestamps", default="%F %H:%M (%a)"
    )
    calendar_agenda_parser.set_defaults(func=calendar_agenda)

    calendar_delete_parser = calendar_subparsers.add_parser("delete")
    calendar_delete_parser.add_argument("--event-uid")
    calendar_delete_parser.add_argument("--event-url")
    calendar_delete_parser.add_argument("--event-timestamp")
    calendar_delete_parser.set_defaults(func=calendar_delete)

    args = parser.parse_args(remaining_argv)

    if args.timezone:
        args.timezone = pytz.timezone(args.timezone)
    else:
        args.timezone = tzlocal.get_localzone()

    if not args.nocaldav:
        caldav_conn = caldav_connect(args)

    ret = args.func(caldav_conn, args)
Example #51
0
def parse_args(argv):
    """Parse the command line.
    """
    parser = argparse.ArgumentParser(
        description='An interface to tarsnap to manage backups.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-q', action='store_true', dest='quiet', help='be quiet')
    group.add_argument('-v', action='store_true', dest='verbose', help='be verbose')
    # We really want nargs=(1,2), but since this isn't available, we can
    # just asl well support an arbitrary number of values for each -o.
    parser.add_argument('-o', metavar=('name', 'value'), nargs='+',
                        dest='tarsnap_options', default=[], action='append',
                        help='option to pass to tarsnap',)
    parser.add_argument('--config', '-c', help='use the given config file')

    group = parser.add_argument_group(
        description='Instead of using a configuration file, you may define '\
                    'a single job on the command line:')
    group.add_argument('--target', help='target filename for the backup')
    group.add_argument('--sources', nargs='+', help='paths to backup',
                        default=[])
    group.add_argument('--deltas', '-d', metavar='DELTA',
                        type=timedelta_string,
                        help='generation deltas', nargs='+')
    group.add_argument('--dateformat', '-f', help='dateformat')

    for plugin in PLUGINS:
        plugin.setup_arg_parser(parser)

    # This will allow the user to break out of an nargs='*' to start
    # with the subcommand. See http://bugs.python.org/issue9571.
    parser.add_argument('-', dest='__dummy', action="store_true",
                        help=argparse.SUPPRESS)

    subparsers = parser.add_subparsers(
        title="commands", description="commands may offer additional options")
    for cmd_name, cmd_klass in COMMANDS.iteritems():
        subparser = subparsers.add_parser(cmd_name, help=cmd_klass.help,
                                          description=cmd_klass.description,
                                          add_help=False)
        subparser.set_defaults(command=cmd_klass)
        group = subparser.add_argument_group(
            title="optional arguments for this command")
        # We manually add the --help option so that we can have a
        # custom group title, but only show a single group.
        group.add_argument('-h', '--help', action='help',
                           default=argparse.SUPPRESS,
                           help='show this help message and exit')
        cmd_klass.setup_arg_parser(group)

        # Unfortunately, we need to redefine the jobs argument for each
        # command, rather than simply having it once, globally.
        subparser.add_argument(
            'jobs', metavar='job', nargs='*',
            help='only process the given job as defined in the config file')

    # This would be in a group automatically, but it would be shown as
    # the very first thing, while it really should be the last (which
    # explicitly defining the group causes to happen).
    #
    # Also, note that we define this argument for each command as well,
    # and the command specific one will actually be parsed. This is
    # because while argparse allows us to *define* this argument globally,
    # and renders the usage syntax correctly as well, it isn't actually
    # able to parse the thing it correctly (see
    # http://bugs.python.org/issue9540).
    group = parser.add_argument_group(title='positional arguments')
    group.add_argument(
        '__not_used', metavar='job', nargs='*',
        help='only process the given job as defined in the config file')

    args = parser.parse_args(argv)

    # Do some argument validation that would be to much to ask for
    # argparse to handle internally.
    if args.config and (args.target or args.dateformat or args.deltas or
                        args.sources):
        raise ArgumentError('If --config is used, then --target, --deltas, '
                            '--sources and --dateformat are not available')
    if args.jobs and not args.config:
        raise ArgumentError(('Specific jobs (%s) can only be given if a '
                            'config file is used') % ", ".join(args.jobs))
    # The command may want to do some validation regarding it's own options.
    args.command.validate_args(args)

    return args
Example #52
0
def ucare_argparser():
    parser = argparse.ArgumentParser()
    parser.add_argument('--version', action='version',
                        version='ucare {0}'.format(__version__))

    subparsers = parser.add_subparsers()

    # list
    subparser = subparsers.add_parser('list', help='list all files')
    subparser.set_defaults(func=list_files)
    subparser.add_argument('--since', help='show files uploaded since',
                           type=dateutil.parser.parse)
    subparser.add_argument('--until', help='show files uploaded until',
                           type=dateutil.parser.parse)
    subparser.add_argument('--limit', help='files to show', default=100,
                           type=int_or_none)
    subparser.add_argument('--stored', help='filter stored files',
                           choices=[True, False, None],
                           type=bool_or_none, default=None)
    subparser.add_argument('--removed', help='filter removed files',
                           choices=[True, False, None],
                           type=bool_or_none, default=False)

    # get
    subparser = subparsers.add_parser('get', help='get file info')
    subparser.set_defaults(func=get_file)
    subparser.add_argument('path', help='file path')

    # common store and delete args
    waiting_parent = argparse.ArgumentParser(add_help=False)
    waiting_parent.add_argument(
        '--timeout',
        type=int,
        dest='timeout',
        default=5,
        help='Set wait seconds until operation completed.'
             ' Default value is 5 seconds')
    group = waiting_parent.add_mutually_exclusive_group()
    group.add_argument(
        '--wait',
        action='store_true',
        default=True,
        dest='wait',
        help='Wait for operation to be completed'
    )
    group.add_argument(
        '--nowait',
        action='store_false',
        dest='wait',
        help='Do not wait for operation to be completed'
    )

    # store
    subparser = subparsers.add_parser('store',
                                      parents=[waiting_parent],
                                      help='store file')
    subparser.set_defaults(func=store_file)
    subparser.add_argument('path', help='file path')

    # delete
    subparser = subparsers.add_parser('delete',
                                      parents=[waiting_parent],
                                      help='request delete')
    subparser.set_defaults(func=delete_file)
    subparser.add_argument('path', help='file path')

    # common upload args
    upload_parent = argparse.ArgumentParser(add_help=False)
    group = upload_parent.add_mutually_exclusive_group()
    group.add_argument(
        '--store',
        action='store_true',
        default=False,
        dest='store',
        help='Store uploaded file')
    group.add_argument(
        '--nostore',
        action='store_false',
        dest='store',
        help='Do not store uploaded file')
    group = upload_parent.add_mutually_exclusive_group()
    group.add_argument(
        '--info',
        action='store_true',
        default=False,
        dest='info',
        help='Get uploaded file info')
    group.add_argument(
        '--noinfo',
        action='store_false',
        dest='info',
        help='Do not get uploaded file info')
    upload_parent.add_argument(
        '--cdnurl',
        action='store_true',
        help='Store file and get CDN url.')

    # upload from url
    subparser = subparsers.add_parser('upload_from_url',
                                      parents=[upload_parent],
                                      help='upload file from url')
    subparser.set_defaults(func=upload_from_url)
    subparser.add_argument('url', help='file url')
    subparser.add_argument(
        '--timeout',
        type=int,
        dest='timeout',
        default=30,
        help='Set wait seconds file uploading from url.'
             ' Default value is 30 seconds')
    group = subparser.add_mutually_exclusive_group()
    group.add_argument(
        '--wait',
        action='store_true',
        default=True,
        dest='wait',
        help='Wait for upload status')
    group.add_argument(
        '--nowait',
        action='store_false',
        dest='wait',
        help='Do not wait for upload status')

    # upload
    subparser = subparsers.add_parser('upload', parents=[upload_parent],
                                      help='upload file')
    subparser.set_defaults(func=upload)
    subparser.add_argument('filename', help='filename')

    # Create file group.
    subparser = subparsers.add_parser('create_group', help='create file group')
    subparser.set_defaults(func=create_group)
    subparser.add_argument('paths', nargs='+', help='file paths')

    # Sync files
    subparser = subparsers.add_parser('sync', help='sync files')
    subparser.set_defaults(func=sync_files)
    subparser.add_argument('path', nargs='?', help=(
        'Local path. It can contains special patterns like: {0} '
        'Default is {1}'.format(
            ' '.join(PATTERNS_MAPPING.keys()),
            DEFAULT_PATTERN_FILENAME)
    ), default='.')
    subparser.add_argument('--replace', help='replace exists files',
                           default=False, action='store_true')
    subparser.add_argument('--uuids', nargs='+',
                           help='list of file\'s uuids for sync',)
    subparser.add_argument('--effects', help=(
        'apply effects for synced images. For more information look at: '
        'https://uploadcare.com/documentation/cdn/'
    ))

    # common arguments
    parser.add_argument(
        '--pub_key',
        help='API key, if not set is read from uploadcare.ini'
             ' and ~/.uploadcare config files')
    parser.add_argument(
        '--secret',
        help='API secret, if not set is read from uploadcare.ini'
             ' and ~/.uploadcare config files')
    parser.add_argument(
        '--api_base',
        help='API url, can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.'
             ' Default value is {0}'.format(conf.api_base))
    parser.add_argument(
        '--upload_base',
        help='Upload API url, can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.'
             ' Default value is {0}'.format(conf.upload_base))
    parser.add_argument(
        '--no_check_upload_certificate',
        action='store_true',
        help="Don't check the uploading API server certificate."
             ' Can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.')
    parser.add_argument(
        '--no_check_api_certificate',
        action='store_true',
        help="Don't check the REST API server certificate."
             ' Can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.')
    parser.add_argument(
        '--api_version',
        help='API version, can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.'
             ' Default value is {0}'.format(conf.api_version))

    return parser
Example #53
0
def main():
    """
    the main function does (almost) nothing but parsing command line parameters
    """
    ## This boilerplate pattern is from
    ## http://stackoverflow.com/questions/3609852
    ## We want defaults for the command line options to be fetched from the config file

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = argparse.ArgumentParser(
        prog=__product__,
        description=__doc__, # printed with -h/--help
        # Don't mess with format of description
        formatter_class=argparse.RawDescriptionHelpFormatter,
        # Turn off help, so we print all options in response to -h
        add_help=False
        )
    conf_parser.add_argument("--config-file",
                             help="Specify config file", metavar="FILE", default=os.getenv('XDG_CONFIG_HOME', os.getenv('HOME', '~') + '/.config')+'/calendar.conf')
    conf_parser.add_argument("--config-section",
                             help="Specify config section; allows several caldav servers to be configured in the same config file",  default='default')
    conf_parser.add_argument("--interactive-config",
                             help="Interactively ask for configuration", action="store_true")
    args, remaining_argv = conf_parser.parse_known_args()
    conf_parser.add_argument("--version", action='version', version='%%(prog)s %s' % __version__)

    config = {}

    try:
        with open(args.config_file) as config_file:
            config = json.load(config_file)
    except IOError:
        ## File not found
        logging.info("no config file found")
    except ValueError:
        if args.interactive_config:
            logging.error("error in config file.  Be aware that the current config file will be ignored and overwritten", exc_info=True)
        else:
            logging.error("error in config file.  You may want to run --interactive-config or fix the config file", exc_info=True)

    if args.interactive_config:
        config = interactive_config(args, config, remaining_argv)
        if not remaining_argv:
            return
    else:
        defaults = config_section(config, args.config_section)
        if not 'ssl_verify_cert' in defaults:
            defaults['ssl_verify_cert'] = 'yes'
        if not 'language' in defaults:
            ## TODO: shouldn't this be lower case?
            defaults['language'] = 'EN'

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h
    parser = argparse.ArgumentParser(
        description=__doc__,
        prog=__product__,
        # Inherit options from config_parser
        parents=[conf_parser]
        )
    parser.set_defaults(**defaults)

    ## Global options
    parser.add_argument("--nocaldav", help="Do not connect to CalDAV server, but read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--icalendar", help="Read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--timezone", help="Timezone to use")
    parser.add_argument('--language', help="language used")
    parser.add_argument("--caldav-url", help="Full URL to the caldav server", metavar="URL")
    parser.add_argument("--caldav-user", help="username to log into the caldav server", metavar="USER")
    parser.add_argument("--caldav-pass", help="password to log into the caldav server", metavar="PASS")
    parser.add_argument("--caldav-proxy", help="HTTP proxy server to use (if any)")
    parser.add_argument("--ssl-verify-cert", help="verification of the SSL cert - 'yes' to use the OS-provided CA-bundle, 'no' to trust any cert and the path to a CA-bundle")
    parser.add_argument("--debug-logging", help="turn on debug logging", action="store_true")
    parser.add_argument("--calendar-url", help="URL for calendar to be used (may be absolute or relative to caldav URL, or just the name of the calendar)")
    parser.add_argument("--ignoremethod", help="Ignores METHOD property if exists in the request. This violates RFC4791 but is sometimes appended by some calendar servers", action="store_true")

    ## TODO: check sys.argv[0] to find command
    ## TODO: set up logging
    subparsers = parser.add_subparsers(title='command')

    ## Tasks
    todo_parser = subparsers.add_parser('todo')
    todo_parser.add_argument('--top', '-1', action='count', default=0)
    todo_parser.add_argument('--offset', action='count', default=0)
    todo_parser.add_argument('--offsetn', type=int, default=0)
    todo_parser.add_argument('--limit', type=int, default=0)
    todo_parser.add_argument('--todo-uid')
    todo_parser.add_argument('--hide-parents', help='Hide the parent if you need to work on children tasks first (parent task depends on children tasks to be done first)', action='store_true')
    todo_parser.add_argument('--hide-children', help='Hide the parent if you need to work on children tasks first (parent task depends on children tasks to be done first)', action='store_true')
    todo_parser.add_argument('--overdue', help='Only show overdue tasks', action='store_true')
    todo_parser.add_argument('--hide-future', help='Hide events with future dtstart', action='store_true')

    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_parser.add_argument('--'+attr, help="for filtering tasks")

    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_parser.add_argument('--no'+attr, help="for filtering tasks", action='store_true')

    #todo_parser.add_argument('--priority', ....)
    #todo_parser.add_argument('--sort-by', ....)
    #todo_parser.add_argument('--due-before', ....)
    todo_subparsers = todo_parser.add_subparsers(title='tasks subcommand')
    todo_add_parser = todo_subparsers.add_parser('add')
    todo_add_parser.add_argument('summaryline', nargs='+')
    todo_add_parser.add_argument('--set-due', default=date.today()+timedelta(365))
    todo_add_parser.add_argument('--set-dtstart', default=date.today()+timedelta(1))
    todo_add_parser.add_argument('--is-child', help="the new task is a child-task of the selected task(s)", action='store_true')
    for attr in vtodo_txt_one + vtodo_txt_many:
        if attr != 'summary':
            todo_add_parser.add_argument('--set-'+attr, help="Set "+attr)
    # TODO: we probably want to be able to set or delete alarms in other situations, yes?  generalize?
    todo_add_parser.add_argument('--alarm', metavar='DURATION_BEFORE',
        help="specifies a time at which a reminder should be presented for this task, " \
             "relative to the start time of the task (as a timestamp delta)")
    todo_add_parser.set_defaults(func=todo_add)

    todo_list_parser = todo_subparsers.add_parser('list')
    todo_list_parser.add_argument('--todo-template', help="Template for printing out the event", default="{dtstart}{dtstart_passed_mark} {due}{due_passed_mark} {summary}")
    todo_list_parser.add_argument('--default-due', help="Default number of days from a task is submitted until it's considered due", type=int, default=365)
    todo_list_parser.add_argument('--list-categories', help="Instead of listing the todo-items, list the unique categories used", action='store_true')
    todo_list_parser.add_argument('--timestamp-format', help="strftime-style format string for the output timestamps", default="%Y-%m-%d (%a)")
    todo_list_parser.set_defaults(func=todo_list)

    todo_edit_parser = todo_subparsers.add_parser('edit')
    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_edit_parser.add_argument('--set-'+attr, help="Set "+attr)
    for attr in vtodo_txt_many:
        todo_edit_parser.add_argument('--add-'+attr, help="Add an "+attr)
    todo_edit_parser.add_argument('--pdb', help='Allow interactive edit through the python debugger', action='store_true')
    todo_edit_parser.set_defaults(func=todo_edit)

    todo_postpone_parser = todo_subparsers.add_parser('postpone')
    todo_postpone_parser.add_argument('until', help="either a new date or +interval to add some interval to the existing time, or i.e. 'in 3d' to set the time to a new time relative to the current time.  interval is a number postfixed with a one character unit (any of smhdwy).  If the todo-item has a dstart, this field will be modified, else the due timestamp will be modified.    If both timestamps exists and dstart will be moved beyond the due time, the due time will be set to dtime.")
    todo_postpone_parser.add_argument('--due', help="move the due, not the dtstart", action='store_true')
    todo_postpone_parser.set_defaults(func=todo_postpone)

    todo_complete_parser = todo_subparsers.add_parser('complete')
    todo_complete_parser.set_defaults(func=todo_complete)

    todo_delete_parser = todo_subparsers.add_parser('delete')
    todo_delete_parser.set_defaults(func=todo_delete)

    ## journal
    journal_parser = subparsers.add_parser('journal')
    journal_subparsers = journal_parser.add_subparsers(title='journal subcommand')
    journal_add_parser = journal_subparsers.add_parser('add')
    journal_add_parser.add_argument('summaryline', nargs='+')
    journal_add_parser.set_defaults(func=journal_add)

    calendar_parser = subparsers.add_parser('calendar')
    calendar_subparsers = calendar_parser.add_subparsers(title='cal subcommand')
    calendar_add_parser = calendar_subparsers.add_parser('add')
    calendar_add_parser.add_argument('event_time', help="Timestamp and duration of the event.  See the documentation for event_time specifications")
    calendar_add_parser.add_argument('summary', nargs='+')
    calendar_add_parser.set_defaults(func=calendar_add)
    calendar_add_parser.add_argument('--whole-day', help='Whole-day event', action='store_true', default=False)

    for attr in vcal_txt_one + vcal_txt_many:
        calendar_add_parser.add_argument('--set-'+attr, help='Set '+attr)

    calendar_addics_parser = calendar_subparsers.add_parser('addics')
    calendar_addics_parser.add_argument('--file', help="ICS file to upload", default='-')
    calendar_addics_parser.set_defaults(func=calendar_addics)

    calendar_agenda_parser = calendar_subparsers.add_parser('agenda')
    calendar_agenda_parser.add_argument('--from-time', help="Fetch calendar events from this timestamp.  See the documentation for time specifications.  Defaults to now")
    calendar_agenda_parser.add_argument('--to-time', help="Fetch calendar until this timestamp")
    calendar_agenda_parser.add_argument('--agenda-mins', help="Fetch calendar for so many minutes", type=int)
    calendar_agenda_parser.add_argument('--agenda-days', help="Fetch calendar for so many days", type=int, default=7)
    calendar_agenda_parser.add_argument('--event-template', help="Template for printing out the event", default="{dstart} {summary}")
    calendar_agenda_parser.add_argument('--timestamp-format', help="strftime-style format string for the output timestamps", default="%Y-%m-%d %H:%M (%a)")
    calendar_agenda_parser.set_defaults(func=calendar_agenda)

    calendar_delete_parser = calendar_subparsers.add_parser('delete')
    calendar_delete_parser.add_argument('--event-uid')
    calendar_delete_parser.add_argument('--event-url')
    calendar_delete_parser.add_argument('--event-timestamp')
    calendar_delete_parser.set_defaults(func=calendar_delete)

    args = parser.parse_args(remaining_argv)

    if not args.nocaldav:
        caldav_conn = caldav_connect(args)
    else:
        caldav_conn = None
    
    if args.ssl_verify_cert == 'no':
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    ret = args.func(caldav_conn, args)
Example #54
0
def get_parser():
    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers(help='sub-command help', dest='subcommand')
    subparsers.required = True

    ht = "Run subsections of a DAG for a specified date range"
    parser_backfill = subparsers.add_parser('backfill', help=ht)
    parser_backfill.add_argument("dag_id", help="The id of the dag to run")
    parser_backfill.add_argument(
        "-t", "--task_regex",
        help="The regex to filter specific task_ids to backfill (optional)")
    parser_backfill.add_argument(
        "-s", "--start_date", help="Override start_date YYYY-MM-DD")
    parser_backfill.add_argument(
        "-e", "--end_date", help="Override end_date YYYY-MM-DD")
    parser_backfill.add_argument(
        "-m", "--mark_success",
        help=mark_success_help, action="store_true")
    parser_backfill.add_argument(
        "-l", "--local",
        help="Run the task using the LocalExecutor", action="store_true")
    parser_backfill.add_argument(
        "-x", "--donot_pickle",
        help=(
            "Do not attempt to pickle the DAG object to send over "
            "to the workers, just tell the workers to run their version "
            "of the code."),
        action="store_true")
    parser_backfill.add_argument(
        "-a", "--include_adhoc",
        help="Include dags with the adhoc parameter.", action="store_true")
    parser_backfill.add_argument(
        "-i", "--ignore_dependencies",
        help=(
            "Skip upstream tasks, run only the tasks "
            "matching the regexp. Only works in conjunction with task_regex"),
        action="store_true")
    parser_backfill.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_backfill.add_argument(
        "-p", "--pool", help="Pool to use to run the backfill")
    parser_backfill.add_argument(
        "-dr", "--dry_run", help="Perform a dry run", action="store_true")
    parser_backfill.set_defaults(func=backfill)

    ht = "Clear a set of task instance, as if they never ran"
    parser_clear = subparsers.add_parser('clear', help=ht)
    parser_clear.add_argument("dag_id", help="The id of the dag to run")
    parser_clear.add_argument(
        "-t", "--task_regex",
        help="The regex to filter specific task_ids to clear (optional)")
    parser_clear.add_argument(
        "-s", "--start_date", help="Override start_date YYYY-MM-DD")
    parser_clear.add_argument(
        "-e", "--end_date", help="Override end_date YYYY-MM-DD")
    ht = "Include upstream tasks"
    parser_clear.add_argument(
        "-u", "--upstream", help=ht, action="store_true")
    ht = "Only failed jobs"
    parser_clear.add_argument(
        "-f", "--only_failed", help=ht, action="store_true")
    ht = "Only running jobs"
    parser_clear.add_argument(
        "-r", "--only_running", help=ht, action="store_true")
    ht = "Include downstream tasks"
    parser_clear.add_argument(
        "-d", "--downstream", help=ht, action="store_true")
    parser_clear.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    ht = "Do not request confirmation"
    parser_clear.add_argument(
        "-c", "--no_confirm", help=ht, action="store_true")
    parser_clear.set_defaults(func=clear)

    ht = "Trigger a DAG"
    parser_trigger_dag = subparsers.add_parser('trigger_dag', help=ht)
    parser_trigger_dag.add_argument("dag_id", help="The id of the dag to run")
    parser_trigger_dag.add_argument(
        "-r", "--run_id",
        help="Helps to indentify this run")
    ht = "json string that gets pickled into the DagRun's conf attribute"
    parser_trigger_dag.add_argument('-c', '--conf', help=ht)
    parser_trigger_dag.set_defaults(func=trigger_dag)

    ht = "Pause a DAG"
    parser_pause = subparsers.add_parser('pause', help=ht)
    parser_pause.add_argument("dag_id", help="The id of the dag to pause")
    parser_pause.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_pause.set_defaults(func=pause)

    ht = "Unpause a DAG"
    parser_unpause = subparsers.add_parser('unpause', help=ht)
    parser_unpause.add_argument("dag_id", help="The id of the dag to unpause")
    parser_unpause.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_unpause.set_defaults(func=unpause)

    ht = "Run a single task instance"
    parser_run = subparsers.add_parser('run', help=ht)
    parser_run.add_argument("dag_id", help="The id of the dag to run")
    parser_run.add_argument("task_id", help="The task_id to run")
    parser_run.add_argument(
        "execution_date", help="The execution date to run")
    parser_run.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_run.add_argument(
        "-s", "--task_start_date",
        help="Override the tasks's start_date (used internally)",)
    parser_run.add_argument(
        "-m", "--mark_success", help=mark_success_help, action="store_true")
    parser_run.add_argument(
        "-f", "--force",
        help="Force a run regardless or previous success",
        action="store_true")
    parser_run.add_argument(
        "-l", "--local",
        help="Runs the task locally, don't use the executor",
        action="store_true")
    parser_run.add_argument(
        "-r", "--raw",
        help=argparse.SUPPRESS,
        action="store_true")
    parser_run.add_argument(
        "--pool", help="Pool to use to run the task instance")
    parser_run.add_argument(
        "-i", "--ignore_dependencies",
        help="Ignore upstream and depends_on_past dependencies",
        action="store_true")
    parser_run.add_argument(
        "--ship_dag",
        help="Pickles (serializes) the DAG and ships it to the worker",
        action="store_true")
    parser_run.add_argument(
        "-p", "--pickle",
        help="Serialized pickle object of the entire dag (used internally)")
    parser_run.add_argument(
        "-j", "--job_id", help=argparse.SUPPRESS)
    parser_run.set_defaults(func=run)

    ht = (
        "Test a task instance. This will run a task without checking for "
        "dependencies or recording it's state in the database."
    )
    parser_test = subparsers.add_parser('test', help=ht)
    parser_test.add_argument("dag_id", help="The id of the dag to run")
    parser_test.add_argument("task_id", help="The task_id to run")
    parser_test.add_argument(
        "execution_date", help="The execution date to run")
    parser_test.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_test.add_argument(
        "-dr", "--dry_run", help="Perform a dry run", action="store_true")
    parser_test.add_argument(
        "-tp", "--task_params", help="Sends a JSON params dict to the task")
    parser_test.set_defaults(func=test)

    ht = "Get the status of a task instance."
    parser_task_state = subparsers.add_parser('task_state', help=ht)
    parser_task_state.add_argument("dag_id", help="The id of the dag to check")
    parser_task_state.add_argument("task_id", help="The task_id to check")
    parser_task_state.add_argument(
        "execution_date", help="The execution date to check")
    parser_task_state.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_task_state.set_defaults(func=task_state)

    ht = "Start a Airflow webserver instance"
    parser_webserver = subparsers.add_parser('webserver', help=ht)
    parser_webserver.add_argument(
        "-p", "--port",
        default=configuration.get('webserver', 'WEB_SERVER_PORT'),
        type=int,
        help="Set the port on which to run the web server")
    parser_webserver.add_argument(
        "-w", "--workers",
        default=configuration.get('webserver', 'WORKERS'),
        type=int,
        help="Number of workers to run the webserver on")
    parser_webserver.add_argument(
        "-k", "--workerclass",
        default=configuration.get('webserver', 'WORKER_CLASS'),
        choices=['sync', 'eventlet', 'gevent', 'tornado'],
        help="The worker class to use for gunicorn")
    parser_webserver.add_argument(
        "-hn", "--hostname",
        default=configuration.get('webserver', 'WEB_SERVER_HOST'),
        help="Set the hostname on which to run the web server")
    ht = "Use the server that ships with Flask in debug mode"
    parser_webserver.add_argument(
        "-d", "--debug", help=ht, action="store_true")
    parser_webserver.set_defaults(func=webserver)

    ht = "Start a scheduler scheduler instance"
    parser_scheduler = subparsers.add_parser('scheduler', help=ht)
    parser_scheduler.add_argument(
        "-d", "--dag_id", help="The id of the dag to run")
    parser_scheduler.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_scheduler.add_argument(
        "-n", "--num_runs",
        default=None,
        type=int,
        help="Set the number of runs to execute before exiting")
    parser_scheduler.add_argument(
        "-p", "--do_pickle",
        default=False,
        help=(
            "Attempt to pickle the DAG object to send over "
            "to the workers, instead of letting workers run their version "
            "of the code."),
        action="store_true")
    parser_scheduler.set_defaults(func=scheduler)

    ht = "Initialize the metadata database"
    parser_initdb = subparsers.add_parser('initdb', help=ht)
    parser_initdb.set_defaults(func=initdb)

    ht = "Burn down and rebuild the metadata database"
    parser_resetdb = subparsers.add_parser('resetdb', help=ht)
    parser_resetdb.add_argument(
            "-y", "--yes",
            default=False,
            help="Do not prompt to confirm reset. Use with care!",
            action="store_true")
    parser_resetdb.set_defaults(func=resetdb)

    ht = "Upgrade metadata database to latest version"
    parser_upgradedb = subparsers.add_parser('upgradedb', help=ht)
    parser_upgradedb.set_defaults(func=upgradedb)

    ht = "List the DAGs"
    parser_list_dags = subparsers.add_parser('list_dags', help=ht)
    parser_list_dags.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_list_dags.set_defaults(func=list_dags)

    ht = "List the tasks within a DAG"
    parser_list_tasks = subparsers.add_parser('list_tasks', help=ht)
    parser_list_tasks.add_argument(
        "-t", "--tree", help="Tree view", action="store_true")
    parser_list_tasks.add_argument(
        "dag_id", help="The id of the dag")
    parser_list_tasks.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_list_tasks.set_defaults(func=list_tasks)

    ht = "Start a Celery worker node"
    parser_worker = subparsers.add_parser('worker', help=ht)
    parser_worker.add_argument(
        "-q", "--queues",
        help="Comma delimited list of queues to serve",
        default=configuration.get('celery', 'DEFAULT_QUEUE'))
    parser_worker.add_argument(
        "-c", "--concurrency",
        type=int,
        help="The number of worker processes",
        default=configuration.get('celery', 'celeryd_concurrency'))
    parser_worker.set_defaults(func=worker)

    ht = "Serve logs generate by worker"
    parser_logs = subparsers.add_parser('serve_logs', help=ht)
    parser_logs.set_defaults(func=serve_logs)

    ht = "Start a Celery Flower"
    parser_flower = subparsers.add_parser('flower', help=ht)
    parser_flower.add_argument(
        "-p", "--port", help="The port")
    parser_flower.add_argument(
        "-a", "--broker_api", help="Broker api")
    parser_flower.set_defaults(func=flower)

    parser_version = subparsers.add_parser('version', help="Show version")
    parser_version.set_defaults(func=version)

    ht = "Start a kerberos ticket renewer"
    parser_kerberos = subparsers.add_parser('kerberos', help=ht)
    parser_kerberos.add_argument(
        "-kt", "--keytab", help="keytab",
        nargs='?', default=configuration.get('kerberos', 'keytab'))
    parser_kerberos.add_argument(
        "principal", help="kerberos principal",
        nargs='?', default=configuration.get('kerberos', 'principal'))
    parser_kerberos.set_defaults(func=kerberos)

    ht = "Render a task instance's template(s)"
    parser_render = subparsers.add_parser('render', help=ht)
    parser_render.add_argument("dag_id", help="The id of the dag to check")
    parser_render.add_argument("task_id", help="The task_id to check")
    parser_render.add_argument(
        "execution_date", help="The execution date to check")
    parser_render.add_argument(
        "-sd", "--subdir", help=subdir_help,
        default=DAGS_FOLDER)
    parser_render.set_defaults(func=render)

    return parser
def main():
    glacier_settings=None
    try:
        import glacier_settings
    except ImportError:
        pass

    AWS_ACCESS_KEY = getattr(glacier_settings, "AWS_ACCESS_KEY", None) \
                        or os.environ.get("AWS_ACCESS_KEY_ID")
    AWS_SECRET_KEY = getattr(glacier_settings, "AWS_SECRET_KEY", None) \
                        or os.environ.get("AWS_SECRET_ACCESS_KEY")
    DEFAULT_REGION = getattr(glacier_settings, "REGION", None) \
                        or os.environ.get("GLACIER_DEFAULT_REGION") \
                        or "us-east-1"
    BOOKKEEPING = getattr(glacier_settings, "BOOKKEEPING", None) \
                        or os.environ.get("GLACIER_BOOKKEEPING") \
                        or False
    BOOKKEEPING_DOMAIN_NAME = getattr(glacier_settings, "BOOKKEEPING_DOMAIN_NAME", None) \
                        or os.environ.get("GLACIER_BOOKKEEPING_DOMAIN_NAME") \
                        or "amazon-glacier"

    program_description = u"""
	Command line interface for Amazon Glacier
	-----------------------------------------

	Required libraries are glaciercorecalls (temporarily, while we wait for glacier 
	support to land in boto's develop branch) and boto - at the moment you still 
	need to use development branch of boto (which you can get by
	 running `pip install --upgrade git+https://github.com/boto/boto.git`).

	To install simply execute:

	    >>> python setup.py install

	To run:

	    >>> glacier

	There are a couple of options on how to pass in the credentials. One is to set 
	`AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` as environmental variables 
	(if you're using `boto` already, this is the usual method of configuration).

	While you can pass in your AWS Access and Secret key (`--aws-access-key` and `--aws-secret-key`), 
	it is recommended that you create `glacier_settings.py` file into which you put
	`AWS_ACCESS_KEY` and `AWS_SECRET_KEY` strings. You can also set these settings
	by exporting environemnt variables using `export AWS_ACCESS_KEY_ID=key` and
	`export AWS_SECRET_ACCESS_KEY=key`.

	You can also put `REGION` into `glacier_settings.py` to specify the default region 
	on which you will operate (default is `us-east-1`). When you want to operate on 
	a non-default region you can pass in the `--region` settings to the commands.
	You can also specify this setting by exporting `export GLACIER_DEFAULT_REGION=region`.

	It is recommended that you enable `BOOKKEEPING` in `glacier_settings.py` to allow
	for saving cache information into Amazon SimpleDB database. Again you can also
	export `GLACIER_BOOKKEEPING` and `GLACIER_BOOKKEEPING_DOMAIN_NAME` as environemnt
	variables.

	You have two options to retrieve an archive - first one is `download`, 
	second one is `getarchive`.

	If you use `download`, you will have to uniquely identify the file either by 
	its file name, its description, or limit the search by region and vault. 
	If that is not enough you should use `getarchive` and specify the archive ID of
	the archive you want to retrieve.
    """

    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
                                    description=program_description)
    subparsers = parser.add_subparsers()

    help_msg_access_secret_key = u"Required if you haven't created glacier_settings.py \
                                file with AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY in it. \
                                Command line keys will override keys set in glacier_settings.py."
    parser.add_argument('--aws-access-key', required=not AWS_ACCESS_KEY,
                        default=AWS_ACCESS_KEY, help=help_msg_access_secret_key)
    parser.add_argument('--aws-secret-key', required=not AWS_SECRET_KEY,
                        default=AWS_SECRET_KEY, help=help_msg_access_secret_key)
    parser_lsvault = subparsers.add_parser("lsvault", help="List vaults")
    parser_lsvault.add_argument('--region', default=DEFAULT_REGION)
    parser_lsvault.set_defaults(func=lsvault)

    parser_mkvault = subparsers.add_parser("mkvault", help="Create a new vault")
    parser_mkvault.add_argument('vault')
    parser_mkvault.add_argument('--region', default=DEFAULT_REGION)
    parser_mkvault.set_defaults(func=mkvault)

    parser_rmvault = subparsers.add_parser('rmvault', help='Remove vault')
    parser_rmvault.add_argument('--region', default=DEFAULT_REGION)
    parser_rmvault.add_argument('vault')
    parser_rmvault.set_defaults(func=rmvault)

    parser_listjobs = subparsers.add_parser('listjobs', help='List jobs')
    parser_listjobs.add_argument('--region', default=DEFAULT_REGION)
    parser_listjobs.add_argument('vault')
    parser_listjobs.set_defaults(func=listjobs)

    parser_describejob = subparsers.add_parser('describejob', help='Describe job')
    parser_describejob.add_argument('--region', default=DEFAULT_REGION)
    parser_describejob.add_argument('vault')
    parser_describejob.add_argument('jobid')
    parser_describejob.set_defaults(func=describejob)

    parser_upload = subparsers.add_parser('upload', help='Upload an archive')
    parser_upload.add_argument('--region', default=DEFAULT_REGION)
    parser_upload.add_argument('vault')
    parser_upload.add_argument('filename')
    parser_upload.add_argument('description', nargs='*')
    parser_upload.set_defaults(func=putarchive)

    parser_getarchive = subparsers.add_parser('getarchive',
                help='Get a file by explicitly setting archive id.')
    parser_getarchive.add_argument('--region', default=DEFAULT_REGION)
    parser_getarchive.add_argument('vault')
    parser_getarchive.add_argument('archive')
    parser_getarchive.add_argument('filename', nargs='?')
    parser_getarchive.set_defaults(func=getarchive)

    if BOOKKEEPING:
        parser_download = subparsers.add_parser('download',
                help='Download a file by searching through SimpleDB cache for it.')
        parser_download.add_argument('--region', default=DEFAULT_REGION)
        parser_download.add_argument('--vault',
                help="Specify the vault in which archive is located.")
        parser_download.add_argument('--out-file')
        parser_download.add_argument('filename', nargs='?')
        parser_download.set_defaults(func=download)

    parser_rmarchive = subparsers.add_parser('rmarchive', help='Remove archive')
    parser_rmarchive.add_argument('--region', default=DEFAULT_REGION)
    parser_rmarchive.add_argument('vault')
    parser_rmarchive.add_argument('archive')
    parser_rmarchive.set_defaults(func=deletearchive)

    parser_search = subparsers.add_parser('search',
                help='Search SimpleDB database (if it was created)')
    parser_search.add_argument('--region')
    parser_search.add_argument('--vault')
    parser_search.add_argument('search_term')
    parser_search.set_defaults(func=search)

    parser_inventory = subparsers.add_parser('inventory',
                help='List inventory of a vault')
    parser_inventory.add_argument('--region', default=DEFAULT_REGION)
    parser_inventory.add_argument('--force')
    parser_inventory.add_argument('vault')
    parser_inventory.set_defaults(func=inventory)

    args = parser.parse_args(sys.argv[1:])

    if args.aws_access_key and args.aws_secret_key:
        args.aws_access_key = AWS_ACCESS_KEY
        args.aws_secret_key = AWS_SECRET_KEY

    args.func(args)
Example #56
0
        message = '{}: {}'.format(', '.join(users), REMINDER)
    else:
        message = REMINDER
    print('  ' + message)
    message = comment_api.add_marker(message, BOT_NAME)
    comment_api.add_comment(request_id=request_id, comment=message)

def common_args_add(parser):
    parser.add_argument('--min-age', type=int, default=0, metavar='DAYS', help='min age of requests')
    parser.add_argument('--repeat-age', type=int, default=7, metavar='DAYS', help='age after which a new reminder will be sent')
    parser.add_argument('--remind', action='store_true', help='remind maintainers to review')


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Operate on devel projects for a given project.')
    subparsers = parser.add_subparsers(title='subcommands')

    parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL')
    parser.add_argument('-d', '--debug', action='store_true', help='print info useful for debuging')
    parser.add_argument('-p', '--project', default='openSUSE:Factory', metavar='PROJECT', help='project from which to source devel projects')

    parser_list = subparsers.add_parser('list', help='List devel projects.')
    parser_list.set_defaults(func=list)
    parser_list.add_argument('-w', '--write', action='store_true', help='write to pseudometa package')

    parser_maintainer = subparsers.add_parser('maintainer', help='Check for relevant groups as maintainer.')
    parser_maintainer.set_defaults(func=maintainer)
    parser_maintainer.add_argument('-g', '--group', action='append', help='group for which to check')

    parser_notify = subparsers.add_parser('notify', help='notify maintainers of their packages')
    parser_notify.set_defaults(func=notify)
def main():
    program_description = u"""
    Command line interface for Amazon Glacier
    """

    # Config parser
    conf_parser = argparse.ArgumentParser(
                                formatter_class=argparse.ArgumentDefaultsHelpFormatter,
                                add_help=False)

    conf_parser.add_argument("-c", "--conf", default=".glacier",
                        help="Specify config file", metavar="FILE")
    args, remaining_argv = conf_parser.parse_known_args()

    # Here we parse config from files in home folder or in current folder
    # We use separate sections for aws and glacier speciffic configs
    aws = glacier = {}
    config = ConfigParser.SafeConfigParser()
    if config.read([args.conf, os.path.expanduser('~/.glacier')]):
        try:
            aws = dict(config.items("aws"))
        except ConfigParser.NoSectionError:
            pass
        try:
            glacier = dict(config.items("glacier"))
        except ConfigParser.NoSectionError:
            pass

    # Join config options with environemnts
    aws= dict(os.environ.items() + aws.items() )
    glacier= dict(os.environ.items() + glacier.items() )

    # Helper functions
    filt_s= lambda x: x.lower().replace("_","-")
    filt = lambda x,y="": dict(((y+"-" if y not in filt_s(k) else "") +
                             filt_s(k), v) for (k, v) in x.iteritems())
    a_required = lambda x: x not in filt(aws,"aws")
    required = lambda x: x not in filt(glacier)
    a_default = lambda x: filt(aws, "aws").get(x)
    default = lambda x: filt(glacier).get(x)

    # Main parser
    parser = argparse.ArgumentParser(parents=[conf_parser],
                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter,
                                     description=program_description)
    subparsers = parser.add_subparsers(title='Subcommands',
                                       help=u"For subcommand help, use: glacier <subcommand> -h")

    group = parser.add_argument_group('aws')
    help_msg_config = u"(Required if you haven't created .glacier config file)"
    group.add_argument('--aws-access-key',
                        required= a_required("aws-access-key"),
                        default= a_default("aws-access-key"),
                        help="Your aws access key " + help_msg_config)
    group.add_argument('--aws-secret-key',
                        required=a_required("aws-secret-key"),
                        default=a_default("aws-secret-key"),
                        help="Your aws secret key " + help_msg_config)
    group = parser.add_argument_group('glacier')
    group.add_argument('--region',
                        required=required("region"),
                        default=default("region"),
                        help="Region where glacier should take action " + help_msg_config)
    group.add_argument('--bookkeeping',
                        required= False,
                        default= default("bookkeeping") and True,
                        action= "store_true",
                        help="Should we keep book of all creatated archives.\
                              This requires a SimpleDB account and it's \
                              bookkeeping domain name set")
    group.add_argument('--bookkeeping-domain-name',
                        required= False,
                        default= default("bookkeeping-domain-name"),
                        help="SimpleDB domain name for bookkeeping.")

    parser_lsvault = subparsers.add_parser("lsvault", help="List vaults")
    parser_lsvault.set_defaults(func=lsvault)

    parser_mkvault = subparsers.add_parser("mkvault", help="Create a new vault")
    parser_mkvault.add_argument('vault')
    parser_mkvault.set_defaults(func=mkvault)

    parser_rmvault = subparsers.add_parser('rmvault', help='Remove vault')
    parser_rmvault.add_argument('vault')
    parser_rmvault.set_defaults(func=rmvault)

    parser_listjobs = subparsers.add_parser('listjobs', help='List jobs')
    parser_listjobs.add_argument('vault')
    parser_listjobs.set_defaults(func=listjobs)

    parser_describejob = subparsers.add_parser('describejob', help='Describe job')
    parser_describejob.add_argument('vault')
    parser_describejob.add_argument('jobid')
    parser_describejob.set_defaults(func=describejob)

    parser_upload = subparsers.add_parser('upload', help='Upload an archive',
                               formatter_class=argparse.RawTextHelpFormatter)
    parser_upload.add_argument('vault')
    parser_upload.add_argument('filename')
    parser_upload.add_argument('--stdin',
                                help="Input data from stdin, instead of file",
                                action='store_true')
    parser_upload.add_argument('--name', default=None,
                                help='''\
Use the given name as the filename for bookkeeping
purposes. This option is useful in conjunction with
--stdin or when the file being uploaded is a
temporary file.''')
    parser_upload.add_argument('--partsize', type=int, default=-1,
                               help='''\
Part size to use for upload (in Mb). Must
be a power of 2 in the range:
    1 .. 4,294,967,296 (2^0 .. 2^32).
Values that are not a power of 2 will be
adjusted upwards to the next power of 2.

Amazon accepts up to 10,000 parts per upload.

Smaller parts result in more frequent progress
updates, and less bandwidth wasted if a part
needs to be re-transmitted. On the other hand,
smaller parts limit the size of the archive that
can be uploaded. Some examples:

partsize  MaxArchiveSize
    1        1*1024*1024*10000 ~= 10Gb
    4        4*1024*1024*10000 ~= 41Gb
   16       16*1024*1024*10000 ~= 137Gb
  128      128*1024*1024*10000 ~= 1.3Tb

By default, the smallest possible value is used
when the archive size is known ahead of time.
Otherwise (when reading from STDIN) a value of
128 is used.''')
    parser_upload.add_argument('description', nargs='*')
    parser_upload.set_defaults(func=putarchive)

    parser_getarchive = subparsers.add_parser('getarchive',
                help='Get a file by explicitly setting archive id')
    parser_getarchive.add_argument('vault')
    parser_getarchive.add_argument('archive')
    parser_getarchive.add_argument('filename', nargs='?')
    parser_getarchive.set_defaults(func=getarchive)

    parser_rmarchive = subparsers.add_parser('rmarchive', help='Remove archive')
    parser_rmarchive.add_argument('vault')
    parser_rmarchive.add_argument('archive')
    parser_rmarchive.set_defaults(func=deletearchive)

    parser_search = subparsers.add_parser('search',
                help='Search SimpleDB database (if it was created). \
                      By default returns contents of vault.')
    parser_search.add_argument('--vault')
    parser_search.add_argument('--search_term')
    parser_search.set_defaults(func=search)

    parser_inventory = subparsers.add_parser('inventory',
                help='List inventory of a vault')
    parser_inventory.add_argument('--force', action='store_true',
                                 help="Create a new inventory job")
    parser_inventory.add_argument('vault')
    parser_inventory.set_defaults(func=inventory)

    parser_describevault = subparsers.add_parser('describevault', help='Describe vault')
    parser_describevault.add_argument('vault')
    parser_describevault.set_defaults(func=describevault)

    parser_listmultiparts = subparsers.add_parser('listmultiparts', help='List multipart uploads')
    parser_listmultiparts.add_argument('vault')
    parser_listmultiparts.set_defaults(func=listmultiparts)

    parser_abortmultipart = subparsers.add_parser('abortmultipart', help='Abort multipart upload')
    parser_abortmultipart.add_argument('vault')
    parser_abortmultipart.add_argument('uploadId')
    parser_abortmultipart.set_defaults(func=abortmultipart)


    # bookkeeping required
    parser_download = subparsers.add_parser('download',
            help='Download a file by searching through SimpleDB cache for it.')
    parser_download.add_argument('--vault',
            help="Specify the vault in which archive is located.")
    parser_download.add_argument('--out-file')
    parser_download.add_argument('filename', nargs='?')
    parser_download.set_defaults(func=download)

    args = parser.parse_args(remaining_argv)
    args.func(args)
Example #58
0
    def contribute_to_subparser(self, parser):
        parser.set_defaults(function=lambda args: parser.print_usage())
        subparsers = parser.add_subparsers(title='commands')

        setup_parser = subparsers.add_parser('setup', help='setup Google Drive API access through OAuth')
        setup_parser.set_defaults(function=self.setup)
Example #59
0
def ucare_argparser():
    parser = argparse.ArgumentParser()
    parser.add_argument('--version', action='version',
                        version='ucare {0}'.format(__version__))

    subparsers = parser.add_subparsers()

    # files list
    subparser = subparsers.add_parser('list_files', help='list all files')
    subparser.set_defaults(func=list_files)
    subparser.add_argument(
        '--starting_point',
        help='a starting point for filtering files',
        action='store')
    subparser.add_argument(
        '--ordering',
        help='specify the way the files should be sorted',
        action='store')
    subparser.add_argument('--limit', help='files to show', default=100,
                           type=int_or_none)
    subparser.add_argument('--request_limit', help='files per request',
                           default=100, type=int_or_none)
    subparser.add_argument('--stored', help='filter stored files',
                           choices=[True, False, None],
                           type=bool_or_none, default=None)
    subparser.add_argument('--removed', help='filter removed files',
                           choices=[True, False, None],
                           type=bool_or_none, default=False)

    # groups list
    subparser = subparsers.add_parser('list_groups', help='list all groups')
    subparser.set_defaults(func=list_groups)
    subparser.add_argument(
        '--starting_point',
        help='a starting point for filtering groups',
        action='store')
    subparser.add_argument(
        '--ordering',
        help='specify the way the groups should be sorted',
        action='store')
    subparser.add_argument('--limit', help='group to show', default=100,
                           type=int_or_none)
    subparser.add_argument('--request_limit', help='groups per request',
                           default=100, type=int_or_none)

    # get
    subparser = subparsers.add_parser('get', help='get file info')
    subparser.set_defaults(func=get_file)
    subparser.add_argument('path', help='file path')

    # common store and delete args
    waiting_parent = argparse.ArgumentParser(add_help=False)
    waiting_parent.add_argument(
        '--timeout',
        type=int,
        dest='timeout',
        default=5,
        help='Set wait seconds until operation completed.'
             ' Default value is 5 seconds')
    group = waiting_parent.add_mutually_exclusive_group()
    group.add_argument(
        '--wait',
        action='store_true',
        default=True,
        dest='wait',
        help='Wait for operation to be completed'
    )
    group.add_argument(
        '--nowait',
        action='store_false',
        dest='wait',
        help='Do not wait for operation to be completed'
    )

    # store
    subparser = subparsers.add_parser('store',
                                      parents=[waiting_parent],
                                      help='store file')
    subparser.set_defaults(func=store_files)
    subparser.add_argument('paths', nargs='+', help='file(s) path')

    # delete
    subparser = subparsers.add_parser('delete',
                                      parents=[waiting_parent],
                                      help='request delete')
    subparser.set_defaults(func=delete_files)
    subparser.add_argument('paths', nargs='+', help='file(s) path')

    # common upload args
    upload_parent = argparse.ArgumentParser(add_help=False)
    group = upload_parent.add_mutually_exclusive_group()
    group.add_argument(
        '--store',
        action='store_true',
        default=False,
        dest='store',
        help='Store uploaded file')
    group.add_argument(
        '--nostore',
        action='store_false',
        dest='store',
        help='Do not store uploaded file')
    group = upload_parent.add_mutually_exclusive_group()
    group.add_argument(
        '--info',
        action='store_true',
        default=False,
        dest='info',
        help='Get uploaded file info')
    group.add_argument(
        '--noinfo',
        action='store_false',
        dest='info',
        help='Do not get uploaded file info')
    upload_parent.add_argument(
        '--cdnurl',
        action='store_true',
        help='Store file and get CDN url.')

    # upload from url
    subparser = subparsers.add_parser('upload_from_url',
                                      parents=[upload_parent],
                                      help='upload file from url')
    subparser.set_defaults(func=upload_from_url)
    subparser.add_argument('url', help='file url')
    subparser.add_argument(
        '--timeout',
        type=int,
        dest='timeout',
        default=30,
        help='Set wait seconds file uploading from url.'
             ' Default value is 30 seconds')
    group = subparser.add_mutually_exclusive_group()
    group.add_argument(
        '--wait',
        action='store_true',
        default=True,
        dest='wait',
        help='Wait for upload status')
    group.add_argument(
        '--nowait',
        action='store_false',
        dest='wait',
        help='Do not wait for upload status')

    # upload
    subparser = subparsers.add_parser('upload', parents=[upload_parent],
                                      help='upload file')
    subparser.set_defaults(func=upload)
    subparser.add_argument('filename', help='filename')

    # Create file group.
    subparser = subparsers.add_parser('create_group', help='create file group')
    subparser.set_defaults(func=create_group)
    subparser.add_argument('paths', nargs='+', help='file paths')

    # Sync files
    add_sync_files_parser(subparsers)

    # common arguments
    parser.add_argument(
        '--pub_key',
        help='API key, if not set is read from uploadcare.ini'
             ' and ~/.uploadcare config files')
    parser.add_argument(
        '--secret',
        help='API secret, if not set is read from uploadcare.ini'
             ' and ~/.uploadcare config files')
    parser.add_argument(
        '--api_base',
        help='API url, can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.'
             ' Default value is {0}'.format(conf.api_base))
    parser.add_argument(
        '--upload_base',
        help='Upload API url, can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.'
             ' Default value is {0}'.format(conf.upload_base))
    parser.add_argument(
        '--no_check_upload_certificate',
        action='store_true',
        help="Don't check the uploading API server certificate."
             ' Can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.')
    parser.add_argument(
        '--no_check_api_certificate',
        action='store_true',
        help="Don't check the REST API server certificate."
             ' Can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.')
    parser.add_argument(
        '--api_version',
        help='API version, can be read from uploadcare.ini'
             ' and ~/.uploadcare config files.'
             ' Default value is {0}'.format(conf.api_version))

    return parser
Example #60
0
    global args

    parser = argparse.ArgumentParser(description='Re-Innovation Wind Data Analysis Tool')
    parser.add_argument('--database', dest='database_path', type=str, default='winda.db',
        help='specify the database file path')
    parser.add_argument('--debug', dest='logging_level', action='store_const',
        const=logging.DEBUG, default=logging.WARN,
        help='write debugging output in the log')
    parser.add_argument('--info', dest='logging_level', action='store_const',
        const=logging.INFO, help='write informational output in the log')
    parser.add_argument('--log-ts', dest='log_timestamps',
        action='store_const', const=True, default=False,
        help='write informational output in the log')
    parser.add_argument('--yes', dest='assume_yes', action='store_const', const=True, 
        default=False, help='Assume the answer to any confirmation prompt is YES')
    subparsers = parser.add_subparsers()

    # Reset command
    parser_reset = subparsers.add_parser('reset', help='Reset the database (delete everything!)')
    parser_reset.set_defaults(func=database_reset)

    # Info command
    parser_info = subparsers.add_parser('info', help='Print information about the database file and exit')
    parser_info.set_defaults(func=database_info)

    # Add command
    parser_add = subparsers.add_parser('add', help='Add data from CSV files into the database')
    parser_add.add_argument('files', metavar='filename', type=str, nargs='+',
                   help='file name or glob pattern to add to database')
    parser_add.set_defaults(func=add_files)