示例#1
0
def config_parser(parser):
    parser.add_argument('--auto-tags',
                        type=bool,
                        help='add automatically generated machine tags',
                        default=True)
    parser.add_argument('--tags',
                        type=str,
                        help='comma separated list of machine tags')
    parser.add_argument('--temp', type=str, help='temp folder')
    parser.add_argument('--interval',
                        type=float,
                        help='dequeue interval (in seconds)')
    parser.add_argument('--concurency',
                        type=int,
                        help='number of simultaneous tasks')
    parser.add_argument('--cpus', type=int, help='cpus limit')
    parser.add_argument('--memory', action=MemoryAction, help='memory limit')
    parser.add_argument('--pids', type=int, help='pids limit')
    parser.add_argument('--storage', action=MemoryAction, help='storage limit')
    parser.add_argument('--time', action=TimeAction, help='time limit')
    parser.add_argument('--network', type=bool, help='allow netowrking')

    def execute(args):
        kolejka_config(args=args)
        foreman()

    parser.set_defaults(execute=execute)
示例#2
0
    def __init__(self, parser):
        super().__init__(parser)

        parser.add_argument(
            '--cluster',
            action=store_env_override,
            envvar='ECS_CLUSTER_NAME',
            description="short name or full Amazon Resource Name (ARN) "
            "of the cluster that your service is running on",
            addendum='(default extended for staging with suffix: -staging)'
            if self._environmental else None,
        )
        parser.add_argument(
            '--service',
            action=store_env_override,
            envvar='ECS_SERVICE_NAME',
            description="name of the service to update",
            addendum='(default extended for staging with suffix: -staging)'
            if self._environmental else None,
        )

        parser.set_defaults(
            resolve_cluster=functools.lru_cache()(self.resolve_cluster),
            resolve_service=functools.lru_cache()(self.resolve_service),
        )
示例#3
0
    def __init__(self, parser):
        super().__init__(parser)

        parser.add_argument(
            '-n',
            '--name',
            default=self.DEFAULT_NAMETAG,
            help=f'Image name/tag (default: {self.DEFAULT_NAMETAG})',
        )
        parser.add_argument(
            '-b',
            '--build',
            action='store_true',
            help="(re-)build image before container creation",
        )
        parser.add_argument(
            '--net',
            choices=('host', ),
            help="specify a non-default networking mode",
        )
        parser.add_argument(
            '-e',
            '--env',
            action='append',
            help='set environment variables (as for docker)',
        )

        parser.set_defaults(resolve_environ=functools.lru_cache()(
            self.resolve_environ), )
示例#4
0
文件: report.py 项目: suqingdong/lims
def parser_add_report(parser):

    parser.add_argument('filename',
                        help='the report file to upload',
                        nargs='?')

    parser.add_argument('-stage',
                        '--stage-code',
                        help='the stage code',
                        required=True)

    parser.add_argument('-t',
                        '--type',
                        help='the type of report, choose from [%(choices)s]',
                        choices=['qc', 'mapping', 'final'])

    parser.add_argument('-sop',
                        '--sop-method',
                        help='the sop method for the product')

    parser.add_argument('-count', '--sample-count', help='the count of sample')

    parser.add_argument('-data', '--data-size', help='the total data size')

    parser.add_argument('-msg',
                        '--message',
                        help='the message for this report')

    parser.add_argument('-d', '--delete', help='the report_id to delete')

    parser.set_defaults(func=main)
示例#5
0
    def contribute_to_subparser(self, parser):
        parser.set_defaults(function=lambda args: parser.print_usage())
        subparsers = parser.add_subparsers(title='commands')

        setup_parser = subparsers.add_parser(
            'setup', help='setup Google Drive API access through OAuth')
        setup_parser.set_defaults(function=self.setup)
def configure_subparsers(subparsers):
    """Configure a new subparser ."""
    parser = subparsers.add_parser(
        'map-users-to-location',
        help=
        'Given a json file of users, maps them to a specific file based on the location type (state, country, county, ...).',
    )
    parser.add_argument(
        '--location-type',
        type=str,
        default='state',
        choices={
            'state', 'state_district', 'country', 'country_code', 'county',
            'town', 'municipality', 'postcode', 'village'
        },
        help=
        'The type of the locations that will be used to map the user into the right file [default: state]'
    )
    parser.add_argument(
        '--specify-country',
        type=str,
        default=None,
        help=
        'Optional parameter used to specify the country aside from the location type [default: None]'
    )
    parser.add_argument(
        '--specify-state',
        type=str,
        default=None,
        help=
        'Optional parameter used to specify the state aside from the location type [default: None]'
    )

    parser.set_defaults(func=main, which='map_users_to_location')
示例#7
0
def parse_args(parser: argparse.ArgumentParser):
    argument_parser = ArgumentParser(parser)

    parser.add_argument("--task_history_time_threshold", type=int, default=180, help="1履歴、何分以上を検知対象とするか。")
    parser.add_argument("--import_file_path", type=str, help="importするタスク履歴イベント全件ファイル,指定しない場合はタスク履歴イベント全件を新規取得する")

    argument_parser.add_output()
    parser.add_argument(
        "-p",
        "--project_id",
        type=str,
        required=True,
        nargs="+",
        help="対象のプロジェクトのproject_idを指定します。複数指定可、但しtask_idを指定した場合は1つしか指定できません。"
        "`file://`を先頭に付けると、project_idの一覧が記載されたファイルを指定できます。",
    )
    parser.add_argument(
        "-t",
        "--task_id",
        type=str,
        nargs="+",
        help="対象のプロジェクトのtask_idを指定します。複数指定可、但しtimeを指定した場合は1つしか指定できません。"
        "`file://`を先頭に付けると、task_idの一覧が記載されたファイルを指定できます。",
    )
    parser.add_argument(
        "--time",
        type=str,
        nargs="+",
        help="検索対象の時間を指定します。(%%Y/%%m/%%d %%H:%%i:%%s)",
    )
    parser.add_argument("--add", action="store_true", help="出力する際に追記で書き込む")

    parser.set_defaults(subcommand_func=main)
示例#8
0
    def __init__(self,
                 store,
                 command_name="cd",
                 disabled_commands=set(),
                 date_formatter=BabelDateFormatter(),
                 default_timezone=None,
                 **kwargs):
        super().__init__(command_name, **kwargs)
        self.store = store
        self.date_formatter = date_formatter

        subparsers = self.argparse.add_subparsers(
            dest="action", help="Choose the action to execute")

        self.disabled_commands = disabled_commands

        if self.CMD_ADD not in disabled_commands:
            # add command
            parser = subparsers.add_parser(
                "add", help="Store a new event in the system")
            parser.add_argument("name",
                                help="Descriptive name of the event to store")
            parser.add_argument("target_date", help="Date of the event")
            parser.add_argument(
                "timezone",
                nargs="?",
                default=default_timezone,
                type=pytz.timezone,
                help="Name of the timezone (e.g. Europe/Berlin). Overrides any"
                " timezone given in the target_date string.")
            parser.set_defaults(func=self._cmd_add)

        if self.CMD_MOVE not in disabled_commands:
            # move command
            parser = subparsers.add_parser("move",
                                           help="Rename an event",
                                           aliases={"mv", "rename"})
            parser.add_argument("oldname")
            parser.add_argument("newname")
            parser.set_defaults(func=self._cmd_rename)

        if self.CMD_DELETE not in disabled_commands:
            # delete command
            parser = subparsers.add_parser("rm",
                                           help="Remove an event",
                                           aliases={"delete", "remove"})
            parser.add_argument("names",
                                nargs="+",
                                help="Name of the information to remove")
            parser.set_defaults(func=self._cmd_delete)

        if self.CMD_SAVE not in disabled_commands:
            parser = subparsers.add_parser(
                "save", help="Save all data stored in the eventstore.")
            parser.set_defaults(func=self._cmd_save)

        if self.CMD_STATS not in disabled_commands:
            parser = subparsers.add_parser(
                "stats", help="Print memory usage statistics.")
            parser.set_defaults(func=self._cmd_stats)
示例#9
0
def main():
    parser = argparse.ArgumentParser('Download a Flickr Set')
    parser.add_argument('-k', '--api_key', type=str,
                        help='Flickr API key')
    parser.add_argument('-s', '--api_secret', type=str,
                        help='Flickr API secret')
    parser.add_argument('-t', '--user_auth', action='store_true',
                        help='Enable user authentication')
    parser.add_argument('-l', '--list', type=str, metavar='USER',
                        help='List photosets for a user')
    parser.add_argument('-d', '--download', type=str, metavar='SET_ID',
                        help='Download the given set')
    parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL',
                        default=None, help='Quality of the picture')
    parser.set_defaults(**_load_defaults())

    args = parser.parse_args()

    if not args.api_key or not args.api_secret:
        print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments'
        return 1

    ret = _init(args.api_key, args.api_secret, args.user_auth)
    if not ret:
        return 1

    if args.list:
        print_sets(args.list)
    elif args.download:
        download_set(args.download, args.quality)
    else:
        print >> sys.stderr, 'ERROR: Must pass either --list or --download\n'
        parser.print_help()
        return 1
示例#10
0
def main():
    # Available CLI flags.
    parser = argparse.ArgumentParser(
        description="Convert YAML to JSON.",
    )

    parser.add_argument(
        "input",
        nargs="?",
        type=argparse.FileType("r"),
        default=sys.stdin,
        help="The YAML file to use as input.")

    parser.add_argument(
        "-o", "--output",
        help="The JSON file to use as output. If not set, will output to STDOUT.")

    parser.add_argument(
        "-p", "--pretty",
        dest='pretty',
        action='store_true',
        help="By default, the JSON is unprettified. This will enable prettification.")

    parser.set_defaults(pretty=False)

    flags = parser.parse_args()

    if flags.output:
        with open(flags.output, 'w') as outfile:
            if flags.pretty:
                json.dump(
                    yaml.load(flags.input.read()),
                    outfile,
                    # sort_keys=True,
                    indent=4,
                    separators=(',', ': '),
                    cls=JsonTimeEncoder
                )
            else:
                json.dump(
                    yaml.load(flags.input.read()),
                    outfile,
                    cls=JsonTimeEncoder
                )
    else:
        if flags.pretty:
            print(json.dumps(
                yaml.load(flags.input.read()),
                # sort_keys=True,
                indent=4,
                separators=(',', ': '),
                cls=JsonTimeEncoder
            ))
        else:
            print(json.dumps(
                yaml.load(flags.input.read()),
                cls=JsonTimeEncoder
            ))
def main():
    # get CLI args
    parser = argparse.ArgumentParser(
        description=
        "Generates a spreadsheet with student submission timestamps for each assignment. Late submissions are called out in red, while on time submissions are in blue.",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('course_id', help='Canvas Course ID.')
    parser.add_argument(
        '--student_identifier',
        choices=['huid', 'name'],
        default='huid',
        help="Choose to identify students by name or HUID in the results.")
    parser.add_argument(
        '--use_cache',
        dest='use_cache',
        action='store_true',
        help=
        "Use cached data rather than fetching from the API, if it is available."
    )
    parser.add_argument('--debug',
                        dest='debug',
                        action='store_true',
                        help="Log debugging information. ")
    parser.set_defaults(use_cache=False)
    parser.set_defaults(debug=False)
    args = parser.parse_args()

    # configure logging
    loggingConfig = {'filename': 'output.log', 'level': logging.INFO}
    if args.debug:
        loggingConfig['level'] = logging.DEBUG
    logging.basicConfig(**loggingConfig)
    requests_log = logging.getLogger("requests.packages.urllib3")
    requests_log.setLevel(loggingConfig['level'])
    requests_log.propagate = True
    console = logging.StreamHandler()
    console.setLevel(logging.INFO)  # only ever output info to the console
    logging.getLogger('').addHandler(console)  # add to root handler

    # configure output filenames
    today = datetime.date.today().isoformat().replace('-', '')
    cache_file = os.path.join(BASE_PATH, "%s-cache.json" % args.course_id)
    results_json_file = os.path.join(
        BASE_PATH, "%s-results-%s.json" % (args.course_id, today))
    results_xls_file = os.path.join(
        BASE_PATH, "%s-results-%s.xls" % (args.course_id, today))

    # fetch and process the data
    logging.debug("Begin.")
    data = load(args.course_id, cache_file, use_cache=args.use_cache)
    if len(data['students']) == 0:
        logging.info(
            "No students found in the course, so can't generate a report.")
    else:
        results = process(data, student_identifier=args.student_identifier)
        cache_write(results_json_file, results)
        create_spreadsheet(results_xls_file, data, results)
    logging.debug("Done.")
示例#12
0
 def add_bool_arg(parser,
                  name,
                  help_text,
                  default=False):  # https://stackoverflow.com/a/31347222
     group = parser.add_mutually_exclusive_group(required=False)
     group.add_argument('--' + name,
                        dest=name,
                        action='store_true',
                        help=help_text)
     group.add_argument('--no-' + name, dest=name, action='store_false')
     parser.set_defaults(**{name: default})
示例#13
0
def main():
    parser = argparse.ArgumentParser('Download a Flickr Set')
    parser.add_argument('-k', '--api_key', type=str, help='Flickr API key')
    parser.add_argument('-s',
                        '--api_secret',
                        type=str,
                        help='Flickr API secret')
    parser.add_argument('-t',
                        '--user_auth',
                        action='store_true',
                        help='Enable user authentication')
    parser.add_argument('-l',
                        '--list',
                        type=str,
                        metavar='USER',
                        help='List photosets for a user')
    parser.add_argument('-d',
                        '--download',
                        type=str,
                        metavar='SET_ID',
                        help='Download the given set')
    parser.add_argument('-q',
                        '--quality',
                        type=str,
                        metavar='SIZE_LABEL',
                        default=None,
                        help='Quality of the picture')
    parser.add_argument('-n',
                        '--naming',
                        type=str,
                        metavar='RENAME_MODE',
                        default=None,
                        help='Photo naming')
    parser.set_defaults(**_load_defaults())

    args = parser.parse_args()

    if not args.api_key or not args.api_secret:
        print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments'
        return 1

    ret = _init(args.api_key, args.api_secret, args.user_auth)
    if not ret:
        return 1

    if args.list:
        print_sets(args.list)
    elif args.download:
        download_set(args.download, args.quality, args.naming)
    else:
        print >> sys.stderr, 'ERROR: Must pass either --list or --download\n'
        parser.print_help()
        return 1
示例#14
0
def register_args(subparsers):
    parser = subparsers.add_parser(
        name='metadata', help="Create PTOv3 metadata files from results")
    parser.add_argument("files",
                        nargs="*",
                        help="input files",
                        metavar="INPUTFILE")
    parser.add_argument("-t",
                        "--filetype",
                        help="filetype [ps-ndjson]",
                        metavar="FILETYPE",
                        default="ps-ndjson")
    # Set the command entry point
    parser.set_defaults(cmd=metadata)
def cli_parser():
    parser = argparse.ArgumentParser()
    parser.add_argument('--aviris-ids', required=False, type=str, default=None)
    parser.add_argument('--aviris-scenes', required=True, type=str)
    parser.add_argument('--output', required=False, type=str, default=None)
    parser.add_argument('--planet-scenes', required=True, type=str)
    parser.add_argument('--timedelta', required=False, type=int, default=33)

    parser.add_argument('--ignore-year',
                        required=False,
                        dest='ignore_year',
                        action='store_true')
    parser.set_defaults(ignore_year=False)
    return parser
示例#16
0
def main():
    # Available CLI flags.
    parser = argparse.ArgumentParser(
        description="Convert YAML to JSON.",
    )

    parser.add_argument(
        "input",
        nargs="?",
        type=argparse.FileType("r"),
        default=sys.stdin,
        help="The YAML file to use as input.")

    parser.add_argument(
        "-o", "--output",
        help="The JSON file to use as output. If not set, will output to STDOUT.")

    parser.add_argument(
        "-p", "--pretty",
        dest='pretty',
        action='store_true',
        help="By default, the JSON is unprettified. This will enable prettification.")

    parser.set_defaults(pretty=False)

    flags = parser.parse_args()

    if flags.output:
        with open(flags.output, 'w') as outfile:
            if flags.pretty:
                json.dump(
                    yaml.load(flags.input.read()),
                    outfile,
                    # sort_keys=True,
                    indent=4,
                    separators=(',', ': '),
                    cls=JsonTimeEncoder
                )
            else:
                json.dump(
                    yaml.load(flags.input.read()),
                    outfile,
                    cls=JsonTimeEncoder
                )
    else:
        if flags.pretty:
            print(pretty_output(flags.input.read()))
        else:
            print(compressed_output(flags.input.read()))
示例#17
0
def cli_parser(subparsers, handlers, parsers):
    subcommand = 'fighter'
    handlers[subcommand] = main
    parser = subparsers.add_parser(
        subcommand,
        help='Get fighter data',
        description='Extract all data on a given fighter')
    parser.add_argument('-t', '--timeout', help='timeout in seconds')
    parser.add_argument('-u', '--user_agent', help='user agent string')
    parser.add_argument('-p', '--parser', help='bs4 parser')
    parser.add_argument('fighter_id', help='Sherdog fighter ID')

    parser.set_defaults(timeout=None, user_agent=USER_AGENT, parser='lxml')

    parsers[subcommand] = parser
示例#18
0
def main():
    parser = argparse.ArgumentParser('Download a Flickr Set')
    parser.add_argument('-k', '--api_key', type=str,
                        help='Flickr API key')
    parser.add_argument('-s', '--api_secret', type=str,
                        help='Flickr API secret')
    parser.add_argument('-t', '--api_token', action='store_true',
                        help='Use OAuth token')
    parser.add_argument('-l', '--list', type=str,
                        help='List photosets for a user')
    parser.add_argument('-p', '--photos', type=str,
						help='List photo of a user')
    parser.add_argument('-d', '--download', type=str,
                        help='Download the given set')
    parser.add_argument('-u', '--photostream', type=str,
                        help='Download photostream of user')
    parser.add_argument('-o', '--photosets', type=str,
                        help='Download all photosets of user')
    parser.add_argument('-x', '--all', type=str,
                        help='Download all photosets and photos of user')



    parser.set_defaults(**_load_defaults())
    
    args = parser.parse_args()

    if not args.api_key or not args.api_secret:
        print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments'
        return 1
 
    _init(args.api_key, args.api_secret, args.api_token)
    if args.list:
        print_sets(args.list)
    elif args.download:
        download_set(args.download)
    elif args.photos:
	    list_photos(args.photos)
    elif args.photosets:
        download_sets(args.photosets)
    elif args.photostream:
        download_photos(args.photostream)
    elif args.all:
        download_all(args.all)
    else:
        print >> sys.stderr, 'ERROR: Must pass either --list or --download\n'
        parser.print_help()
        return 1
示例#19
0
def config_parser(parser):
    parser.add_argument("task", type=str, help='task folder')
    parser.add_argument("result", type=str, help='result folder')
    parser.add_argument("--temp", type=str, help='temp folder')
    parser.add_argument("--consume", action="store_true", default=False, help='consume task folder') 
    parser.add_argument('--cpus', type=int, help='cpus limit')
    parser.add_argument('--memory', action=MemoryAction, help='memory limit')
    parser.add_argument('--pids', type=int, help='pids limit')
    parser.add_argument('--storage', action=MemoryAction, help='storage limit')
    parser.add_argument('--time', action=TimeAction, help='time limit')
    parser.add_argument('--network',type=bool, help='allow netowrking')
    def execute(args):
        kolejka_config(args=args)
        config = worker_config()
        stage0(args.task, args.result, temp_path=config.temp_path, consume_task_folder=args.consume)
    parser.set_defaults(execute=execute)
def configure_subparsers(subparsers):
    """Configure a new subparser ."""
    parser = subparsers.add_parser(
        'aggregate-tweets',
        help=
        'Aggregates in a single file tweets of the same pool of days, year and language',
    )
    parser.add_argument(
        '--type',
        type=str,
        default='week',
        choices={'week'},
        help=
        'The method that will be used to aggregate tweets together [default: week]'
    )

    parser.set_defaults(func=main, which='aggregate_tweets')
示例#21
0
def main():
    parser = argparse.ArgumentParser('Download a Flickr Set')
    parser.add_argument('-k', '--api_key', type=str,
                        help='Flickr API key')
    parser.add_argument('-s', '--api_secret', type=str,
                        help='Flickr API secret')
    parser.add_argument('-t', '--user_auth', action='store_true',
                        help='Enable user authentication')
    parser.add_argument('-l', '--list', type=str, metavar='USER',
                        help='List photosets for a user')
    parser.add_argument('-d', '--download', type=str, metavar='SET_ID',
                        help='Download the given set')
    parser.add_argument('-u', '--download_user', type=str, metavar='USERNAME',
                        help='Download all sets for a given user')
    parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL',
                        default=None, help='Quality of the picture')
    parser.add_argument('-n', '--naming', type=str, metavar='NAMING_MODE',
                        default='title', help='Photo naming mode')
    parser.set_defaults(**_load_defaults())

    args = parser.parse_args()

    if not args.api_key or not args.api_secret:
        print ('You need to pass in both "api_key" and "api_secret" arguments', file=sys.stderr)
        return 1

    ret = _init(args.api_key, args.api_secret, args.user_auth)
    if not ret:
        return 1

    if args.list:
        print_sets(args.list)
    elif args.download or args.download_user:
        try:
            get_filename = get_filename_handler(args.naming)
            if args.download:
                download_set(args.download, get_filename, args.quality)
            else:
                download_user(args.download_user, get_filename, args.quality)
        except KeyboardInterrupt:
            print('Forcefully aborting. Last photo download might be partial :(', file=sys.stderr)
    else:
        print('ERROR: Must pass either --list or --download\n', file=sys.stderr)
        parser.print_help()
        return 1
示例#22
0
    def parse_args(self):
        parser = argparse.ArgumentParser(prog=self.prog)
        parser.add_argument("-q",
                            "--quiet",
                            action="store_true",
                            help="Be quiet on logging to stdout/stderr")
        parser.add_argument("--version",
                            action="store_const",
                            dest="verb",
                            const="version")
        parser.set_defaults(verb=None)
        subparsers = parser.add_subparsers()

        parser_backup = subparsers.add_parser("backup")
        parser_backup.set_defaults(verb="backup")

        parser_list = subparsers.add_parser("list")
        parser_list.set_defaults(verb="list")
        parser_list.add_argument("--before",
                                 dest="before",
                                 default=None,
                                 type=parse_simple_date)
        parser_list.add_argument("--after",
                                 dest="after",
                                 default=None,
                                 type=parse_simple_date)

        parser_restore = subparsers.add_parser("restore")
        parser_restore.set_defaults(verb="restore")
        parser_restore.add_argument("backup", metavar="BACKUPNAME", type=str)
        parser_restore.add_argument("backend", metavar="BACKENDNAME", type=str)
        parser_restore.add_argument("archive_spec", metavar="SPEC", type=str)
        parser_restore.add_argument("destination", metavar="DEST", type=str)

        parser_list_backups = subparsers.add_parser("list-configured-backups")
        parser_list_backups.set_defaults(verb="list-configured-backups")

        parser_list_backends = subparsers.add_parser("list-backends")
        parser_list_backends.set_defaults(verb="list-backends")

        parser_prune = subparsers.add_parser("prune")
        parser_prune.set_defaults(verb="prune")

        return parser.parse_args(self.argv)
示例#23
0
    def __init__(self, parser):
        super().__init__(parser)

        parser.add_argument(
            '--repository-uri',
            action=store_env_override,
            envvar='IMAGE_REPOSITORY_URI',
            description='image repository URI',
        )
        parser.add_argument(
            '--repository-name',
            action=store_env_override,
            envvar='IMAGE_REPOSITORY_NAME',
            description='image repository name',
            addendum='(default extended for staging to: …/staging)'
            if isinstance(self, EnvironmentMixin) else None,
        )
        parser.set_defaults(resolve_repository_name=functools.lru_cache()(
            self.resolve_repository_name), )
示例#24
0
def _setup():
    group = make_subcommand_group('rgw',
            help='low-level radosgw support')

    parser = group.add_parser('backup',
            help='back up radosgw bucket')
    parser.set_defaults(func=cmd_rgw_backup)
    parser.add_argument('bucket',
            help='bucket name')
    parser.add_argument('-A', '--scrub-acls', action='store_true',
            help='update ACLs for unmodified keys')
    parser.add_argument('-c', '--scrub', action='store_true',
            help='check backup data against original')

    parser = group.add_parser('restore',
            help='restore radosgw bucket')
    parser.set_defaults(func=cmd_rgw_restore)
    parser.add_argument('source', metavar='origin-bucket-or-path',
            help='origin bucket name or filesystem path')
    parser.add_argument('dest_bucket', metavar='dest-bucket',
            help='destination bucket for restore')
    parser.add_argument('-f', '--force', action='store_true',
            help='force restore to non-empty bucket')
def main():

    def main_help(args):
        parser.print_help(sys.stderr)

    parser = argparse.ArgumentParser(
        description='CLI Tools',
        prog='tools.py')

    parser_user_parent = argparse.ArgumentParser()
    subparsers = parser.add_subparsers()

    parser_export_links = subparsers.add_parser(
        'export',
        description='Export as JSON'
    )
    parser_export_links.add_argument(
        '-u', '--user', type=str, required=True
    )
    parser_export_links.set_defaults(func=export_bookmarks)

    parser_import_links = subparsers.add_parser(
        'import',
        description='Import links from external sources')
    parser_import_links.add_argument(
        '-u', '--user', type=str, required=True)
    parser_import_links.add_argument('source', type=str)
    parser_import_links.set_defaults(func=import_bookmarks)

    create_user_parser = subparsers.add_parser('create_user', description='Create a user')
    create_user_parser.add_argument('-u', '--username', type=str, required=True)
    create_user_parser.add_argument('-p', '--password', type=str, required=True)
    create_user_parser.set_defaults(func=create_user)

    parser.set_defaults(func=main_help)
    args = parser.parse_args()
    args.func(args)
示例#26
0
def main():
    parser = argparse.ArgumentParser('Download a Flickr Set')
    parser.add_argument('-k', '--api_key', type=str,
                        help='Flickr API key')
    parser.add_argument('-s', '--api_secret', type=str,
                        help='Flickr API secret')
    parser.add_argument('-t', '--user_auth', action='store_true',
                        help='Enable user authentication')
    parser.add_argument('-l', '--list', type=str, metavar='USER',
                        help='List photosets for a user')
    parser.add_argument('-d', '--download', type=str, metavar='SET_ID',
                        help='Download the given set')
    parser.add_argument('-a', '--all_download', type=str, metavar='USER',
                        help='Download all sets for a user')
    parser.set_defaults(**_load_defaults())

    args = parser.parse_args()

    if not args.api_key or not args.api_secret:
        print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments'
        return 1

    ret = _init(args.api_key, args.api_secret, args.user_auth)
    if not ret:
        return 1

    if args.list:
        for photoset in get_photosets(args.list):
            print '{0} - {1}'.format(photoset.id, photoset.title)
    elif args.download:
        download_set(args.download)
    elif args.all_download:
        download_all_sets(args.all_download)
    else:
        print >> sys.stderr, 'ERROR: Must pass either --list or --download\n'
        parser.print_help()
        return 1
示例#27
0
文件: stage0.py 项目: Raalsky/kolejka
def config_parser(parser):
    parser.add_argument("task", type=str, help='task folder')
    parser.add_argument("result", type=str, help='result folder')
    parser.add_argument("--temp", type=str, help='temp folder')
    parser.add_argument(
        '--pull',
        action='store_true',
        help='always pull images, even if local version is present',
        default=False)
    parser.add_argument('--consume',
                        action='store_true',
                        default=False,
                        help='consume task folder')
    parser.add_argument('--cpus', type=int, help='cpus limit')
    parser.add_argument('--memory', action=MemoryAction, help='memory limit')
    parser.add_argument('--swap', action=MemoryAction, help='swap limit')
    parser.add_argument('--pids', type=int, help='pids limit')
    parser.add_argument('--storage', action=MemoryAction, help='storage limit')
    parser.add_argument('--image',
                        action=MemoryAction,
                        help='image size limit')
    parser.add_argument('--workspace',
                        action=MemoryAction,
                        help='workspace size limit')
    parser.add_argument('--time', action=TimeAction, help='time limit')
    parser.add_argument('--network', type=bool, help='allow netowrking')
    parser.add_argument('--gpus', type=int, help='gpus limit')

    def execute(args):
        kolejka_config(args=args)
        config = worker_config()
        stage0(args.task,
               args.result,
               temp_path=config.temp_path,
               consume_task_folder=args.consume)

    parser.set_defaults(execute=execute)
示例#28
0
def init():
	parser = argparse.ArgumentParser(description="Generates a dataset by compiling generated data properties using a certain dataset model")
	parser.add_argument('--model', type=str, default='matrix', help='The name of the dataset model to use. Defaults to matrix.')
	parser.add_argument('properties', type=str, default='openPrice,closePrice,gasPrice', help='A list of the names of the properties to use, separated by a comma.')
	parser.add_argument('targets', type=str, default='highPrice', help='A list of target property names, separated by a comma.')
	parser.add_argument('--start', type=str, default=None, help='The start date. YYYY-MM-DD-HH')
	parser.add_argument('--end', type=str, default=None, help='The end date. YYYY-MM-DD-HH')
	parser.add_argument('--filename', type=str, default=None, help='The target filename / dir to save the pickled dataset to. Defaults to "data/dataset.pickle"')
	parser.add_argument('--overwrite', dest='overwrite', action='store_true', help="If the filename already exists, overwrite it.")
	parser.add_argument('--ratio', type=str, default='1', help='On how many fragments to split the main dataset. For example, "1:2:3" will create three datasets with sizes proportional to what given.')
	parser.add_argument('--shuffle', dest='shuffle', action="store_true", help="Shuffle the generated dataset and labels.")
	parser.set_defaults(shuffle=False)
	parser.set_defaults(overwrite=False)

	args, _ = parser.parse_known_args()

	if len(_) != 0:
		raise ValueError("Provided flags %s cannot be understood." % str(_))

	if args.filename == None:
		filename = "data/dataset_" + str(args.start) + "-" + str(args.end) + ".pickle"
	else: filename = args.filename

	start = args.start
	end = args.end

	start = dateutil.parser.parse(start) if start is not None else None
	end = dateutil.parser.parse(end) if end is not None else None

	try:
		ratio = [int(x) for x in args.ratio.split(':')]
	except ValueError:
		print("Error while reading the given ratio. Did you format it in the correct way?")
		return

	run(args.model, args.properties.split(','), args.targets.split(','), filename, start=start, end=end, ratio=ratio, shuffle=args.shuffle, overwrite=args.overwrite)
示例#29
0
def cli_parser():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--num-bands",
        type=int,
        default=4,
        choices=[4, 5],
        help="The desired number of bands (PSScene4Band or REOrthoTile)")
    parser.add_argument("--pipeline", type=str, help="JSON with instructions")
    parser.add_argument("--pipeline-uri",
                        type=str,
                        help="A URI to JSON with instructions")
    parser.add_argument("--planet-api-key",
                        type=str,
                        default=os.environ.get("PLANET_API_KEY", None))
    parser.add_argument(
        "--planet-api-uri",
        type=str,
        default="https://api.planet.com/data/v1/item-types/{}/items/{}/assets")
    parser.add_argument("--planet-id", type=str, help="Planet Image ID")
    parser.add_argument("--s3-bucket",
                        type=str,
                        default=os.environ.get("S3_BUCKET", "planet-data-hsi"))
    parser.add_argument("--s3-prefix",
                        type=str,
                        default=os.environ.get("S3_PREFIX",
                                               "planet-scene-cogs"))
    parser.add_argument("--stac-api-uri",
                        type=str,
                        default=os.environ.get("STAC_API_URI",
                                               "http://franklin:9090"))
    parser.add_argument("--temp-dir", type=str, default='/tmp')

    parser.add_argument('--no-download',
                        required=False,
                        dest='download',
                        action='store_false')
    parser.set_defaults(download=True)

    parser.add_argument('--no-upload',
                        required=False,
                        dest='upload',
                        action='store_false')
    parser.set_defaults(upload=True)

    parser.add_argument('--no-update',
                        required=False,
                        dest='update',
                        action='store_false')
    parser.set_defaults(update=True)

    return parser
示例#30
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawTextHelpFormatter,
        description='Downloads one or more Flickr photo sets.\n'
        '\n'
        'To use it you need to get your own Flickr API key here:\n'
        'https://www.flickr.com/services/api/misc.api_keys.html\n'
        '\n'
        'For more information see:\n'
        'https://github.com/beaufour/flickr-download\n'
        '\n'
        'You can store argument defaults in ' + CONFIG_FILE + '. API keys for example:\n'
        '  api_key: .....\n'
        '  api_secret: ...\n',
        epilog='examples:\n'
        '  list all sets for a user:\n'
        '  > {app} -k <api_key> -s <api_secret> -l beaufour\n'
        '\n'
        '  download a given set:\n'
        '  > {app} -k <api_key> -s <api_secret> -d 72157622764287329\n'
        '\n'
        '  download a given set, keeping duplicate names:\n'
        '  > {app} -k <api_key> -s <api_secret> -d 72157622764287329 -n title_increment\n'
        .format(app=sys.argv[0])
    )
    parser.add_argument('-k', '--api_key', type=str,
                        help='Flickr API key')
    parser.add_argument('-s', '--api_secret', type=str,
                        help='Flickr API secret')
    parser.add_argument('-t', '--user_auth', action='store_true',
                        help='Enable user authentication')
    parser.add_argument('-l', '--list', type=str, metavar='USER',
                        help='List photosets for a user')
    parser.add_argument('-d', '--download', type=str, metavar='SET_ID',
                        help='Download the given set')
    parser.add_argument('-p', '--download_user_photos', type=str, metavar='USERNAME',
                        help='Download all photos for a given user')
    parser.add_argument('-u', '--download_user', type=str, metavar='USERNAME',
                        help='Download all sets for a given user')
    parser.add_argument('-i', '--download_photo', type=str, metavar='PHOTO_ID',
                        help='Download one specific photo')
    parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL',
                        default=None, help='Quality of the picture')
    parser.add_argument('-n', '--naming', type=str, metavar='NAMING_MODE',
                        help='Photo naming mode')
    parser.add_argument('-m', '--list_naming', action='store_true',
                        help='List naming modes')
    parser.add_argument('-o', '--skip_download', action='store_true',
                        help='Skip the actual download of the photo')
    parser.add_argument('-j', '--save_json', action='store_true',
                        help='Save photo info like description and tags, one .json file per photo')
    parser.set_defaults(**_load_defaults())

    args = parser.parse_args()

    if args.list_naming:
        print(get_filename_handler_help())
        return 1

    if not args.api_key or not args.api_secret:
        print ('You need to pass in both "api_key" and "api_secret" arguments', file=sys.stderr)
        return 1

    ret = _init(args.api_key, args.api_secret, args.user_auth)
    if not ret:
        return 1

    # Replace stdout with a non-strict writer that replaces unknown characters instead of throwing
    # an exception. This "fixes" print issues on the standard Windows terminal, and when there is no
    # terminal at all.
    if sys.stdout.isatty():
        default_encoding = sys.stdout.encoding
    else:
        default_encoding = locale.getpreferredencoding()
    if default_encoding != 'utf-8':
        sys.stdout = codecs.getwriter(default_encoding)(sys.stdout, 'replace')

    if args.list:
        print_sets(args.list)
        return 0

    if args.skip_download:
        print('Will skip actual downloading of files')

    if args.save_json:
        print('Will save photo info in .json file with same basename as photo')

    if args.download or args.download_user or args.download_user_photos or args.download_photo:
        try:
            with Timer('total run'):
                get_filename = get_filename_handler(args.naming)
                if args.download:
                    download_set(args.download, get_filename, args.quality, args.skip_download,
                                 args.save_json)
                elif args.download_user:
                    download_user(args.download_user, get_filename, args.quality,
                                  args.skip_download, args.save_json)
                elif args.download_photo:
                    download_photo(args.download_photo, get_filename, args.quality,
                                   args.skip_download, args.save_json)
                else:
                    download_user_photos(args.download_user_photos, get_filename, args.quality,
                                         args.skip_download, args.save_json)
        except KeyboardInterrupt:
            print('Forcefully aborting. Last photo download might be partial :(', file=sys.stderr)
        return 0

    print('ERROR: Must pass either --list or --download\n', file=sys.stderr)
    parser.print_help()
    return 1
示例#31
0
def main():
    ## This boilerplate pattern is from
    ## http://stackoverflow.com/questions/3609852 
    ## We want defaults for the command line options to be fetched from the config file

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = argparse.ArgumentParser(
        description=__doc__, # printed with -h/--help
        # Don't mess with format of description
        formatter_class=argparse.RawDescriptionHelpFormatter,
        # Turn off help, so we print all options in response to -h
        add_help=False
        )
    conf_parser.add_argument("--config-file",
                             help="Specify config file", metavar="FILE", default=os.getenv('XDG_CONFIG_HOME', os.getenv('HOME', '~') + '/.config')+'/calendar.conf')
    conf_parser.add_argument("--config-section",
                             help="Specify config section; allows several caldav servers to be configured in the same config file",  default='default')
    args, remaining_argv = conf_parser.parse_known_args()

    config = {}
    try:
        with open(args.config_file) as config_file:
            config = json.load(config_file)
    except IOError:
        ## File not found
        logging.info("no config file found")
    except ValueError:
        logging.error("error in config file", exc_info=True)
        raise

    defaults = config.get(args.config_section, {})

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h
    parser = argparse.ArgumentParser(
        # Inherit options from config_parser
        parents=[conf_parser]
        )
    parser.set_defaults(**defaults)

    ## Global options
    parser.add_argument("--nocaldav", help="Do not connect to CalDAV server, but read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--icalendar", help="Read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--timezone", help="Timezone to use")
    parser.add_argument('--language', help="language used", default="EN")
    parser.add_argument("--caldav-url", help="Full URL to the caldav server", metavar="URL")
    parser.add_argument("--caldav-user", help="username to log into the caldav server", metavar="USER")
    parser.add_argument("--caldav-pass", help="password to log into the caldav server", metavar="PASS")
    parser.add_argument("--debug-logging", help="turn on debug logging", action="store_true")

    ## TODO: check sys.argv[0] to find command
    ## TODO: set up logging
    subparsers = parser.add_subparsers(title='command')

    calendar_parser = subparsers.add_parser('calendar')
    calendar_parser.add_argument("--calendar-url", help="URL for calendar to be used (may be absolute or relative to caldav URL)")
    calendar_subparsers = calendar_parser.add_subparsers(title='subcommand')
    calendar_add_parser = calendar_subparsers.add_parser('add')
    calendar_add_parser.add_argument('event_time', help="Timestamp and duration of the event.  See the documentation for event_time specifications")
    calendar_add_parser.add_argument('description', nargs='+')
    calendar_add_parser.set_defaults(func=calendar_add)
    calendar_addics_parser = calendar_subparsers.add_parser('addics')
    calendar_addics_parser.add_argument('--file', help="ICS file to upload", default='-')
    calendar_addics_parser.set_defaults(func=calendar_addics)

    calendar_agenda_parser = calendar_subparsers.add_parser('agenda')
    calendar_agenda_parser.set_defaults(func=niy)
    todo_parser = subparsers.add_parser('todo')
    todo_parser.set_defaults(func=niy)
    args = parser.parse_args(remaining_argv)

    if not args.nocaldav:
        caldav_conn = caldav_connect(args)

    ret = args.func(caldav_conn, args)
示例#32
0
def create_argument_parser():
    parser = argparse.ArgumentParser(
        description='Extract cycle time analytics data from JIRA.')
    parser.add_argument('config',
                        metavar='config.yml',
                        help='Configuration file')
    parser.add_argument(
        'output',
        metavar='data.csv',
        nargs='?',
        help=
        'Output file. Contains all issues described by the configuration file, metadata, and dates of entry to each state in the cycle.'
    )
    parser.add_argument('-v',
                        dest='verbose',
                        action='store_true',
                        help='Verbose output')
    parser.add_argument('-n',
                        metavar='N',
                        dest='max_results',
                        type=int,
                        help='Only fetch N most recently updated issues',
                        default=500)
    parser.add_argument(
        '-b',
        dest='blankcredentials',
        action='store_true',
        help='Flag to set username and password to empty strings.')
    parser.add_argument(
        '--changelog',
        dest='changelog',
        action='store_true',
        help='Get issue history changelog. Default for all queries.')
    parser.add_argument(
        '--no-changelog',
        dest='changelog',
        action='store_false',
        help='DO NOT Get issue history changelog. Limit response size.')
    parser.set_defaults(changelog=True)
    parser.add_argument('--format',
                        metavar='csv|json|xlsx',
                        help="Output format for data (default CSV)")
    parser.add_argument(
        '--points',
        metavar="StoryPoints",
        help=
        "By default we use story count, now use given column and use Story Points size for analytics"
    )
    parser.add_argument(
        '--records',
        metavar='records.json',
        help="All the ouptut data for issues as JSON records instead of arrays."
    )
    parser.add_argument(
        '--cfd',
        metavar='cfd.csv',
        help=
        'Calculate data to draw a Cumulative Flow Diagram and write to file. Hint: Plot as a (non-stacked) area chart.'
    )
    parser.add_argument(
        '--scatterplot',
        metavar='scatterplot.csv',
        help=
        'Calculate data to draw a cycle time scatter plot and write to file. Hint: Plot as a scatter chart.'
    )
    parser.add_argument(
        '--histogram',
        metavar='histogram.csv',
        help=
        'Calculate data to draw a cycle time histogram and write to file. Hint: Plot as a column chart.'
    )
    parser.add_argument(
        '--throughput',
        metavar='throughput.csv',
        help=
        'Calculate daily throughput data and write to file. Hint: Plot as a column chart.'
    )
    parser.add_argument(
        '--percentiles',
        metavar='percentiles.csv',
        help='Calculate cycle time percentiles and write to file.')
    parser.add_argument(
        '--burnup-forecast',
        metavar='burnup_forecast.csv',
        help='Calculate forecasted dates percentiles and write to file.')
    parser.add_argument('--size-history',
                        metavar='size_history.csv',
                        help='Get Story Points history and write to file.')
    parser.add_argument(
        '--links',
        metavar='links_data.tsv',
        help='Write issue links and epic relationships to file.')

    parser.add_argument('--quantiles',
                        metavar='0.3,0.5,0.75,0.85,0.95',
                        help="Quantiles to use when calculating percentiles")
    parser.add_argument(
        '--backlog-column',
        metavar='<name>',
        help="Name of the backlog column. Defaults to the first column.")
    parser.add_argument(
        '--committed-column',
        metavar='<name>',
        help=
        "Name of the column from which work is considered committed. Defaults to the second column."
    )
    parser.add_argument(
        '--final-column',
        metavar='<name>',
        help=
        "Name of the final 'work' column. Defaults to the penultimate column.")
    parser.add_argument(
        '--done-column',
        metavar='<name>',
        help="Name of the 'done' column. Defaults to the last column.")
    parser.add_argument(
        '--throughput-window',
        metavar='60',
        type=int,
        default=60,
        help="How many days in the past to use for calculating throughput")
    parser.add_argument(
        '--throughput-window-end',
        metavar=datetime.date.today().isoformat(),
        help=
        "By default, the throughput window runs to today's date. Use this option to set an alternative end date for the window."
    )
    parser.add_argument(
        '--separator',
        metavar='tab|comma',
        help="Separator to be used when output format is csv (default tab)")

    if charting.HAVE_CHARTING:

        parser.add_argument(
            '--charts-from',
            metavar=(datetime.date.today() -
                     datetime.timedelta(days=30)).isoformat(),
            help="Limit time window when drawing charts to start from this date"
        )
        parser.add_argument(
            '--charts-to',
            metavar=datetime.date.today().isoformat(),
            help="Limit time window when drawing charts to end at this date")

        parser.add_argument('--charts-scatterplot',
                            metavar='scatterplot.png',
                            help="Draw cycle time scatter plot")
        parser.add_argument('--charts-scatterplot-title',
                            metavar='"Cycle time scatter plot"',
                            help="Title for cycle time scatter plot")

        parser.add_argument('--charts-histogram',
                            metavar='histogram.png',
                            help="Draw cycle time histogram")
        parser.add_argument('--charts-histogram-title',
                            metavar='"Cycle time histogram"',
                            help="Title for cycle time histogram")

        parser.add_argument('--charts-cfd',
                            metavar='cfd.png',
                            help="Draw Cumulative Flow Diagram")
        parser.add_argument('--charts-cfd-title',
                            metavar='"Cumulative Flow Diagram"',
                            help="Title for CFD")

        parser.add_argument(
            '--charts-throughput',
            metavar='throughput.png',
            help="Draw weekly throughput chart with trend line")
        parser.add_argument('--charts-throughput-title',
                            metavar='"Throughput trend"',
                            help="Title for throughput chart")

        parser.add_argument('--charts-burnup',
                            metavar='burnup.png',
                            help="Draw simple burn-up chart")
        parser.add_argument('--charts-burnup-title',
                            metavar='"Burn-up"',
                            help="Title for burn-up charts_scatterplot")

        parser.add_argument(
            '--charts-burnup-forecast',
            metavar='burnup-forecast.png',
            help=
            "Draw burn-up chart with Monte Carlo simulation forecast to completion"
        )
        parser.add_argument('--charts-burnup-forecast-title',
                            metavar='"Burn-up forecast"',
                            help="Title for burn-up forecast chart")
        parser.add_argument(
            '--charts-burnup-forecast-target',
            metavar='<num stories>',
            type=int,
            help=
            "Target completion scope for forecast. Defaults to current size of backlog."
        )
        parser.add_argument(
            '--charts-burnup-forecast-deadline',
            metavar=datetime.date.today().isoformat(),
            help=
            "Deadline date for completion of backlog. If set, it will be shown on the chart, and the forecast delta will also be shown."
        )
        parser.add_argument(
            '--charts-burnup-forecast-deadline-confidence',
            metavar=.85,
            type=float,
            help="Quantile to use when comparing deadline to forecast.")
        parser.add_argument(
            '--charts-burnup-forecast-trials',
            metavar='100',
            type=int,
            default=100,
            help="Number of iterations in Monte Carlo simulation.")

        parser.add_argument('--charts-wip',
                            metavar='wip',
                            help="Draw weekly WIP box plot")
        parser.add_argument('--charts-wip-title',
                            metavar='"Weekly WIP"',
                            help="Title for WIP chart")
        parser.add_argument(
            '--charts-wip-window',
            metavar='6',
            default=6,
            type=int,
            help=
            "Number of weeks in the past for which to draw weekly WIP chart")

        parser.add_argument('--charts-ageing-wip',
                            metavar='ageing-wip.png',
                            help="Draw current ageing WIP chart")
        parser.add_argument('--charts-ageing-wip-title',
                            metavar='"Ageing WIP"',
                            help="Title for ageing WIP chart")

        parser.add_argument('--charts-net-flow',
                            metavar='net-flow.png',
                            help="Draw weekly net flow bar chart")
        parser.add_argument('--charts-net-flow-title',
                            metavar='"Net flow"',
                            help="Title for net flow bar chart`")
        parser.add_argument(
            '--charts-net-flow-window',
            metavar='6',
            default=6,
            type=int,
            help="Number of weeks in the past for which to draw net flow chart"
        )
    return parser
示例#33
0
def main(args):
    """
    :type args: list
    """
    parser = ArgumentParser(
        description="Search for python packages using better metrics",
        formatter_class=ArgumentDefaultsHelpFormatter)
    parser.add_argument("search_term",
                        type=str,
                        help="The search term or phrase to query")
    parser.add_argument(
        "-S",
        "--disable-stats",
        dest="collect_stats",
        action="store_false",
        help="Disable extra stats collection (i.e. revert to old behavior)")
    parser.add_argument(
        "-s",
        "--enable-stats",
        dest="collect_stats",
        action="store_true",
        help="Enable extra stats collection (i.e. the default)")
    parser.set_defaults(collect_stats=True)
    parser.add_argument(
        "-B",
        "--disable-backup-search",
        dest="backup_search",
        action="store_false",
        help="Disable backup search for last update (i.e. the default)")
    parser.add_argument(
        "-b",
        "--enable-backup-search",
        dest="backup_search",
        action="store_true",
        help="Enable backup search for last update (can be slow!)")
    parser.set_defaults(backup_search=False)
    parser.add_argument(
        "-d",
        "--max-age-days",
        dest="max_age_days",
        type=float,
        help=
        "Max days to consider recent when downloading already-existing files")
    parser.set_defaults(max_age_days=0.5)
    parser.add_argument(
        "-p",
        "--path-to-aria2c",
        dest="aria2c_path",
        type=str,
        help="The path to aria2c(.exe) if not in current PATH environment")
    parser.set_defaults(aria2c_path=None)
    argcomplete.autocomplete(parser)
    parser_ns = parser.parse_args(args)

    out_obj = OutputFile(parser_ns.search_term)
    if out_obj.age < parser_ns.max_age_days:
        with open(out_obj.path, 'r') as f:
            csv_lines = f.read().splitlines()
        packages = [
            PypiSearchResult.from_csv(line, ref_date=out_obj.ref_date)
            for line in csv_lines
        ]
    else:
        packages = search_packages(parser_ns.search_term,
                                   parser_ns.collect_stats,
                                   parser_ns.backup_search,
                                   parser_ns.max_age_days,
                                   parser_ns.aria2c_path)
        packages.sort()
        logging.info("Saving CSV entries to %s", out_obj.path)
        with open(out_obj.path, "w") as f:
            for package in packages:
                f.write(package.to_csv())
                f.write(os.linesep)
示例#34
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawTextHelpFormatter,
        description='Downloads one or more Flickr photo sets.\n'
        '\n'
        'To use it you need to get your own Flickr API key here:\n'
        'https://www.flickr.com/services/api/misc.api_keys.html\n'
        '\n'
        'For more information see:\n'
        'https://github.com/beaufour/flickr-download',
        epilog='examples:\n'
        '  list all sets for a user:\n'
        '  > {app} -k <api_key> -s <api_secret> -l beaufour\n'
        '\n'
        '  download a given set:\n'
        '  > {app} -k <api_key> -s <api_secret> -d 72157622764287329\n'
        '\n'
        '  download a given set, keeping duplicate names:\n'
        '  > {app} -k <api_key> -s <api_secret> -d 72157622764287329 -n title_increment\n'
        .format(app=sys.argv[0])
    )
    parser.add_argument('-k', '--api_key', type=str,
                        help='Flickr API key')
    parser.add_argument('-s', '--api_secret', type=str,
                        help='Flickr API secret')
    parser.add_argument('-t', '--user_auth', action='store_true',
                        help='Enable user authentication')
    parser.add_argument('-l', '--list', type=str, metavar='USER',
                        help='List photosets for a user')
    parser.add_argument('-d', '--download', type=str, metavar='SET_ID',
                        help='Download the given set')
    parser.add_argument('-u', '--download_user', type=str, metavar='USERNAME',
                        help='Download all sets for a given user')
    parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL',
                        default=None, help='Quality of the picture')
    parser.add_argument('-n', '--naming', type=str, metavar='NAMING_MODE',
                        help='Photo naming mode')
    parser.add_argument('-m', '--list_naming', action='store_true',
                        help='List naming modes')
    parser.set_defaults(**_load_defaults())

    args = parser.parse_args()

    if args.list_naming:
        print(get_filename_handler_help())
        return 1

    if not args.api_key or not args.api_secret:
        print ('You need to pass in both "api_key" and "api_secret" arguments', file=sys.stderr)
        return 1

    ret = _init(args.api_key, args.api_secret, args.user_auth)
    if not ret:
        return 1

    if args.list:
        print_sets(args.list)
        return 0

    if args.download or args.download_user:
        try:
            get_filename = get_filename_handler(args.naming)
            if args.download:
                download_set(args.download, get_filename, args.quality)
            else:
                download_user(args.download_user, get_filename, args.quality)
        except KeyboardInterrupt:
            print('Forcefully aborting. Last photo download might be partial :(', file=sys.stderr)
        return 0

    print('ERROR: Must pass either --list or --download\n', file=sys.stderr)
    parser.print_help()
    return 1
示例#35
0
    def __init__(self, store, command_name="cd",
                 disabled_commands=set(),
                 date_formatter=BabelDateFormatter(),
                 default_timezone=None,
                 **kwargs):
        super().__init__(command_name, **kwargs)
        self.store = store
        self.date_formatter = date_formatter

        subparsers = self.argparse.add_subparsers(
            dest="action",
            help="Choose the action to execute"
        )

        self.disabled_commands = disabled_commands

        if self.CMD_ADD not in disabled_commands:
            # add command
            parser = subparsers.add_parser(
                "add",
                help="Store a new event in the system")
            parser.add_argument(
                "name",
                help="Descriptive name of the event to store")
            parser.add_argument(
                "target_date",
                help="Date of the event")
            parser.add_argument(
                "timezone",
                nargs="?",
                default=default_timezone,
                type=pytz.timezone,
                help="Name of the timezone (e.g. Europe/Berlin). Overrides any"
                " timezone given in the target_date string.")
            parser.set_defaults(
                func=self._cmd_add)

        if self.CMD_MOVE not in disabled_commands:
            # move command
            parser = subparsers.add_parser(
                "move",
                help="Rename an event",
                aliases={"mv", "rename"})
            parser.add_argument("oldname")
            parser.add_argument("newname")
            parser.set_defaults(
                func=self._cmd_rename)

        if self.CMD_DELETE not in disabled_commands:
            # delete command
            parser = subparsers.add_parser(
                "rm",
                help="Remove an event",
                aliases={"delete", "remove"})
            parser.add_argument(
                "names",
                nargs="+",
                help="Name of the information to remove")
            parser.set_defaults(
                func=self._cmd_delete)

        if self.CMD_SAVE not in disabled_commands:
            parser = subparsers.add_parser(
                "save",
                help="Save all data stored in the eventstore.")
            parser.set_defaults(
                func=self._cmd_save)

        if self.CMD_STATS not in disabled_commands:
            parser = subparsers.add_parser(
                "stats",
                help="Print memory usage statistics.")
            parser.set_defaults(
                func=self._cmd_stats)
示例#36
0
		plt.title('Target vs Predicted on %s' % name)
		plt.legend(loc='upper left')
	if not save:
		plt.show()
	else:
		filename = "data/results/%s_%s.svg" % (str(dt.now()), setType)
		plt.savefig(filename, dpi = 1500)
		print("Saved accuracy graph at %s." % filename)

if __name__ == "__main__": #if this is the main file, parse the command args
	parser = argparse.ArgumentParser(description="Module that loads given datasets and trains and evaluates one or more neural network models on that.")
	parser.add_argument('dataset', type=str, help="The filepath to the dataset/s.")
	parser.add_argument('--models', type=str, help="A list of the models that are going to be trained and evaluated. Default is all available.")
	parser.add_argument('--args', type=str, help="A list of arguments to be passed on to the models. In the format key1=value1,key2=value2.1;value2.2")
	parser.add_argument('--quiet', dest='quiet', action="store_true", help="Do not plot graphs, but save them as images.")
	parser.set_defaults(quiet=False)
	parser.add_argument('--shuffle', dest='shuffle', action="store_true", help="Shuffle the generated dataset and labels.")
	parser.set_defaults(shuffle=False)
	parser.add_argument('--trim-batch', dest='trim', action="store_true", help="Trim each dataset so that its length is divisible by the batch size.")
	parser.set_defaults(trim=False)

	args, _ = parser.parse_known_args()

	givenModels = args.models.split(',') if args.models else None

	modelArgs = {}
	pairs = args.args.split(',')
	for pair in pairs:
		key, value = pair.split('=')

		try:
示例#37
0
        'A list of the names of the properties to generate, separated by a comma.'
    )
    parser.add_argument('--start',
                        type=str,
                        default=None,
                        help='The start date. YYYY-MM-DD-HH')
    parser.add_argument('--end',
                        type=str,
                        default=None,
                        help='The end date. YYYY-MM-DD-HH')
    parser.add_argument(
        '--list',
        dest='list',
        action="store_true",
        help="List the available properties that can be generated.")
    parser.set_defaults(list=False)
    parser.add_argument('--relative',
                        dest='relative',
                        action="store_true",
                        help="Generate the properties with relative values.")
    parser.set_defaults(relative=False)

    args, _ = parser.parse_known_args()

    start = dateutil.parser.parse(
        args.start) if args.start is not None else None
    end = dateutil.parser.parse(args.end) if args.end is not None else None

    properties = args.properties.split(
        ',') if args.properties != None else None
示例#38
0
def main():
    param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"]
    param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"]
    param2 = ["-y", "dataselect", "-z"]
    times = {"starttime": datetime.datetime(1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)}
    nets = set()

    def add_param0(option, opt_str, value, parser):
        param0.append(opt_str)
        param0.append(value)

    def add_param1(option, opt_str, value, parser):
        param1.append(opt_str)
        param1.append(value)

    def add_param2(option, opt_str, value, parser):
        param2.append(opt_str)
        param2.append(value)

    def add_param(option, opt_str, value, parser):
        add_param0(option, opt_str, value, parser)
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    def add_time(option, opt_str, value, parser):
        add_param1(option, opt_str, value, parser)

        try:
            t = dateutil.parser.parse(value)

        except ValueError as e:
            raise optparse.OptionValueError("option '%s': invalid time value: '%s'" % (opt_str, value))

        if t.tzinfo is not None:
            t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

        times[option.dest] = t

    parser = optparse.OptionParser(
            usage="Usage: %prog [-h|--help] [OPTIONS] -o directory",
            version="%prog " + VERSION)

    parser.set_defaults(
            url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
            timeout=600,
            retries=10,
            retry_wait=60,
            threads=5,
            max_lines=1000,
            max_timespan=1440)

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string", action="callback",
                      callback=add_param,
                      help="URL of routing service (default %default)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_param1,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_param1,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_param1,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_param1,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_time,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_time,
                      help="end time")

    parser.add_option("-t", "--timeout", type="int", action="callback",
                      callback=add_param,
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int", action="callback",
                      callback=add_param,
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int", action="callback",
                      callback=add_param,
                      help="seconds to wait before each retry (default %default)")

    parser.add_option("-n", "--threads", type="int", action="callback",
                      callback=add_param,
                      help="maximum number of download threads (default %default)")

    parser.add_option("-c", "--credentials-file", type="string", action="callback",
                      callback=add_param2,
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string", action="callback",
                      callback=add_param2,
                      help="file that contains the auth token")

    parser.add_option("-o", "--output-dir", type="string",
                      help="SDS directory where downloaded data is written")

    parser.add_option("-l", "--max-lines", type="int",
                      help="max lines per request (default %default)")

    parser.add_option("-m", "--max-timespan", type="int",
                      help="max timespan per request in minutes (default %default)")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    parser.add_option("-Z", "--no-check", action="store_true", default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if args or not options.output_dir:
        parser.print_usage(sys.stderr)
        return 1

    def log_alert(s):
        if sys.stderr.isatty():
            s = "\033[31m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_notice(s):
        if sys.stderr.isatty():
            s = "\033[32m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_verbose(s):
        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_silent(s):
        pass

    logs.error = log_alert
    logs.warning = log_alert
    logs.notice = log_notice
    logs.info = (log_silent, log_verbose)[options.verbose]
    logs.debug = log_silent

    try:
        try:
            proc = exec_fetch(param1, None, options.verbose, options.no_check)

        except OSError as e:
            logs.error(str(e))
            logs.error("error running fdsnws_fetch")
            return 1

        timespan = {}

        for line in proc.stdout:
            if isinstance(line, bytes):
                line = line.decode('utf-8')

            if not line or line.startswith('#'):
                continue

            starttime = max(dateutil.parser.parse(line.split('|')[15]), times['starttime'])

            try:
                endtime = min(dateutil.parser.parse(line.split('|')[16]), times['endtime'])

            except ValueError:
                # dateutil.parser.parse('') now causes ValueError instead of current time
                endtime = min(datetime.datetime.now(), times['endtime'])

            if starttime.tzinfo is not None:
                starttime = starttime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            if endtime.tzinfo is not None:
                endtime = endtime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            try:
                ts = timespan[tuple(line.split('|')[:4])]

                if ts.start > starttime:
                    ts.start = starttime
                    ts.current = starttime

                if ts.end < endtime:
                    ts.end = endtime

            except KeyError:
                timespan[tuple(line.split('|')[:4])] = Timespan(starttime, endtime)

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

        if os.path.exists(options.output_dir):
            scan_sds(options.output_dir, timespan, nets)

        while len(timespan) > 0:
            postdata = ""

            ts_used = random.sample(timespan.items(), min(len(timespan), options.max_lines))

            for ((net, sta, loc, cha), ts) in ts_used:
                te = min(ts.end, ts.start + datetime.timedelta(minutes=options.max_timespan))

                if loc == '':
                    loc = '--'

                postdata += "%s %s %s %s %sZ %sZ\n" \
                            % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat())

            if not isinstance(postdata, bytes):
                postdata = postdata.encode('utf-8')

            try:
                proc = exec_fetch(param2, postdata, options.verbose, options.no_check)

            except OSError as e:
                logs.error(str(e))
                logs.error("error running fdsnws_fetch")
                return 1

            got_data = False

            try:
                for rec in mseedlite.Input(proc.stdout):
                    try:
                        ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)]

                    except KeyError:
                        logs.warning("unexpected data: %s.%s.%s.%s" % (rec.net, rec.sta, rec.loc, rec.cha))
                        continue

                    if rec.end_time <= ts.current:
                        continue

                    sds_dir = "%s/%d/%s/%s/%s.D" \
                              % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha)

                    sds_file = "%s.%s.%s.%s.D.%s" \
                              % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j'))

                    if not os.path.exists(sds_dir):
                        os.makedirs(sds_dir)

                    with open(sds_dir + '/' + sds_file, 'ab') as fd:
                        fd.write(rec.header + rec.data)

                    ts.current = rec.end_time
                    nets.add((rec.net, rec.begin_time.year))
                    got_data = True

            except mseedlite.MSeedError as e:
                logs.error(str(e))

            proc.stdout.close()
            proc.wait()

            if proc.returncode != 0:
                logs.error("error running fdsnws_fetch")
                return 1

            for ((net, sta, loc, cha), ts) in ts_used:
                if not got_data:
                    # no progress, skip to next segment
                    ts.start += datetime.timedelta(minutes=options.max_timespan)

                else:
                    # continue from current position
                    ts.start = ts.current

                if ts.start >= ts.end:
                    # timespan completed
                    del timespan[(net, sta, loc, cha)]

        if nets and not options.no_citation:
            logs.info("retrieving network citation info")
            get_citation(nets, param0, options.verbose)

    except (IOError, Error) as e:
        logs.error(str(e))
        return 1

    return 0
示例#39
0
def main():
    """
    the main function does (almost) nothing but parsing command line parameters
    """
    ## This boilerplate pattern is from
    ## http://stackoverflow.com/questions/3609852
    ## We want defaults for the command line options to be fetched from the config file

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = argparse.ArgumentParser(
        prog=__product__,
        description=__doc__, # printed with -h/--help
        # Don't mess with format of description
        formatter_class=argparse.RawDescriptionHelpFormatter,
        # Turn off help, so we print all options in response to -h
        add_help=False
        )
    conf_parser.add_argument("--config-file",
                             help="Specify config file", metavar="FILE", default=os.getenv('XDG_CONFIG_HOME', os.getenv('HOME', '~') + '/.config')+'/calendar.conf')
    conf_parser.add_argument("--config-section",
                             help="Specify config section; allows several caldav servers to be configured in the same config file",  default='default')
    conf_parser.add_argument("--interactive-config",
                             help="Interactively ask for configuration", action="store_true")
    args, remaining_argv = conf_parser.parse_known_args()
    conf_parser.add_argument("--version", action='version', version='%%(prog)s %s' % __version__)

    config = {}

    try:
        with open(args.config_file) as config_file:
            config = json.load(config_file)
    except IOError:
        ## File not found
        logging.info("no config file found")
    except ValueError:
        if args.interactive_config:
            logging.error("error in config file.  Be aware that the current config file will be ignored and overwritten", exc_info=True)
        else:
            logging.error("error in config file.  You may want to run --interactive-config or fix the config file", exc_info=True)

    if args.interactive_config:
        config = interactive_config(args, config, remaining_argv)
        if not remaining_argv:
            return
    else:
        defaults = config_section(config, args.config_section)
        if not 'ssl_verify_cert' in defaults:
            defaults['ssl_verify_cert'] = 'yes'
        if not 'language' in defaults:
            ## TODO: shouldn't this be lower case?
            defaults['language'] = 'EN'

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h
    parser = argparse.ArgumentParser(
        description=__doc__,
        prog=__product__,
        # Inherit options from config_parser
        parents=[conf_parser]
        )
    parser.set_defaults(**defaults)

    ## Global options
    parser.add_argument("--nocaldav", help="Do not connect to CalDAV server, but read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--icalendar", help="Read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--timezone", help="Timezone to use")
    parser.add_argument('--language', help="language used")
    parser.add_argument("--caldav-url", help="Full URL to the caldav server", metavar="URL")
    parser.add_argument("--caldav-user", help="username to log into the caldav server", metavar="USER")
    parser.add_argument("--caldav-pass", help="password to log into the caldav server", metavar="PASS")
    parser.add_argument("--caldav-proxy", help="HTTP proxy server to use (if any)")
    parser.add_argument("--ssl-verify-cert", help="verification of the SSL cert - 'yes' to use the OS-provided CA-bundle, 'no' to trust any cert and the path to a CA-bundle")
    parser.add_argument("--debug-logging", help="turn on debug logging", action="store_true")
    parser.add_argument("--calendar-url", help="URL for calendar to be used (may be absolute or relative to caldav URL, or just the name of the calendar)")
    parser.add_argument("--ignoremethod", help="Ignores METHOD property if exists in the request. This violates RFC4791 but is sometimes appended by some calendar servers", action="store_true")

    ## TODO: check sys.argv[0] to find command
    ## TODO: set up logging
    subparsers = parser.add_subparsers(title='command')

    ## Tasks
    todo_parser = subparsers.add_parser('todo')
    todo_parser.add_argument('--top', '-1', action='count', default=0)
    todo_parser.add_argument('--offset', action='count', default=0)
    todo_parser.add_argument('--offsetn', type=int, default=0)
    todo_parser.add_argument('--limit', type=int, default=0)
    todo_parser.add_argument('--todo-uid')
    todo_parser.add_argument('--hide-parents', help='Hide the parent if you need to work on children tasks first (parent task depends on children tasks to be done first)', action='store_true')
    todo_parser.add_argument('--hide-children', help='Hide the parent if you need to work on children tasks first (parent task depends on children tasks to be done first)', action='store_true')
    todo_parser.add_argument('--overdue', help='Only show overdue tasks', action='store_true')
    todo_parser.add_argument('--hide-future', help='Hide events with future dtstart', action='store_true')

    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_parser.add_argument('--'+attr, help="for filtering tasks")

    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_parser.add_argument('--no'+attr, help="for filtering tasks", action='store_true')

    #todo_parser.add_argument('--priority', ....)
    #todo_parser.add_argument('--sort-by', ....)
    #todo_parser.add_argument('--due-before', ....)
    todo_subparsers = todo_parser.add_subparsers(title='tasks subcommand')
    todo_add_parser = todo_subparsers.add_parser('add')
    todo_add_parser.add_argument('summaryline', nargs='+')
    todo_add_parser.add_argument('--set-due', default=date.today()+timedelta(365))
    todo_add_parser.add_argument('--set-dtstart', default=date.today()+timedelta(1))
    todo_add_parser.add_argument('--is-child', help="the new task is a child-task of the selected task(s)", action='store_true')
    for attr in vtodo_txt_one + vtodo_txt_many:
        if attr != 'summary':
            todo_add_parser.add_argument('--set-'+attr, help="Set "+attr)
    # TODO: we probably want to be able to set or delete alarms in other situations, yes?  generalize?
    todo_add_parser.add_argument('--alarm', metavar='DURATION_BEFORE',
        help="specifies a time at which a reminder should be presented for this task, " \
             "relative to the start time of the task (as a timestamp delta)")
    todo_add_parser.set_defaults(func=todo_add)

    todo_list_parser = todo_subparsers.add_parser('list')
    todo_list_parser.add_argument('--todo-template', help="Template for printing out the event", default="{dtstart}{dtstart_passed_mark} {due}{due_passed_mark} {summary}")
    todo_list_parser.add_argument('--default-due', help="Default number of days from a task is submitted until it's considered due", type=int, default=365)
    todo_list_parser.add_argument('--list-categories', help="Instead of listing the todo-items, list the unique categories used", action='store_true')
    todo_list_parser.add_argument('--timestamp-format', help="strftime-style format string for the output timestamps", default="%Y-%m-%d (%a)")
    todo_list_parser.set_defaults(func=todo_list)

    todo_edit_parser = todo_subparsers.add_parser('edit')
    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_edit_parser.add_argument('--set-'+attr, help="Set "+attr)
    for attr in vtodo_txt_many:
        todo_edit_parser.add_argument('--add-'+attr, help="Add an "+attr)
    todo_edit_parser.add_argument('--pdb', help='Allow interactive edit through the python debugger', action='store_true')
    todo_edit_parser.set_defaults(func=todo_edit)

    todo_postpone_parser = todo_subparsers.add_parser('postpone')
    todo_postpone_parser.add_argument('until', help="either a new date or +interval to add some interval to the existing time, or i.e. 'in 3d' to set the time to a new time relative to the current time.  interval is a number postfixed with a one character unit (any of smhdwy).  If the todo-item has a dstart, this field will be modified, else the due timestamp will be modified.    If both timestamps exists and dstart will be moved beyond the due time, the due time will be set to dtime.")
    todo_postpone_parser.add_argument('--due', help="move the due, not the dtstart", action='store_true')
    todo_postpone_parser.set_defaults(func=todo_postpone)

    todo_complete_parser = todo_subparsers.add_parser('complete')
    todo_complete_parser.set_defaults(func=todo_complete)

    todo_delete_parser = todo_subparsers.add_parser('delete')
    todo_delete_parser.set_defaults(func=todo_delete)

    ## journal
    journal_parser = subparsers.add_parser('journal')
    journal_subparsers = journal_parser.add_subparsers(title='journal subcommand')
    journal_add_parser = journal_subparsers.add_parser('add')
    journal_add_parser.add_argument('summaryline', nargs='+')
    journal_add_parser.set_defaults(func=journal_add)

    calendar_parser = subparsers.add_parser('calendar')
    calendar_subparsers = calendar_parser.add_subparsers(title='cal subcommand')
    calendar_add_parser = calendar_subparsers.add_parser('add')
    calendar_add_parser.add_argument('event_time', help="Timestamp and duration of the event.  See the documentation for event_time specifications")
    calendar_add_parser.add_argument('summary', nargs='+')
    calendar_add_parser.set_defaults(func=calendar_add)
    calendar_add_parser.add_argument('--whole-day', help='Whole-day event', action='store_true', default=False)

    for attr in vcal_txt_one + vcal_txt_many:
        calendar_add_parser.add_argument('--set-'+attr, help='Set '+attr)

    calendar_addics_parser = calendar_subparsers.add_parser('addics')
    calendar_addics_parser.add_argument('--file', help="ICS file to upload", default='-')
    calendar_addics_parser.set_defaults(func=calendar_addics)

    calendar_agenda_parser = calendar_subparsers.add_parser('agenda')
    calendar_agenda_parser.add_argument('--from-time', help="Fetch calendar events from this timestamp.  See the documentation for time specifications.  Defaults to now")
    calendar_agenda_parser.add_argument('--to-time', help="Fetch calendar until this timestamp")
    calendar_agenda_parser.add_argument('--agenda-mins', help="Fetch calendar for so many minutes", type=int)
    calendar_agenda_parser.add_argument('--agenda-days', help="Fetch calendar for so many days", type=int, default=7)
    calendar_agenda_parser.add_argument('--event-template', help="Template for printing out the event", default="{dstart} {summary}")
    calendar_agenda_parser.add_argument('--timestamp-format', help="strftime-style format string for the output timestamps", default="%Y-%m-%d %H:%M (%a)")
    calendar_agenda_parser.set_defaults(func=calendar_agenda)

    calendar_delete_parser = calendar_subparsers.add_parser('delete')
    calendar_delete_parser.add_argument('--event-uid')
    calendar_delete_parser.add_argument('--event-url')
    calendar_delete_parser.add_argument('--event-timestamp')
    calendar_delete_parser.set_defaults(func=calendar_delete)

    args = parser.parse_args(remaining_argv)

    if not args.nocaldav:
        caldav_conn = caldav_connect(args)
    else:
        caldav_conn = None
    
    if args.ssl_verify_cert == 'no':
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    ret = args.func(caldav_conn, args)
示例#40
0
    def contribute_to_subparser(self, parser):
        parser.set_defaults(function=lambda args: parser.print_usage())
        subparsers = parser.add_subparsers(title='commands')

        setup_parser = subparsers.add_parser('setup', help='setup Google Drive API access through OAuth')
        setup_parser.set_defaults(function=self.setup)
示例#41
0
文件: enimport.py 项目: leingang/plg
args, remaining_argv = conf_parser.parse_known_args()

if args.conf:
	config = ConfigParser.SafeConfigParser()
	config.read([args.conf])
	defaults = dict(config.items("defaults"))
else:
	defaults = { }
# Parse rest of arguments
# Don't suppress add_help here so it will handle -h
parser = argparse.ArgumentParser(
	# Inherit options from config_parser
	parents=[conf_parser]
	)
# TODO: maybe move this next line down to make sure config file is processed AFTER option defaults?
parser.set_defaults(**defaults)
parser.add_argument('-d','--debug',
    help='Print lots of debugging statements',
    action="store_const",dest="loglevel",const=logging.DEBUG,
    default=logging.WARNING
)
parser.add_argument('-v','--verbose',
    help='Be verbose',
    action="store_const",dest="loglevel",const=logging.INFO
)
parser.add_argument('--dry-run',
	help='do not save any notes',
	action='store_true',dest='dry_run')
parser.add_argument('--auth-token',
	help='authentication token (visit https://sandbox.evernote.com/api/DeveloperToken.action)',
	action='store',dest='auth_token')
示例#42
0
def main():
    param0 = ["-y", "station", "-q", "format=text", "-q", "level=network"]
    param1 = ["-y", "station", "-q", "format=text", "-q", "level=channel"]
    param2 = ["-y", "dataselect", "-z"]
    times = {"starttime": datetime.datetime(1900, 1, 1), "endtime": datetime.datetime(2100, 1, 1)}
    nets = set()

    def add_param0(option, opt_str, value, parser):
        param0.append(opt_str)
        param0.append(value)

    def add_param1(option, opt_str, value, parser):
        param1.append(opt_str)
        param1.append(value)

    def add_param2(option, opt_str, value, parser):
        param2.append(opt_str)
        param2.append(value)

    def add_param(option, opt_str, value, parser):
        add_param0(option, opt_str, value, parser)
        add_param1(option, opt_str, value, parser)
        add_param2(option, opt_str, value, parser)

    def add_time(option, opt_str, value, parser):
        add_param1(option, opt_str, value, parser)

        try:
            t = dateutil.parser.parse(value)

        except ValueError as e:
            raise optparse.OptionValueError("option '%s': invalid time value: '%s'" % (opt_str, value))

        if t.tzinfo is not None:
            t = t.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

        times[option.dest] = t

    parser = optparse.OptionParser(
            usage="Usage: %prog [-h|--help] [OPTIONS] -o directory",
            version="%prog " + VERSION)

    parser.set_defaults(
            url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
            timeout=600,
            retries=10,
            retry_wait=60,
            threads=5,
            max_lines=1000,
            max_timespan=1440)

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string", action="callback",
                      callback=add_param,
                      help="URL of routing service (default %default)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_param1,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_param1,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_param1,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_param1,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_time,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_time,
                      help="end time")

    parser.add_option("-t", "--timeout", type="int", action="callback",
                      callback=add_param,
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int", action="callback",
                      callback=add_param,
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int", action="callback",
                      callback=add_param,
                      help="seconds to wait before each retry (default %default)")

    parser.add_option("-n", "--threads", type="int", action="callback",
                      callback=add_param,
                      help="maximum number of download threads (default %default)")

    parser.add_option("-c", "--credentials-file", type="string", action="callback",
                      callback=add_param2,
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string", action="callback",
                      callback=add_param2,
                      help="file that contains the auth token")

    parser.add_option("-o", "--output-dir", type="string",
                      help="SDS directory where downloaded data is written")

    parser.add_option("-l", "--max-lines", type="int",
                      help="max lines per request (default %default)")

    parser.add_option("-m", "--max-timespan", type="int",
                      help="max timespan per request in minutes (default %default)")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    parser.add_option("-Z", "--no-check", action="store_true", default=False,
                      help="suppress checking received routes and data")

    (options, args) = parser.parse_args()

    if args or not options.output_dir:
        parser.print_usage(sys.stderr)
        return 1

    def log_alert(s):
        if sys.stderr.isatty():
            s = "\033[31m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_notice(s):
        if sys.stderr.isatty():
            s = "\033[32m" + s + "\033[m"

        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_verbose(s):
        sys.stderr.write(s + '\n')
        sys.stderr.flush()

    def log_silent(s):
        pass

    logs.error = log_alert
    logs.warning = log_alert
    logs.notice = log_notice
    logs.info = (log_silent, log_verbose)[options.verbose]
    logs.debug = log_silent

    try:
        try:
            proc = exec_fetch(param1, None, options.verbose, options.no_check)

        except OSError as e:
            logs.error(str(e))
            logs.error("error running fdsnws_fetch")
            return 1

        timespan = {}

        for line in proc.stdout:
            if isinstance(line, bytes):
                line = line.decode('utf-8')

            if not line or line.startswith('#'):
                continue

            starttime = max(dateutil.parser.parse(line.split('|')[15]), times['starttime'])
            endtime = min(dateutil.parser.parse(line.split('|')[16]), times['endtime'])

            if starttime.tzinfo is not None:
                starttime = starttime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            if endtime.tzinfo is not None:
                endtime = endtime.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)

            try:
                ts = timespan[tuple(line.split('|')[:4])]

                if ts.start > starttime:
                    ts.start = starttime
                    ts.current = starttime

                if ts.end < endtime:
                    ts.end = endtime

            except KeyError:
                timespan[tuple(line.split('|')[:4])] = Timespan(starttime, endtime)

        proc.stdout.close()
        proc.wait()

        if proc.returncode != 0:
            logs.error("error running fdsnws_fetch")
            return 1

        if os.path.exists(options.output_dir):
            scan_sds(options.output_dir, timespan, nets)

        while len(timespan) > 0:
            postdata = ""

            ts_used = random.sample(timespan.items(), min(len(timespan), options.max_lines))

            for ((net, sta, loc, cha), ts) in ts_used:
                te = min(ts.end, ts.start + datetime.timedelta(minutes=options.max_timespan))

                if loc == '':
                    loc = '--'

                postdata += "%s %s %s %s %sZ %sZ\n" \
                            % (net, sta, loc, cha, ts.start.isoformat(), te.isoformat())

            if not isinstance(postdata, bytes):
                postdata = postdata.encode('utf-8')

            try:
                proc = exec_fetch(param2, postdata, options.verbose, options.no_check)

            except OSError as e:
                logs.error(str(e))
                logs.error("error running fdsnws_fetch")
                return 1

            got_data = False

            try:
                for rec in mseedlite.Input(proc.stdout):
                    try:
                        ts = timespan[(rec.net, rec.sta, rec.loc, rec.cha)]

                    except KeyError:
                        logs.warning("unexpected data: %s.%s.%s.%s" % (rec.net, rec.sta, rec.loc, rec.cha))
                        continue

                    if rec.end_time <= ts.current:
                        continue

                    sds_dir = "%s/%d/%s/%s/%s.D" \
                              % (options.output_dir, rec.begin_time.year, rec.net, rec.sta, rec.cha)

                    sds_file = "%s.%s.%s.%s.D.%s" \
                              % (rec.net, rec.sta, rec.loc, rec.cha, rec.begin_time.strftime('%Y.%j'))

                    if not os.path.exists(sds_dir):
                        os.makedirs(sds_dir)

                    with open(sds_dir + '/' + sds_file, 'ab') as fd:
                        fd.write(rec.header + rec.data)

                    ts.current = rec.end_time
                    nets.add((rec.net, rec.begin_time.year))
                    got_data = True

            except mseedlite.MSeedError as e:
                logs.error(str(e))

            proc.stdout.close()
            proc.wait()

            if proc.returncode != 0:
                logs.error("error running fdsnws_fetch")
                return 1

            for ((net, sta, loc, cha), ts) in ts_used:
                if not got_data:
                    # no progress, skip to next segment
                    ts.start += datetime.timedelta(minutes=options.max_timespan)

                else:
                    # continue from current position
                    ts.start = ts.current

                if ts.start >= ts.end:
                    # timespan completed
                    del timespan[(net, sta, loc, cha)]

        if nets and not options.no_citation:
            logs.info("retrieving network citation info")
            get_citation(nets, param0, options.verbose)

    except (IOError, Error) as e:
        logs.error(str(e))
        return 1

    return 0
示例#43
0
def main():
    """
    the main function does (almost) nothing but parsing command line parameters
    """
    ## This boilerplate pattern is from
    ## http://stackoverflow.com/questions/3609852
    ## We want defaults for the command line options to be fetched from the config file

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = argparse.ArgumentParser(
        description=__doc__,  # printed with -h/--help
        # Don't mess with format of description
        formatter_class=argparse.RawDescriptionHelpFormatter,
        # Turn off help, so we print all options in response to -h
        add_help=False,
    )
    conf_parser.add_argument(
        "--config-file",
        help="Specify config file",
        metavar="FILE",
        default=os.getenv("XDG_CONFIG_HOME", os.getenv("HOME", "~") + "/.config") + "/calendar.conf",
    )
    conf_parser.add_argument(
        "--config-section",
        help="Specify config section; allows several caldav servers to be configured in the same config file",
        default="default",
    )
    conf_parser.add_argument("--interactive-config", help="Interactively ask for configuration", action="store_true")
    args, remaining_argv = conf_parser.parse_known_args()

    config = {}

    try:
        with open(args.config_file) as config_file:
            config = json.load(config_file)
    except IOError:
        ## File not found
        logging.info("no config file found")
    except ValueError:
        if args.interactive_config:
            logging.error(
                "error in config file.  Be aware that the current config file will be ignored and overwritten",
                exc_info=True,
            )
        else:
            logging.error(
                "error in config file.  You may want to run --interactive-config or fix the config file", exc_info=True
            )

    if args.interactive_config:
        config = interactive_config(args, config, remaining_argv)
        if not remaining_argv:
            return
    else:
        defaults = config_section(config, args.config_section)

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h
    parser = argparse.ArgumentParser(
        # Inherit options from config_parser
        parents=[conf_parser]
    )
    parser.set_defaults(**defaults)

    ## Global options
    parser.add_argument(
        "--nocaldav",
        help="Do not connect to CalDAV server, but read/write icalendar format from stdin/stdout",
        action="store_true",
    )
    parser.add_argument("--icalendar", help="Read/write icalendar format from stdin/stdout", action="store_true")
    parser.add_argument("--timezone", help="Timezone to use")
    parser.add_argument("--language", help="language used", default="EN")
    parser.add_argument("--caldav-url", help="Full URL to the caldav server", metavar="URL")
    parser.add_argument("--caldav-user", help="username to log into the caldav server", metavar="USER")
    parser.add_argument("--caldav-pass", help="password to log into the caldav server", metavar="PASS")
    parser.add_argument("--debug-logging", help="turn on debug logging", action="store_true")
    parser.add_argument(
        "--calendar-url",
        help="URL for calendar to be used (may be absolute or relative to caldav URL, or just the name of the calendar)",
    )

    ## TODO: check sys.argv[0] to find command
    ## TODO: set up logging
    subparsers = parser.add_subparsers(title="command")

    ## Tasks
    todo_parser = subparsers.add_parser("todo")
    todo_parser.add_argument("--top", "-1", action="count", default=0)
    todo_parser.add_argument("--offset", action="count", default=0)
    todo_parser.add_argument("--offsetn", type=int, default=0)
    todo_parser.add_argument("--limit", type=int, default=0)
    todo_parser.add_argument("--todo-uid")

    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_parser.add_argument("--" + attr, help="for filtering tasks")

    # todo_parser.add_argument('--priority', ....)
    # todo_parser.add_argument('--sort-by', ....)
    # todo_parser.add_argument('--due-before', ....)
    todo_subparsers = todo_parser.add_subparsers(title="tasks subcommand")
    todo_add_parser = todo_subparsers.add_parser("add")
    todo_add_parser.add_argument("summaryline", nargs="+")
    todo_add_parser.add_argument("--set-due", default=date.today() + timedelta(7))
    todo_add_parser.add_argument("--set-dtstart", default=date.today() + timedelta(1))
    todo_add_parser.add_argument(
        "--is-child", help="the new task is a child-task of the selected task(s)", action="store_true"
    )
    for attr in vtodo_txt_one + vtodo_txt_many:
        if attr != "summary":
            todo_add_parser.add_argument("--set-" + attr, help="Set " + attr)
    todo_add_parser.set_defaults(func=todo_add)

    todo_list_parser = todo_subparsers.add_parser("list")
    todo_list_parser.add_argument(
        "--todo-template",
        help="Template for printing out the event",
        default="{dtstart}{dtstart_passed_mark} {due}{due_passed_mark} {summary}",
    )
    todo_list_parser.add_argument(
        "--default-due", help="Default number of days from a task is submitted until it's considered due", default=14
    )
    todo_list_parser.add_argument(
        "--list-categories",
        help="Instead of listing the todo-items, list the unique categories used",
        action="store_true",
    )
    todo_list_parser.set_defaults(func=todo_list)

    todo_edit_parser = todo_subparsers.add_parser("edit")
    for attr in vtodo_txt_one + vtodo_txt_many:
        todo_edit_parser.add_argument("--set-" + attr, help="Set " + attr)
    for attr in vtodo_txt_many:
        todo_edit_parser.add_argument("--add-" + attr, help="Add an " + attr)
    todo_edit_parser.add_argument(
        "--pdb", help="Allow interactive edit through the python debugger", action="store_true"
    )
    todo_edit_parser.set_defaults(func=todo_edit)

    todo_postpone_parser = todo_subparsers.add_parser("postpone")
    todo_postpone_parser.add_argument(
        "until",
        help="either a new date or +interval to add some interval to the existing time, or i.e. 'in 3d' to set the time to a new time relative to the current time.  interval is a number postfixed with a one character unit (any of smhdwy).  If the todo-item has a dstart, this field will be modified, else the due timestamp will be modified.    If both timestamps exists and dstart will be moved beyond the due time, the due time will be set to dtime.",
    )
    todo_postpone_parser.add_argument("--due", help="move the due, not the dtstart", action="store_true")
    todo_postpone_parser.set_defaults(func=todo_postpone)

    todo_complete_parser = todo_subparsers.add_parser("complete")
    todo_complete_parser.set_defaults(func=todo_complete)

    todo_delete_parser = todo_subparsers.add_parser("delete")
    todo_delete_parser.set_defaults(func=todo_delete)

    calendar_parser = subparsers.add_parser("calendar")
    calendar_subparsers = calendar_parser.add_subparsers(title="cal subcommand")
    calendar_add_parser = calendar_subparsers.add_parser("add")
    calendar_add_parser.add_argument(
        "event_time", help="Timestamp and duration of the event.  See the documentation for event_time specifications"
    )
    calendar_add_parser.add_argument("summary", nargs="+")
    calendar_add_parser.set_defaults(func=calendar_add)
    calendar_addics_parser = calendar_subparsers.add_parser("addics")
    calendar_addics_parser.add_argument("--file", help="ICS file to upload", default="-")
    calendar_addics_parser.set_defaults(func=calendar_addics)

    calendar_agenda_parser = calendar_subparsers.add_parser("agenda")
    calendar_agenda_parser.add_argument(
        "--from-time",
        help="Fetch calendar events from this timestamp.  See the documentation for time specifications.  Defaults to now",
    )
    calendar_agenda_parser.add_argument("--to-time", help="Fetch calendar until this timestamp")
    calendar_agenda_parser.add_argument("--agenda-mins", help="Fetch calendar for so many minutes", type=int)
    calendar_agenda_parser.add_argument("--agenda-days", help="Fetch calendar for so many days", type=int, default=7)
    calendar_agenda_parser.add_argument(
        "--event-template", help="Template for printing out the event", default="{dstart} {summary}"
    )
    calendar_agenda_parser.add_argument(
        "--timestamp-format", help="strftime-style format string for the output timestamps", default="%F %H:%M (%a)"
    )
    calendar_agenda_parser.set_defaults(func=calendar_agenda)

    calendar_delete_parser = calendar_subparsers.add_parser("delete")
    calendar_delete_parser.add_argument("--event-uid")
    calendar_delete_parser.add_argument("--event-url")
    calendar_delete_parser.add_argument("--event-timestamp")
    calendar_delete_parser.set_defaults(func=calendar_delete)

    args = parser.parse_args(remaining_argv)

    if args.timezone:
        args.timezone = pytz.timezone(args.timezone)
    else:
        args.timezone = tzlocal.get_localzone()

    if not args.nocaldav:
        caldav_conn = caldav_connect(args)

    ret = args.func(caldav_conn, args)
示例#44
0
def main() -> None:

    parser = argparse.ArgumentParser(
        description="Main server process for a scheduled Speedtest tester.")
    parser.add_argument('--version',
                        action='version',
                        version=f'Speedtest Manager Client {__version__}',
                        help="Display the current version and exit")
    parser.add_argument(
        '-d',
        '--datadir',
        default=DEFAULT_DATADIR,
        type=Path,
        help=
        "The directory where data is stored, used only to calculate relative paths for Unix sockets"
    )

    logging_args = parser.add_argument_group(
        'Logging Settings',
        description="Arguments that controls logging output.")

    loglevel_group = logging_args.add_mutually_exclusive_group(required=False)
    loglevel_group.add_argument('-l',
                                '--loglevel',
                                type=str,
                                choices=LOGLEVELS,
                                dest='loglevel',
                                help="The level of logging to use.")
    loglevel_group.add_argument(
        '-v',
        '--verbose',
        action='store_const',
        const=VERBOSE_LOGLEVEL,
        dest='loglevel',
        help=
        f"Output all logging. This is equivalent to --loglevel {VERBOSE_LOGLEVEL}."
    )

    network_args = parser.add_argument_group(
        'Network Settings',
        description=
        "Arguments that control how the client connects to the manager.")

    family_group = network_args.add_mutually_exclusive_group(required=False)
    family_group.add_argument('-u',
                              '--unix',
                              action='store_const',
                              dest='family',
                              const=socket.AF_UNIX,
                              help="Uses a UNIX socket for connections")
    family_group.add_argument('-4',
                              '--ipv4',
                              action='store_const',
                              dest='family',
                              const=socket.AF_INET,
                              help="Uses an IPV4 socket for connections")
    family_group.add_argument('-6',
                              '--ipv6',
                              action='store_const',
                              dest='family',
                              const=socket.AF_INET6,
                              help="Uses an IPV6 socket for connections")

    network_args.add_argument('-a',
                              '--host',
                              default=None,
                              type=str,
                              help="The address to connect to")
    network_args.add_argument('-p',
                              '--port',
                              default=8090,
                              type=int,
                              help="The port to connect to")

    parser.set_defaults(family=socket.AF_UNIX, loglevel=DEFAULT_LOGLEVEL)

    subparsers = parser.add_subparsers(
        required=True,
        dest='operation',
        title="Operations",
        description="Operations that can be performed in the manager.")

    ##### New job

    new_job_parser = subparsers.add_parser(
        'new',
        help="Creates a new job",
        description="Creates a new job in the system with the given parameters."
    )

    new_job_parser.add_argument('id', type=str, help="The ID of the job")

    new_job_parser.add_argument(
        '-t',
        '--title',
        type=str,
        help="The title of the job (purely for readability purposes)")
    new_job_parser.add_argument(
        '-i',
        '--interval',
        type=parse_interval,
        default=None,
        help=
        "The interval between job executions. If not specified, the job is only ran once, and the end time is ignored."
    )
    new_job_parser.add_argument(
        '-s',
        '--start',
        type=parse_time,
        default=None,
        help="When to start the job. If not specified, starts immediately.")
    new_job_parser.add_argument(
        '-e',
        '--end',
        type=parse_time,
        default=None,
        help=
        "When to stop the job. If not specified, the job will run until manually stopped."
    )

    server_id_group = new_job_parser.add_mutually_exclusive_group(
        required=True)
    server_id_group.add_argument(
        '--server-id',
        type=int,
        default=None,
        help="The ID of the server to use for the job")
    server_id_group.add_argument(
        '--server-name',
        type=str,
        default=None,
        help="The hostname of the server to use for the job")

    def new_job(client: ManagerClient, args) -> None:

        job = Job(
            id=args.id,
            title=args.title,
            server_id=args.server_id,
            server_name=args.server_name,
            interval=args.interval,
            start=args.start,
            end=args.end,
        )
        id = client.new_job(job)

        print(f"Created job with ID '{id}'.")

    new_job_parser.set_defaults(func=new_job)

    ##### Get single job

    get_job_parser = subparsers.add_parser(
        'job',
        help="Describe a job",
        description="Retrieves information about a single job.")

    get_job_parser.add_argument('id', type=str, help='The ID of the job')

    def get_job(client: ManagerClient, args) -> None:

        job: Job = client.get_job(args.id)
        print(json.dumps(job.to_json()))

    get_job_parser.set_defaults(func=get_job)

    ##### Get job list

    get_jobs_parser = subparsers.add_parser(
        'jobs',
        help="List registered jobs",
        description="Retrieves information about all registered job.")

    get_job_filter_group = get_jobs_parser.add_mutually_exclusive_group(
        required=False)

    get_job_filter_group.add_argument('-r',
                                      '--running',
                                      action='store_const',
                                      const=True,
                                      dest='running',
                                      help="Only retrieve running jobs")
    get_job_filter_group.add_argument('-s',
                                      '--stopped',
                                      action='store_const',
                                      const=False,
                                      dest='running',
                                      help="Only retrieve stopped jobs")

    def get_jobs(client: ManagerClient, args) -> None:

        jobs: Set[Job] = client.get_jobs(args.running)
        print(json.dumps({'jobs': [job.to_json() for job in jobs]}))

    get_jobs_parser.set_defaults(func=get_jobs, running=None)

    ##### Stop job

    stop_job_parser = subparsers.add_parser(
        'stop',
        help="Stops a running job",
        description="Stops a running job before its scheduled finish date.")

    stop_job_parser.add_argument('id', type=str, help='The ID of the job')

    def stop_job(client: ManagerClient, args) -> None:

        job: Job = client.stop_job(args.id)
        print(json.dumps(job.to_json()))

    stop_job_parser.set_defaults(func=stop_job)

    ##### Delete job

    delete_job_parser = subparsers.add_parser(
        'delete',
        help="Deletes a job",
        description="Deletes a job from the system, including its results.")

    delete_job_parser.add_argument('id', type=str, help='The ID of the job')

    def delete_job(client: ManagerClient, args) -> None:

        job: Job = client.delete_job(args.id)
        print(json.dumps(job.to_json()))

    delete_job_parser.set_defaults(func=delete_job)

    ##### Get results

    get_results_parser = subparsers.add_parser(
        'results',
        help="Retrieves job results",
        description="Retrieves the results obtained so far by registered jobs."
    )

    get_results_parser.add_argument(
        'id',
        type=str,
        nargs='+',
        help='The ID of each job to get results for')

    def get_results(client: ManagerClient, args) -> None:

        results = client.get_results(args.id)
        print(json.dumps(results))

    get_results_parser.set_defaults(func=get_results)

    ##### Run program

    args = parser.parse_args()

    datadir: Path = args.datadir

    loglevel: int = getattr(logging, args.loglevel)
    setup_logging(True, logdir=None, level=loglevel)

    logging.info("Program starting.")

    family: int = args.family
    host: str = args.host
    port: int = args.port
    if family == socket.AF_UNIX:
        address = str(datadir / (host if host is not None else 'server.sock'))
    elif family == socket.AF_INET:
        address = (host if host is not None else '127.0.0.1', port)
    elif family == socket.AF_INET6:
        address = (host if host is not None else '::1', port)
    else:
        raise ValueError("Unsupported address family.")

    client = ManagerClient(family, address)
    try:
        args.func(client, args)
    except SpeedtestError as e:
        print(f"Error: {e}")
示例#45
0
                dateThen = dateThen.replace(tzinfo=None)
                dateNow  = datetime.now()
                dateDiff = dateNow - dateThen
                return dateDiff.days
    except ValueError:
        return None

    return None
    

if __name__ == '__main__':

    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--domain', help='domain to analyze', type=str)
    parser.add_argument('-t', '--test', help='unit tests',  dest='tests', action='store_true')
    parser.set_defaults(tests=False)
    args = parser.parse_args()

    if args.tests:
        arryDomains  = ['nccgroup.trust','nccgroup.com','facebook.cn','facebook.com','facebook.fr','facebook.de','facebook.co.uk','facebook.net','facebook.gr','facebook.nl','facebook.se','facebook.fi','facebook.ae','facebook.cm','facebook.co'
                        ,'facebook.it','facebook.es','facebook.za','facebook.ca','facebook.pl','facebook.su','facebook.ru','facebook.tw','facebook.jp','facebook.au','facebook.nz','facebook.ar','facebook.mx','facebook.is','facebook.io']
        for strDomain in arryDomains:
            resAge = howold(strDomain)
            print("[i] " + strDomain + " - ", end="")
            if resAge:
                print(str(resAge) + " - pass")
            else:
                print(" unknown - fail")
    else:
        if args.domain and len(args.Domain) > 0:
            resAge = howold(args.domain)
示例#46
0
def main():
    qp = {}

    def add_qp(option, opt_str, value, parser):
        if option.dest == 'query':
            try:
                (p, v) = value.split('=', 1)
                qp[p] = v

            except ValueError:
                raise optparse.OptionValueError("%s expects parameter=value"
                                                % opt_str)

        else:
            qp[option.dest] = value

    parser = optparse.OptionParser(
            usage="Usage: %prog [-h|--help] [OPTIONS] -o file",
            version="%prog " + VERSION,
            add_help_option=False)

    parser.set_defaults(
            url="http://geofon.gfz-potsdam.de/eidaws/routing/1/",
            timeout=600,
            retries=10,
            retry_wait=60,
            threads=5)

    parser.add_option("-h", "--help", action="store_true", default=False,
                      help="show help message and exit")

    parser.add_option("-l", "--longhelp", action="store_true", default=False,
                      help="show extended help message and exit")

    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="verbose mode")

    parser.add_option("-u", "--url", type="string",
                      help="URL of routing service (default %default)")

    parser.add_option("-y", "--service", type="string", action="callback",
                      callback=add_qp,
                      help="target service (default dataselect)")

    parser.add_option("-N", "--network", type="string", action="callback",
                      callback=add_qp,
                      help="network code or pattern")

    parser.add_option("-S", "--station", type="string", action="callback",
                      callback=add_qp,
                      help="station code or pattern")

    parser.add_option("-L", "--location", type="string", action="callback",
                      callback=add_qp,
                      help="location code or pattern")

    parser.add_option("-C", "--channel", type="string", action="callback",
                      callback=add_qp,
                      help="channel code or pattern")

    parser.add_option("-s", "--starttime", type="string", action="callback",
                      callback=add_qp,
                      help="start time")

    parser.add_option("-e", "--endtime", type="string", action="callback",
                      callback=add_qp,
                      help="end time")

    parser.add_option("-q", "--query", type="string", action="callback",
                      callback=add_qp, metavar="PARAMETER=VALUE",
                      help="additional query parameter")

    parser.add_option("-t", "--timeout", type="int",
                      help="request timeout in seconds (default %default)")

    parser.add_option("-r", "--retries", type="int",
                      help="number of retries (default %default)")

    parser.add_option("-w", "--retry-wait", type="int",
                      help="seconds to wait before each retry "
                           "(default %default)")

    parser.add_option("-n", "--threads", type="int",
                      help="maximum number of download threads "
                           "(default %default)")

    parser.add_option("-c", "--credentials-file", type="string",
                      help="URL,user,password file (CSV format) for queryauth")

    parser.add_option("-a", "--auth-file", type="string",
                      help="file that contains the auth token")

    parser.add_option("-p", "--post-file", type="string",
                      help="request file in FDSNWS POST format")

    parser.add_option("-f", "--arclink-file", type="string",
                      help="request file in ArcLink format")

    parser.add_option("-b", "--breqfast-file", type="string",
                      help="request file in breq_fast format")

    parser.add_option("-o", "--output-file", type="string",
                      help="file where downloaded data is written")

    parser.add_option("-z", "--no-citation", action="store_true", default=False,
                      help="suppress network citation info")

    (options, args) = parser.parse_args()

    if options.help:
        print(__doc__.split("Usage Examples", 1)[0], end="")
        parser.print_help()
        return 0

    if options.longhelp:
        print(__doc__)
        parser.print_help()
        return 0

    if args or not options.output_file:
        parser.print_usage()
        return 1

    if bool(options.post_file) + bool(options.arclink_file) + \
            bool(options.breqfast_file) > 1:
        msg("only one of (--post-file, --arclink-file, --breqfast-file) "
            "can be used")
        return 1

    try:
        cred = {}
        authdata = None
        postdata = None

        if options.credentials_file:
            with open(options.credentials_file) as fd:
                try:
                    for (url, user, passwd) in csv.reader(fd):
                        cred[url] = (user, passwd)

                except (ValueError, csv.Error):
                    raise Error("error parsing %s" % options.credentials_file)

        if options.auth_file:
            with open(options.auth_file) as fd:
                authdata = fd.read()

        if options.post_file:
            with open(options.post_file) as fd:
                postdata = fd.read()

        else:
            parser = None

            if options.arclink_file:
                parser = ArclinkParser()
                parser.parse(options.arclink_file)

            elif options.breqfast_file:
                parser = BreqParser()
                parser.parse(options.breqfast_file)

            if parser is not None:
                if parser.failstr:
                    msg(parser.failstr)
                    return 1

                postdata = parser.postdata

        url = RoutingURL(urlparse.urlparse(options.url), qp)
        dest = open(options.output_file, 'wb')

        nets = route(url, cred, authdata, postdata, dest, options.timeout,
                     options.retries, options.retry_wait, options.threads,
                     options.verbose)

        if nets and not options.no_citation:
              msg("retrieving network citation info", options.verbose)
              get_citation(nets, options)

        else:
              msg("", options.verbose)

        msg("In case of problems with your request, plese use the contact "
            "form at\n\n"
            "    http://www.orfeus-eu.org/organization/contact/form/\n",
            options.verbose)

    except (IOError, Error) as e:
        msg(str(e))
        return 1

    return 0
示例#47
0
#!/usr/bin/env python
import dateutil.parser
import datetime
from getpass import getpass
import sys
import logging
import argparse

from spiff import cli

parser = argparse.ArgumentParser()
cli.add_argument_group(parser)
parser.set_defaults(filter=['unpaid'])
parser.add_argument('-a', '--active', dest='filter', const='active', action='append_const')
parser.add_argument('-i', '--inactive', dest='filter', const='inactive', action='append_const')
parser.add_argument('-U', '--unpaid', dest='filter', const='unpaid', action='append_const')

args = parser.parse_args(sys.argv[1:])

api = cli.api_from_args(args)

subsTotal = 0
total = 0
inactive = []
active = []

for m in sorted(api.getList("member"), key=lambda x:x.outstandingBalance):
  if not m.activeMember:
    inactive.append(m)
  else:
    active.append(m)