def add_config_args(parser, arg_names): for name in arg_names: doc, t, defval, choices = cfg_docs[name] if t is bool: if defval: parser.add_argument(f'--no-{name.replace("_", "-")}', dest=name, help=doc, action='store_false') else: parser.add_argument(f'--{name.replace("_", "-")}', dest=name, help=doc, action='store_true') elif t is int: parser.add_argument(f'--{name.replace("_", "-")}', type=int, dest=name, metavar='<cfg_val>', help=doc) else: arg = parser.add_argument(f'--{name.replace("_", "-")}', dest=name, metavar='<cfg_val>', help=doc, choices=choices) if name.endswith('dir'): arg.completer = DirectoriesCompleter()
def add_arguments(self, parser, cli_name): arg = parser.add_argument('ROOT', help='root path of keystore') arg.completer = DirectoriesCompleter() parser.add_argument('NAME', help='key name, aka ROS enclave name') arg = parser.add_argument('POLICY_FILE_PATH', help='path of the policy xml file') arg.completer = FilesCompleter(allowednames=('xml'), directories=False)
def test_directory_completion(self): completer = DirectoriesCompleter() def c(prefix): return set(completer(prefix)) with TempDir(prefix="test_dir", dir="."): # Create some temporary dirs and files (files must be ignored) os.makedirs(os.path.join("abc", "baz")) os.makedirs(os.path.join("abb", "baz")) os.makedirs(os.path.join("abc", "faz")) os.makedirs(os.path.join("def", "baz")) with open("abc1", "w") as fp1: with open("def1", "w") as fp2: fp1.write("A test") fp2.write("Another test") # Test completions self.assertEqual(c("a"), set(["abb/", "abc/"])) self.assertEqual(c("ab"), set(["abc/", "abb/"])) self.assertEqual(c("abc"), set(["abc/"])) self.assertEqual(c("abc/"), set(["abc/baz/", "abc/faz/"])) self.assertEqual(c("d"), set(["def/"])) self.assertEqual(c("def/"), set(["def/baz/"])) self.assertEqual(c("e"), set([])) self.assertEqual(c("def/k"), set([])) return
def add_args(self, parser): targetarg = parser.add_argument('target', metavar='TARGET', choices=self.targets, help=' | '.join(self.targets)) rundirsarg = parser.add_argument( 'rundirs', nargs='+', metavar='RUNDIR', default=[], help='run directories to parse (results/run.XXX)') parser.add_argument('-i', '--instances', nargs='+', metavar='INSTANCE', default=[], choices=self.instances, help=' | '.join(self.instances)) parser.add_argument('--no-cache', action='store_false', dest='cache', help='cached results in the bottom of log files') parser.add_argument('--refresh', action='store_true', help='refresh cached results in logs') add_table_report_args(parser) try: from argcomplete.completers import DirectoriesCompleter targetarg.completer = self.complete_package rundirsarg.completer = DirectoriesCompleter() except ImportError: pass
def add_arguments(self, parser, cli_name): arg = parser.add_argument('-k', '--keystore-root-path', help='root path of keystore') arg.completer = DirectoriesCompleter() parser.add_argument( '-n', '--node-names', nargs='*', default=[], help='list of identities, aka ROS node names') arg = parser.add_argument( '-p', '--policy-files', nargs='*', default=[], help='list of policy xml file paths') arg.completer = FilesCompleter( allowednames=('xml'), directories=False)
def add_arguments(self, parser, cli_name) -> None: arg = parser.add_argument( '-k', '--keystore-root-path', type=pathlib.Path, help='root path of keystore') arg.completer = DirectoriesCompleter() parser.add_argument( '-e', '--enclaves', nargs='*', default=[], help='list of identities, aka ROS security enclave names') arg = parser.add_argument( '-p', '--policy-files', nargs='*', type=pathlib.Path, default=[], help='list of policy xml file paths') arg.completer = FilesCompleter( allowednames=('xml'), directories=False)
def add_arguments(self, parser, cli_name) -> None: arg = parser.add_argument('ROOT', type=pathlib.Path, help='root path of keystore') arg.completer = DirectoriesCompleter() parser.add_argument('NAME', help='enclave name')
def load_arguments(self, _): detections_directory_type = CLIArgumentType(options_list=['--detections-directory', '-d'], completer=DirectoriesCompleter(), type=file_type, help='Directory which contains the detection files') detection_file_type = CLIArgumentType(options_list=['--detection-file', '-f'], completer=FilesCompleter(allowednames=['json', 'yaml']), type=file_type, help="File path of the detection") detection_schema_type = CLIArgumentType(options_list=['--detection-schema', '-s'], completer=FilesCompleter(allowednames=['json', 'yaml'], directories=False), type=file_type, help="File path of the detection schema") data_sources_directory_type = CLIArgumentType(options_list=['--data-sources-directory', '-d'], completer=DirectoriesCompleter(), type=file_type, help='Directory which contains data source files') data_source_file_type = CLIArgumentType(options_list=['--data-source-file', '-f'], completer=FilesCompleter(allowednames=['json', 'yaml']), type=file_type, help="File path of the data source") data_source_schema_type = CLIArgumentType(options_list=['--data-source-schema', '-s'], completer=FilesCompleter(allowednames=['json', 'yaml'], directories=False), type=file_type, help="File path of the data source schema") with self.argument_context('sentinel') as c: c.argument('workspace_name', options_list=['--workspace-name', '-n'], help='Name of the Sentinel Workspace') with self.argument_context('sentinel detection create') as c: c.argument('detections_directory', detections_directory_type) c.argument('detection_file', detection_file_type) c.argument('enable_validation', options_list=['--enable-validation'], arg_type=get_three_state_flag(), help='Enable/Disable detection validation before deploying it') c.argument('detection_schema', detection_schema_type) with self.argument_context('sentinel detection validate') as c: c.argument('detections_directory', detections_directory_type) c.argument('detection_file', detection_file_type) c.argument('detection_schema', detection_schema_type) with self.argument_context('sentinel detection generate') as c: c.argument('detections_directory', detections_directory_type) c.argument('skip_interactive', options_list=['--skip-interactive'], arg_type=get_three_state_flag(), help='Enable/Disable interactive detection creation') # TODO: Add all detection configurations as arguments here c.argument('name', options_list=['--name', '-n'], help='Name of your detection(alphanumeric without spaces)') c.argument('create_directory', options_list=['--create-dir'], arg_type=get_three_state_flag(), help='Enable/Disable creating new directory for the detection') c.argument('with_documentation', options_list=['--with-documentation', '--doc'], arg_type=get_three_state_flag(), help='Enable/Disable detection documentation') with self.argument_context('sentinel data_source create') as c: c.argument('data_sources_directory', data_sources_directory_type) c.argument('data_source_file', data_source_file_type) c.argument('enable_validation', options_list=['--enable-validation'], arg_type=get_three_state_flag(), help='Enable/Disable data source validation before deploying it') c.argument('data_source_schema', data_source_schema_type) with self.argument_context('sentinel data_source validate') as c: c.argument('data_sources_directory', data_sources_directory_type) c.argument('data_source_file', data_source_file_type) c.argument('data_source_schema', data_source_schema_type) with self.argument_context('sentinel data_source generate') as c: c.argument('data_sources_directory', data_sources_directory_type) c.argument('skip_interactive', options_list=['--skip-interactive'], arg_type=get_three_state_flag(), help='Enable/Disable interactive data siyrce creation') # TODO: Add all detection configurations as arguments here c.argument('name', options_list=['--name', '-n'], help='Name of your data source(alphanumeric without spaces)') c.argument('create_directory', options_list=['--create-dir'], arg_type=get_three_state_flag(), help='Enable/Disable creating new directory for the data source') c.argument('with_documentation', options_list=['--with-documentation', '--doc'], arg_type=get_three_state_flag(), help='Enable/Disable data source documentation')
def _load_transformed_arguments(self, handler): """Load all the command line arguments from the request parameters. :param func handler: The operation function. """ from azure.cli.core.commands.parameters import file_type from argcomplete.completers import FilesCompleter, DirectoriesCompleter self.parser = BatchArgumentTree(self.validator, self.silent) self._load_options_model(handler) for arg in extract_args_from_signature(handler): arg_type = find_param_type(handler, arg[0]) if arg[0] == self._options_param: for option_arg in self._process_options(): yield option_arg elif arg_type.startswith("str or"): docstring = find_param_help(handler, arg[0]) choices = [] values_index = docstring.find(' Possible values include') if values_index >= 0: choices = docstring[values_index + 25:].split(', ') choices = [c for c in choices if c != "'unmapped'"] docstring = docstring[0:values_index] yield (arg[0], CliCommandArgument(arg[0], options_list=[arg_name(arg[0])], required=False, default=None, choices=choices, help=docstring)) elif arg_type.startswith( ":class:"): # TODO: could add handling for enums param_type = class_name(arg_type) self.parser.set_request_param(arg[0], param_type) param_model = _load_model(param_type) self._flatten_object(arg[0], param_model) for flattened_arg in self.parser.compile_args(): yield flattened_arg param = 'json_file' docstring = "A file containing the {} specification in JSON format. " \ "If this parameter is specified, all '{} Arguments'" \ " are ignored.".format(arg[0].replace('_', ' '), group_title(arg[0])) yield (param, CliCommandArgument(param, options_list=[arg_name(param)], required=False, default=None, type=file_type, completer=FilesCompleter(), help=docstring)) elif arg[0] not in self.ignore: yield arg return_type = find_return_type(handler) if return_type == 'Generator': param = 'destination' docstring = "The path to the destination file or directory." yield (param, CliCommandArgument( param, options_list=[arg_name(param)], required=True, default=None, completer=DirectoriesCompleter(), type=file_type, validator=validators.validate_file_destination, help=docstring)) if return_type == 'None' and handler.__name__.startswith('get'): self.head_cmd = True if self.confirmation: param = CONFIRM_PARAM_NAME docstring = 'Do not prompt for confirmation.' yield (param, CliCommandArgument(param, options_list=['--yes', '-y'], required=False, action='store_true', help=docstring))
def parse_arguments(): class ShowSuppressedInHelpFormatter(argparse.RawTextHelpFormatter): def add_usage(self, usage, actions, groups, prefix=None): if usage is not argparse.SUPPRESS: actions = [ action for action in actions if action.metavar != '<cfg_val>' ] args = usage, actions, groups, prefix self._add_item(self._format_usage, args) main_parser = argparse.ArgumentParser(prog='zkay') zkay_files = ('zkay', 'sol') zkay_package_files = ('zkp', ) config_files = ('json', ) msg = 'Path to local configuration file (defaults to "config.json" in cwd). ' \ 'This file (if it exists), overrides settings defined in the global configuration.' main_parser.add_argument('--config-file', default='config.json', metavar='<config_file>', help=msg).completer = FilesCompleter(config_files) # Shared 'config' parser config_parser = argparse.ArgumentParser(add_help=False) msg = 'These parameters can be used to override settings defined (and documented) in config_user.py' cfg_group = config_parser.add_argument_group(title='Configuration Options', description=msg) # Expose config_user.py options via command line arguments, they are supported in all parsers cfg_docs = parse_config_doc() def add_config_args(parser, arg_names): for name in arg_names: doc, t, defval, choices = cfg_docs[name] if t is bool: if defval: parser.add_argument(f'--no-{name.replace("_", "-")}', dest=name, help=doc, action='store_false') else: parser.add_argument(f'--{name.replace("_", "-")}', dest=name, help=doc, action='store_true') elif t is int: parser.add_argument(f'--{name.replace("_", "-")}', type=int, dest=name, metavar='<cfg_val>', help=doc) else: arg = parser.add_argument(f'--{name.replace("_", "-")}', dest=name, metavar='<cfg_val>', help=doc, choices=choices) if name.endswith('dir'): arg.completer = DirectoriesCompleter() add_config_args(cfg_group, cfg_docs.keys()) solc_version_help = 'zkay defaults to the latest installed\n' \ 'solidity version supported by the current zkay version.\n\n' \ 'If you need to use a particular minor release (e.g. because \n' \ 'the latest release is broken or you need determinism for testing)\n' \ 'you can specify a particular solc version (e.g. v0.5.12) via this argument.\n' \ 'Note: An internet connection is required if the selected version is not installed' subparsers = main_parser.add_subparsers(title='actions', dest='cmd', required=True) # 'compile' parser compile_parser = subparsers.add_parser( 'compile', parents=[config_parser], help='Compile a zkay contract.', formatter_class=ShowSuppressedInHelpFormatter) msg = 'The directory to output the compiled contract to. Default: Current directory' compile_parser.add_argument( '-o', '--output', default=os.getcwd(), help=msg, metavar='<output_directory>').completer = DirectoriesCompleter() compile_parser.add_argument( 'input', help='The zkay source file', metavar='<zkay_file>').completer = FilesCompleter(zkay_files) compile_parser.add_argument('--log', action='store_true', help='enable logging') compile_parser.add_argument('--solc-version', help=solc_version_help, metavar='<cfg_val>') # 'check' parser typecheck_parser = subparsers.add_parser( 'check', parents=[config_parser], help='Only type-check, do not compile.', formatter_class=ShowSuppressedInHelpFormatter) typecheck_parser.add_argument( 'input', help='The zkay source file', metavar='<zkay_file>').completer = FilesCompleter(zkay_files) typecheck_parser.add_argument('--solc-version', help=solc_version_help, metavar='<cfg_val>') # 'solify' parser msg = 'Output solidity code which corresponds to zkay code with all privacy features and comments removed, ' \ 'useful in conjunction with analysis tools which operate on solidity code.)' solify_parser = subparsers.add_parser( 'solify', parents=[config_parser], help=msg, formatter_class=ShowSuppressedInHelpFormatter) solify_parser.add_argument( 'input', help='The zkay source file', metavar='<zkay_file>').completer = FilesCompleter(zkay_files) # 'export' parser export_parser = subparsers.add_parser( 'export', parents=[config_parser], help='Package a compiled zkay contract.', formatter_class=ShowSuppressedInHelpFormatter) msg = 'Output filename. Default: ./contract.zkp' export_parser.add_argument( '-o', '--output', default='contract.zkp', help=msg, metavar='<output_filename>').completer = FilesCompleter( zkay_package_files) msg = 'Directory with the compilation output of the contract which should be packaged.' export_parser.add_argument('input', help=msg, metavar='<zkay_compilation_output_dir>' ).completer = DirectoriesCompleter() # 'import' parser msg = 'Unpack a packaged zkay contract.\n' \ 'Note: An internet connection is required if the packaged contract used a solc version which is not currently installed.' import_parser = subparsers.add_parser( 'import', parents=[config_parser], help=msg, formatter_class=ShowSuppressedInHelpFormatter) msg = 'Directory where the contract should be unpacked to. Default: Current Directory' import_parser.add_argument( '-o', '--output', default=os.getcwd(), help=msg, metavar='<target_directory>').completer = DirectoriesCompleter() msg = 'Contract package to unpack.' import_parser.add_argument( 'input', help=msg, metavar='<zkay_package_file>').completer = FilesCompleter( zkay_package_files) # 'run, deploy and connect' parsers interact_parser = argparse.ArgumentParser(add_help=False) msg = 'Directory with the compilation output of the contract with which you want to interact.' interact_parser.add_argument('input', help=msg, metavar='<zkay_compilation_output_dir>' ).completer = DirectoriesCompleter() interact_parser.add_argument('--log', action='store_true', help='enable logging') interact_parser.add_argument('--account', help='Sender blockchain address', metavar='<address>') subparsers.add_parser( 'run', parents=[interact_parser, config_parser], help='Enter transaction shell for a compiled zkay contract.', formatter_class=ShowSuppressedInHelpFormatter) deploy_parser = subparsers.add_parser( 'deploy', parents=[interact_parser, config_parser], help='Deploy contract with given constructor arguments', formatter_class=ShowSuppressedInHelpFormatter) deploy_parser.add_argument('constructor_args', nargs='*', help='Constructor arguments', metavar='<args>...') connect_parser = subparsers.add_parser( 'connect', parents=[interact_parser, config_parser], help='Connect to contract at address and enter shell.', formatter_class=ShowSuppressedInHelpFormatter) connect_parser.add_argument('address', help='Blockchain address of deployed contract', metavar='<address>') # Common deploy libs parameters deploy_libs_parser = argparse.ArgumentParser(add_help=False) msg = 'Address of the account to use for deploying the library contracts. ' \ 'Its ethereum keys must be hosted in the specified node and sufficient funds ' \ 'to cover the deployment costs must be available. ' \ 'WARNING: This account will be charged with the deployment costs.' deploy_libs_parser.add_argument( 'account', metavar='<deployer account ethereum address>', help=msg) # 'deploy-pki' parser dpki_parser = subparsers.add_parser( 'deploy-pki', parents=[deploy_libs_parser], help= 'Manually deploy global pki contract compatible with a particular crypto backend to a blockchain' ) add_config_args( dpki_parser, {'crypto_backend', 'blockchain_backend', 'blockchain_node_uri'}) # 'deploy-crypto-libs' parser dclibs_parser = subparsers.add_parser( 'deploy-crypto-libs', parents=[deploy_libs_parser], help= 'Manually deploy proving-scheme specific crypto libraries (if any needed) to a blockchain' ) add_config_args( dclibs_parser, {'proving_scheme', 'blockchain_backend', 'blockchain_node_uri'}) subparsers.add_parser('version', help='Display zkay version information') subparsers.add_parser( 'update-solc', help= 'Install latest compatible solc version (requires internet connection)' ) # parse argcomplete.autocomplete(main_parser, always_complete_options=False) a = main_parser.parse_args() return a
def wrapper(parsed_args, **kwds): import argcomplete try: with cd(parsed_args.C) if getattr( parsed_args, 'C', None) else DummyContextManager() as _: return f(parsed_args=parsed_args, **kwds) except Exception as e: argcomplete.warn( 'An error occurred during argument completion: {}'.format(e)) return wrapper try: from argcomplete.completers import DirectoriesCompleter directory_completer = DirectoriesCompleter() except ImportError: directory_completer = None @completer def library_completer(prefix, parsed_args, **kwargs): with ConfiguredNeedy('.', parsed_args) as needy: target_or_universal_binary = parsed_args.universal_binary if getattr( parsed_args, 'universal_binary', None) else needy.target( getattr(parsed_args, 'target', 'host')) return [ name for name in needy.libraries(target_or_universal_binary).keys() if name.startswith(prefix) ]
def add_args(self, parser): subparsers = parser.add_subparsers( title='target', metavar='TARGET', dest='target', help=' | '.join(self.targets)) subparsers.required = True for name, target in self.targets.items(): tparser = subparsers.add_parser(name) rundirsarg = tparser.add_argument('rundirs', nargs='+', metavar='RUNDIR', default=[], help='run directories to parse (results/run.XXX)') tparser.add_argument('-i', '--instances', nargs='+', metavar='INSTANCE', default=[], choices=self.instances, help=' | '.join(self.instances)) tparser.add_argument('--no-cache', action='store_false', dest='cache', help='cached results in the bottom of log files') tparser.add_argument('--refresh', action='store_true', help='refresh cached results in logs') add_table_report_args(tparser) report_modes = tparser.add_mutually_exclusive_group() report_modes.add_argument('--raw', action='store_true', help='output all data points instead of aggregates') report_modes.add_argument('--overhead', metavar='INSTANCE', choices=self.instances, help='report each field as overhead relative to this baseline') tparser.add_argument('--groupby', metavar='FIELD', choices=_reportable_fields(target), default=target.aggregation_field, help='field to group by when aggregating results ' '(default %s)' % target.aggregation_field) tparser.add_argument('--filter', nargs='+', default=[], help='only report these values of the --groupby field') fieldarg = tparser.add_argument('-f', '--field', nargs='+', action='append', metavar='FIELD [AGGR...]', default=[], help=''' add reported field, followed by aggregation methods (unless --raw is true) separated by colons. Valid aggregations are mean|median|stdev|stdev_percent|mad|min|max| sum|count|first|same|one|all|geomean (stdev_percent = 100*stdev/mean, mad = median absolute deviation, same = asserts each value is the same, all = join values by space, sort = join sorted values by space)''') tparser.add_argument('--help-fields', action='store_true', help='print valid values for --field') tparser.add_argument('--aggregate', choices=_aggregate_fns, help='aggregation method for entire columns') try: from argcomplete.completers import DirectoriesCompleter rundirsarg.completer = DirectoriesCompleter() fieldarg.completer = _FieldCompleter(target) except ImportError: pass
def add_arguments(self, parser, cli_name): arg = parser.add_argument('ROOT', help='root path of keystore') arg.completer = DirectoriesCompleter()
def _load_transformed_arguments(self, handler): """Load all the command line arguments from the request parameters. :param func handler: The operation function. """ from azure.cli.core.commands.parameters import file_type from argcomplete.completers import FilesCompleter, DirectoriesCompleter self.parser = BatchArgumentTree(self.validator) self._load_options_model(handler) args = [] for arg in extract_args_from_signature( handler, excluded_params=EXCLUDED_PARAMS): arg_type = find_param_type(handler, arg[0]) if arg[0] == self._options_param: for option_arg in self._process_options(): args.append(option_arg) elif arg_type.startswith("str or"): docstring = find_param_help(handler, arg[0]) choices = [] values_index = docstring.find(' Possible values include') if values_index >= 0: choices = docstring[values_index + 25:].split(', ') choices = [ enum_value(c) for c in choices if enum_value(c) != "'unmapped'" ] docstring = docstring[0:values_index] args.append( ((arg[0], CLICommandArgument(arg[0], options_list=[arg_name(arg[0])], required=False, default=None, choices=choices, help=docstring)))) elif arg_type.startswith( "~"): # TODO: could add handling for enums param_type = class_name(arg_type) self.parser.set_request_param(arg[0], param_type) param_model = _load_model(param_type) self._flatten_object(arg[0], param_model) for flattened_arg in self.parser.compile_args(): args.append(flattened_arg) param = 'json_file' docstring = "A file containing the {} specification in JSON " \ "(formatted to match the respective REST API body). " \ "If this parameter is specified, all '{} Arguments'" \ " are ignored.".format(arg[0].replace('_', ' '), group_title(arg[0])) args.append((param, CLICommandArgument(param, options_list=[arg_name(param)], required=False, default=None, type=file_type, completer=FilesCompleter(), help=docstring))) elif arg[0] not in pformat.IGNORE_PARAMETERS: args.append(arg) return_type = find_return_type(handler) if return_type and return_type.startswith('Generator'): param = 'destination' docstring = "The path to the destination file or directory." args.append((param, CLICommandArgument( param, options_list=[arg_name(param)], required=True, default=None, completer=DirectoriesCompleter(), type=file_type, validator=validators.validate_file_destination, help=docstring))) if return_type == 'None' and handler.__name__.startswith('get'): self._head_cmd = True if self.confirmation: param = CONFIRM_PARAM_NAME docstring = 'Do not prompt for confirmation.' args.append((param, CLICommandArgument(param, options_list=['--yes', '-y'], required=False, action='store_true', help=docstring))) auth_group_name = 'Batch Account' args.append(('cmd', CLICommandArgument('cmd', action=IgnoreAction))) args.append(( 'account_name', CLICommandArgument( 'account_name', options_list=['--account-name'], required=False, default=None, validator=validators.validate_client_parameters, arg_group=auth_group_name, help= 'Batch account name. Alternatively, set by environment variable: AZURE_BATCH_ACCOUNT' ))) args.append(( 'account_key', CLICommandArgument( 'account_key', options_list=['--account-key'], required=False, default=None, arg_group=auth_group_name, help= 'Batch account key. Alternatively, set by environment variable: AZURE_BATCH_ACCESS_KEY' ))) args.append(( 'account_endpoint', CLICommandArgument( 'account_endpoint', options_list=['--account-endpoint'], required=False, default=None, arg_group=auth_group_name, help= 'Batch service endpoint. Alternatively, set by environment variable: AZURE_BATCH_ENDPOINT' ))) return args
filename = f"/tmp/rcbuildvalidator_{date}.txt" if DEBUG: print(f"Writtig log to {colored(filename, 'green')} ...") with open(filename, 'w') as output_file: output_file.write(command_output.decode('utf-8')) return filename else: return "" if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description='rcbuildvalidator makes easy to test the installation and build of the Robocomp core in many different Ubuntu versions.') parser.add_argument('-b', '--branch', type=str, default='development', help="Set the Robocomp branch to be tested") parser.add_argument('-v', '--version', type=str, default='18.04', help="Set the ubuntu version to be used").completer = ubuntu_images_completer parser.add_argument('--manual-mode', action='store_true', help="Open an interactive terminal of ubuntu with the version set by -v") parser.add_argument('-m', '--mount', nargs='*', type=str, action='append', help="Accept a list of paths to be mountes in the home directory of the launched machine").completer = DirectoriesCompleter() parser.add_argument('-d', '--debug', action='store_true', help="Shows some debugging messages") argcomplete.autocomplete(parser) args = parser.parse_args() if args.debug: DEBUG = True try: cache = apt.cache.Cache() pkg = cache[pkg_name] if pkg.is_installed: cprint(f"{pkg_name} already installed", 'green') else: output = docker_install() except KeyError: output = docker_install()
def add_arguments(self, parser, cli_name): arg = parser.add_argument('ROOT', help='root path of keystore') arg.completer = DirectoriesCompleter() parser.add_argument('NAME', help='key name, aka ROS enclave name')
def add_arguments(subparser): subparser.add_argument("contest_id", type=str).completer = get_recent_contest_ids subparser.add_argument("alphabets", type=str, nargs="*", default=[]).completer = get_alphabets_in_contest subparser.add_argument("--contests-dir", type=Path, default=Path("./contests")).completer = DirectoriesCompleter() subparser.add_argument("--template-file", type=Path).completer = FilesCompleter()
def load_arguments(self, _): resources_directory_type = CLIArgumentType( options_list=["--resources-directory", "-d"], completer=DirectoriesCompleter(), type=file_type, help="Directory which contains the resources", ) resource_file_type = CLIArgumentType( options_list=["--resource-file", "-f"], completer=FilesCompleter(allowednames=["json", "yaml"]), type=file_type, help="Resource file path", ) resource_schema_type = CLIArgumentType( options_list=["--resource-schema", "-s"], completer=FilesCompleter(allowednames=["json", "yaml"], directories=False), type=file_type, help="Resource schema file path", ) resource_type = CLIArgumentType( options_list=["--resource-type", "-t"], choices=["scheduled_detection", "microsoft_security_detection", "data_source"], help="Resource type", ) aux_subscription_type = CLIArgumentType( options_list=["--aux-subscriptions"], help="Auxiliary subscriptions for multi-tenant resource deployment such as cross tenant Logic App linking", ) with self.argument_context("sentinel") as c: c.argument( "workspace_name", options_list=["--workspace-name", "-n"], help="Name of the Sentinel Workspace", ) with self.argument_context("sentinel create") as c: c.argument("aux_subscriptions", aux_subscription_type) c.argument("resource_type", resource_type) c.argument("resources_directory", resources_directory_type) c.argument("resource_file", resource_file_type) c.argument( "enable_validation", options_list=["--enable-validation"], arg_type=get_three_state_flag(), help="Enable/Disable resource validation before deploying it", ) c.argument("resource_schema", resource_schema_type) with self.argument_context("sentinel validate") as c: c.argument("resource_type", resource_type) c.argument("resources_directory", resources_directory_type) c.argument("resource_file", resource_file_type) c.argument("resource_schema", resource_schema_type) with self.argument_context("sentinel generate") as c: c.argument("resource_type", resource_type) c.argument("resources_directory", resources_directory_type) c.argument( "skip_interactive", options_list=["--skip-interactive"], arg_type=get_three_state_flag(), help="Enable/Disable interactive resource generation", ) # TODO: Add all detection configurations as arguments here c.argument( "name", options_list=["--name", "-n"], help="Name of your resource(alphanumeric without spaces)", ) c.argument( "create_directory", options_list=["--create-dir"], arg_type=get_three_state_flag(), help="Enable/Disable creating new directory for the resource", ) c.argument( "with_documentation", options_list=["--with-documentation", "--doc"], arg_type=get_three_state_flag(), help="Enable/Disable resource documentation", )