Example #1
0
def cli():
    common_args = ArgumentParser(add_help=False, description=__doc__)
    common_args.add_argument('--profile', help='AWS client profile')
    common_args.add_argument('--region', help='AWS region')
    common_args.add_argument('-c', '--encryption-context',
                             help='key=val,key=val')

    ap = ArgumentParser()
    sp = ap.add_subparsers()

    pack_ap = sp.add_parser('pack', help='Store KMS-encrypted data',
                            parents=(common_args,))
    pack_ap.add_argument('key',
                         help='They master key to use. Pass a key ID or '
                         'alias/<alias-name>.')
    pack_ap.add_argument('source_path')
    pack_ap.set_defaults(func=pack)

    unpack_ap = sp.add_parser('unpack', help='Retrieve KMS-encrypted data',
                              parents=(common_args,))
    unpack_ap.add_argument('source_path')
    unpack_ap.set_defaults(func=unpack)

    args = ap.parse_args()
    if not os.path.exists(args.source_path):
        ap.exit(1, 'File not found: {}\n'.format(args.source_path))
    if args.source_path.endswith("/"):
        args.source_path = args.source_path[:-1]

    args.func(args)
Example #2
0
def main():
    """main operation of script."""
    parser = ArgumentParser(usage='%(prog)s [options] ecommonsMetadata.csv')
    parser.add_argument("-d",
                        "--date",
                        dest="date",
                        help="Date on or after that an ETD was published for \
                        creating DOIs. Put in format YYYY-MM")
    parser.add_argument("-w",
                        "--withdrawn",
                        dest="withdrawn",
                        help="Withdrawn item file with item ids")
    parser.add_argument("datafile", help="eCommons metadata worked from.")

    args = parser.parse_args()

    if not len(sys.argv) > 0:
        parser.print_help()
        parser.exit()

    if not args.withdrawn:
        workingdir = csvparse(args.datafile, args.date)
        doiparse(workingdir)
        print('ANVL files available in: ' + workingdir)
    else:
        workingdir = csvparse(args.datafile, args.date, False, args.withdrawn)
        doiparse(workingdir)
        print('ANVL files available in: ' + workingdir)
def main():
    parser = ArgumentParser(description = usage)
    parser.add_argument('--ref_len', dest='ref_len', type=int, help='length in bp of the sequenced genome to calculate --split parameter for savage')
    parser.add_argument('-s', dest='input_s', type=str, help='path to input fastq containing single-end reads')
    parser.add_argument('-p1', dest='input_p1', type=str, help='path to input fastq containing paired-end reads (/1)')
    parser.add_argument('-p2', dest='input_p2', type=str, help='path to input fastq containing paired-end reads (/2)')

    if len(sys.argv[1:])==0:
#        print usage
#        parser.print_usage()
        parser.print_help()
        parser.exit()
    args = parser.parse_args()

    # analyze single-end input reads
    if args.input_s:
        [s_total_len] = analyze_fastq(args.input_s)
    else:
        s_total_len = 0

    # analyze paired-end input reads
    if args.input_p1:
        [p1_total_len] = analyze_fastq(args.input_p1)
        [p2_total_len] = analyze_fastq(args.input_p2)
        p_total_len = p1_total_len + p2_total_len

    total_seq_len = s_total_len + p_total_len
    assert total_seq_len > 0, "Total length of input sequences is zero."

    split_into = calculate_split(total_seq_len, args.ref_len)
    filename = args.input_p1
    print split_into
Example #4
0
    def parse(self, args) -> Namespace:
        parser = ArgumentParser('Estimark')
        subparsers = parser.add_subparsers(dest='action')

        # Initialize
        estimate_parser = subparsers.add_parser('init')
        estimate_parser.set_defaults(func=self.init)

        # Estimate
        estimate_parser = subparsers.add_parser('estimate')
        estimate_parser.add_argument('-s', '--states', default='')
        estimate_parser.set_defaults(func=self.estimate)

        # Show
        show_parser = subparsers.add_parser('show')
        show_parser.add_argument('-m', '--model', default='task')
        show_parser.set_defaults(func=self.show)

        # Plot
        plot_parser = subparsers.add_parser('plot')
        plot_parser.add_argument('-t', '--type', default='gantt')
        plot_parser.add_argument('-c', '--context', default='{}',
                                 help='JSON plot parameters context.')
        plot_parser.set_defaults(func=self.plot)

        # Version
        version_parser = subparsers.add_parser('version')
        version_parser.set_defaults(func=self.version)

        if len(args) == 0:
            parser.print_help()
            parser.exit()

        return parser.parse_args(args)
Example #5
0
def main():
    parser = ArgumentParser(description='Meeting Room Genie CLI')
    parser.add_argument('service', choices=SERVICES.keys())
    parser.add_argument('cmd', choices=COMMANDS)
    parser.add_argument('--room-id', '-r')

    args = parser.parse_args()

    service = SERVICES[args.service]

    if args.cmd == CMD_ROOMS:
        print_rooms(service)
    elif args.cmd == CMD_RESERVATIONS:
        print_reservations(service, args.room_id)
    elif args.cmd == CMD_STATUS:
        if args.room_id:
            print_status(service, args.room_id)
        else:
            print_all_status(service)
    elif args.cmd == CMD_MAKE_RESERVATION:
        if args.room_id:
            make_reservation(service, args.room_id)
        else:
            parser.exit(1, 'room id is required to make a reservation\n')
    elif args.cmd == CMD_CANCEL_RESERVATION:
        if args.room_id:
            cancel_reservation(service, args.room_id)
        else:
            parser.exit(1, 'room id is required to cancel a reservation\n')
Example #6
0
        def __call__(
            self,
            parser: ArgumentParser,
            namespace: Namespace,
            values: Union[Text, Sequence[Any], None],
            option_string: Optional[Text] = None,
        ) -> None:
            import platform

            def print_table(version_rows: Sequence[Tuple[str, str]]) -> None:
                row_format = "{:12} | {}"
                print(row_format.format("module", "version"))
                print(row_format.format("------", "-------"))
                for module, version in version_rows:
                    # Some version strings have multiple lines and need to be squashed
                    print(row_format.format(module, version.replace("\n", " ")))

            version_rows = [
                ("platform", platform.platform()),
                ("Python", sys.version),
                ("dicognito", dicognito.__version__),
                ("pydicom", pydicom.__version__),
            ]

            print_table(version_rows)
            parser.exit()
Example #7
0
def main() -> None:
    parser = ArgumentParser(prog="python main.py",
                            description="Potential field method runner")
    parser.add_argument(
        "--space",
        dest="space",
        help=("Path to the JSON file describing the configuration space, "
              "default: %(default)r"),
        default=DIR / "data" / "normally.json",
    )
    parser.add_argument(
        "--solution",
        dest="solution",
        help=("The path to the file where the solution will be written, "
              "default: %(default)r"),
        default=DIR / "solution.json",
    )
    args = parser.parse_args()

    input_path = Path(args.space).resolve()
    output_path = Path(args.solution).resolve()

    space = Space.form_file(input_path)
    algorithm = PotentialFieldMethod()

    plan = algorithm.solve(space)
    plan.dump(output_path)

    parser.exit()
Example #8
0
def main():
    """main operation of script."""
    parser = ArgumentParser(usage='%(prog)s [options] ecommonsMetadata.csv')
    parser.add_argument("-d",
                        "--date",
                        dest="date",
                        help="Date on or after that an ETD was published for \
                        creating DOIs. Put in format YYYY-MM")
    parser.add_argument("-u",
                        "--username",
                        dest="username",
                        help="EZID creation username")
    parser.add_argument("-p",
                        "--password",
                        dest="password",
                        help="EZID creation password.")
    parser.add_argument("-s",
                        "--shoulder",
                        dest="shoulder",
                        default="10.5072/FK2",
                        help="DOI shoulder to use. Format 10.5072/FK2.")
    parser.add_argument("datafile", help="eCommons metadata worked from.")

    args = parser.parse_args()

    if not len(sys.argv) > 0:
        parser.print_help()
        parser.exit()

    workingdir = csvparse(args.datafile, args.date)
    output = doiparse(workingdir)
    mintdoi(output, workingdir, args)
Example #9
0
    def __call__(self,
                 parser: ArgumentParser,
                 namespace: Namespace,
                 values: List[Any],  # ?
                 option_string: str = None) -> None:
        # 1. Print top-level help
        parser.print_help()
        sep = "=" * 79  # "-" less helpful when using grep for "--option"!

        # 2. Print help for all subparsers
        # noinspection PyProtectedMember
        subparsers_actions = [
            action for action in parser._actions
            if isinstance(action, _SubParsersAction)
        ]  # type: List[_SubParsersAction]
        messages = [""]  # type: List[str]
        for subparsers_action in subparsers_actions:
            for choice, subparser in subparsers_action.choices.items():
                messages.append(sep)
                messages.append(f"Help for command '{choice}'")
                messages.append(sep)
                messages.append(subparser.format_help())
        print("\n".join(messages))

        parser.exit()
Example #10
0
def main():
    parser = ArgumentParser(prog='waitforem',
                            formatter_class=lambda prog: HelpFormatter(prog, max_help_position=30))
    parser.add_argument('command', nargs=REMAINDER, help='Command to execute after waiting is done')
    parser.add_argument('-t', '--timeout', metavar='SECS', type=int, default=10,
                        help='Max number of seconds to wait before aborting with non-zero exit '
                             'code. Default: 10')
    parser.add_argument('-s', '--socket', action='append', metavar='H:P', type=host_pairs,
                        help='Network socket to wait for, specified as HOST:PORT')

    if len(sys.argv[1:]) == 0:
        parser.print_help()
        parser.exit()

    args = parser.parse_args()

    if args.socket:
        host = port = None
        try:
            with timeout(args.timeout):
                for host, port in args.socket:
                    wait_for_server_socket(host, port)
            if len(args.command) > 0:
                do_exec(args.command)
        except WaitTimeout:
            sys.stderr.write("Timed out after {} seconds, waiting for {}:{}\n".format(
                args.timeout, host, port))
Example #11
0
def function2822(arg611):
    var2017 = ArgumentParser(description='aiohttp.web Application server', prog='aiohttp.web')
    var2017.add_argument('entry_func', help="Callable returning the `aiohttp.web.Application` instance to run. Should be specified in the 'module:function' syntax.", metavar='entry-func')
    var2017.add_argument('-H', '--hostname', help='TCP/IP hostname to serve on (default: %(default)r)', default='localhost')
    var2017.add_argument('-P', '--port', help='TCP/IP port to serve on (default: %(default)r)', type=int, default='8080')
    var2017.add_argument('-U', '--path', help='Unix file system path to serve on. Specifying a path will cause hostname and port arguments to be ignored.')
    (var3826, var4146) = var2017.parse_known_args(arg611)
    (var3730, var842, var1721) = var3826.entry_func.partition(':')
    if ((not var1721) or (not var3730)):
        var2017.error("'entry-func' not in 'module:function' syntax")
    if var3730.startswith('.'):
        var2017.error('relative module names not supported')
    try:
        var2621 = import_module(var3730)
    except ImportError as var3316:
        var2017.error(('unable to import %s: %s' % (var3730, var3316)))
    try:
        var4676 = getattr(var2621, var1721)
    except AttributeError:
        var2017.error(('module %r has no attribute %r' % (var3730, var1721)))
    if ((var3826.var3159 is not None) and (not hasattr(socket, 'AF_UNIX'))):
        var2017.error('file system paths not supported by your operating environment')
    arg246 = var4676(var4146)
    function189(arg246, host=var3826.hostname, port=var3826.var1439, path=var3826.var3159)
    var2017.exit(message='Stopped\n')
Example #12
0
    def parse(self, args) -> Namespace:
        parser = ArgumentParser('Estimark')
        subparsers = parser.add_subparsers(dest='action')

        # Estimate
        estimate_parser = subparsers.add_parser('estimate')
        estimate_parser.add_argument('-s', '--state')
        estimate_parser.set_defaults(func=self.estimate)

        # Show
        show_parser = subparsers.add_parser('show')
        show_parser.add_argument('-t', '--tasks', action='store_true')
        show_parser.add_argument('-l', '--links', action='store_true')
        show_parser.add_argument('-c', '--classifiers', action='store_true')
        show_parser.add_argument('-s', '--schedules', action='store_true')
        show_parser.add_argument('-o', '--slots', action='store_true')
        show_parser.set_defaults(func=self.show)

        # Plot
        plot_parser = subparsers.add_parser('plot')
        plot_parser.set_defaults(func=self.plot)

        # Version
        version_parser = subparsers.add_parser('version')
        version_parser.set_defaults(func=self.version)

        if len(args) == 0:
            parser.print_help()
            parser.exit()

        return parser.parse_args(args)
Example #13
0
def send_command():
    parser = ArgumentParser("Send command to the AC unit")
    _add_ac_arguments(parser)
    parser.add_argument(
        "--ac-mode",
        default=None,
        help="set the AC mode",
        choices=["STBY", "COOL", "FAN", "DRY", "HEAT", "AUTO"],
    )  # TODO: take from constants module
    parser.add_argument(
        "--fan-speed",
        default=None,
        help="set the fan speed",
        choices=["LOW", "MED", "HIGH", "AUTO"],
    )
    parser.add_argument("--temperature",
                        default=None,
                        type=int,
                        help="set the target temperature")
    args = parser.parse_args()
    oper_kwargs = {}
    if args.ac_mode is not None:
        oper_kwargs["ac_mode"] = args.ac_mode
    if args.fan_speed is not None:
        oper_kwargs["fan_speed"] = args.fan_speed
    if args.temperature is not None:
        oper_kwargs["temperature"] = args.temperature
    if not oper_kwargs:
        parser.exit(message="no change was requested, aborting")
    ac = AC(args.imei, args.token, args.ac_id)
    ac.renew_sid()
    ac.modify_oper(**oper_kwargs)
Example #14
0
def main():
    """ The main entrypoint for the Opentrons robot API server stack.

    This function
    - creates and starts the server for both the RPC routes
      handled by :py:mod:`opentrons.server.rpc` and the HTTP routes handled
      by :py:mod:`opentrons.server.http`
    - initializes the hardware interaction handled by either
      :py:mod:`opentrons.legacy_api` or :py:mod:`opentrons.hardware_control`

    This function does not return until the server is brought down.
    """

    arg_parser = ArgumentParser(description="Opentrons robot software",
                                parents=[build_arg_parser()])
    arg_parser.add_argument('--hardware-server',
                            action='store_true',
                            help='Run a jsonrpc server allowing rpc to the'
                            ' hardware controller. Only works on buildroot '
                            'because extra dependencies are required.')
    arg_parser.add_argument('--hardware-server-socket',
                            action='store',
                            default='/var/run/opentrons-hardware.sock',
                            help='Override for the hardware server socket')
    args = arg_parser.parse_args()

    if ff.use_protocol_api_v2():
        checked_hardware = adapters.SingletonAdapter(asyncio.get_event_loop())
    else:
        checked_hardware = opentrons.hardware
    run(checked_hardware, **vars(args))
    arg_parser.exit(message="Stopped\n")
Example #15
0
def main():
    """main operation of script."""
    parser = ArgumentParser(usage='%(prog)s [options] ecommonsMetadata.csv')
    parser.add_argument("-u",
                        "--username",
                        dest="username",
                        help="EZID creation username")
    parser.add_argument("-p",
                        "--password",
                        dest="password",
                        help="EZID creation password.")
    parser.add_argument("-s",
                        "--shoulder",
                        dest="shoulder",
                        default="10.5072/FK2",
                        help="DOI shoulder to use. Format 10.5072/FK2.")
    parser.add_argument("workingdir",
                        help="Working directory containing ready \
                        ANVL files.")

    args = parser.parse_args()

    if not len(sys.argv) > 0:
        parser.print_help()
        parser.exit()

    with open(args.workingdir + 'EC.csv', 'r') as ECdata:
        reader = csv.DictReader(ECdata)
        data = [x for x in reader]
    mintdoi(data, args.workingdir, args)
Example #16
0
def main():
	"""Command line entry point."""

	init_globexc()

	parser = ArgumentParser()
	parser.add_argument('--no-error',
						action='store_false',
						help='Do not throw an error')
	parser.add_argument('--no-init-log',
						action='store_false',
						help='Do not initialise logging system')
	parser.add_argument('--basic-log',
						action='store_true',
						help='Use basic log config even if themelog available')

	args = parser.parse_args()

	if args.no_init_log is not False:
		if args.basic_log is not True:
			try:
				import themelog
				themelog.init_log()

			except ImportError:
				logging.basicConfig()

		else:
			logging.basicConfig()

	if args.no_error is not False:
		error()

	parser.exit('The end')
Example #17
0
class Shell:
    def __init__(self, config: Config, injector: Injectark) -> None:
        self.config = config
        self.injector = injector
        self.parser = ArgumentParser('Integrark')

    async def run(self, argv: List[str]):
        args = await self.parse(argv)
        await args.func(args)

    async def parse(self, argv: List[str]) -> Namespace:
        subparsers = self.parser.add_subparsers()

        # Serve
        serve_parser = subparsers.add_parser('serve',
                                             help='Start HTTP server.')
        serve_parser.add_argument('-p', '--port')
        serve_parser.set_defaults(func=self.serve)

        if len(argv) == 0:
            self.parser.print_help()
            self.parser.exit()

        return self.parser.parse_args(argv)

    async def serve(self, args: Namespace) -> None:
        logger.info('SERVE')
        port = args.port or self.config['port']
        app = create_app(self.config, self.injector)
        await run_app(app, port)
Example #18
0
def run():
    defaultmech = "%s/mapping/cb05cl_ae6_aq.csv" % os.path.dirname(__file__)
    parser = ArgumentParser(description = "Usage: %prog [-tq] \n"+(" "*16)+" [-i <init name>] [-f <final name>] <yamlfile>")
    parser.add_argument("-t", "--template", dest = "template", action = "store_true", default = False, help="Output template on standard out (configurable with -m and -c")

    parser.add_argument("-v", "--verbose", dest = "verbose", action = "count", default = 0, help = "extra output for debugging")
    
    paths = glob(os.path.join(os.path.dirname(__file__), 'mapping', '*_*.csv'))
    mechanisms = ', '.join(['_'.join(path.split('/')[-1].split('_')[:])[:-4] for path in paths])
    parser.add_argument("-c", "--configuration", dest="configuration", default = None,
                        help = "Chemical mechanisms: %s (for use with -t)" % mechanisms)
    parser.add_argument('configfile')
    options = parser.parse_args()
    args = [options.configfile]
    if options.template:
        from template import template
        if options.configuration is None:
            warn("Using default mechanism: %s" % defaultmech)
            options.configuration = defaultmech
        else:
            if os.path.exists(options.configuration):
                pass
            else:
                options.configuration = "%s/mapping/%s.csv" % (os.path.dirname(__file__), options.configuration)
                if not os.path.exists(options.configuration):
                    raise IOError('Cannot find file %s; must be either you own file or in %s' % (options.configuration, mechanisms))
        print template(options.configuration)
        parser.exit()
    if len(args)<1:
        parser.error(msg="Requires a yaml file as an argument.  For a template use the -t option.  The template will be output to the stdout.")
    else:
        yamlpath=args[0]
        from load import loader
        from process import process
        outf = process(config = loader(yamlpath), verbose = options.verbose)
Example #19
0
def main(argv: List[str]) -> None:
    arg_parser = ArgumentParser(
        description="aiohttp.web Application server", prog="aiohttp.web"
    )
    arg_parser.add_argument(
        "entry_func",
        help=(
            "Callable returning the `aiohttp.web.Application` instance to "
            "run. Should be specified in the 'module:function' syntax."
        ),
        metavar="entry-func",
    )
    arg_parser.add_argument(
        "-H",
        "--hostname",
        help="TCP/IP hostname to serve on (default: %(default)r)",
        default="localhost",
    )
    arg_parser.add_argument(
        "-P",
        "--port",
        help="TCP/IP port to serve on (default: %(default)r)",
        type=int,
        default="8080",
    )
    arg_parser.add_argument(
        "-U",
        "--path",
        help="Unix file system path to serve on. Specifying a path will cause "
        "hostname and port arguments to be ignored.",
    )
    args, extra_argv = arg_parser.parse_known_args(argv)

    # Import logic
    mod_str, _, func_str = args.entry_func.partition(":")
    if not func_str or not mod_str:
        arg_parser.error("'entry-func' not in 'module:function' syntax")
    if mod_str.startswith("."):
        arg_parser.error("relative module names not supported")
    try:
        module = import_module(mod_str)
    except ImportError as ex:
        arg_parser.error(f"unable to import {mod_str}: {ex}")
    try:
        func = getattr(module, func_str)
    except AttributeError:
        arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")

    # Compatibility logic
    if args.path is not None and not hasattr(socket, "AF_UNIX"):
        arg_parser.error(
            "file system paths not supported by your operating" " environment"
        )

    logging.basicConfig(level=logging.DEBUG)

    app = func(extra_argv)
    run_app(app, host=args.hostname, port=args.port, path=args.path)
    arg_parser.exit(message="Stopped\n")
Example #20
0
def main():
    global _logger
    parser = ArgumentParser(usage='prog [options]')
    parser.add_argument('-c', '--config',
                        dest='config_file', required=True,
                        metavar='CONFIG_FILE', help='The full path to the config file to open')
    parser.add_argument('-l', '--log-config',
                        default='logger.conf', dest='log_config_file',
                        metavar='LOG_CONFIG_FILE', help='The full path to the log config file to open')
    parser.add_argument('--start-frame-time', default=-1, type=int, dest='start_frame_pos',
                        help='Start frame time in seconds')
    parser.add_argument('--end-frame-time', default=-1, type=int, dest='end_frame_pos',
                        help='End frame time in seconds')
    parser.add_argument('--smooth-filter-size', default=3, type=int, dest='gaussian_filter_size',
                        help='Gaussian filter kernel size')
    parser.add_argument('--smooth-filter-sigma', default=0, type=int, dest='gaussian_filter_sigma',
                        help='Gaussian filter sigma')
    parser.add_argument('--nthreads', default=1, type=int, dest='nthreads')
    parser.add_argument('--nprocesses', default=1, type=int, dest='nprocesses',
                        help='Number of processes to run in parallel')

    args = parser.parse_args()

    # setup logger
    logging.config.fileConfig(args.log_config_file)
    _logger = logging.getLogger('tracker')

    if args.config_file is None:
        _logger.warning('Missing config file')
        parser.exit(1, 'Missing config file\n')

    # load config file
    config, errors = load_config(args.config_file)
    errors |= set(config.validate())

    if len(errors) == 0:
        if args.nprocesses > 1:
            source = MovieFile(config.get_source(),
                                     start_msecs=args.start_frame_pos * 1000,
                                     end_msecs=args.end_frame_pos * 1000,
                                     resolution=config.get_image_size())
            if not source.is_opened():
                _logger.error('Error opening %s' % config.get_source())
                return
            start_frame_pos = int(source.get_start_time_in_seconds())
            end_frame_pos = int(source.get_end_time_in_seconds())
            frame_interval = int((end_frame_pos - start_frame_pos) / args.nprocesses)
            tracker_args = [(config, s * 1000, (s + frame_interval) * 1000,
                             args.gaussian_filter_size, args.gaussian_filter_sigma,
                             args.nthreads, _get_run_interval(s, s + frame_interval)[1]) for s in
                            range(start_frame_pos, end_frame_pos, frame_interval)
                            ]
            with Pool(args.nprocesses) as p:
                p.starmap(_run_tracker, tracker_args)
        else:
            _run_tracker(config, args.start_frame_pos * 1000, args.end_frame_pos * 1000,
                         args.gaussian_filter_size, args.gaussian_filter_sigma, args.nthreads)
    else:
        _logger.error('Config load error: %r' % errors)
Example #21
0
def parse_args_or_environment(
    parser: argparse.ArgumentParser,
    env_variable_names: Dict[str, str],
    argv: Optional[Sequence[str]] = None,
) -> argparse.Namespace:
    """Parse CLI arguments with some defaults specified in the environment.

    Sometimes we would like to specify the default arguments for some CLI
    parameters in the environment. This can save typing out long parameters
    in the command line. If present, the ".env" file will be loaded, and the
    order of precedence is the following

        1. Command line arguments
        2. The .env file.
        3. Environment variables

    Parameters
    ----------
    parser
        An instance of `argparse.ArgumentParser`.
    env_variable_names
        The parameter names that should be looked up in the environment. The
        values of this mapping are the names as they appear in the environment,
        the keys are the names under which the values will be saved and
        returned.
    argv
        An optional iterable of command line arguments. It's used in the
        `parser.parse_args(argv)` call and is useful for testing.

    Returns
    -------
    args : argparse.Namespace
        A map of parsed argument names to their values.
    """
    # Parse CLI arguments
    cli_args = vars(parser.parse_args(args=argv))

    # Parse environment
    load_dotenv(override=True)
    environment_args = {}
    for arg_name, value_name in env_variable_names.items():
        value = os.environ.get(value_name)
        if value is not None:
            environment_args[arg_name] = value

    # Combine CLI and environment variables
    args = collections.ChainMap(cli_args, environment_args)

    # Check if all arguments were supplied
    for arg_name in env_variable_names:
        if arg_name not in args:
            parser.print_usage()
            parser.exit(
                status=1,
                message=("The following arguments are required: "
                         f"--{arg_name.replace('_', '-')}\n"),
            )

    return argparse.Namespace(**args)
Example #22
0
 def __call__(self, parser: ArgumentParser, namespace: Namespace, values: list, option_string: str=None):
     fn = values
     if fn:
         if os.path.isfile(fn):
             parsed_scen_args["train_insts"] = in_reader.read_instance_file(fn)
         else:
             parser.exit(1, "Could not find instance file: {}".format(fn))
     setattr(namespace, self.dest, values)
Example #23
0
 def __call__(self,
              parser: argparse.ArgumentParser,
              namespace: argparse.Namespace,
              values: typing.Any,
              option_string: typing.Optional[str] = None) -> None:
     from nunavut.version import __version__
     parser._print_message(__version__, sys.stdout)
     parser.exit()
Example #24
0
def args_parse(arg_def):
    """Passing arguments from command line """
    # create parameter process objects
    parser = ArgumentParser(description=arg_def['description'],
                            prog=arg_def['prog'],
                            epilog=arg_def['epilog'],
                            formatter_class=CustomFormatter)

    for arg_ in arg_def['args']:
        if "choices" in arg_.keys():
            parser.add_argument("-%s" % arg_['short'],
                                "--%s" % arg_['para'],
                                help="%s" % arg_['help'],
                                choices=arg_['choices'],
                                default=arg_['choices'][0])
        elif 'default' in arg_.keys():
            parser.add_argument("-%s" % arg_['short'],
                                "--%s" % arg_['para'],
                                default="%s" % arg_['default'],
                                help="%s" % arg_['help'])
        elif 'type' in arg_.keys():
            if arg_['type'] == "file":
                parser.add_argument("-%s" % arg_['short'],
                                    "--%s" % arg_['para'],
                                    help="%s" % arg_['help'],
                                    type=file)  # pylint: disable=E0602
        else:
            if "require" in arg_.keys():
                parser.add_argument("-%s" % arg_['short'],
                                    "--%s" % arg_['para'],
                                    help="%s" % arg_['help'])
            else:
                parser.add_argument("-%s" % arg_['short'],
                                    "--%s" % arg_['para'],
                                    help="%s" % arg_['help'],
                                    action='store_true')

    # if there is not parameter passed
    if len(sys.argv) == 1:
        if arg_def["gui_define"] is None:
            parser.print_help()
            parser.exit()
        else:
            if os.path.exists(arg_def['gui_define']['defaultconfig']):
                with open(arg_def['gui_define']['defaultconfig'], 'r') as \
                filehandleobj:
                    filecontent = filehandleobj.read()
                args_ = args_parseconfigfile(parser, filecontent)
                return args_
    # if there are parameter passed from command line
    else:
        args_ = parser.parse_args(sys.argv[1:])
        try:
            filecontent = args_.configfile.read()
        except:  # pylint: disable=W9702
            return args_
        args_ = args_parseconfigfile(parser, filecontent)
        return args_
Example #25
0
 def __call__(self, parser: ArgumentParser, namespace: Namespace, values: list, option_string: str=None):
     fn = values
     if fn:
         if os.path.isfile(fn):
             parsed_scen_args['cs'] = in_reader.read_pcs_file(fn)
             parsed_scen_args["cs"].seed(42)
         else:
             parser.exit(1, "Could not find pcs file: {}".format(fn))
     setattr(namespace, self.dest, values)
Example #26
0
def main() -> None:
    help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))
    parser = ArgumentParser(
        prog='incremental_checker',
        description=__doc__,
        formatter_class=help_factory)

    parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
                        help="must be one of 'last' or 'commit'")
    parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
                        help="the commit id to start from, or the number of "
                        "commits to move back (see above)")
    parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
                        help="the repo to clone and run tests on")
    parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
                        help="the name of the file or directory to typecheck")
    parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
                        help="sets a custom location to store cache data")
    parser.add_argument("--branch", default=None, metavar="NAME",
                        help="check out and test a custom branch"
                        "uses the default if not specified")

    if len(sys.argv[1:]) == 0:
        parser.print_help()
        parser.exit()

    params = parser.parse_args(sys.argv[1:])

    # Make all paths absolute so we avoid having to worry about being in the right folder

    # The path to this specific script (incremental_checker.py).
    script_path = os.path.abspath(sys.argv[0])

    # The path to the mypy repo.
    mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))

    # The folder the cloned repo will reside in.
    temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))

    # The particular file or package to typecheck inside the repo.
    target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))

    # The path to where the incremental checker cache data is stored.
    incremental_cache_path = os.path.abspath(params.cache_path)

    # The path to store the mypy incremental mode cache data
    mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))

    print("Assuming mypy is located at {0}".format(mypy_path))
    print("Temp repo will be cloned at {0}".format(temp_repo_path))
    print("Testing file/dir located at {0}".format(target_file_path))
    print("Using cache data located at {0}".format(incremental_cache_path))
    print()

    test_repo(params.repo_url, temp_repo_path, target_file_path,
              mypy_path, incremental_cache_path, mypy_cache_path,
              params.range_type, params.range_start, params.branch)
Example #27
0
def main() -> None:
    help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))
    parser = ArgumentParser(
        prog='incremental_checker',
        description=__doc__,
        formatter_class=help_factory)

    parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
                        help="must be one of 'last' or 'commit'")
    parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
                        help="the commit id to start from, or the number of "
                        "commits to move back (see above)")
    parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
                        help="the repo to clone and run tests on")
    parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
                        help="the name of the file or directory to typecheck")
    parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
                        help="sets a custom location to store cache data")
    parser.add_argument("--branch", default=None, metavar="NAME",
                        help="check out and test a custom branch"
                        "uses the default if not specified")

    if len(sys.argv[1:]) == 0:
        parser.print_help()
        parser.exit()

    params = parser.parse_args(sys.argv[1:])

    # Make all paths absolute so we avoid having to worry about being in the right folder

    # The path to this specific script (incremental_checker.py).
    script_path = os.path.abspath(sys.argv[0])

    # The path to the mypy repo.
    mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))

    # The folder the cloned repo will reside in.
    temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))

    # The particular file or package to typecheck inside the repo.
    target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))

    # The path to where the incremental checker cache data is stored.
    incremental_cache_path = os.path.abspath(params.cache_path)

    # The path to store the mypy incremental mode cache data
    mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))

    print("Assuming mypy is located at {0}".format(mypy_path))
    print("Temp repo will be cloned at {0}".format(temp_repo_path))
    print("Testing file/dir located at {0}".format(target_file_path))
    print("Using cache data located at {0}".format(incremental_cache_path))
    print()

    test_repo(params.repo_url, temp_repo_path, target_file_path,
              mypy_path, incremental_cache_path, mypy_cache_path,
              params.range_type, params.range_start, params.branch)
Example #28
0
 def __call__(self,
              parser: argparse.ArgumentParser,
              namespace: argparse.Namespace,
              values: Union[str, Sequence[Any], None],
              option_string: Optional[str] = None) -> NoReturn:
     formatter = parser._get_formatter()
     formatter.add_text(self.version)
     parser._print_message(formatter.format_help(), self.stdout)
     parser.exit()
Example #29
0
    def exit_cli(self,
                 arg_parser: argparse.ArgumentParser,
                 print_usage: bool = False) -> typing.NoReturn:
        assert isinstance(self, EncodedErrorMixin)

        if print_usage:
            arg_parser.print_usage(sys.stderr)

        arg_parser.exit(self.get_error_status(), str(self))
Example #30
0
 def __call__(
     self,
     parser: argparse.ArgumentParser,
     namespace: argparse.Namespace,
     values,
     option_string: Optional[str] = None,
 ) -> None:
     self.print_parameters()
     parser.exit()
Example #31
0
File: core.py Project: mikimn/arggo
 def __call__(
     self,
     parser: ArgumentParser,
     namespace: Namespace,
     values: Union[Text, Sequence[Any], None],
     option_string: Optional[Text] = ...,
 ) -> None:
     parser.print_help()
     parser.exit()
Example #32
0
def main():
    args = ArgumentParser(
        prog='plot_trainSets_stats.py',
        description='Plots PhiSpy\'s training/test sets statistics.',
        epilog=
        'Example usage:\npython3 scripts/plot_trainSets_stats.py -d PhiSpyModules/data -o PhiSpyModules/data/trainSets_stats ',
        formatter_class=RawDescriptionHelpFormatter)

    args.add_argument('-i',
                      '--infile',
                      type=str,
                      help='Path to input GenBank file.')

    args.add_argument(
        '-d',
        '--indir',
        type=str,
        help='Path to input directory with multiple GenBank files.')

    args.add_argument('-s',
                      '--suffix',
                      type=str,
                      help='Suffix that will be added to input file name.')

    args.add_argument('-o',
                      '--outdir',
                      type=str,
                      help='Path to output directory.',
                      required=True)

    if len(argv[1:]) == 0:
        args.print_help()
        args.exit()

    try:
        args = args.parse_args()
    except:
        args.exit()

    if not args.infile and not args.indir:
        print('You have to provide input data by either --infile or --indir.')
        exit(1)
    elif args.indir:
        infiles = glob(path.join(args.indir, '*.txt'))
    else:
        infiles = [args.infile]

    # Create output directory
    if not path.isdir(args.outdir): makedirs(args.outdir)

    # Process all input files
    for infile in infiles:
        plot_file_name = f'{path.basename(infile).rsplit(".", 1)[0]}.{args.suffix}.png'
        plot_file = path.join(args.outdir, plot_file_name)
        plot_stats(infile, plot_file)
        print(f'Done with plot: {plot_file}')
Example #33
0
def main(argv):
    arg_parser = ArgumentParser(
        description="aiohttp.web Application server",
        prog="aiohttp.web"
    )
    arg_parser.add_argument(
        "entry_func",
        help=("Callable returning the `aiohttp.web.Application` instance to "
              "run. Should be specified in the 'module:function' syntax."),
        metavar="entry-func"
    )
    arg_parser.add_argument(
        "-H", "--hostname",
        help="TCP/IP hostname to serve on (default: %(default)r)",
        default="localhost"
    )
    arg_parser.add_argument(
        "-P", "--port",
        help="TCP/IP port to serve on (default: %(default)r)",
        type=int,
        default="8080"
    )
    arg_parser.add_argument(
        "-r", "--autoreload",
        help="Autoreload on codechange (default: %(default)r)",
        dest="autoreload",
        action='store_true',
        default=False
    )
    args, extra_argv = arg_parser.parse_known_args(argv)

    # Import logic
    mod_str, _, func_str = args.entry_func.partition(":")
    if not func_str or not mod_str:
        arg_parser.error(
            "'entry-func' not in 'module:function' syntax"
        )
    if mod_str.startswith("."):
        arg_parser.error("relative module names not supported")
    try:
        module = import_module(mod_str)
    except ImportError:
        arg_parser.error("module %r not found" % mod_str)
    try:
        func = getattr(module, func_str)
    except AttributeError:
        arg_parser.error("module %r has no attribute %r" % (mod_str, func_str))

    app = func(extra_argv)
    run_app(
        app,
        host=args.hostname,
        port=args.port,
        autoreload=args.autoreload
    )
    arg_parser.exit(message="Stopped\n")
Example #34
0
def main():
    """Main method for performing recon on a field or entity type."""
    parser = ArgumentParser(usage='%(prog)s [options]')
    parser.add_argument("-i",
                        "--input",
                        dest="input",
                        default="data/output.json",
                        help="Original Google-sourced JSON data file.")
    parser.add_argument("-o",
                        "--output",
                        dest="output",
                        default="data/matched_output.json",
                        help="Matched JSON output data file.")
    parser.add_argument("-f",
                        "--field",
                        dest="field",
                        help="Field for reconciliation.")
    parser.add_argument("-e",
                        "--entity",
                        dest="entity",
                        help="Entity type for reconciliation. See README.md")
    args = parser.parse_args()
    if not args.field and not args.entity:
        parser.print_help()
        parser.exit()
    """Pull in Google-sourced JSON data."""
    with open(args.input, 'r') as fout:
        data = json.load(fout)
    labels = []
    """Match against field or entity type field set."""
    for record in data:
        if args.field:
            if data[record][args.field]:
                val = data[record][args.field]
                if any(delim in val for delim in s_delimiters):
                    labels = field_split(val)
                else:
                    labels = [val.strip()]
        elif args.entity == "Agent":
            for field in s_agent_fields:
                if data[record][field]:
                    val = data[record][field]
                    if any(delim in val for delim in s_delimiters):
                        labels = field_split(val)
                    else:
                        labels = [val.strip()]
        pre_queries = {}
        for label in labels:
            key = "q" + str(labels.index(label))
            queries[key] = {}
            queries[key]["query"] = label
            queries[key]["type"] = "Names"
        queries = {}
        queries["queries"] = json.dumps(pre_queries)
        resp = requests.post("http://localhost:5000/", data=queries).json()
        print(resp["q0"])
Example #35
0
def main():
    print('')
    parser = ArgumentParser(
        description=
        'Removes the domain permission of all files and folders in Google Drive of a domain user'
    )
    parser.add_argument('-e',
                        '--email',
                        dest='email',
                        required=True,
                        help='email of the user')
    parser.add_argument(
        '-d',
        '--driveId',
        dest='drive_id',
        required=False,
        help='drive id if you want to apply it on a shared drive')

    if len(sys.argv) == 1:
        parser.print_help()
        parser.exit()

    args = parser.parse_args()
    EMAIL = args.email
    DRIVE_ID = args.drive_id

    print('Recursive checking all files / folders of %s ...' % (EMAIL))

    drive_service = get_drive_service(EMAIL)
    deleted_permissions = []
    page_token = None
    index = 1

    while True:
        response = get_files(drive_service, EMAIL, page_token, DRIVE_ID)
        for file in response.get('files', []):
            status = '.'
            res = get_permissions(drive_service, file.get('id'))
            permissions = res['permissions']

            for p in permissions:
                if p['type'] == "domain":
                    delete_permission(drive_service, file.get('id'), p['id'])
                    deleted_permissions.append(file)
                    status = 'd'
            print_status(index, status)
            index += 1

        page_token = response.get('nextPageToken', None)
        if page_token is None:
            break

    print('Deleted permissions of %s files / folders:' %
          len(deleted_permissions))
    for item in deleted_permissions:
        print('- %s (%s)' % (file.get('name'), file.get('id')))
Example #36
0
def main(argv):
    arg_parser = ArgumentParser(
        description="aiohttp.web Application server",
        prog="aiohttp.web"
    )
    arg_parser.add_argument(
        "entry_func",
        help=("Callable returning the `aiohttp.web.Application` instance to "
              "run. Should be specified in the 'module:function' syntax."),
        metavar="entry-func"
    )
    arg_parser.add_argument(
        "-H", "--hostname",
        help="TCP/IP hostname to serve on (default: %(default)r)",
        default="localhost"
    )
    arg_parser.add_argument(
        "-P", "--port",
        help="TCP/IP port to serve on (default: %(default)r)",
        type=int,
        default="8080"
    )
    arg_parser.add_argument(
        "-U", "--path",
        help="Unix file system path to serve on. Specifying a path will cause "
             "hostname and port arguments to be ignored.",
    )
    args, extra_argv = arg_parser.parse_known_args(argv)

    # Import logic
    mod_str, _, func_str = args.entry_func.partition(":")
    if not func_str or not mod_str:
        arg_parser.error(
            "'entry-func' not in 'module:function' syntax"
        )
    if mod_str.startswith("."):
        arg_parser.error("relative module names not supported")
    try:
        module = import_module(mod_str)
    except ImportError as ex:
        arg_parser.error("unable to import %s: %s" % (mod_str, ex))
    try:
        func = getattr(module, func_str)
    except AttributeError:
        arg_parser.error("module %r has no attribute %r" % (mod_str, func_str))

    # Compatibility logic
    if args.path is not None and not hasattr(socket, 'AF_UNIX'):
        arg_parser.error("file system paths not supported by your operating"
                         " environment")

    logging.basicConfig(level=logging.DEBUG)

    app = func(extra_argv)
    run_app(app, host=args.hostname, port=args.port, path=args.path)
    arg_parser.exit(message="Stopped\n")
Example #37
0
class Shell:
    def __init__(self, config: Config, injector: Injectark) -> None:
        self.config = config
        self.injector = injector
        self.parser = ArgumentParser('Authark')

    async def run(self, argv: List[str]):
        args = await self.parse(argv)
        await args.func(vars(args))

    async def parse(self, argv: List[str]) -> Namespace:
        subparsers = self.parser.add_subparsers()

        # Provision
        provision_parser = subparsers.add_parser('provision',
                                                 help='Provision new tenants.')
        provision_parser.add_argument('data', help='JSON encoded tenant.')
        provision_parser.set_defaults(func=self.provision)

        # Serve
        serve_parser = subparsers.add_parser('serve',
                                             help='Start HTTP server.')
        serve_parser.add_argument('-p', '--port')
        serve_parser.set_defaults(func=self.serve)

        # Migrate
        # migrate_parser = subparsers.add_parser(
        # 'migrate', help='Upgrade tenant schema version.')
        # migrate_parser.set_defaults(func=self.migrate)
        # migrate_parser.add_argument(
        # "-t", "--tenant", help="Target tenant to upgrade",
        # required=True)
        # migrate_parser.add_argument(
        # "-v", "--version", help="Migration version to upgrade",
        # default='999')

        if len(argv) == 0:
            self.parser.print_help()
            self.parser.exit()

        return self.parser.parse_args(argv)

    async def serve(self, options: Dict[str, str]) -> None:
        logger.info('SERVE')
        port = int(options.get('port') or self.config['port'])
        app = RestApplication(self.config, self.injector)
        await RestApplication.run(app, port)
        logger.info('END SERVE')

    async def provision(self, options: Dict[str, str]) -> None:
        logger.info('PROVISION')
        tenant_supplier = self.injector.resolve('TenantSupplier')
        tenant_dict = json.loads(options['data'])
        logger.info("Creating tenant:", tenant_dict)
        tenant_supplier.ensure_tenant(tenant_dict)
        logger.info('END PROVISION')
Example #38
0
def main():
    stats_aggregate = {
        "record_count": 0,
        "field_info": {}
    }

    parser = ArgumentParser(usage='%(prog)s [options] data_filename.xml')
    parser.add_argument("-x", "--xpath", dest="xpath",
                        help="get response of xpath expression on record")
    parser.add_argument("-i", "--id", action="store_true", dest="id",
                        default=False, help="prepend meta_id to line")
    parser.add_argument("-s", "--stats", action="store_true", dest="stats",
                        default=False, help="only print stats for repository")
    parser.add_argument("-p", "--present", action="store_true", dest="present",
                        default=False, help="if there is that value in record")
    parser.add_argument("datafile", help="the datafile you want analyzed")

    args = parser.parse_args()

    if not len(sys.argv) > 0:
        parser.print_help()
        parser.exit()

    if args.xpath is None:
        args.stats = True

    s = 0
    for event, elem in etree.iterparse(args.datafile):
        if elem.tag == OAI_NS + "record":
            r = Record(elem, args)
            record_id = r.get_record_id()

            if args.stats is False and args.present is False and args.xpath is not None:
                if r.get_xpath() is not None:
                    for i in r.get_xpath():
                        if args.id:
                            print("\t".join([record_id, i]))
                        else:
                            print(i)

            if args.stats is False and args.xpath is not None and args.present is True:
                print("%s %s" % (record_id, r.has_xpath()))

            if args.stats is True:
                if (s % 1000) == 0 and s != 0:
                    print("%d records processed" % s)
                s += 1
                collect_stats(stats_aggregate, r.get_stats())
            elem.clear()

    if args.stats is True:
        stats_averages = create_stats_averages(stats_aggregate)
        pretty_print_stats(stats_averages)
def main():
    parser = ArgumentParser(description="Help with calculating and ordering required LEGO Mindstorms EV3 spare parts.")
    commands = parser.add_subparsers(metavar='command', dest='command')
    commands.required = True

    cmd = commands.add_parser(
        'parse', help="Parse 3 inventory data files and combine them into a single data list."
                      " You can redirect the output into a text file on the command line.")
    cmd.add_argument('datafiles', nargs=3, help="3 inventory data files for the 3 LEGO sets")

    cmd = commands.add_parser(
        'missing', help="Calculate the LEGO pieces missing in the combination of the Edu"
                        " Expansion set + Home or Edu Core, that only the other (omitted)"
                        " set would have.")
    cmd.add_argument('omitted_set', choices=[SET_EV3HOME, SET_EDUCORE],
                     help="The LEGO set you did *not* buy, which you need the bricks from."
                          " 31313 = Mindstorms EV3, 45544 = Edu Core, 45560 = Edu Expansion.")
    datafile_default = os.path.join('raw-data', 'Lego Mindstorms EV3 combined list.csv')
    cmd.add_argument('--datafile', '-f', default=datafile_default,
                     help="The combined list data file. Default: {}".format(datafile_default))

    cmd = commands.add_parser(
        'order', help="Add the LEGO parts you need to the shopping bag on LEGO's customer service platform.")
    cmd.add_argument('--shop', '-s', default='en-us',
                     choices=['nl-be', 'fr-be', 'cs-cz', 'da-dk', 'de-de', 'es-es', 'fr-fr',
                              'it-it', 'es-ar', 'hu-hu', 'nl-nl', 'nb-no', 'pl-pl', 'fi-fi',
                              'sv-se', 'en-gb', 'en-us', 'ru-ru', 'ko-kr', 'zh-cn', 'ja-jp'],
                     help="<language-country> identifier of the LEGO shop (language and geographic region)"
                          " you want to use for ordering. Default: en-us")
    cmd.add_argument('--browser', '-b', default='firefox', choices=['chrome', 'firefox'],
                     help="Web browser that will be used to open the LEGO shop. Default: firefox")
    cmd.add_argument('--username', '-u', help="User name for your LEGO ID account")
    cmd.add_argument('--password', '-p', help="Password for your LEGO ID account")
    cmd.add_argument('--lego-set', '-l', default=SET_EDUCORE, choices=[SET_EV3HOME, SET_EDUCORE, SET_EDUEXPA],
                     help="The LEGO set you did *not* buy, which you need the bricks from."
                          " 31313 = Mindstorms EV3, 45544 = Edu Core, 45560 = Edu Expansion."
                          " Default: 45544 (Edu Core)")
    cmd.add_argument('order_list',
                     help="A list of LEGO part_number:quantity you want to buy, separated by comma signs."
                          " Example: 370526:4,370726:2,4107085:4,4107767:2")

    # avoid intimidating the user ("error: ... required") with no arguments
    if len(sys.argv) == 1:
        parser.print_help()
        parser.exit()

    args = parser.parse_args()
    kwargs = vars(args).copy()
    kwargs.pop('command', None)

    function = globals()[args.command]
    function(**kwargs)
Example #40
0
def main(args=None):
    """Entry-point for ``python -m mpi4py.bench``."""
    from argparse import ArgumentParser, REMAINDER
    parser = ArgumentParser(prog=__package__ + ".bench",
                            usage="%(prog)s [options] <command> [args]")
    parser.add_argument("--threads",
                        action="store_true", dest="threads", default=None,
                        help="initialize MPI with thread support")
    parser.add_argument("--no-threads",
                        action="store_false", dest="threads", default=None,
                        help="initialize MPI without thread support")
    parser.add_argument("--thread-level",
                        dest="thread_level", default=None,
                        action="store", metavar="LEVEL",
                        choices="single funneled serialized multiple".split(),
                        help="initialize MPI with required thread level")
    parser.add_argument("--mpe",
                        action="store_true", dest="mpe", default=False,
                        help="use MPE for MPI profiling")
    parser.add_argument("--vt",
                        action="store_true", dest="vt", default=False,
                        help="use VampirTrace for MPI profiling")
    parser.add_argument("command",
                        action="store", metavar="<command>",
                        help="benchmark command to run")
    parser.add_argument("args",
                        nargs=REMAINDER, metavar="[args]",
                        help="arguments for benchmark command")
    options = parser.parse_args(args)

    from . import rc, profile
    if options.threads is not None:
        rc.threads = options.threads
    if options.thread_level is not None:
        rc.thread_level = options.thread_level
    if options.mpe:
        profile('mpe', logfile='mpi4py')
    if options.vt:
        profile('vt', logfile='mpi4py')

    from . import MPI
    comm = MPI.COMM_WORLD
    if options.command not in main.commands:
        if comm.rank == 0:
            parser.error("unknown command '%s'" % options.command)
        parser.exit(2)
    command = main.commands[options.command]
    command(comm, options.args)
    parser.exit()
Example #41
0
def main(bot_class=TelegramBot):
    parser = ArgumentParser(description='An easily extensible Telegram bot.')
    parser.add_argument('--bot-id', default=environ.get('TELEGRAM_BOT_ID'),
                        help='can also be set via an environment variable '
                             'called TELEGRAM_BOT_ID')
    parser.add_argument('-v', dest='verbose', action='store_true',
                        help='verbose logging')
    args = parser.parse_args()

    # set up logging apparatus
    logging.captureWarnings(True)
    logging_config = dict(
        level=logging.DEBUG if args.verbose else logging.INFO,
        datefmt='%Y-%m-%d %H:%M:%S',
        format='%(asctime)-15s %(name)s: %(levelname)s %(message)s',
    )
    logging.basicConfig(**logging_config)
    # suppress logs from the requests library connection pool (they're noisy)
    noise = logging.getLogger('requests.packages.urllib3.connectionpool')
    noise.disabled = True

    logger.info('Starting Telegram bot..')
    try:
        bot = bot_class(args.bot_id)
    except TelegramBotException as e:
        logger.error(e)
        parser.exit(1)

    # handle exit conditions gracefully
    def was_force_stopped(signo, stackframe):
        if signo == SIGINT:
            print()
            logger.debug('Bot interrupted via keypress!')
        if signo == SIGTERM:
            logger.debug('Bot was asked to shutdown..')
        logger.info('Shutting down..')
        bot.exiting = True
    signal(SIGINT, was_force_stopped)
    signal(SIGTERM, was_force_stopped)

    while True:
        if bot.exiting:
            break
        sleep(2)
        bot.get_updates()
    parser.exit()
Example #42
0
def main():
    parser = ArgumentParser(prog='benchmark.py', description='Test perfomance.')
    parser.add_argument('--only-bench', action='store_true')
    parser.add_argument('--only-show', action='store_true')

    args = parser.parse_args()

    if args.only_bench and args.only_show:
        parser.print_help()
        parser.exit()

    if args.only_bench:
        bench_num_nodes()
        bench_num_iters()
        bench_orders()

    if args.only_show:
        visualize()
Example #43
0
def main():
    parser = ArgumentParser(description="Bogobogosort")
    parser.add_argument("--value", "-v", type=int, nargs="+", help="Values to sort", action="append")
    parser.add_argument("--nargs", "-n", type=int, help="Number of values to generate to sort")
    parser.add_argument("--test", "-t", type=int, metavar="maxN", help="Test with list lengths up to maxN")
    parser.add_argument("--count", "-c", type=int, help="Number of trials to test. Only applies when running a test.", default=5)
    args = parser.parse_args()

    if (args.value is None) and (args.nargs is None) and (args.test is None):
        parser.print_help()
        parser.exit(0, "value, nargs, or test is required.\n")
    elif (args.test is not None) and ((args.value is not None) or (args.nargs is not None)):
        parser.print_help()
        parser.exit(0, "Invalid combination of arguments.\n")
    elif (args.value is not None) and (args.nargs is not None):
        parser.print_help()
        parser.exit(0, "Provide a list of numbers, or a number of values to generate, not both.\n")

    if args.test is not None:
        bbsTest(args.test+1, args.count)
    else:
        if args.value is not None:
            vals = [item for sublist in args.value for item in sublist]
        else:
            vals = randlist(0, 10000, args.nargs)

        print("Sorting {}...".format(vals))
        pt = PerfTimer()
        srtd = bogobogosort(vals)
        elapsed = pt.elapsedf()
        print("Sorted {} in {}".format(srtd, elapsed))
Example #44
0
def main():
    parser = ArgumentParser(usage='%(prog)s [options] data_filename.xml')
    parser.add_argument("-f", "--full", action="store_true", dest="full",
                        default=False, help="full CSV report")
    parser.add_argument("-o", "--out", action="store_true",
                        dest="output", default=False, help="output file")
    parser.add_argument("-x", "--xml", action="store_true",
                        dest="xml", default=False, help="xml? default MARC21")
    parser.add_argument("datafile", help="put the datafile you want analyzed \
                        here")

    args = parser.parse_args()

    if not len(sys.argv) > 0:
        parser.print_help()
        parser.exit()

    if args.full and args.xml:
        createFullCSV(args.datafile, args.output)

    if args.full and not args.xml:
        print(getMRCHeader(args.datafile))
Example #45
0
def main():
    try:
        parser = ArgumentParser()

        subparsers = parser.add_subparsers(title='subcommands')
        start_parser = subparsers.add_parser('start')
        start_parser.add_argument('--name', default='default')
        start_parser.add_argument('--no-edit', action='store_false',
                                  dest='edit')
        start_parser.add_argument('files', nargs='+')
        start_parser.set_defaults(func=main_start, edit=True)

        edit_parser = subparsers.add_parser('edit')
        edit_parser.add_argument('--name', default='default')
        edit_parser.set_defaults(func=main_edit)

        move_parser = subparsers.add_parser('move')
        move_parser.add_argument('--name', default='default')
        move_parser.add_argument('-n', '--dry-run', action='store_true',
                                 dest='dry_run')
        move_parser.set_defaults(func=main_move)

        copy_parser = subparsers.add_parser('copy')
        copy_parser.add_argument('--name', default='default')
        copy_parser.add_argument('-n', '--dry-run', action='store_true',
                                 dest='dry_run')
        copy_parser.set_defaults(func=main_copy)

        args = parser.parse_args()
        if not hasattr(args, "func"):
            parser.print_help()
            parser.exit(1)
        args.func(args)
    except Exception:
        traceback.print_exc(file=sys.stderr)
        sys.exit(EXIT_UNEXPECTED_FAIL)
Example #46
0
    def __init__(self, name, loader, *largs, **kwargs):
        kwargs['formatter_class'] = RawTextHelpFormatter
        kwargs['fromfile_prefix_chars']="@"
        args = kwargs.pop('args', None)
            
        preparser = ArgumentParser(add_help=False, 
                fromfile_prefix_chars=kwargs['fromfile_prefix_chars'])
        preparser.add_argument("-X", type=loader, action="append")
        # Process the plugins
        preparser.exit = lambda a, b: None
#        preparser.convert_arg_line_to_args = self.convert_arg_line_to_args
        preparser._read_args_from_files = PluginArgumentParser._read_args_from_files.__get__(preparser)         
        preparser._yield_args_from_files = PluginArgumentParser._yield_args_from_files.__get__(preparser)         
        preparser.convert_args_file_to_args = PluginArgumentParser.convert_args_file_to_args.__get__(preparser)         

        self.ns, unknown = preparser.parse_known_args(args) 

        ArgumentParser.__init__(self, name, *largs, **kwargs)

        self.add_argument("-X", action='append', help='path of additional plugins to be loaded' )
import sys
import subprocess
from jinja2 import Environment
from argparse import ArgumentParser

parser = ArgumentParser()
parser.add_argument("--name", help="name of the job", type=str, required=True)
parser.add_argument("--n_gpus", help="number of gpus", type=int, default=6)
parser.add_argument("--repeats", help="number of  repeats", type=int, default=10)
parser.add_argument("--dry_run", help="dont actually submit", type=bool, default=False)
parser.add_argument("--time", help="time", type=str, default="99:59:59")

args = parser.parse_args()

if args.n_gpus <= 0:
    parser.exit("n_gpus must be positive")
if args.repeats <= 0:
    parser.exit("n_gpus must be positive")


template = """#!/bin/bash
#MSUB -N msmaccelerator-{{name}}
#MSUB -l nodes=1:ppn={{n_gpus}}:gpus={{n_gpus}}
#MSUB -l walltime={{time}}
#MSUB -q longq
#MSUB -d /home/rmcgibbo/projects/msmaccelerator2.villin/project/

cd /home/rmcgibbo/projects/msmaccelerator2.villin/project/
mkdir -p logs
nvidia-smi &>logs/msmaccelerator-{{name}}.out
Example #48
0
def main():
  cut = { 'x':None, 'y':None, 'z':None }
  remesh = { 'vertex_perc': None, 'treshold': 0.3, 'boundaryweight': 1.0, 'xml_fmt': """<!DOCTYPE FilterScript>
<FilterScript>
 <filter name="Quadric Edge Collapse Decimation">
  <Param type="RichInt" value="%d" name="TargetFaceNum"/>
  <Param type="RichFloat" value="0" name="TargetPerc"/>
  <Param type="RichFloat" value="%.2f" name="QualityThr"/>
  <Param type="RichBool" value="true" name="PreserveBoundary"/>
  <Param type="RichFloat" value="%.2f" name="BoundaryWeight"/>
  <Param type="RichBool" value="true" name="PreserveNormal"/>
  <Param type="RichBool" value="false" name="PreserveTopology"/>
  <Param type="RichBool" value="true" name="OptimalPlacement"/>
  <Param type="RichBool" value="false" name="PlanarQuadric"/>
  <Param type="RichBool" value="false" name="QualityWeight"/>
  <Param type="RichBool" value="true" name="AutoClean"/>
  <Param type="RichBool" value="false" name="Selected"/>
 </filter>
</FilterScript>""" }

  parser = ArgumentParser(epilog="Version "+__VERSION__+"\n -- Written by "+__AUTHOR__+"\n -- Example (3 parts): stlcut LFS_Elephant.STL -x 33.4%+ -e scad", description="Cut STL objects into pieces. Call without options to open an STL viewer and get the bounding box printed out.")
  parser.add_argument("-x", metavar='XPOS', help="cut at given X-coordinate, parallel to yz plane. Use '%%' with any value for a relative dimension. E.g. '-x 50%%' cuts the object in two equal halves. Suffix with '-' to create only the first part; Suffix with '+' to make multiple equally spaced cuts. Default unit is mm.")
  # Not implemented: Prefix with '-' to measure from the high coordinates downward. Use units '%%', 'mm' or 'cm'.
  parser.add_argument("-y", metavar='YPOS', help="cut at given Y-coordinate")
  parser.add_argument("-z", metavar='ZPOS', help="cut at given Z-coordinate")
  parser.add_argument("-xy", metavar='POS', help="cut into vertical columns")
  parser.add_argument("-xz", metavar='POS', help="cut into horizontal columns")
  parser.add_argument("-yz", metavar='POS', help="cut into horizontal columns (other direction)")
  parser.add_argument("-d", "--xyz", "--dice", metavar='POS', help="cut into equal sided dices ")
  parser.add_argument("-e", "--engine", help="select the CSG engine. Try 'blender' or 'scad' or check 'pydoc trimesh.boolean.intersection' for more valid values. The openSCAD engine may work better for objects with disconnected parts.")
  parser.add_argument("-f", "--fix", action='store_true', help="try to fix defects in STL: normals, holes, ...")
  parser.add_argument("-r", "--remesh-filter", metavar='VERTEX_PERCENTAGE', help="Apply the 'Quadratic Edge Collapse Decimation' filter from meshlab. This fixes all defects, but slightly changes the geometry. Optionally, you can append a trehshold value to the percentage like e.g. '-F 50%%,0.1' - The default treshold is "+str(remesh['treshold']))
  parser.add_argument("-rx", metavar='XDEG', help="rotate about the X-axis")
  parser.add_argument("-ry", metavar='YDEG', help="rotate about the Y-axis")
  parser.add_argument("-rz", metavar='ZDEG', help="rotate about the Z-axis")
  parser.add_argument("-s", "--scale", metavar='SCALE', help="uniformly scale the object about the origin")
  parser.add_argument("-S", "--save-transformed", metavar='STLFILE', help="save an stl file after applying scale and rotation. No cut operation performed.")
  parser.add_argument("infile", metavar="STLFILE", help="The STL input file")

  args = parser.parse_args()      # --help is automatic

  if args.infile is None:
    parser.exit("No input file given")

  if args.remesh_filter is not None:
    f = args.remesh_filter.split(',')
    f[0] = float(f[0].split('%')[0])
    remesh['vertex_perc'] = f[0]
    if len(f) > 1:
      remesh['treshold'] = float(f[1])
    print "remesh not impl. ", remesh


  if args.xyz is not None: cut['x']=cut['y']=cut['z']=args.xyz
  if args.xy is not None: cut['x']=cut['y']=args.xy
  if args.xz is not None: cut['x']=cut['z']=args.xz
  if args.yz is not None: cut['y']=cut['z']=args.yz
  if args.x is not None: cut['x']=args.x
  if args.y is not None: cut['y']=args.y
  if args.z is not None: cut['z']=args.z

  Re = euler_rotation_matrix(args.rx, args.ry, args.rz)
  if args.scale is not None:
    Sc = trimesh.transformations.scale_matrix(float(args.scale), [0,0,0])
    RS = numpy.dot(Re,Sc)
  else:
    RS = Re

  if args.save_transformed: cut['x'] = cut['y'] = cut['z'] = None

  if cut['x'] is None and cut['y'] is None and cut['z'] is None:
    print "loading "+args.infile+" ..."
    m = trimesh.load_mesh(args.infile)
    m.process()
    m.transform(RS)
    if args.fix:
      print "is watertight: ", m.fill_holes()
      m.fix_normals()
      m.process()
    print "vertices: ", len(m.vertices)
    if args.save_transformed:
      open(args.save_transformed, "wb+").write(trimesh.io.export.export_stl(m))
      parser.exit(args.save_transformed + " written.\nSpecify one of the -x, -y, -z options instead of -S to cut something.")
    else:
      print m.bounds
      bb = m.bounding_box			# oriented parallel to the axis
      # bb = m.bounding_box_oriented	# rotated for minimum size, slow!
      for f in bb.facets():
        bb.visual.face_colors[f] = trimesh.visual.to_rgba([255,255,0,127])
      # FIXME: transparency and color does not work.
      # (m+bb).show(block=False)
      m.show(block=False)

      print """View mode:
     drag rotates the view,
     CTRL + drag pans,
     SHIFT + drag zooms (mouse wheel scrolls),
     'z' returns to the base view,
     'w' toggles wireframe mode, and
     'c' toggles backface culling.
     'q' quit."""
      parser.exit('Specify one of the -x, -y, -z options for cut mode.')

  svg = [ args.infile ]

  print "x,y,z: ", cut['x'], cut['y'], cut['z']

  for dim in ('x','y','z'):
    if cut[dim] is not None:
      done = []
      for f in svg:
        done.extend(do_cut(dim, cut[dim], f, engine=args.engine, mat=RS, fix=args.fix))
	RS = None	# only rotate the original input file. The temp files are saved rotated.
        if f != args.infile:
          print "... removing "+f
          os.remove(f)
      svg = done

  print "files generated: ", svg
    cmd = ['msub', os.path.abspath(fn)]
    print ' '.join(cmd)

    if not args.dry_run:
        print 'SUBMITTING JOB'
        code = subprocess.check_output(cmd)
        print 'JOB_ID: %s' % code

if __name__ == "__main__":
    parser = ArgumentParser()
    parser.add_argument('--name', help='name of the job', type=str, required=True)
    parser.add_argument('--n_gpus', help='number of gpus', type=int, default=7)
    parser.add_argument('--n_nodes', help='number of nodes', type=int, default=7)
    parser.add_argument('--repeats', help='number of  repeats', type=int, default=10)
    parser.add_argument('--dry_run', help='dont actually submit', type=bool, default=False)
    parser.add_argument('--dir', help='Directory to store the job file', default='jobs')
    parser.add_argument('--queue', help='The queue to submit to', default='longq')

    args = parser.parse_args()

    if args.n_gpus <= 0:
        parser.exit('n_gpus must be positive')
    if args.repeats <= 0:
        parser.exit('repeats must be positive')
    if args.queue not in queues.keys():
        parser.exit('please supply a valid queue')

    args.time = queues[args.queue]

    submit_round_to_pbs(args)
Example #50
0
class Application(object):
    def __init__(self):
        self.version = "0.3"

    def run(self):
        options = self._parse_args()
        self._init_verbosity(options.verbosity)
        self._load_files(options.auto_path)
        dfa = self._make_dfa(options.desired_state)
        self._execute_path(dfa, options.exact)

    @property
    def persisted_data(self):
        """Dictionary that is persisted between runs.
        
        Keys starting with '#' are reserved and should not be used"""
        return self._data["last_state"]

    def _init_verbosity(self, level):
        pyautomate.verbosity.init(level)

    def _parse_args(self):
        self.parser = ArgumentParser(
            description="Automation tool", prog="auto", epilog="For more information see TODO github link readme"
        )
        self.parser.add_argument("desired_state", metavar="S", nargs="+", help="a state to reach")
        self.parser.add_argument(
            "--file", "-f", dest="auto_path", default="auto.py", help="the pyautomate config file (default: ./auto.py)"
        )
        self.parser.add_argument(
            "--exact",
            "-e",
            default=False,
            action="store_true",
            help="when specified desired state must be matched " + "exactly (default: partial match)",
        )
        self.parser.add_argument(
            "--verbosity",
            "-v",
            metavar="V",
            default=1,
            type=int,
            help="verbosity of output. 0 for no output, 1 for "
            + "listing actions, 2 for listing state switches and "
            + "actions (default: 1)",
        )
        self.parser.add_argument("--version", action="version", version="%(prog)s " + self.version)
        return self.parser.parse_args()

    def _load_files(self, auto_path):
        auto_path = os.path.abspath(auto_path)
        auto_dir, auto_file = os.path.split(auto_path)
        if not os.path.exists(auto_path):
            self.parser.error("Could not find auto file at: %s" % auto_path)
        os.chdir(auto_dir)

        self._data = Data()
        self._config = self._load_auto_file(auto_dir, auto_file)

    def _load_auto_file(self, auto_dir, auto_file):
        sys.path.insert(0, auto_dir)  # allow importing

        auto_module_name = os.path.splitext(auto_file)[0]
        try:
            config = import_module(auto_module_name)
        except ImportError as ex:
            self.parser.error("Failed to import auto file: %s" % ex)

        weights = defaultdict(lambda: 1000)
        if hasattr(config, "weights"):
            weights.update(config.weights)
        config.weights = weights

        return config

    def _make_dfa(self, desired_state):
        from pyautomate.automata import GuardedState, StateDict, NFA, NFAAsDFA, UnknownStatesException

        raw_states = yaml.load(self._config.states)
        if not raw_states:
            raw_states = {}

        states = StateDict()
        for raw_state in raw_states:
            state = GuardedState(raw_state)
            states[state.name] = state

        start_state = self._config.get_initial_state()
        self._data.commit()
        if isinstance(start_state, str):
            start_state = (start_state,)
        elif not isinstance(start_state, tuple):
            self.parser.error(
                "get_initial_state must return a str or a tuple of " + "states, got: {0}".format(start_state)
            )

        try:
            nfa = NFA(states=states, raw_start_states=start_state, raw_end_states=desired_state)
        except UnknownStatesException as ex:
            self.parser.error("Unknown state(s) in desired state: {0}".format(", ".join(ex.states)))
        return NFAAsDFA(nfa)

    def _execute_path(self, dfa, exact):
        from pyautomate.automata import EndUnreachableException
        from pyautomate.verbosity import print1, print1e, print2, print2e, level as verbosity_level

        try:
            for from_, action, to in dfa.get_shortest_path(exact, self._config.weights):

                print1e(action)

                if verbosity_level == 2:

                    for i, right in enumerate(to):

                        if i == 0:
                            fill = "-"
                            right = "> " + right
                            center = action
                        else:
                            fill = " "
                            center = ""
                        center_len = 79 - len(right)

                        print("{0:{1}^{3}}{2}".format(center, fill, right, center_len))

                    print()

                try:
                    eval(action, vars(self._config))
                    self._data.commit()
                except:
                    print("Failed to execute action:", action, file=sys.stderr)
                    raise

        except EndUnreachableException:
            print(
                "Desired state (%s) is unreachable from (%s)" % (", ".join(dfa.end_state), ", ".join(dfa.start_state))
            )
            self.parser.exit(1)
        finally:
            self._data.save()
Example #51
0
def main() -> None:
    help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))  # type: Any
    parser = ArgumentParser(
        prog='incremental_checker',
        description=__doc__,
        formatter_class=help_factory)

    parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
                        help="must be one of 'last' or 'commit'")
    parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
                        help="the commit id to start from, or the number of "
                        "commits to move back (see above)")
    parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
                        help="the repo to clone and run tests on")
    parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
                        help="the name of the file or directory to typecheck")
    parser.add_argument("-x", "--exit-on-error", action='store_true',
                        help="Exits as soon as an error occurs")
    parser.add_argument("--keep-temporary-files", action='store_true',
                        help="Keep temporary files on exit")
    parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
                        help="sets a custom location to store cache data")
    parser.add_argument("--branch", default=None, metavar="NAME",
                        help="check out and test a custom branch"
                        "uses the default if not specified")
    parser.add_argument("--sample", type=int, help="use a random sample of size SAMPLE")
    parser.add_argument("--seed", type=str, help="random seed")
    parser.add_argument("--limit", type=int,
                        help="maximum number of commits to use (default until end)")
    parser.add_argument("--mypy-script", type=str, help="alternate mypy script to run")
    parser.add_argument("--daemon", action='store_true',
                        help="use mypy daemon instead of incremental (highly experimental)")

    if len(sys.argv[1:]) == 0:
        parser.print_help()
        parser.exit()

    params = parser.parse_args(sys.argv[1:])

    # Make all paths absolute so we avoid having to worry about being in the right folder

    # The path to this specific script (incremental_checker.py).
    script_path = os.path.abspath(sys.argv[0])

    # The path to the mypy repo.
    mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))

    # The folder the cloned repo will reside in.
    temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))

    # The particular file or package to typecheck inside the repo.
    if params.file_path:
        target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))
    else:
        # Allow `-f ''` to clear target_file_path.
        target_file_path = None

    # The path to where the incremental checker cache data is stored.
    incremental_cache_path = os.path.abspath(params.cache_path)

    # The path to store the mypy incremental mode cache data
    mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))

    print("Assuming mypy is located at {0}".format(mypy_path))
    print("Temp repo will be cloned at {0}".format(temp_repo_path))
    print("Testing file/dir located at {0}".format(target_file_path))
    print("Using cache data located at {0}".format(incremental_cache_path))
    print()

    test_repo(params.repo_url, temp_repo_path, target_file_path,
              mypy_path, incremental_cache_path, mypy_cache_path,
              params.range_type, params.range_start, params.branch,
              params)
Example #52
0
 def exit(self, status=0, message=None):
     try:
         return ArgumentParser.exit(self, status, message)
     except SystemExit:
         raise FakeSystemExit(status)
Example #53
0
def main():
    from argparse import ArgumentParser
    parser = ArgumentParser(prog="arcomm")
    arg = parser.add_argument

    arg("endpoints", nargs="*")

    arg("-v", "--version", action="store_true", help="display version info")

    arg("--protocol", help=("set the protocol. By default 'eapi' is used."),
        choices=["eapi", "eapi+https", "mock", "ssh"])

    arg("-u", "--username", default="admin",
        help="specifies the username on the switch")

    arg("-p", "--password", default=None, help="specifies users password")

    arg("--no-password", action="store_true",
        help="specifies no password required for user")

    arg("-s", "--secret-file", help="read passwords from file")

    arg("--authorize", action="store_true")

    arg("-a", "--authorize-password", default=None,
        help=("use if a password is needed for elevated prvilges"))

    arg("-t", "--timeout", type=int, default=30,
        help=("change the timeout from the default of 30 seconds"))

    arg("--hosts-file", help="path to file containing list of hosts")

    arg("--script", help=("path to a script file containing commands to "
                          "execute. template variables will be processed if "
                          "Jinja2 is installed and `--variables` is also "
                          "supplied on the command line"))

    arg("--variables", help=("replacements for template variables in script "
                             "file (must be JSON formatted)"))



    ssh_group = parser.add_argument_group('SSH Options', '')
    ssh_arg = ssh_group.add_argument

    ssh_arg("--identity-file", help="specifies identity file")

    eapi_group = parser.add_argument_group('eAPI Options', '')

    eapi_arg = eapi_group.add_argument

    eapi_arg("--encoding", default="text", choices=["json", "text"],
             help="control output formatting")

    eapi_arg("--no-verify", action="store_true",
             help="when using eAPI over HTTPS, don't verify certificate")

    eapi_arg("--private-key", help="specifies private key file")
    eapi_arg("--certificate", help="specifies client certificate file")

    args = parser.parse_args()

    options = {}
    endpoints = []

    if args.version:
        parser.exit(0, arcomm.__version__ + "\n")

    if args.hosts_file:
        endpoints = arcomm.util.load_endpoints(args.hosts_file)
    else:
        endpoints = args.endpoints

    if not endpoints:
        raise ValueError('no endpoints')

    if args.authorize_password:
        options['authorize'] = args.authorize_password
    elif args.authorize:
        options['authorize'] = ''

    username = args.username

    if args.no_password:
        password = ""
    else:
        password = args.password

    if not username:
        username = getpass.getuser()

    if args.secret_file:
        with open(args.secret_file, "r") as stream:
            secrets = yaml.load(stream)
            password = secrets.get(username)

    if password is None:
        password = getpass.getpass("password for {}: ".format(username))

    options['creds'] = arcomm.BasicCreds(args.username, password)

    if args.protocol:
        options['protocol'] = args.protocol

    options['timeout'] = args.timeout

    options['encoding'] = args.encoding

    options['verify'] = not args.no_verify

    script = []

    if args.script:
        with open(args.script, 'r') as fh:
            script = fh.read()
            script = script.splitlines()
    elif not sys.stdin.isatty():
        for line in sys.stdin:
            script.append(line)
    else:
        isatty = sys.stdin.isatty()
        if isatty:
            print("Enter commands (one per line).")
            print("Enter '.' alone to send or 'Crtl-C' to quit.")
            try:
                while True:
                    line = input('> ')
                    if line == ".":
                        break
                    script.append(line)
            except (KeyboardInterrupt, SystemExit):
                print("Commands aborted.")
                sys.exit()
        else:
            for line in sys.stdin:
                script.append(line)

    if args.variables:
        import jinja2
        replacements = json.loads(args.variables)
        script = "\n".join(script)
        template = jinja2.Template(script)
        script = template.render(replacements)
        script = script.splitlines()

    for res in arcomm.batch(endpoints, script, **options):
        print('---')
        if options['encoding'] == 'json':
            print(res.to_json())
        else:
            print(res.to_yaml())
    print('...')
Example #54
0
    results['final_template'] = lct

    pickle.dump(
        results,
        open('results_%s.pickle' % name,'w')
        )



if __name__ == '__main__':

    parser = ArgumentParser()
    parser.add_argument("--pwndata", required=True)
    parser.add_argument("-n", "--name", required=True, help="Name of the pulsar")
    parser.add_argument("--pwnphase", required=True)
    parser.add_argument("--pwnpeaks", required=True)
    parser.add_argument("--rad", default=1)
    args=parser.parse_args()

    name=args.name
    ft1=yaml.load(open(args.pwndata))[name]['ft1']

    pwncat1phase=PhaseRange(*yaml.load(open(args.pwnphase))[name]['phase'])

    peaks=yaml.load(open(args.pwnpeaks))[name]['peaks']

    print peaks
    if peaks is None: parser.exit('no peaks')

    find_offpeak(ft1,name,rad=args.rad,peaks=peaks,pwncat1phase=pwncat1phase)
Example #55
0
def start():
    description = '''\
Another great statusline generator for i3wm.\
    '''

    epilog = '''\
available generators:
  battery           - show battery information
  clipboard         - show the contents of the clipboard
  clock             - show the current time
  disk              - show disk usage statistics
  ethernet          - show information about a network device
  focused-window    - show the currently focused window
  mediaplayer       - show artist/title of the currently playing track
  other-statuslines - show the output of another statusline
  scratchpad        - show what windows are in the i3 scratchpad

example usage:
  i3-dstatus focused-window ethernet clock
    '''

    parser = ArgumentParser(prog='i3-dstatus',
                            description=description,
                            epilog=epilog,
                            formatter_class=RawDescriptionHelpFormatter)

    parser.add_argument('-c',
                        metavar='CONFIG',
                        dest='config',
                        nargs=1,
                        help='The location of your i3-dstatus config')

    parser.add_argument('generators',
                        metavar='Generators',
                        nargs=argparse.REMAINDER,
                        help='''Pass a list of generators as arguments to
                        appear in the statusline. See below for a list of
                        generators.''')

    args = parser.parse_args()

    # load the config
    config = {}

    try:
        config_path = args.config[0] if args.config else '~/.i3-dstatus.conf'
        with open(os.path.expanduser(config_path)) as f:
            config = yaml.safe_load(f)
    except FileNotFoundError:
        if args.config:
            message = 'Could not find config at {}\n'.format(args.config[0])
            parser.exit(status=1, message=message)
        else:
            pass

    try:
        service = DStatusService(args.generators, config=config)
        service.main()
    except Exception as e:
        with open('/tmp/i3-dstatus-error.log', 'a') as f:
            f.write("ERROR - " + str(datetime.datetime.now()) + "\n")
            f.write(traceback.format_exc())
            f.write('\n')
        raise e
ap.add_argument("-D", "--dockerfile", default=False, action="store_true", help="Output dockerfile to stdout. Default: create an image")
ap.add_argument("-S", "--shell-script", "--script", default=False, action="store_true", help="Print out a shell script. Similar to the --dockerfile output.")
ap.add_argument("-R", "--rm", default=False, action="store_true", help="Remove intermediate docker containers after a successful build")
ap.add_argument("--no-cache", default=False, action="store_true", help="Do not use cache when building the image. Default: use docker cache as available")
ap.add_argument("--nogpgcheck", default=False, action="store_true", help="Ignore broken or missing keys. Default: yum check, zypper auto-import")
ap.add_argument("-X", "--xauth", default=False, action="store_true", help="Prepare a docker image that can connect to your X-Server.")
ap.add_argument("-C", "--cleanup", default=False, action="store_true", help="Run suggested docker cleanup.")
ap.add_argument("--ssh-server", default=False, action="store_true", help="Start an ssh login server with the default docker run CMD and start.sh script.")
ap.add_argument("project", metavar="PROJECT", nargs="?", help="obs project name. Alternate syntax to PROJ/PACK")
ap.add_argument("package", metavar="PACKAGE",  nargs="?", help="obs package name, or PROJ/PACK")
ap.add_argument("platform",metavar="PLATFORM", nargs="?", help="obs build target name. Alternate syntax to -p. Default: "+target)
args,run_args = ap.parse_known_args()  # --help is automatic
if len(run_args) and run_args[0] == '--':
  run_args = run_args[1:]

if args.version: ap.exit(__VERSION__)
if args.print_image_name_only or args.dockerfile or args.shell_script:
  args.quiet=True
  args.no_operation=True
if args.quiet: run.verbose=0

context_dir = tempfile.mkdtemp(prefix="obs_docker_install_context_")
docker_cmd_cmd="/bin/bash"
extra_docker_cmd = []
extra_packages = []
if args.extra_packages: extra_packages = re.split(r"[\s,]", args.extra_packages)

if args.cleanup:
  run.verbose = 2
  run(['sh', '-c', docker_cmd_clean_c], redirect=False)
  run(['sh', '-c', docker_cmd_clean_i], redirect=False)
Example #57
0
 def parseCmd(self):
     parser = ArgumentParser(add_help=False)
     parser.add_argument('glob', nargs='*', help='File(s)/Dir(s)')
     # Generic Program Information
     parser.add_argument(
         '--help',
         dest='help',
         action='store_true',
         default=False,
         help=
         'Print a usage message briefly summarizing the command-line options and the bug-reporting address, then exit.'
     )
     parser.add_argument(
         '-V',
         '--version',
         dest='version',
         action='store_true',
         default=False,
         help=
         'Print the version number of eg to the standard output stream.')
     # Matching Control
     parser.add_argument(
         '-e',
         '--regexp',
         dest='regexp',
         action='append',
         default=[],
         help='Search patterns')
     parser.add_argument(
         '-f',
         '--file',
         dest='file',
         action='store',
         help=
         'Obtain patterns from file, one per line. The empty file contains zero patterns, and therefore matches nothing.'
     )
     parser.add_argument(
         '-i',
         '--ignore-case',
         dest='ignore_case',
         action='store_true',
         default=False,
         help=
         'Ignore case distinctions in both the patterns and the input files.'
     )
     parser.add_argument(
         '-v',
         '--invert-match',
         dest='invert_match',
         action='store_true',
         default=False,
         help='Invert the sense of matching, to select non-matching lines.')
     parser.add_argument(
         '-w',
         '--word-regexp',
         dest='word_regexp',
         action='store_true',
         default=False,
         help=
         'Select only those lines containing matches that form whole words.')
     parser.add_argument(
         '-x',
         '--line-regexp',
         dest='line_regexp',
         action='store_true',
         default=False,
         help=
         'Select only those matches that exactly match the whole line.')
     # General Output Control
     parser.add_argument(
         '-c',
         '--count',
         dest='count',
         action='store_true',
         default=False,
         help=
         'Suppress normal output; instead print a count of matching lines for each input file. With the ‘-v’, ‘--invert-match’ option, count non-matching lines.'
     )
     parser.add_argument(
         '--color',
         dest='color',
         action='store',
         default='never',
         help=
         'Surround the matched (non-empty) strings, matching lines, context lines, file names, line numbers, byte offsets, and separators (for fields and groups of context lines) with escape sequences to display them in color on the terminal. The colors are defined by the environment variable GREP_COLORS and default to ‘ms=01;31:mc=01;31:sl=:cx=:fn=35:ln=32:bn=32:se=36’ for bold red matched text, magenta file names, green line numbers, green byte offsets, cyan separators, and default terminal colors otherwise. COLOR is ‘never’, ‘always’, or ‘auto’.'
     )
     parser.add_argument(
         '-L',
         '--files-without-match',
         dest='files_without_match',
         action='store_true',
         default=False,
         help=
         'Suppress normal output; instead print the name of each input file from which no output would normally have been printed. The scanning of every file will stop on the first match.'
     )
     parser.add_argument(
         '-l',
         '--files-with-matches',
         dest='files_with_matches',
         action='store_true',
         default=False,
         help=
         'Suppress normal output; instead print the name of each input file from which output would normally have been printed. The scanning of every file will stop on the first match.'
     )
     parser.add_argument(
         '-m',
         '--max-count',
         dest='max_count',
         action='store',
         type=int,
         default=-1,
         help=
         'Stop reading a file after num matching lines. If the input is standard input from a regular file, and num matching lines are output, grep ensures that the standard input is positioned just after the last matching line before exiting, regardless of the presence of trailing context lines. This enables a calling process to resume a search.'
     )
     parser.add_argument(
         '-o',
         '--only-matching',
         dest='only_matching',
         action='store_true',
         default=False,
         help=
         'Print only the matched (non-empty) parts of matching lines, with each such part on a separate output line.'
     )
     parser.add_argument(
         '-q',
         '--quiet',
         dest='quiet',
         action='store_true',
         default=False,
         help=
         'Quiet; do not write anything to standard output. Exit immediately with zero status if any match is found, even if an error was detected.'
     )
     parser.add_argument(
         '-s',
         '--no-message',
         dest='no_message',
         action='store_true',
         default=False,
         help=
         'Suppress error messages about nonexistent or unreadable files.')
     # Output Line Prefix
     parser.add_argument(
         '-b',
         '--byte-offset',
         dest='byte_offset',
         action='store_true',
         default=False,
         help=
         'Print the 0-based byte offset within the input file before each line of output. If ‘-o’ (‘--only-matching’) is specified, print the offset of the matching part itself. '
     )
     parser.add_argument(
         '-H',
         '--with-filename',
         dest='with_filename',
         action='store_true',
         default=False,
         help=
         'Print the file name for each match. This is the default when there is more than one file to search.'
     )
     parser.add_argument(
         '-h',
         '--no-filename',
         dest='no_filename',
         action='store_true',
         default=False,
         help=
         'Suppress the prefixing of file names on output. This is the default when there is only one file (or only standard input) to search.'
     )
     parser.add_argument(
         '--label',
         dest='label',
         action='store',
         help=
         'Display input actually coming from standard input as input coming from file LABEL.'
     )
     parser.add_argument(
         '-n',
         '--line-number',
         dest='line_number',
         action='store_true',
         default=False,
         help=
         'Prefix each line of output with the 1-based line number within its input file.'
     )
     parser.add_argument(
         '-T',
         '--initial-tab',
         dest='initial_tab',
         action='store_true',
         default=False,
         help=
         'Make sure that the first character of actual line content lies on a tab stop, so that the alignment of tabs looks normal. This is useful with options that prefix their output to the actual content: ‘-H’, ‘-n’, and ‘-b’. In order to improve the probability that lines from a single file will all start at the same column, this also causes the line number and byte offset (if present) to be printed in a minimum-size field width. '
     )
     parser.add_argument(
         '-u',
         '--unix-byte-offsets',
         dest='unix_byte_offsets',
         action='store_true',
         default=False,
         help='Report Unix-style byte offsets.')
     parser.add_argument(
         '-Z',
         '--null',
         dest='null',
         action='store_true',
         default=False,
         help=
         'Output a zero byte (the ASCII NUL character) instead of the character that normally follows a file name.'
     )
     # Context Line Control
     parser.add_argument(
         '-A',
         '--after-context',
         dest='after_context',
         action='store',
         default=0,
         type=int,
         help='Print num lines of trailing context after matching lines.')
     parser.add_argument(
         '-B',
         '--before-context',
         dest='before_context',
         action='store',
         default=0,
         type=int,
         help='Print num lines of leading context before matching lines.')
     parser.add_argument(
         '-C',
         '--context',
         dest='context',
         action='store',
         default=0,
         type=int,
         help='Print num lines of leading and trailing output context.')
     # File and Directory Selection
     parser.add_argument(
         '-a',
         '--text',
         dest='text',
         action='store_true',
         default=False,
         help=
         'Process a binary file as if it were text; this is equivalent to the ‘--binary-files=text’ option. '
     )
     parser.add_argument(
         '--binary-files',
         dest='binary_files',
         action='store',
         help=
         'If the first few bytes of a file indicate that the file contains binary data, assume that the file is of type type. By default, type is ‘binary’, and grep normally outputs either a one-line message saying that a binary file matches, or no message if there is no match. If type is ‘without-match’, grep assumes that a binary file does not match; this is equivalent to the ‘-I’ option. If type is ‘text’, grep processes a binary file as if it were text; this is equivalent to the ‘-a’ option. Warning: ‘--binary-files=text’ might output binary garbage, which can have nasty side effects if the output is a terminal and if the terminal driver interprets some of it as commands. '
     )
     parser.add_argument(
         '-D',
         '--devices',
         dest='devices',
         action='store',
         help=
         'If an input file is a device, FIFO, or socket, use action to process it. By default, action is ‘read’, which means that devices are read just as if they were ordinary files. If action is ‘skip’, devices, FIFOs, and sockets are silently skipped. '
     )
     parser.add_argument(
         '-d',
         '--directories',
         dest='directories',
         action='store',
         help=
         'If an input file is a directory, use action to process it. By default, action is ‘read’, which means that directories are read just as if they were ordinary files (some operating systems and file systems disallow this, and will cause grep to print error messages for every directory or silently skip them). If action is ‘skip’, directories are silently skipped. If action is ‘recurse’, grep reads all files under each directory, recursively; this is equivalent to the ‘-r’ option. '
     )
     parser.add_argument(
         '--exclude',
         dest='exclude',
         action='append',
         default=[],
         help=
         'Skip files whose base name matches glob (using wildcard matching). A file-name glob can use ‘*’, ‘?’, and ‘[’...‘]’ as wildcards, and \ to quote a wildcard or backslash character literally. '
     )
     parser.add_argument(
         '--exclude-from',
         dest='exclude_from',
         action='append',
         default=[],
         help=
         'Skip files whose base name matches any of the file-name globs read from file (using wildcard matching as described under ‘--exclude’). '
     )
     parser.add_argument(
         '--exclude-dir',
         dest='exclude_dir',
         action='append',
         default=[],
         help=
         'Exclude directories matching the pattern dir from recursive directory searches. '
     )
     parser.add_argument(
         '-I',
         dest='I',
         action='store_true',
         default=False,
         help=
         'Process a binary file as if it did not contain matching data; this is equivalent to the ‘--binary-files=without-match’ option. '
     )
     parser.add_argument(
         '--include',
         dest='include',
         action='append',
         default=[],
         help=
         'Search only files whose base name matches glob (using wildcard matching as described under ‘--exclude’). '
     )
     parser.add_argument(
         '-r',
         '--recursive',
         dest='recursive',
         action='store_true',
         default=False,
         help=
         'For each directory mentioned on the command line, read and process all files in that directory, recursively. This is the same as the ‘--directories=recurse’ option. '
     )
     # Other Options
     parser.add_argument(
         '--line-buffered',
         dest='line_buffered',
         action='store_true',
         default=False,
         help=
         'Use line buffering on output. This can cause a performance penalty. '
     )
     parser.add_argument(
         '--mmap',
         dest='mmap',
         action='store_true',
         default=False,
         help=
         'If possible, use the mmap system call to read input, instead of the default read system call. In some situations, ‘--mmap’ yields better performance. However, ‘--mmap’ can cause undefined behavior (including core dumps) if an input file shrinks while grep is operating, or if an I/O error occurs. '
     )
     parser.add_argument(
         '-U',
         '--binary',
         dest='binary',
         action='store_true',
         default=False,
         help=
         'Treat the file(s) as binary. By default, under MS-DOS and MS-Windows, grep guesses the file type by looking at the contents of the first 32kB read from the file. If grep decides the file is a text file, it strips the CR characters from the original file contents (to make regular expressions with ^ and $ work correctly). Specifying ‘-U’ overrules this guesswork, causing all files to be read and passed to the matching mechanism verbatim; if the file is a text file with CR/LF pairs at the end of each line, this will cause some regular expressions to fail. This option has no effect on platforms other than MS-DOS and MS-Windows. '
     )
     parser.add_argument(
         '-z',
         '--null-data',
         dest='null_data',
         action='store_true',
         default=False,
         help=
         'Treat the input as a set of lines, each terminated by a zero byte (the ASCII NUL character) instead of a newline. Like the ‘-Z’ or ‘--null’ option, this option can be used with commands like ‘sort -z’ to process arbitrary file names. '
     )
     # Encoding Options
     parser.add_argument(
         '-E',
         '--encoding',
         dest='encoding',
         action='append',
         default=[],
         help='Encoding for reading text files.')
     parser.add_argument(
         '-G',
         '--guess',
         dest='guess',
         action='store_true',
         default=False,
         help='Guess encoding for text files.')
     parser.add_argument(
         '-S',
         '--stdout',
         dest='stdout',
         action='store_true',
         default=False,
         help='Encoding output text with system default encoding.')
     options = parser.parse_args()
     if options.help:
         parser.print_help()
         parser.exit()
     elif options.version:
         print('Version: 0.1')
         parser.exit()
     else:
         return options
Example #58
0

if __name__ == "__main__":

    parser = ArgumentParser()
    parser.add_argument("--pwndata", required=True)
    parser.add_argument("-n", "--name", required=True, help="Name of the pulsar")
    parser.add_argument("--pwnphase", required=True)
    parser.add_argument("--pwnpeaks", required=True)
    parser.add_argument("--rad", default=1)
    args = parser.parse_args()

    name = args.name
    pwndata = args.pwndata

    ft1 = yaml.load(open(pwndata))[name]["ft1"]

    pwncat1phase = PhaseRange(*yaml.load(open(args.pwnphase))[name]["phase"])

    peaks = yaml.load(open(args.pwnpeaks))[name]["peaks"]

    print peaks
    if peaks is None:
        parser.exit("no peaks")

    TSdc = find_TSdc(name, pwndata)

    peaks = yaml.load(open(args.pwnpeaks))[name]["peaks"]

    find_offpeak(ft1, name, rad=args.rad, peaks=peaks, pwncat1phase=pwncat1phase, TSdc=TSdc)
Example #59
0

def main(img1_path, img2_path, output_path, show=False):
    img1, img2 = map(cv2.imread, [img1_path, img2_path])
    output_img = swap_faces(img1, img2)

    if show:
        cv2.imshow('output_img', output_img)
        if cv2.waitKey(0) > 30:
            pass

    cv2.imwrite(output_path, output_img)
    print 'Output image stored to %s' % output_path
    print 'Completed'

if __name__ == '__main__':
    parser = ArgumentParser(description='faceswap')
    parser.add_argument('img1_path', help='path to first image (head)')
    parser.add_argument('img2_path', help='path to second image (face)')
    parser.add_argument('-o', '--output', help='path for output image', default='output.jpg')
    parser.add_argument('--show', help='display output image', action="store_true")

    args = parser.parse_args()

    if not os.path.isfile(args.img1_path) or not os.path.isfile(args.img2_path):
        print 'Arguments not valid'
        parser.print_help()
        parser.exit()

    main(args.img1_path, args.img2_path, args.output, args.show)