def create_console(self, script): usage_sections = parse_section('usage:', script.__doc__) if len(usage_sections) == 0: raise DocoptLanguageError('"usage:" (case-insensitive) not found.') if len(usage_sections) > 1: raise DocoptLanguageError('More than one "usage:" (case-insensitive).') DocoptExit.usage = usage_sections[0] pattern = parse_pattern(formal_usage(DocoptExit.usage), parse_defaults(script.__doc__)) paths = [] paths.append({ "flags": [opt for opt in parse_defaults(script.__doc__) if not opt.argcount and opt.name not in ["--help", "--version"]], "extras": [opt for opt in parse_defaults(script.__doc__) if not opt.argcount and opt.name in ["--help", "--version"]], "options": [opt for opt in parse_defaults(script.__doc__) if opt.argcount and not opt.value], "default_options": [opt for opt in parse_defaults(script.__doc__) if opt.value], "arguments": [arg.name for arg in pattern.flat(Argument)], }) return { "name": script.__name__, "file": script.__file__, "doc": script.__doc__, "paths": paths, }
def docopt(doc, argv=None, help=True, version=None, options_first=False): # @ReservedAssignment help """Re-implementation of docopt.docopt() function to parse ANYTHING at the end (for proxying django options).""" if argv is None: argv = sys.argv[1:] DocoptExit.usage = printable_usage(doc) options = parse_defaults(doc) pattern = parse_pattern(formal_usage(DocoptExit.usage), options) argv = parse_argv(TokenStream(argv, DocoptExit), list(options), options_first) pattern_options = set(pattern.flat(Option)) for ao in pattern.flat(AnyOptions): doc_options = parse_defaults(doc) ao.children = list(set(doc_options) - pattern_options) extras(help, version, argv, doc) __matched, __left, collected = pattern.fix().match(argv) # if matched and left == []: # better error message if left? if collected: # better error message if left? result = Dict((a.name, a.value) for a in (pattern.flat() + collected)) collected_django_options = len(result.get("DJANGO_OPTIONS", [])) result["DJANGO_OPTIONS"] = ( result.get("DJANGO_OPTIONS", []) + sys.argv[len(collected) + (collected_django_options or 1) :] ) # If any of the collected arguments are also in the DJANGO_OPTIONS, # then exit because we don't want users to have put options for kalite # at the end of the command if any(map(lambda x: x.name in map(lambda x: x.split("=")[0], result["DJANGO_OPTIONS"]), collected)): sys.stderr.write( "Cannot mix django manage command options with kalite options. " "Always put django management options last.\n\n" ) raise DocoptExit() return result raise DocoptExit()
def __init__(self, docopt_str, base_descriptor): with open(base_descriptor, "r") as base_desc: self.descriptor = collections.OrderedDict(json.load(base_desc)) del self.descriptor['groups'] del self.descriptor['inputs'] del self.descriptor['output-files'] self.docopt_str = docopt_str self.dependencies = collections.OrderedDict() self.all_desc_and_type = collections.OrderedDict() self.unique_ids = [] try: # docopt code snippet to extract args tree (pattern) # should run if docopt script is valid options = parse_defaults(docopt_str) self.pattern = parse_pattern( formal_usage(self._parse_section('usage:', docopt_str)[0]), options) argv = parse_argv(TokenStream(sys.argv[1:], DocoptLanguageError), list(options), False) pattern_options = set(self.pattern.flat(Option)) for options_shortcut in self.pattern.flat(AnyOptions): doc_options = parse_defaults(docopt_str) options_shortcut.children = list( set(doc_options) - pattern_options) matched, left, collected = self.pattern.fix().match(argv) except Exception: os.remove(base_descriptor) raise_error(ImportError, "Invalid docopt script")
def usage(self): """Extract usage from a specified command. This is useful if usage not defined in subclass, but it is recommended to define them there. """ opt = docopt.parse_defaults(self.__doc__) return dict({a.short or a.long: bool(a.argcount) for a in opt})
def main(): from docopt import docopt, parse_defaults args = docopt(__doc__, version='ontree 0.0.0') defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(__doc__) } verbose = args['--verbose'] sgg._verbose = verbose sgv._verbose = verbose sgc._verbose = verbose sgd._verbose = verbose if verbose: log.setLevel('DEBUG') if args['--test']: test() elif args['server']: api = args['--api'] if api is not None: scigraph.scigraph_client.BASEPATH = api sgg._basePath = api sgv._basePath = api sgc._basePath = api # reinit curies state sgc.__init__(cache=sgc._get == sgc._cache_get, verbose=sgc._verbose) sgd._basePath = api api_key = args['--key'] if api_key: sgg.api_key = api_key sgv.api_key = api_key sgc.api_key = api_key sgd.api_key = api_key scs = OntTerm.query.services[0] scs.api_key = api_key scs.setup(instrumented=OntTerm) _data_endpoint = args['--data-api'] data_endpoint = (_data_endpoint if _data_endpoint else scigraph.scigraph_client.BASEPATH) app = server(verbose=verbose, data_endpoint=data_endpoint) # app.debug = False # app.run(host='localhost', port=args['--port'], threaded=True) # nginxwoo # app.debug = True app.run(host='0.0.0.0', port=args['--port'], threaded=True) # nginxwoo # FIXME pypy3 has some serious issues yielding when threaded=True, gil issues? os.sys.exit() else: direction = both if args[ '--both'] else out if args['--incoming'] else inc # TODO default direction table to match to expected query behavior based on rdf direction pred = args['<predicate-curie>'] root = args['<root-curie>'] render(pred, root, direction)
def cli(): options = docopt(__doc__) defaults = dict(map(lambda x: (x.long, x.value), parse_defaults(__doc__))) try: main(options, defaults) except RuntimeWarning as w: print(" Warning: %s" % w, file=sys.stderr) sys.exit(1)
def setup(cls, doc, argv=None, **kwargs): """ kwargs are passed to docopt """ from docopt import docopt, parse_defaults args = docopt(doc, argv=argv, **kwargs) defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(doc) } options = cls(args, defaults, argv=argv) return options, args, defaults
def main(): from docopt import docopt, parse_defaults args = docopt(__doc__, version='slimgen 0.0.0') defaults = {o.name:o.value if o.argcount else None for o in parse_defaults(__doc__)} options = Options(args, defaults) main = Main(options) if main.options.debug: print(main.options) main()
def docopt(doc, argv=None, help=True, version=None, options_first=False): # @ReservedAssignment help """Re-implementation of docopt.docopt() function to parse ANYTHING at the end (for proxying django options).""" if argv is None: argv = sys.argv[1:] DocoptExit.usage = printable_usage(doc) options = parse_defaults(doc) pattern = parse_pattern(formal_usage(DocoptExit.usage), options) argv = parse_argv(TokenStream(argv, DocoptExit), list(options), options_first) pattern_options = set(pattern.flat(Option)) for ao in pattern.flat(AnyOptions): doc_options = parse_defaults(doc) ao.children = list(set(doc_options) - pattern_options) extras(help, version, argv, doc) __matched, __left, collected = pattern.fix().match(argv) # if matched and left == []: # better error message if left? if collected: # better error message if left? result = Dict((a.name, a.value) for a in (pattern.flat() + collected)) collected_django_options = len(result.get('DJANGO_OPTIONS', [])) result['DJANGO_OPTIONS'] = (result.get('DJANGO_OPTIONS', []) + sys.argv[len(collected) + (collected_django_options or 1):]) # If any of the collected arguments are also in the DJANGO_OPTIONS, # then exit because we don't want users to have put options for kalite # at the end of the command if any( map( lambda x: x.name in map(lambda x: x.split("=")[0], result[ 'DJANGO_OPTIONS']), collected)): sys.stderr.write( "Cannot mix django manage command options with kalite options. " "Always put django management options last.\n\n") raise DocoptExit() return result raise DocoptExit()
def main(): from docopt import docopt, parse_defaults args = docopt(__doc__, version='googapis 0.0.0') defaults = {o.name:o.value if o.argcount else None for o in parse_defaults(__doc__)} options = Options(args, defaults) main = Main(options) if main.options.debug: log.setLevel('DEBUG') print(main.options) main()
def main(): from docopt import docopt, parse_defaults args = docopt(__doc__) defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(__doc__) } debug = args['--debug'] options = Options(args, defaults) main = Main(options) main()
def parse_params(cmd): # This creates a parameter tree (CommandParams object) for the target docopt tool. # Also returns a second parameter, a dict of: # option->option-help-string from docopt import parse_defaults, parse_pattern, formal_usage, printable_usage usage = get_usage(cmd) options = parse_defaults(usage) pattern = parse_pattern(formal_usage(printable_usage(usage)), options) param_tree = CommandParams() build_command_tree(pattern, param_tree) return param_tree, dict(list(get_options_descriptions(usage)))
def parse_params(cmd): # this function creates a parameter tree for the target docopt tool. # a parameter tree is a CommandParams instance, see the documentation of the class # this function also returns a second parameter, which is a dictionary of option->option help string from docopt import parse_defaults, parse_pattern, formal_usage, printable_usage usage = get_usage(cmd) options = parse_defaults(usage) pattern = parse_pattern(formal_usage(printable_usage(usage)), options) param_tree = CommandParams() build_command_tree(pattern, param_tree) return param_tree, dict(list(get_options_descriptions(usage)))
def parse_params(cmd, given_usage=None): # This creates a parameter tree (CommandParams object) for the target docopt tool. # Also returns a second parameter, a dict of: # option->option-help-string from docopt import parse_defaults, parse_pattern, formal_usage, printable_usage usage = get_usage(cmd) if given_usage is None else given_usage options = parse_defaults(usage) pattern = parse_pattern(formal_usage(printable_usage(usage)), options) param_tree = CommandParams() build_command_tree(pattern, param_tree) return param_tree, dict(list(get_options_descriptions(usage)))
def pattern(self): """Returns a docopt match pattern.""" try: from docopt import parse_defaults, formal_usage, parse_pattern except ImportError: LOGGER.warning('docopt not installed.') LOGGER.warning('Command help pattern generation unavailable.') return None usage = self.usage options = parse_defaults(usage) return parse_pattern(formal_usage(usage), options)
def dochelper(doc): """ Helper to add defaults from ENV to pydoc. """ doc_ = doc for default in docopt.parse_defaults(doc): optname = default.name.strip('-').replace('-', '_').upper() envname = EnvOption.prefix() + optname optval = os.getenv(envname) if optval is not None: search = r"(^ *(%s|%s|%s %s) .*?$)" \ % (default.short, default.long, default.short, default.long) for match in re.findall(search, doc, re.MULTILINE): whole = match[0] if not re.search(r"\[default: .*?\]", whole): doc_ = doc_.replace( whole, "%s [default: %s]" % (whole, optval)) return doc_
def settable_options(doc, argv, ignore, options_first): """Determine which options we can set, which ones are boolean, and which ones are repeatable. All set items are option long names. :param str doc: Docstring from docoptcfg(). :param iter argv: CLI arguments from docoptcfg(). :param iter ignore: Options to ignore from docoptcfg(). :param bool options_first: docopt argument from docoptcfg(). :return: Settable options, boolean options, repeatable options, and short to long option name mapping. :rtype: tuple """ settable, booleans, repeatable, short_map = set(), set(), set(), dict() # Determine which options are settable by docoptcfg and which ones are flags/booleans. options = docopt.parse_defaults(doc) short_map.update((o.short, o.long) for o in options) parsed_argv = docopt.parse_argv( docopt.TokenStream(argv, docopt.DocoptExit), list(options), options_first) overridden = [o.long for o in parsed_argv if hasattr(o, 'long')] for option in options: if option.long in overridden or (option.long in ignore or option.short in ignore) or option.long is None: continue if option.argcount == 0: booleans.add(option.long) settable.add(option.long) # Determine which options are repeatable. if settable and '...' in doc: pattern = docopt.parse_pattern( docopt.formal_usage(docopt.DocoptExit.usage), options) for option in pattern.fix().flat(): if not hasattr(option, 'long'): continue # Positional argument or sub-command. if getattr(option, 'long') not in settable: continue # Don't care about this if we can't set it. if getattr(option, 'long') in booleans and getattr( option, 'value') == 0: repeatable.add(getattr(option, 'long')) elif hasattr(getattr(option, 'value'), '__iter__'): repeatable.add(getattr(option, 'long')) return settable, booleans, repeatable, short_map
def dochelper(doc): """ Helper to add defaults from ENV to pydoc. """ doc_ = doc for default in docopt.parse_defaults(doc): optname = default.name.strip('-').replace('-', '_').upper() envname = EnvOption.prefix() + optname optval = os.getenv(envname) if optval is not None: search = r"(^ *({}|{}|{} {}) .*?$)".format(default.short, default.long, default.short, default.long) for match in re.findall(search, doc, re.MULTILINE): whole = match[0] if not re.search(r"\[default: .*?\]", whole): doc_ = doc_.replace( whole, "{} [default: {}]".format(whole, optval)) return doc_
def main(): from docopt import docopt from docopt import parse_defaults defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(__doc__) } args = docopt(__doc__, version='scigraph-codegen 1.0.0') ssd = 'https://scicrunch.org/swagger-docs' if args['--api'] == defaults['--basepath']: args['--api'] = ssd if args['--api'] == 'https://scicrunch.org/api/1/scigraph': args['--api'] = ssd if args['--api'] == ssd: State2.path_prefix = '/scigraph' output_file, api, version, basepath = (args['--' + k] for k in ('output-file', 'api', 'scigraph-version', 'basepath')) version = int(version) basepath = None if basepath == 'default' else basepath if version < 2: state = State docs_path = 'api-docs' else: state = State2 docs_path = 'swagger.json' api_url = f'{api}/{docs_path}' print(api_url) dynamics = [f'{d}/swagger.json' for d in args['--dynamic']] if dynamics: print('dynamics:', dynamics) s = state(api_url, basepath, dynamics=dynamics) code = s.code() with open(output_file, 'wt') as f: f.write(code) import os os.system(f'python {output_file}')
def settable_options(doc, argv, ignore, options_first): """Determine which options we can set, which ones are boolean, and which ones are repeatable. All set items are option long names. :param str doc: Docstring from docoptcfg(). :param iter argv: CLI arguments from docoptcfg(). :param iter ignore: Options to ignore from docoptcfg(). :param bool options_first: docopt argument from docoptcfg(). :return: Settable options, boolean options, repeatable options, and short to long option name mapping. :rtype: tuple """ settable, booleans, repeatable, short_map = set(), set(), set(), dict() # Determine which options are settable by docoptcfg and which ones are flags/booleans. options = docopt.parse_defaults(doc) short_map.update((o.short, o.long) for o in options) parsed_argv = docopt.parse_argv(docopt.TokenStream(argv, docopt.DocoptExit), list(options), options_first) overridden = [o.long for o in parsed_argv if hasattr(o, 'long')] for option in options: if option.long in overridden or (option.long in ignore or option.short in ignore): continue if option.argcount == 0: booleans.add(option.long) settable.add(option.long) # Determine which options are repeatable. if settable and '...' in doc: pattern = docopt.parse_pattern(docopt.formal_usage(docopt.DocoptExit.usage), options) for option in pattern.fix().flat(): if not hasattr(option, 'long'): continue # Positional argument or sub-command. if getattr(option, 'long') not in settable: continue # Don't care about this if we can't set it. if getattr(option, 'long') in booleans and getattr(option, 'value') == 0: repeatable.add(getattr(option, 'long')) elif hasattr(getattr(option, 'value'), '__iter__'): repeatable.add(getattr(option, 'long')) return settable, booleans, repeatable, short_map
def main(args={o.name: o.value for o in parse_defaults(__doc__)}): #print(args) if not args['--refresh'] and args['--input'] and Path( args['--input']).exists(): with open(args['--input'], 'rt') as f: input = json.load(f) else: response = requests.get( 'http://api.brain-map.org/api/v2/data/query.json?criteria=' 'model::Specimen,rma::criteria,[is_cell_specimen$eq%27true%27],' 'products[name$eq%27Mouse%20Cell%20Types%27],' 'rma::include,structure,donor(transgenic_lines),' 'specimen_tags,cell_soma_locations,rma::options[num_rows$eqall]') input = response.json()['msg'] with open(args['--input'], 'wt') as f: json.dump(input, f, indent=4) act = AllenCellTypes(input, args['--output']) act.build_transgenic_lines() act.build_neurons() return act.config
def docopt_cmd_completion(func, **kwargs): options = parse_defaults(func.__doc__) pattern = parse_pattern(formal_usage(printable_usage(func.__doc__)), options).children[0] def get_state(it, pattern): try: value = next(it) except StopIteration: return pattern res = [] for x in pattern: if ((type(x[0]) == list and value in flatten(x[0])) or value == x[0]): res.append(x[1:]) if res: return get_state(it, res) return [] def wrapper(self, text, line, begidx, endidx): argv = shlex.split(line[:endidx])[1:] if not line[endidx - 1].isspace(): target = argv[-1] argv = argv[:-1] else: target = '' state = get_state(iter(argv), pattern.noflat()) res = [] for x in state: if type(x[0]) == list: res.extend(flatten(x[0])) else: res.append(x[0]) return list(set(x for x in res if x.startswith(target))) wrapper.__name__ = str('complete_' + func.__name__[3:]) wrapper.__module__ = func.__module__ wrapper.__doc__ = func.__doc__ return wrapper
def main(): args = docopt.docopt(__doc__) try: if args['<docopt>'] is not None: with open(args['<docopt>'], 'r') as f: args['<docopt>'] = f.read() elif args['<docopt>'] is None and sys.stdin.isatty(): print(__doc__.strip("\n")) sys.exit("") else: args['<docopt>'] = sys.stdin.read() except IOError as e: sys.exit(e) doc = args['<docopt>'] usage = parse_section('usage:', doc) s = ['More than one ', '"usage:" (case-insensitive)', ' not found.'] usage = {0: s[1:], 1: usage[0] if usage else None}.get(len(usage), s[:2]) if isinstance(usage, list): raise docopt.DocoptLanguageError(''.join(usage)) options = docopt.parse_defaults(doc) pattern = docopt.parse_pattern(docopt.formal_usage(usage), options) fsm = ragel_ast(pattern) leafs, commands, arguments, flags, options = parse_leafs(pattern) command_fields = '\n '.join( map(lambda c: 'int {0};'.format(clean_name(c)), commands)) flag_fields = '\n '.join( map(lambda c: 'int {0};'.format(clean_name(c)), flags)) option_fields = '\n '.join( map(lambda c: 'char* {0};'.format(clean_name(c)), options)) argument_fields = '\n '.join( map(lambda c: 'char* {0};'.format(clean_name(c)), arguments)) command_actions = '\n '.join( map( lambda c: 'action command_{0}{{ fsm->opt->{0} = 1; }}'.format( clean_name(c)), commands)) flag_actions = '\n '.join( map( lambda c: 'action option_{0}{{ fsm->opt->{0} = 1; }}'.format( clean_name(c)), flags)) option_actions = '\n '.join( map( lambda c: 'action option_{0}{{ fsm->opt->{0} = strdup(fsm->buffer); }}'. format(clean_name(c)), options)) argument_actions = '\n '.join( map( lambda c: 'action argument_{0}{{ fsm->opt->{0} = strdup(fsm->buffer); }}'. format(clean_name(c)), arguments)) options_with_defaults = filter(lambda x: x.value is not None, options) option_defaults = '\n '.join( map( lambda c: 'fsm->opt->{0} = strdup("{1}");'.format( clean_name(c), c.value), options_with_defaults)) usage = '\n '.join( map(lambda l: 'fprintf(stdout, "{0}\\n");'.format(l), doc.split('\n'))) file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "template.rl") print( Template(open(file).read()).safe_substitute( fsm=fsm, usage=usage, command_fields=command_fields, flag_fields=flag_fields, option_fields=option_fields, argument_fields=argument_fields, command_actions=command_actions, flag_actions=flag_actions, option_actions=option_actions, argument_actions=argument_actions, option_defaults=option_defaults, ))
def _parse_docopt(doc): options = docopt.parse_defaults(doc) pure_doc = docopt.formal_usage(docopt.printable_usage(doc)) pattern = docopt.parse_pattern(pure_doc, options) return pattern
def counterfactual(): arguments = docopt.docopt(__doc__) dataset = arguments['--dataset'] thought_vector_size = int(arguments['--thought-vector']) encoder_model = arguments['--encoder'] decoder_model = arguments['--decoder'] classifier_model = arguments['--classifier'] epochs = int(arguments['--epochs']) decay = float(arguments['--decay']) learning_rate = float(arguments['--learning-rate']) gan_type = arguments['--gan-type'] perceptual_layers = int(arguments['--perceptual-layers']) img_width = int(arguments['--img-width']) try: experiment_timestamp = int(arguments['--timestamp']) except ValueError: experiment_timestamp = int(time.time()) experiment_name = '-'.join( str(x) for x in [ dataset, thought_vector_size, encoder_model, decoder_model, classifier_model, experiment_timestamp, ]) timestamp = int(time.time()) #Download the dataset if it doesn't already exist # TODO: security lol os.system('scripts/download_{}.py'.format(dataset)) train_dataset = os.path.expanduser( '~/data/{}_train.dataset'.format(dataset)) test_dataset = os.path.expanduser('~/data/{}_test.dataset'.format(dataset)) # Fill params with defaults import main defaults = { opt.long: opt.value for opt in docopt.parse_defaults(main.__doc__) } params = {main.argname(k): main.argval(defaults[k]) for k in defaults} # TODO: Merge main.py params with these params in some nice elegant way # Like, imagine if each experiment could inherit all the main.py params # But it would also have its own params which override the inherited ones # This is beginning to sound dangerously object-oriented params['experiment_name'] = experiment_name params['encoder_input_filename'] = test_dataset params['decoder_input_filename'] = test_dataset params['classifier_input_filename'] = test_dataset params['vocabulary_filename'] = train_dataset params['encoder_datatype'] = 'img' params['decoder_datatype'] = 'img' params['classifier_datatype'] = 'lab' params['encoder_model'] = encoder_model params['decoder_model'] = decoder_model params['discriminator_model'] = encoder_model params['classifier_model'] = classifier_model params['thought_vector_size'] = thought_vector_size params['img_width_encoder'] = img_width params['img_width_decoder'] = img_width params['epochs'] = epochs params['learning_rate'] = learning_rate params['decay'] = decay params['perceptual_loss_layers'] = perceptual_layers params['batches_per_epoch'] = 50 params['enable_classifier'] = True params['enable_discriminator'] = True params['enable_perceptual_loss'] = True params['encoder_weights'] = 'encoder_{}.h5'.format(encoder_model) params['decoder_weights'] = 'decoder_{}.h5'.format(decoder_model) params['classifier_weights'] = 'classifier_{}.h5'.format(classifier_model) params['discriminator_weights'] = 'discriminator_{}.h5'.format( encoder_model) params['gan_type'] = gan_type params['learning_rate_disc'] = learning_rate * .1 params['learning_rate_generator'] = learning_rate * .1 # TODO: security lol os.system('mkdir ~/results/{}'.format(experiment_name)) save_params(experiment_name, params) import transcoder # First train a manifold train_params = params.copy() train_params['stdout_filename'] = 'stdout_train_{}.txt'.format(timestamp) train_params['encoder_input_filename'] = train_dataset train_params['decoder_input_filename'] = train_dataset train_params['classifier_input_filename'] = train_dataset train_params['mode'] = 'train' if train_params['epochs'] > 0: transcoder.main(**train_params) # Evaluate the classifier """ eval_params = params.copy() eval_params['decoder_model'] = params['classifier_model'] eval_params['decoder_datatype'] = params['classifier_datatype'] eval_params['decoder_weights'] = params['classifier_weights'] eval_params['enable_classifier'] = False eval_params['enable_discriminator'] = False eval_params['stdout_filename'] = 'stdout_eval_{}.txt'.format(timestamp) eval_params['mode'] = 'evaluate' transcoder.main(**eval_params) """ # Generate a "dream" video dream_params = params.copy() dream_params['video_filename'] = 'dream_output_{}.mjpeg'.format(timestamp) dream_params['stdout_filename'] = 'stdout_dream_{}.txt'.format(timestamp) dream_params['enable_discriminator'] = False dream_params['mode'] = 'dream' transcoder.main(**dream_params) # Re-encode the video to mp4 for storage encode_video(experiment_name, dream_params['video_filename']) # Add counterfactuals counter_params = params.copy() counter_params['video_filename'] = 'counterfactual_output_{}.mjpeg'.format( timestamp) counter_params['stdout_filename'] = 'stdout_counterfactual_{}.txt'.format( timestamp) counter_params['enable_discriminator'] = False counter_params['mode'] = 'counterfactual' for _ in range(3): transcoder.main(**counter_params) # Re-encode the video to mp4 for storage encode_video(experiment_name, counter_params['video_filename']) # Touch a file to mark the experiment as finished filename = os.path.expanduser( '~/results/{}/finished'.format(experiment_name)) open(filename, 'w').write('OK')
from docopt import parse_defaults from joblib import Parallel, delayed from ttlser import CustomTurtleSerializer from pyontutils.core import OntGraph from pyontutils.utils import noneMembers, TODAY, setPS1, refile, TermColors as tc, log from pyontutils.namespaces import getCuries, OntCuries from pyontutils.hierarchies import creatTree from pyontutils.closed_namespaces import rdf, rdfs, owl, skos, oboInOwl, dc try: breakpoint except NameError: from IPython import embed as breakpoint defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(__doc__) } COMMIT_HASH_HEAD_LEN = 8 CONFIG_HASH_HEAD_LEN = 8 bigleaves = 'go.owl', 'uberon.owl', 'pr.owl', 'doid.owl', 'taxslim.owl', 'chebislim.ttl', 'ero.owl' Query = namedtuple('Query', ['root', 'relationshipType', 'direction', 'depth']) class NotBuiltError(FileNotFoundError): pass def identity_json(blob,
args["<docopt>"] = sys.stdin.read() if args["--template"] is None: args["--template"] = os.path.join(os.path.dirname(os.path.realpath(__file__)), "template.c") with open(args["--template"], "r") as f: args["--template"] = f.read() except IOError as e: sys.exit(e) doc = args["<docopt>"] usage = docopt.parse_section("usage:", doc) s = ["More than one ", '"usage:" (case-insensitive)', " not found."] usage = {0: s[1:], 1: usage[0] if usage else None}.get(len(usage), s[:2]) if isinstance(usage, list): raise docopt.DocoptLanguageError("".join(usage)) all_options = docopt.parse_defaults(doc) pattern = docopt.parse_pattern(docopt.formal_usage(usage), all_options) leafs, commands, arguments, flags, options = parse_leafs(pattern, all_options) t_commands = ";\n ".join("int %s" % c_name(cmd.name) for cmd in commands) t_commands = ("\n /* commands */\n " + t_commands + ";") if t_commands != "" else "" t_arguments = ";\n ".join("char *%s" % c_name(arg.name) for arg in arguments) t_arguments = ("\n /* arguments */\n " + t_arguments + ";") if t_arguments != "" else "" t_flags = ";\n ".join("int %s" % c_name(flag.long or flag.short) for flag in flags) t_flags = ("\n /* options without arguments */\n " + t_flags + ";") if t_flags != "" else "" t_options = ";\n ".join("char *%s" % c_name(opt.long or opt.short) for opt in options) t_options = ("\n /* options with arguments */\n " + t_options + ";") if t_options != "" else "" t_defaults = ", ".join(to_c(leaf.value) for leaf in leafs) t_defaults = re.sub(r'"(.*?)"', r'(char*) "\1"', t_defaults) t_defaults = "\n ".join(textwrap.wrap(t_defaults, 72)) t_defaults = ("\n " + t_defaults + ",") if t_defaults != "" else ""
def main(local=False): from scibot.config import api_token, username, group, group2 print(username, group, group2) # sanity check from scibot.sync import __doc__ as sync__doc__, Locker, client from scibot.config import syncword if syncword is None: raise KeyError('Please set the SCIBOT_SYNC environment variable') from docopt import docopt, parse_defaults _sdefaults = {o.name:o.value if o.argcount else None for o in parse_defaults(sync__doc__)} _backup_sync_port = int(_sdefaults['--port']) app = Flask('scibot bookmarklet server') if __name__ == '__main__': args = docopt(__doc__) _sync_port = args['--sync-port'] if _sync_port: sync_port = int(_sync_port) else: sync_port = _backup_sync_port else: sync_port = _backup_sync_port chan = 'localhost', sync_port # TODO #try: #except AuthenticationError as e: #raise e send = run(client, chan, syncword) URL_LOCK = Locker(send) app.URL_LOCK = URL_LOCK #@app.route('/synctest', methods=['GET']) def synctest(): URL_LOCK.start_uri('a-test-uri') URL_LOCK.stop_uri('a-test-uri') return 'test-passed?' synctest() @app.route('/controlled-tags', methods=['GET']) def route_controlled_tags(): curator_tags = curatorTags() # TODO need client support for workflow:RRID -> * here return '\n'.join(curator_tags), 200, {'Content-Type':'text/plain; charset=utf-8'} @app.route('/rrid', methods=['POST', 'OPTIONS']) def rrid(): return rrid_wrapper(request, username, api_token, group, 'logs/rrid/', URL_LOCK) @app.route('/validaterrid', methods=['POST', 'OPTIONS']) def validaterrid(request): return rrid_wrapper(request, username, api_token, group2, 'logs/validaterrid/', URL_LOCK) @app.route('/bookmarklet', methods=['GET']) def bookmarklet(): return bookmarklet_wrapper(request, 'rrid') @app.route('/validatebookmarklet', methods=['GET']) def validatebookmarklet(): return bookmarklet_wrapper(request, 'validaterrid') @app.route('/export', methods=['GET']) def export(): print('starting csv export') output_rows, DATE = export_impl() data = StringIO() writer = csv.writer(data) writer.writerows(sorted(output_rows)) return gzip.compress(data.getvalue().encode()), 200, { 'Content-Type': 'text/csv', 'Content-Disposition': 'attachment;filename = RRID-data-%s.csv' % DATE, 'Content-Encoding': 'gzip'} @app.route('/export.json', methods=['GET']) def export_json(): print('starting json export') output_json, DATE = export_json_impl() data = json.dumps(output_json, sort_keys=True, indent=4) return gzip.compress(data.encode()), 200, { 'Content-Type': 'application/json', 'Content-Encoding': 'gzip'} if not local: return app else: from os.path import expanduser from wsgiref.simple_server import make_server from scibot.config import test_host, port_bookmarklet print('no login detected, running on localhost only') host = test_host port = port_bookmarklet print('host: %s, port %s' % ( host, port )) server = make_server(host, port, app) # openssl req -new -x509 -keyout scibot-self-sign-temp.pem -out scibot-self-sign-temp.pem -days 365 -nodes #server.socket = ssl.wrap_socket(server.socket, #keyfile='/etc/letsencrypt/live/scibot.scicrunch.io/privkey.pem', #certfile='/etc/letsencrypt/live/scibot.scicrunch.io/fullchain.pem', #server_side=True) server.socket = ssl.wrap_socket(server.socket, keyfile=expanduser('~/files/certs/scibot_test/tmp-nginx.key'), certfile=expanduser('~/files/certs/scibot_test/tmp-nginx.crt'), server_side=True) server.serve_forever()
def main(): args = docopt.docopt(__doc__, version='none') with open(args['<docopt_file>'], 'r') as f: args['<docopt_file>'] = f.read() template_h_name = args['--template_h'] template_c_name = args['--template_c'] args['template_h_obj'] = read_template_file_contents(args['--template_h']) args['template_c_obj'] = read_template_file_contents(args['--template_c']) doc = args['<docopt_file>'] usage = docopt.printable_usage(doc) all_options = docopt.parse_defaults(doc) pattern = docopt.parse_pattern(docopt.formal_usage(usage), all_options) prompt = usage.split()[1].strip() usage_lines = [x.replace(prompt, "") for x in usage.split('\n')[1:]] tokens = [] commands = [] # I'm not sure why we have to reach in here, but it "works" required_commands = pattern.children[0].children for idx, required in enumerate(required_commands): parts = [ o.name for o in required.children if isinstance(o, docopt.Command) ] if not parts: continue # "help" is a special case? So exclude? if "help" in parts: continue tokens.extend(parts) docopt_text = usage_lines[idx].strip( ) if idx < len(usage_lines) else None commands.append(Command(parts, docopt_text)) if args['--short'] is not None: doc = doc.replace(prompt + " ", args['--short'] + " ") rendering = Rendering(args['<module_name>'], commands, prompt, doc, args['--multithreaded']) if len(rendering.tokens) > 64: raise docopt.DocoptExit( 'Too many unique tokens ({}) for Docopt μC (max:64)'.format( len(rendering.tokens))) too_long_commands = [] for cmd in rendering.commands: if len(cmd.parts) > 6: too_long_commands.append(cmd) if too_long_commands: summaries = [ ' > {}'.format(" ".join(p for p in c.parts)) for c in too_long_commands ] raise docopt.DocoptExit('\n'.join([ 'The following commands are too long for Docopt μC (max: 6 long):' ] + summaries)) output_h_filename = args['--template_h'].replace(args['--template_prefix'], rendering.include_name) output_c_filename = args['--template_c'].replace(args['--template_prefix'], rendering.include_name) output_h_filename = os.path.join(args['--output_dir'], output_h_filename) output_c_filename = os.path.join(args['--output_dir'], output_c_filename) with open(output_h_filename, 'w') as f: f.write(args['template_h_obj'].render(rendering=rendering)) with open(output_c_filename, 'w') as f: f.write(args['template_c_obj'].render(rendering=rendering)) if args["--no-docopt-args-h"] is False: # copy the docopt header file to the output directory docopt_args = pkg_resources.resource_filename( __name__, 'templates/docopt_args.h') shutil.copy2(docopt_args, args['--output_dir'])
def test_issue_126_defaults_not_parsed_correctly_when_tabs(): section = 'Options:\n\t--foo=<arg> [default: bar]' assert parse_defaults(section) == [Option(None, '--foo', 1, 'bar')]
-p --profile enable profiling on parsing and serialization -d --debug launch debugger after parsing and before serialization """ import os import sys from io import StringIO, TextIOWrapper from json.decoder import JSONDecodeError from concurrent.futures import ProcessPoolExecutor from docopt import docopt, parse_defaults import rdflib from rdflib.plugins.parsers.notation3 import BadSyntax from ttlser.utils import regjsonld defaults = {o.name:o.value if o.argcount else None for o in parse_defaults(__doc__)} #GRAPHCLASS = rdflib.Graph GRAPHCLASS = rdflib.ConjunctiveGraph def getVersion(): ttlser = rdflib.plugin.get('nifttl', rdflib.serializer.Serializer) ttlser_version = ttlser._CustomTurtleSerializer__version # FIXME version = "ttlfmt v0.0.2\nttlser {}".format(ttlser_version) return version def prepare(filepath_or_stream, outpath=None, stream=False): if stream: infmt_guess = 'turtle' if outpath is None:
from docopt import parse_defaults from sparcur import exceptions as exc from sparcur.cli import Report, Options, __doc__ as clidoc from sparcur.paths import Path, BlackfynnCache from sparcur.config import auth from sparcur.server import make_app from sparcur.backends import BlackfynnRemote from sparcur.curation import Summary project_path = Path.cwd() if not (project_path / Path._cache_class._local_data_dir).exists(): raise exc.NotInProjectError(f'{project_path}') defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(clidoc) } args = { 'server': True, '--raw': False, '--latest': True, '--sort-count-desc': True, '--project-path': project_path, '--tab-table': False, '<path>': [], '--verbose': False, '--export-path': auth.get_path('export-path'), '--partial': False, '--open': False, }
with open(args['--template'], 'r') as f: args['--template'] = f.read() if args['--header']: with open(template_h_path, 'r') as f: template_h = f.read() except IOError as e: sys.exit(e) doc = args['<docopt>'] usage = docopt.parse_section('usage:', doc) s = ['More than one ', '"usage:" (case-insensitive)', ' not found.'] usage = {0: s[1:], 1: usage[0] if usage else None}.get(len(usage), s[:2]) if isinstance(usage, list): raise docopt.DocoptLanguageError(''.join(usage)) all_options = docopt.parse_defaults(doc) pattern = docopt.parse_pattern(docopt.formal_usage(usage), all_options) leafs, commands, arguments, flags, options = parse_leafs( pattern, all_options) t_commands = ';\n '.join('int %s' % c_name(cmd.name) for cmd in commands) t_commands = (('\n /* commands */\n ' + t_commands + ';') if t_commands != '' else '') t_arguments = ';\n '.join('char *%s' % c_name(arg.name) for arg in arguments) t_arguments = (('\n /* arguments */\n ' + t_arguments + ';') if t_arguments != '' else '') t_flags = ';\n '.join('int %s' % c_name(flag.long or flag.short) for flag in flags) t_flags = (('\n /* options without arguments */\n ' + t_flags +
def main(): from docopt import docopt, parse_defaults args = docopt(__doc__, version='ontutils 0.0.1') defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(__doc__) } verbose = args['--verbose'] debug = args['--debug'] repo_name = args['<repo>'] git_local = os.path.expanduser(args['--git-local']) epoch = args['--epoch'] curies_location = args['--curies'] curies = getCuries(curies_location) curie_prefixes = set(curies.values()) filenames = args['<file>'] filenames.sort(key=lambda f: os.path.getsize(f), reverse=True) # make sure the big boys go first refactor_skip = ('nif.ttl', 'resources.ttl', 'generated/chebislim.ttl', 'unused/ro_bfo_bridge.ttl', 'generated/ncbigeneslim.ttl', 'generated/NIF-NIFSTD-mapping.ttl') rfilenames = [f for f in filenames if f not in refactor_skip] if args['devconfig']: if args['--write']: file = devconfig.write(args['--output-file']) print(f'config written to {file}') elif args['<field>']: for f in args['<field>']: print(getattr(devconfig, f, '')) else: print(devconfig) elif args['catalog-extras']: catalog_extras(args['--fetch']) elif args['version-iri']: version_iris(*filenames, epoch=epoch) elif args['scigraph-stress']: scigraph_stress(int(args['--rate']), int(args['--timeout']), verbose, debug) elif args['deadlinks']: deadlinks(filenames, int(args['--rate']), int(args['--timeout']), verbose, debug) elif args['spell']: spell(filenames, debug) elif args['iri-commit']: make_git_commit_command(git_local, repo_name) elif args['uri-switch']: uri_switch(rfilenames, uri_switch_values) elif args['backend-refactor']: backend_refactor(rfilenames, backend_refactor_values) elif args['todo']: graph = loadall(git_local, repo_name, local=True) graph_todo(graph, curie_prefixes, uri_switch_values) embed() elif args['expand']: curies['NLXWIKI'] = 'http://legacy.neurolex.org/wiki/' for curie in args['<curie>']: prefix, suffix = curie.split(':') print(curies[prefix] + suffix)
from subprocess import check_output from collections import OrderedDict from docopt import docopt, printable_usage, parse_defaults, formal_usage if __name__ == '__main__': args = docopt(__doc__, help=True, version=1.0) base_command = args.get('<tool_help_call>').split(' ') doc = check_output(base_command) # Load options and arguments doc_options, doc_args = optdoc.parse_defaults(doc) # Load list usage = printable_usage(doc) options = parse_defaults(doc) pattern, arg_list, cmd_list, ids = optdoc.parse_pattern(formal_usage(usage), options) # print pattern # Print args, options, cmds, and lists # print 'ARGS \n' + str(args) + '\n' # print 'OPTIONS \n' + str(doc_options) + '\n' # print 'ARGUMENTS \n' + str(doc_args) + '\n' # print 'LIST OPT/ARGS \n' + str(arg_list) # print 'CMD \n' + str(cmd_list) # remove -h/--help from base_command if '-h' in base_command: base_command.remove('-h') elif '--help' in base_command: base_command.remove('--help')
def main(): import sys from pyontutils.clifun import Dispatcher, Options as BaseOptions class Options(BaseOptions): drive_scopes = ( 'appdata', 'file', 'metadata', 'metadata.readonly', 'photos.readonly', 'readonly', 'scripts',) def __new__(cls, args, defaults): bads = [] for scope in args['--drive-scope']: if scope not in cls.drive_scopes: bads.append(scope) if bads: log.error(f'Invalid scopes! {bads}') sys.exit(1) return super().__new__(cls, args, defaults) class Main(Dispatcher): @property def _scopes(self): base = 'https://www.googleapis.com/auth/' suffix = '.readonly' if self.options.readonly else '' if self.options.sheets: yield base + 'spreadsheets' + suffix if self.options.docs: yield base + 'doccuments' + suffix if self.options.drive: suffixes = [] if suffix: suffixes.append(suffix) suffixes += ['.' + s for s in self.options.drive_scope] if not suffixes: suffixes = '', for suffix in suffixes: yield base + 'drive' + suffix def auth(self): newline = '\n' scopes = list(self._scopes) if self.options.debug: log.debug(f'requesting for scopes:\n{newline.join(scopes)}') service = get_oauth_service(readonly=self.options.readonly, SCOPES=scopes) # FIXME decouple this ... log.info(f'Auth finished successfully for scopes:\n{newline.join(scopes)}') from docopt import docopt, parse_defaults args = docopt(__doc__, version='clifun-demo 0.0.0') passthrough = ('--noauth_local_webserver', '--auth_host_name', '--auth_host_port', '--logging_level') to_pop = [arg for i, arg in enumerate(sys.argv) if i and not [None for pt in passthrough if pt in arg]] for arg in to_pop: sys.argv.pop(sys.argv.index(arg)) defaults = {o.name:o.value if o.argcount else None for o in parse_defaults(__doc__)} options = Options(args, defaults) main = Main(options) if main.options.debug: log.setLevel('DEBUG') print(main.options) main()
def test_issue_126_defaults_not_parsed_correctly_when_tabs(): section = "Options:\n\t--foo=<arg> [default: bar]" assert parse_defaults(section) == [ Option(None, "--foo", 1, "bar", "<arg>") ]
def main(): from docopt import docopt, parse_defaults args = docopt(__doc__, version='ontutils 0.0.1') defaults = { o.name: o.value if o.argcount else None for o in parse_defaults(__doc__) } verbose = args['--verbose'] debug = args['--debug'] repo_name = args['<repo>'] git_local = os.path.expanduser(args['--git-local']) epoch = args['--epoch'] curies_location = args['--curies'] curies = getCuries(curies_location) curie_prefixes = set(curies.values()) filenames = args['<file>'] filenames.sort(key=lambda f: os.path.getsize(f), reverse=True) # make sure the big boys go first refactor_skip = ('nif.ttl', 'resources.ttl', 'generated/chebislim.ttl', 'unused/ro_bfo_bridge.ttl', 'generated/ncbigeneslim.ttl', 'generated/NIF-NIFSTD-mapping.ttl') rfilenames = [f for f in filenames if f not in refactor_skip] if args['set']: from pyontutils.config import auth uc = auth.user_config def set_uc(var, value): with open(uc._path, 'rt') as f: text = f.read() if '#' in text: msg = f'Comments detected! Not writing config! {uc._path}' raise ValueError(msg) blob = uc.load() # XXX NEVER DUMP A CONFIG THIS YOU _WILL_ KLOBBER IT # BY ACCIDENT AT SOME POINT AND WILL ERASE ANY/ALL COMMENTS # THERE IS NO SAFETY WITH THIS IMPLEMENTATION # USERS SHOULD EDIT THEIR CONFIGS DIRECTLY # except that it makes giving instructions for # setting values a bit more complicated blob['auth-variables'][var] = value uc.dump(blob) if args['ontology-local-repo']: var = 'ontology-local-repo' olr = Path(args['<path>']).expanduser().resolve() olr_string = olr.as_posix() set_uc(var, olr_string) value2 = auth.get_path(var) if not value2.exists(): msg = f'{var} path does not exist! {value2}' print(tc.red('WARNING'), msg) msg = f'{var} path {value2} written to {uc._path}' print(msg) assert olr == value2 elif args['scigraph-api-key']: # FIXME this is a hack on top of orthauth, which will not # # check the secrets path first to make sure it is ok # be implementing programmtic modification of user config # files any time soon, though it might make sense to have a # "machine config path" in addition to auth and user config path = ['scigraph', 'api', 'key'] spath = auth._pathit(uc.get_blob('auth-stores', 'secrets')['path']) if not spath.parent.exists(): spath.parent.mkdir(parents=True) spath.parent.chmod(0o0700) if spath.suffix != '.yaml': msg = f"Can't write secrets file of type {spath.suffix}" args = None raise NotImplementedError(msg) v = None try: s = uc.secrets v = s(*path) except: pass if v is not None: v = None raise ValueError(f'Path already in secrets! {path} in {spath}') # safely append to the secrets file key = args['<key>'] path_key = f'\nscigraph:\n api:\n key: {key}' if not spath.exists(): spath.touch() spath.chmod(0o0600) with open(spath, 'a+') as f: f.write(path_key) # set the config var var = 'scigraph-api-key' value = {'path': ' '.join(path)} set_uc(var, value) # set the path # XXX NOTE yes, it is correct to do this only after secrets succeeds # otherwise it is possible to get into a state where secrets does # not exist but there is a path pointing to it, so load this # ontutils file will fail during import time # test that we got the value we expected value2 = auth.get(var) msg = (f'Key written to secrets. {spath} and path to ' f'key was written to config {uc._path}') print(msg) assert key == value2, 'Key retrieved does not match key set!' elif args['devconfig']: if args['--write']: file = devconfig.write(args['--output-file']) print(f'config written to {file}') elif args['<field>']: for f in args['<field>']: print(getattr(devconfig, f, '')) else: print(devconfig) elif args['catalog-extras']: catalog_extras(args['--fetch']) elif args['version-iri']: version_iris(*filenames, epoch=epoch) elif args['scigraph-stress']: scigraph_stress(int(args['--rate']), int(args['--timeout']), verbose, debug) elif args['deadlinks']: deadlinks(filenames, int(args['--rate']), int(args['--timeout']), verbose, debug) elif args['spell']: spell(filenames, debug) elif args['iri-commit']: make_git_commit_command(git_local, repo_name) elif args['uri-switch']: uri_switch(rfilenames, uri_switch_values) elif args['backend-refactor']: backend_refactor(rfilenames, backend_refactor_values) elif args['todo']: graph = loadall(git_local, repo_name, local=True) graph_todo(graph, curie_prefixes, uri_switch_values) breakpoint() elif args['expand']: curies['NLXWIKI'] = 'http://legacy.neurolex.org/wiki/' for curie in args['<curie>']: prefix, suffix = curie.split(':') print(curies[prefix] + suffix)