def main(): parser = get_base_parser() commands = [ render_config, clean, sphinx, deploy, push ] argh.add_commands(parser, commands) git_commands = [ giza.operations.git.apply_patch, giza.operations.git.pull_rebase, giza.operations.git.cherry_pick, ] argh.add_commands(parser, git_commands, namespace='git') generate_commands = [ giza.operations.generate.api, giza.operations.generate.assets, giza.operations.generate.images, giza.operations.generate.intersphinx, giza.operations.generate.options, giza.operations.generate.primer, giza.operations.generate.steps, giza.operations.generate.tables, giza.operations.generate.toc, ] argh.add_commands(parser, generate_commands, namespace='generate') include_commands = [ giza.operations.includes.recursive, giza.operations.includes.changed, giza.operations.includes.once, giza.operations.includes.unused, giza.operations.includes.list, giza.operations.includes.graph, giza.operations.includes.clean, ] argh.add_commands(parser, include_commands, namespace='includes') packaging_commands = [ giza.operations.packaging.fetch, giza.operations.packaging.unwind, giza.operations.packaging.create, giza.operations.packaging.deploy, ] argh.add_commands(parser, packaging_commands, namespace='package') translation_commands = [ giza.operations.tx.check_orphaned, giza.operations.tx.update_translations, giza.operations.tx.pull_translations, giza.operations.tx.push_translations, ] argh.add_commands(parser, translation_commands, namespace='tx') args = RuntimeStateConfig() argh.dispatch(parser, namespace=args)
def main(): log.debug(' '.join(sys.argv)) parser = argh.helpers.ArghParser() argh.add_commands(parser, [print_installation_commands, install, install_full, list_data_sources, list_output, log_info, copy_files, version]) argh.dispatch(parser)
def main(): parser = argparse.ArgumentParser( description='Tools for %s.' % ssh.HOSTNAME, ) argh.add_commands(parser, [update, proxy]) git.namespace.add_subcommands(parser) argh.dispatch(parser)
def main(): """ The main entry point, as specified in the ``setup.py`` file. Adds commands from other subsidiary entry points (specified in the ``commands`` variable above,) and then uses ``arch.dispatch()`` to start the process. The ``RuntimeStateConfig()`` object is created here and handed to the parser as the object that will recive all command line data, rather than using a standard argparse namespace object. This allows all runtime argument parsing to happen inside of these config objects rather than spread among all of the entry points. This function catches and recovers from :exc:`KeyboardInterupt` which means that doesn't dump a stack trace following a Control-C. """ parser = get_base_parser() for namespace, entry_points in commands.items(): if namespace == 'main': argh.add_commands(parser, entry_points) else: argh.add_commands(parser, entry_points, namespace=namespace) args = RuntimeStateConfig() try: argh.dispatch(parser, namespace=args) except KeyboardInterrupt: logger.error('operation interrupted by user.')
def main(): """ Set up the context and connectors """ try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
def run(self): ''' Runs the command-line interpreter. ''' init_colors() dirs = [self.site.outpath, os.path.dirname(self.sass_dest_path)] for dirname in dirs: if not os.path.exists(dirname): os.makedirs(dirname) argh.dispatch(self.parser)
def main(): parser = argh.ArghParser() parser.add_commands(COMMANDS) parser.add_argument('-c', '--show-commands', action='store_true', help="display virsh/shell commands used") parser.add_argument('-v', '--verbose', action='count', help="increase output verbosity", default=0) try: argh.dispatch(parser, argv=process_argv(), pre_call=parse_global_args) except exception.CommandFailed, ex: cmd.log_cmd_fail(ex)
def main(): parser = get_base_parser() commands = [ mongo_to_po, po_to_mongo, verifier, ] argh.add_commands(parser, commands) args = RuntimeStateConfig() argh.dispatch(parser, namespace=args)
def main(): parser = get_base_parser() commands = [setup, setup_credential_file, config, progress, triage, make_versions, mirror_version, release] argh.add_commands(parser, commands) args = JeerahRuntimeStateConfig() if args.level == "info": args.level = "warning" argh.dispatch(parser, namespace=args)
def main(): parser = get_base_parser() commands = [mine, stats, actions, setup] argh.add_commands(parser, commands) args = GithubRuntimeConfig() if args.level == 'info': args.level = 'warning' argh.dispatch(parser, namespace=args)
def main(): parser = get_base_parser() for namespace, entry_points in commands.items(): if namespace == 'main': argh.add_commands(parser, entry_points) else: argh.add_commands(parser, entry_points, namespace=namespace) args = RuntimeStateConfig() try: argh.dispatch(parser, namespace=args) except KeyboardInterrupt: logger.error('operation interrupted by user.')
def main(): """ The main entry point, as specified in the ``setup.py`` file. Adds commands from other subsidiary entry points (specified in the ``commands`` variable above,) and then uses ``arch.dispatch()`` to start the process. The "DaggerConfig" object allows the application to delegate all argument parsing validation using setters and getters in the object where arg(h)parse stores the configuration data. This function catches and recovers from :exc:`KeyboardInterupt` which means that doesn't dump a stack trace following a Control-C. """ parser = get_base_parser() for namespace, entry_points in commands.items(): if namespace == 'main': argh.add_commands(parser, entry_points) else: argh.add_commands(parser, entry_points, namespace=namespace) args = dagger.config.cli.DaggerCliConfig() # set the logging level early to ensure logging is configured during # argument parsing. args.level = "info" # run the command, catching user-interruption and common error types directly. try: argh.dispatch(parser, namespace=args) except KeyboardInterrupt: logger.error('operation interrupted by user.') exit(1) except RuntimeError: logger.error("exiting due to a runtime error") exit(1) except (ValueError, TypeError, AttributeError) as e: # catch data access and validation errors, and, in common operation, # suppress the traceback, unless logging at debug level. logger.info("error: {0}, type: {1}".format(e, type(e))) tb = traceback.format_exc() err = libgiza.error.Error(message=("encountered data validation or access " "error during normal operation."), fatal=True, include_trace=True) err.payload = {"type": type(e), "error": e, "trace": tb} logger.debug(err.render_output()) logger.debug("exception traceback: \n" + tb) exit(1)
def main(): parser = get_base_parser() commands = [mine, stats, actions, setup] argh.add_commands(parser, commands) args = GithubRuntimeConfig() if args.level == 'info': args.level = 'warning' if args.runner == 'process': logger.warning('this operation does not support multiprocessing, falling back to threads') args.runner = 'thread' argh.dispatch(parser, namespace=args)
def main(): epilog = """The 'instance' argument can be one or multiple specifiers separated by commas. valid specifiers: Tag Name (or altName): tt-api-stage Instance ID: i-1a2b3c4d5e Private IP: 12.34.567.89 Private DNS: domU-12-45-56-AB-CD-EF Wildcard on tag Name: tt-api-*""" parser = argh.ArghParser( epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter, ) argh.set_default_command(parser, connect) argh.dispatch(parser, completion=False)
def script(steps, optional, default_cfg_fnames=[], default_section='DEFAULT'): """ run a processing pipeline as command line script """ arg_parser, config, section, left_args = setup(default_cfg_fnames, default_section) basicConfig(level=config.get(section, "LOG_LEVEL", fallback="WARNING")) def run_all(): run_commands(steps) run_all.__doc__ = "Run complete pipeline: {}".format( " --> ".join(step.__name__.replace('_','-') for step in steps)) functions = steps + optional + [run_all] add_commands(arg_parser, functions, config, section, prefix=True) dispatch(arg_parser, argv=left_args)
def main(): errors = StringIO() parser = argh.ArghParser() argh.add_commands(parser, [ source, run, run_piped, list_registered, source_registered, source_named, source_def, source_inline, update_env ], ) argh.add_commands(parser, functions=DaemonCommands.commands(), namespace=DaemonCommands.namespace, title=DaemonCommands.__doc__) argh.dispatch(parser, completion=False, errors_file=errors) if errors.len > 0: sys.exit(errors.getvalue().strip())
def main(): github_subcommands = [ migration.create_repositories, migration.delete_repositories, migration.edit_repositories, migration.import_repositories, migration.import_attachments, migration.import_issues, migration.import_issues_for_project ] ow2_subcommands = [ migration.clone_repositories, migration.gc_repositories, migration.prune_repositories ] parser = argparse.ArgumentParser() argh.add_commands(parser, github_subcommands, namespace='github') argh.add_commands(parser, ow2_subcommands, namespace='ow2') argh.dispatch(parser)
def main(root_pkg, argv=None): """Invokes module functions in :mod:`pykern.pykern_cli` Looks in ``<root_pkg>.pykern_cli`` for the ``argv[1]`` module. It then invokes the ``argv[2]`` method of that module. Args: root_pkg (str): top level package name argv (list of str): Defaults to `sys.argv`. Only used for testing. Returns: int: 0 if ok. 1 if error (missing command, etc.) """ pkconfig.append_load_path(root_pkg) if not argv: argv = list(sys.argv) prog = os.path.basename(argv.pop(0)) if _is_help(argv): return _list_all(root_pkg, prog) module_name = argv.pop(0) cli = _module(root_pkg, module_name) if not cli: return 1 prog = prog + ' ' + module_name parser = argparse.ArgumentParser( prog=prog, formatter_class=argh.PARSER_FORMATTER) cmds = _commands(cli) dc = _default_command(cmds, argv) if dc: argh.set_default_command(parser, dc) else: argh.add_commands(parser, cmds) if len(argv) < 1: # Python 3: parser doesn't exit if not enough commands parser.error('too few arguments') if argv[0][0] != '-': argv[0] = argv[0].replace('_', '-') argh.dispatch(parser, argv=argv) return 0
# will get called within a graph context containing our model graph. self.summary_writer = SummaryWriterCache.get(self.working_dir) self.weight_tensors = tf.trainable_variables() self.global_step = tf.train.get_or_create_global_step() def before_run(self, run_context): global_step = run_context.session.run(self.global_step) if global_step % self.every_n_steps == 0: self.before_weights = run_context.session.run(self.weight_tensors) def after_run(self, run_context, run_values): global_step = run_context.session.run(self.global_step) if self.before_weights is not None: after_weights = run_context.session.run(self.weight_tensors) weight_update_summaries = compute_update_ratio( self.weight_tensors, self.before_weights, after_weights) self.summary_writer.add_summary(weight_update_summaries, global_step) self.before_weights = None parser = argparse.ArgumentParser() argh.add_commands(parser, [train]) if __name__ == '__main__': # Let absl.flags parse known flags from argv, then pass the remaining flags # into argh for dispatching. remaining_argv = flags.FLAGS(sys.argv, known_only=True) argh.dispatch(parser, argv=remaining_argv[1:])
def main(): parser = argparse.ArgumentParser(description='Tools for %s.' % ssh.HOSTNAME, ) argh.add_commands(parser, [update, proxy, https_proxy]) git.namespace.add_subcommands(parser) argh.dispatch(parser)
def main(): parser = get_base_parser() commands = [ make_project, render_config, clean, sphinx, deploy, push ] argh.add_commands(parser, commands) git_commands = [ giza.operations.git.apply_patch, giza.operations.git.pull_rebase, giza.operations.git.cherry_pick, giza.operations.git.merge, ] argh.add_commands(parser, git_commands, namespace='git') generate_commands = [ giza.operations.generate.api, giza.operations.generate.assets, giza.operations.generate.images, giza.operations.generate.intersphinx, giza.operations.generate.options, giza.operations.generate.primer, giza.operations.generate.steps, giza.operations.generate.tables, giza.operations.generate.toc, giza.operations.generate.examples, giza.operations.generate.redirects ] argh.add_commands(parser, generate_commands, namespace='generate') include_commands = [ giza.operations.includes.recursive, giza.operations.includes.changed, giza.operations.includes.once, giza.operations.includes.unused, giza.operations.includes.list, giza.operations.includes.graph, giza.operations.includes.clean, ] argh.add_commands(parser, include_commands, namespace='includes') packaging_commands = [ giza.operations.packaging.fetch, giza.operations.packaging.unwind, giza.operations.packaging.create, giza.operations.packaging.deploy, ] argh.add_commands(parser, packaging_commands, namespace='package') translate_commands = [ giza.operations.translate.create_corpora, giza.operations.translate.build_translation_model, giza.operations.translate.model_results, giza.operations.translate.merge_translations, giza.operations.translate.po_to_corpus, giza.operations.translate.dict_to_corpus, giza.operations.translate.translate_po, giza.operations.translate.translate_text_doc, giza.operations.translate.flip_text, giza.operations.translate.auto_approve_obvious_po, ] argh.add_commands(parser, translate_commands, namespace='translate') translation_commands = [ giza.operations.tx.check_orphaned, giza.operations.tx.update_translations, giza.operations.tx.pull_translations, giza.operations.tx.push_translations, ] argh.add_commands(parser, translation_commands, namespace='tx') args = RuntimeStateConfig() argh.dispatch(parser, namespace=args)
already_processed = set() num_already_processed = len(already_processed) for model_name, record_files in sorted(model_gamedata.items()): if set(record_files) <= already_processed: continue print("Gathering files for %s:" % model_name) for i, example_batch in enumerate( tqdm(preprocessing.shuffle_tf_examples(examples_per_record, record_files))): output_record = os.path.join(output_directory, '{}-{}.tfrecord.zz'.format(model_name, str(i))) preprocessing.write_tf_examples( output_record, example_batch, serialize=False) already_processed.update(record_files) print("Processed %s new files" % (len(already_processed) - num_already_processed)) with gfile.GFile(meta_file, 'w') as f: f.write('\n'.join(sorted(already_processed))) qmeas.stop_time('gather') parser = argparse.ArgumentParser() argh.add_commands(parser, [gtp, bootstrap, train, selfplay, gather, evaluate, validate]) if __name__ == '__main__': cloud_logging.configure() argh.dispatch(parser)
def main(): parser = ArghParser(description='todo') parser.add_commands([mirror, add, empty, merge, init]) dispatch(parser)
def z(): opt = ['atomic', 'ovun5', 'Desi', 'Depi', 'Depp', 'Devdw', 'Dehb'], rn = ReaxFF(libfile='ffield', direcs=direcs, dft='siesta', atomic=True, optword='nocoul', opt=['atomic'], nn=False, cons=None, pkl=True, batch_size=batch, losFunc='n2', bo_penalty=10000.0) # tc.session(learning_rate=1.0e-4,method='AdamOptimizer') # GradientDescentOptimizer AdamOptimizer AdagradOptimizer RMSPropOptimizer rn.run(learning_rate=100, step=1000, print_step=10, writelib=1000) rn.close() if __name__ == '__main__': ''' use commond like ./bp.py <t> to run it z: optimize zpe t: train the whole net ''' parser = argparse.ArgumentParser() argh.add_commands(parser, [t, r, z]) argh.dispatch(parser)
def call_main(): p = argh.ArghParser() argh.set_default_command(p, main) argh.dispatch(p)
exit(1) t1 = time.time() time_taken = (t1 - t0) / 60 logging.info("TIME TAKEN: {0:.2f} minutes".format(time_taken)) send_email(url, email, file_loc, plugin, no_stdout, time_taken, format) def generate_output_name(file_format, plugins): """Generate a filename with the format sectool-report-PLUGINS-DATE.FILEFORMAT. """ date_frmt = "%y-%m-%d-%H%M" current_date = datetime.datetime.now().strftime(date_frmt) return OUTPUT_FILE.format('{0}{1}'.join(plugins), current_date, file_format) def send_email(url, e_mail, file_loc, plugin, no_stdout, time_taken, output_format): """Send an e-mail with a report. """ email_obj = Email(target_url=url, users_email_address=e_mail, input_file=file_loc, plugin_name=plugin, show_std_out=not no_stdout, duration=time_taken) email_obj.trigger_email_alert(output_format) if __name__ == '__main__': parser = ArgumentParser() set_default_command(parser, sectool) dispatch(parser)