def parse_args(args): """ create the parser, parse the arguments and set up logging :returns tuple of parser and parsed arguments """ parser = argparse.ArgumentParser(description='Create a connection to the remote server.\ For instructions on setting up dsi locally') parser.add_argument('-d', '--debug', action='store_true', help='enable debug output') parser.add_argument('--dryrun', action='store_true', default=False, help='Do not run the command, just evaluate it.') parser.add_argument('--log-file', help='path to log file') parser.add_argument('-s', '--ssh', default='ssh', help='the ssh location') parser.add_argument('host', metavar='host', nargs='+', type=str, help='the path of the host to connect to') parser.add_argument('-c', '--command', metavar='command', nargs='?', action='append', default=[], help='the remote command to run') arguments = parser.parse_args(args) setup_logging(arguments.debug, arguments.log_file) return parser, arguments
def parse_args(args): """ create the parser, parse the arguments and set up logging :returns tuple of parser and parsed arguments """ parser = argparse.ArgumentParser( description='Expand an alias to a server ip address.') parser.add_argument('-d', '--debug', action='store_true', help='enable debug output') parser.add_argument('-e', '--export', action='store_true', help='enable output as shell export') parser.add_argument('--log-file', default='/tmp/expand.log', help='path to log file') parser.add_argument('host', metavar='host', type=str, help='the alias to expand and convert to an ip') arguments = parser.parse_args(args) setup_logging(arguments.debug, arguments.log_file) return arguments
def main(): """ Main function """ args = parse_command_line() setup_logging(args.debug, args.log_file) config = ConfigDict('infrastructure_provisioning') config.load() provisioner = Provisioner(config, verbose=args.debug) provisioner.provision_resources()
def main(): p = build_arg_parser() cfg = p.parse_args() logger = setup_logging(cfg.doc_type) if cfg.dump_config: logger.info('Running index_ccdb with') logger.info(p.format_values()) index_alias = cfg.index_name index_name = "{}-v1".format(index_alias) backup_index_name = "{}-v2".format(index_alias) logger.info("Creating Elasticsearch Connection") if cfg.is_aws_host: es = get_aws_es_connection(cfg) logger.info('AWS configured as Elasticsearch host') else: es = get_es_connection(cfg) qas_timestamp = get_qa_timestamp(cfg, logger) logger.info("Begin indexing data in Elasticsearch") index_json_data(es, logger, cfg.doc_type, cfg.settings, cfg.mapping, cfg.dataset, index_name, backup_index_name, index_alias, qas_timestamp=qas_timestamp)
def main(): """ Handle the main functionality (parse args /setup logging ) and then start the mongodb cluster.""" args = parse_command_line() setup_logging(args.debug, args.log_file) config = ConfigDict('mongodb_setup') config.load() # Delays should be unset at the end of each test_control.py run, but if it didn't complete... safe_reset_all_delays(config) # Start MongoDB cluster(s) using config given in mongodb_setup.topology (if any). # Note: This also installs mongo client binary onto workload client. mongo = MongodbSetup(config=config) start_cluster(mongo, config)
def main(argv): """ Parse args and call workload_setup.yml operations """ parser = argparse.ArgumentParser(description='Workload Setup') parser.add_argument('-d', '--debug', action='store_true', help='enable debug output') parser.add_argument('--log-file', help='path to log file') args = parser.parse_args(argv) setup_logging(args.debug, args.log_file) config = ConfigDict('workload_setup') config.load() # Delays should be unset at the end of each test_control.py run, but if it didn't complete... safe_reset_all_delays(config) setup = WorkloadSetupRunner(config) setup.setup_workloads()
def main(): p = build_arg_parser() cfg = p.parse_args() logger = setup_logging('taxonomy') if cfg.dump_config: logger.info('Running index_taxonomy with') logger.info(p.format_values()) es = get_es_connection(cfg) logger.info("Begin indexing taxonomy data in Elasticsearch") index_taxonomy(es, logger, cfg.taxonomy, cfg.index_name)
def main(argv): """ Main function. Parse command line options, and run analysis. Note that the return value here determines whether Evergreen considers the entire task passed or failed. Non-zero return value means failure. :returns: int the exit status to return to the caller (0 for OK) """ parser = argparse.ArgumentParser(description='Analyze DSI test results.') parser.add_argument('-d', '--debug', action='store_true', help='enable debug output') parser.add_argument('--log-file', help='path to log file') args = parser.parse_args(argv) setup_logging(args.debug, args.log_file) config = ConfigDict('analysis') config.load() analyzer = ResultsAnalyzer(config) analyzer.analyze_all() return 1 if analyzer.failures > 0 else 0
dest='doc_type', default='complaint', help='Elasticsearch document type') group = p.add_argument_group('Files') group.add('--dataset', dest='dataset', required=True, help="Complaint data in NDJSON format") return p if __name__ == '__main__': p = build_arg_parser() cfg = p.parse_args() logger = setup_logging(cfg.doc_type) if cfg.dump_config: logger.info('Running index_ccdb with') logger.info(p.format_values()) index_alias = cfg.index_name index_name = "{}-v1".format(index_alias) backup_index_name = "{}-v2".format(index_alias) logger.info("Creating Elasticsearch Connection") es = get_es_connection(cfg) logger.info("Begin indexing data in Elasticsearch") index_json_data(es, logger, cfg.doc_type, cfg.settings, cfg.mapping, cfg.dataset, index_name, backup_index_name, index_alias)
def test_setup_logging(self): actual = sut.setup_logging('foo') self.assertIsNotNone(actual)
def parse_command_line(config, args=None): """ Parse the command line options for setting up a working directory :param dict config: The bootstrap.py config, populated from cli options. (NOT ConfigDict.) :param list args: Command line arguments to pass to argparse. """ parser = argparse.ArgumentParser( description='Setup DSI working environment. For instructions \ on setting up dsi locally, see \ https://drive.google.com/open?id=14QXOmo-ia8w72pW5zqQ2fCWfXEwiVQ8_1EoMCkB4baY' ) parser.add_argument( '-b', '--bootstrap-file', help='Specify the bootstrap file. If not specified, will look for ' 'bootstrap.yml in the current directory. ') parser.add_argument('-d', '--debug', action='store_true', help='enable debug output') parser.add_argument( '-D', '--directory', default='.', help="Directory to setup. Defaults to current directory") parser.add_argument('--log-file', help='path to log file') # These options are ignored but allowed for backward compatibility parser.add_argument('--production', action='store_true', default=False, help='(Ignored)') parser.add_argument('-v', '--verbose', action='store_true', help='(Ignored, use -d instead.)') parser.add_argument('-l', '--symlink', action='store_true', default=False, help='Symlink files instead of copying them.') parser.add_argument( '--list', action='store_true', default=False, help= 'List available canned configurations that you can use in bootstrap.yml.' ) args = parser.parse_args(args) setup_logging(args.debug, args.log_file) # pylint: disable=no-member if args.bootstrap_file: config['bootstrap_file'] = args.bootstrap_file if args.directory: config['directory'] = args.directory if args.symlink: config['symlink'] = args.symlink if args.list: config['list'] = args.list return config