def test_query_cluster_should_gracefully_handle_json_parsing_failures(self): http.configure({'http': {'modules': {'session-module': 'requests', 'adapters-module': 'requests.adapters'}}}) cluster = {'url': 'http://localhost'} uuids = [uuid.uuid4()] with requests_mock.mock() as m: m.get('http://localhost/rawscheduler', text='this is not json') self.assertEqual([], query_cluster(cluster, uuids, None, None, None, make_job_request, 'job'))
def run(args): """ Main entrypoint to the cook scheduler CLI. Loads configuration files, processes global command line arguments, and calls other command line sub-commands (actions) if necessary. """ args = vars(parser.parse_args(args)) print_version = args.pop('version') if print_version: print(f'cs version {version.VERSION}') return 0 util.silent = args.pop('silent') verbose = args.pop('verbose') and not util.silent log_format = '%(asctime)s [%(levelname)s] [%(name)s] %(message)s' if verbose: logging.getLogger('').handlers = [] logging.basicConfig(format=log_format, level=logging.DEBUG) else: logging.disable(logging.FATAL) logging.debug('args: %s' % args) action = args.pop('action') config_path = args.pop('config') cluster = args.pop('cluster') url = args.pop('url') if action is None: parser.print_help() else: config_map = configuration.load_config_with_defaults(config_path) try: metrics.initialize(config_map) metrics.inc('command.%s.runs' % action) clusters = load_target_clusters(config_map, url, cluster) http.configure(config_map) args = {k: v for k, v in args.items() if v is not None} defaults = config_map.get('defaults') action_defaults = (defaults.get(action) if defaults else None) or {} result = actions[action](clusters, deep_merge(action_defaults, args), config_path) logging.debug('result: %s' % result) return result finally: metrics.close() return None
def test_query_cluster_should_gracefully_handle_json_parsing_failures( self): http_plugins = { 'http-adapter-factory': requests.adapters.HTTPAdapter, 'http-session-factory': requests.Session, } http.configure(config={}, plugins=http_plugins) cluster = {'url': 'http://localhost'} uuids = [uuid.uuid4()] with requests_mock.mock() as m: m.get('http://localhost/rawscheduler', text='this is not json') self.assertEqual([], query_cluster(cluster, uuids, None, None, None, make_job_request, 'job'))
def run(args, plugins): """ Main entrypoint to the cook scheduler CLI. Loads configuration files, processes global command line arguments, and calls other command line sub-commands (actions) if necessary. plugins is a map from plugin-name -> function or Class.SubCommandPlugin """ # This has to happen before we parse the args, otherwise we might # get subcommand not found. for name, instance in plugins.items(): if isinstance(instance, SubCommandPlugin): logging.debug('Adding SubCommandPlugin %s' % name) try: instance.register(subparsers.add_parser, configuration.add_defaults) logging.debug('Done adding SubCommandPlugin %s' % name) name = instance.name() if name in actions: raise Exception( 'SubCommandPlugin %s clashes with an existing subcommand.' % name) actions[name] = instance.run except Exception as e: print('Failed to load SubCommandPlugin %s: %s' % (name, e), file=sys.stderr) args = vars(parser.parse_args(args)) util.silent = args.pop('silent') verbose = args.pop('verbose') and not util.silent log_format = '%(asctime)s [%(levelname)s] [%(name)s] %(message)s' if verbose: logging.getLogger('').handlers = [] logging.basicConfig(format=log_format, level=logging.DEBUG) else: logging.disable(logging.FATAL) logging.debug('args: %s', args) action = args.pop('action') config_path = args.pop('config') cluster = args.pop('cluster') url = args.pop('url') if action is None: parser.print_help() else: _, config_map = configuration.load_config_with_defaults(config_path) try: metrics.initialize(config_map) metrics.inc('command.%s.runs' % action) clusters = load_target_clusters(config_map, url, cluster) http.configure(config_map, plugins) cook.plugins.configure(plugins) args = {k: v for k, v in args.items() if v is not None} defaults = config_map.get('defaults') action_defaults = (defaults.get(action) if defaults else None) or {} logging.debug('going to execute % action' % action) result = actions[action](clusters, deep_merge(action_defaults, args), config_path) logging.debug('result: %s' % result) return result finally: metrics.close() return None