def main(parser, options, reg): suite, suiterc = parse_suite_arg(options, reg) if options.markup: prefix = '!cylc!' else: prefix = '' config = SuiteConfig( suite, suiterc, options, load_template_vars(options.templatevars, options.templatevars_file)) if options.tasks: for task in config.get_task_name_list(): print(prefix + task) elif options.alltasks: for task in config.get_task_name_list(): items = ['[runtime][' + task + ']' + i for i in options.item] print(prefix + task, end=' ') config.pcfg.idump(items, options.sparse, options.pnative, prefix, options.oneline, none_str=options.none_str) else: config.pcfg.idump(options.item, options.sparse, options.pnative, prefix, options.oneline, none_str=options.none_str)
def main(_, options: 'Values', *ids) -> None: workflow_id, _, flow_file = parse_id( *ids, src=True, constraint='workflows', ) # extract task host platforms from the workflow_id config = WorkflowConfig( workflow_id, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) platforms = { config.get_config(['runtime', name, 'platform']) for name in config.get_namespace_list('all tasks') } - {None, 'localhost'} # When "workflow run hosts" are formalised as "flow platforms" # we can substitute `localhost` for this, in the mean time # we will have to assume that flow hosts are configured correctly. if not platforms: sys.exit(0) verbose = cylc.flow.flags.verbosity > 0 # get the cylc version on each platform versions = {} for platform_name in sorted(platforms): platform = get_platform(platform_name) host = get_host_from_platform(platform, bad_hosts=None) cmd = construct_ssh_cmd(['version'], platform, host) if verbose: print(cmd) proc = procopen(cmd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE) out, err = proc.communicate() out = out.decode() err = err.decode() if proc.wait() == 0: if verbose: print(" %s" % out) versions[platform_name] = out.strip() else: versions[platform_name] = f'ERROR: {err.strip()}' # report results max_len = max((len(platform_name) for platform_name in platforms)) print(f'{"platform".rjust(max_len)}: cylc version') print('-' * (max_len + 14)) for platform_name, result in versions.items(): print(f'{platform_name.rjust(max_len)}: {result}') if all((version == CYLC_VERSION for version in versions.values())): ret_code = 0 elif options.error: ret_code = 1 else: ret_code = 0 sys.exit(ret_code)
def main(_, options, *args): # suite name or file path suite, flow_file = parse_suite_arg(options, args[0]) # extract task host platforms from the suite config = SuiteConfig( suite, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) platforms = { config.get_config(['runtime', name, 'platform']) for name in config.get_namespace_list('all tasks') } - {None, 'localhost'} # When "suite run hosts" are formalised as "flow platforms" # we can substitute `localhost` for this, in the mean time # we will have to assume that flow hosts are configured correctly. if not platforms: sys.exit(0) verbose = cylc.flow.flags.verbose # get the cylc version on each platform versions = {} for platform_name in sorted(platforms): platform = get_platform(platform_name) cmd = construct_platform_ssh_cmd(['version'], platform) if verbose: print(cmd) proc = procopen(cmd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE) out, err = proc.communicate() out = out.decode() err = err.decode() if proc.wait() == 0: if verbose: print(" %s" % out) versions[platform_name] = out.strip() else: versions[platform_name] = f'ERROR: {err.strip()}' # report results max_len = max((len(platform_name) for platform_name in platforms)) print(f'{"platform".rjust(max_len)}: cylc version') print('-' * (max_len + 14)) for platform_name, result in versions.items(): print(f'{platform_name.rjust(max_len)}: {result}') if all((version == CYLC_VERSION for version in versions.values())): exit = 0 elif options.error: exit = 1 else: exit = 0 sys.exit(exit)
def main(parser: COP, options: 'Values', workflow_id1: str, workflow_id2: str): workflow_id_1, _, workflow_file_1_ = parse_id( workflow_id1, src=True, constraint='workflows', ) workflow_id_2, _, workflow_file_2_ = parse_id( workflow_id2, src=True, constraint='workflows', ) if workflow_file_1_ == workflow_file_2_: parser.error("You can't diff a single workflow.") print(f"Parsing {workflow_id_1} ({workflow_file_1_})") template_vars = load_template_vars( options.templatevars, options.templatevars_file ) config1 = WorkflowConfig( workflow_id_1, workflow_file_1_, options, template_vars ).cfg print(f"Parsing {workflow_id_2} ({workflow_file_2_})") config2 = WorkflowConfig( workflow_id_2, workflow_file_2_, options, template_vars, is_reload=True ).cfg if config1 == config2: print( f"Workflow definitions {workflow_id_1} and {workflow_id_2} are " f"identical" ) sys.exit(0) print(f"Workflow definitions {workflow_id_1} and {workflow_id_2} differ") workflow1_only = {} # type: ignore workflow2_only = {} # type: ignore diff_1_2 = {} # type: ignore # TODO: this whole file could do wih refactoring at some point diffdict(config1, config2, workflow1_only, workflow2_only, diff_1_2) if n_oone > 0: print(f'\n{n_oone} items only in {workflow_id_1} (<)') prdict(workflow1_only, '<', nested=options.nested) if n_otwo > 0: print(f'\n{n_otwo} items only in {workflow_id_2} (>)') prdict(workflow2_only, '>', nested=options.nested) if n_diff > 0: print(f'\n{n_diff} common items differ {workflow_id_1}(<) ' f'{workflow_id_2}(>)') prdict(diff_1_2, '', diff=True, nested=options.nested)
def test_load_template_vars_from_string_and_file_2(self): """Text pair variables take precedence over file.""" pairs = ["str='str'", "int=12", "float=12.3", "bool=True", "none=None"] expected = { 'str': 'str', 'int': 12, 'float': 12.3, 'bool': True, 'none': None } self.assertEqual(expected, load_template_vars(template_vars=pairs))
def test_load_template_vars_from_string(self): pairs = [ "name=John", "type=Human", "age=12" ] expected = { "name": "John", "type": "Human", "age": "12" } self.assertEqual(expected, load_template_vars(template_vars=pairs))
def test_load_template_vars_from_file(self): with tempfile.NamedTemporaryFile() as tf: tf.write(""" name=John type=Human # a comment # type=Test age=12 """.encode()) tf.flush() expected = {"name": "John", "type": "Human", "age": "12"} self.assertEqual( expected, load_template_vars(template_vars=None, template_vars_file=tf.name))
def main(parser, opts, suite=None, start=None, stop=None): """Implement ``cylc graph``.""" if opts.ungrouped and opts.namespaces: raise UserInputError('Cannot combine --ungrouped and --namespaces.') if not opts.reference: raise UserInputError('Only the --reference use cases are supported') template_vars = load_template_vars( opts.templatevars, opts.templatevars_file) config = get_config(suite, opts, template_vars=template_vars) if opts.namespaces: graph_inheritance(config) else: graph_workflow(config, start, stop, ungrouped=opts.ungrouped, show_suicide=opts.show_suicide)
def test_load_template_vars_from_string_and_file(self): """Text pair variables take precedence over file.""" pairs = ["name=John", "age=12"] with tempfile.NamedTemporaryFile() as tf: tf.write(""" name=Mariah type=Human # a comment # type=Test age=70 """.encode()) tf.flush() expected = {"name": "John", "type": "Human", "age": "12"} self.assertEqual( expected, load_template_vars(template_vars=pairs, template_vars_file=tf.name))
def main(parser, opts, suite=None, start=None, stop=None): """Implement ``cylc graph``.""" template_vars = load_template_vars( opts.templatevars, opts.templatevars_file) config = get_config(suite, opts, template_vars=template_vars) ret = get_metrics( config, start, stop, ungrouped=True, show_suicide=True ) print( json.dumps( ret, indent=4 ) )
def main(parser, options, *args): suite1, suite1rc = parse_suite_arg(options, args[0]) suite2, suite2rc = parse_suite_arg(options, args[1]) if suite1 == suite2: parser.error("You can't diff a single suite.") print("Parsing %s (%s)" % (suite1, suite1rc)) template_vars = load_template_vars( options.templatevars, options.templatevars_file) config1 = SuiteConfig(suite1, suite1rc, options, template_vars).cfg print("Parsing %s (%s)" % (suite2, suite2rc)) config2 = SuiteConfig( suite2, suite2rc, options, template_vars, is_reload=True).cfg if config1 == config2: print("Suite definitions %s and %s are identical" % (suite1, suite2)) sys.exit(0) print("Suite definitions %s and %s differ" % (suite1, suite2)) suite1_only = {} suite2_only = {} diff_1_2 = {} diffdict(config1, config2, suite1_only, suite2_only, diff_1_2) if n_oone > 0: print() msg = str(n_oone) + ' items only in ' + suite1 + ' (<)' print(msg) prdict(suite1_only, '<', nested=options.nested) if n_otwo > 0: print() msg = str(n_otwo) + ' items only in ' + suite2 + ' (>)' print(msg) prdict(suite2_only, '>', nested=options.nested) if n_diff > 0: print() msg = (str(n_diff) + ' common items differ ' + suite1 + '(<) ' + suite2 + '(>)') print(msg) prdict(diff_1_2, '', diff=True, nested=options.nested)
def main(parser, options, *args): workflow1_name, workflow1_config = parse_workflow_arg(options, args[0]) workflow2_name, workflow2_config = parse_workflow_arg(options, args[1]) if workflow1_name == workflow2_name: parser.error("You can't diff a single workflow.") print(f"Parsing {workflow1_name} ({workflow1_config})") template_vars = load_template_vars( options.templatevars, options.templatevars_file) config1 = WorkflowConfig( workflow1_name, workflow1_config, options, template_vars).cfg print(f"Parsing {workflow2_name} ({workflow2_config})") config2 = WorkflowConfig( workflow2_name, workflow2_config, options, template_vars, is_reload=True).cfg if config1 == config2: print( f"Workflow definitions {workflow1_name} and {workflow2_name} are " f"identical" ) sys.exit(0) print(f"Workflow definitions {workflow1_name} and {workflow2_name} differ") workflow1_only = {} workflow2_only = {} diff_1_2 = {} diffdict(config1, config2, workflow1_only, workflow2_only, diff_1_2) if n_oone > 0: print(f'\n{n_oone} items only in {workflow1_name} (<)') prdict(workflow1_only, '<', nested=options.nested) if n_otwo > 0: print(f'\n{n_otwo} items only in {workflow2_name} (>)') prdict(workflow2_only, '>', nested=options.nested) if n_diff > 0: print(f'\n{n_diff} common items differ {workflow1_name}(<) ' f'{workflow2_name}(>)') prdict(diff_1_2, '', diff=True, nested=options.nested)
def main(parser, options, *args): suite1_name, suite1_config = parse_suite_arg(options, args[0]) suite2_name, suite2_config = parse_suite_arg(options, args[1]) if suite1_name == suite2_name: parser.error("You can't diff a single suite.") print(f"Parsing {suite1_name} ({suite1_config})") template_vars = load_template_vars(options.templatevars, options.templatevars_file) config1 = SuiteConfig(suite1_name, suite1_config, options, template_vars).cfg print(f"Parsing {suite2_name} ({suite2_config})") config2 = SuiteConfig(suite2_name, suite2_config, options, template_vars, is_reload=True).cfg if config1 == config2: print(f"Suite definitions {suite1_name} and {suite2_name} are " f"identical") sys.exit(0) print(f"Suite definitions {suite1_name} and {suite2_name} differ") suite1_only = {} suite2_only = {} diff_1_2 = {} diffdict(config1, config2, suite1_only, suite2_only, diff_1_2) if n_oone > 0: print(f'\n{n_oone} items only in {suite1_name} (<)') prdict(suite1_only, '<', nested=options.nested) if n_otwo > 0: print(f'\n{n_otwo} items only in {suite2_name} (>)') prdict(suite2_only, '>', nested=options.nested) if n_diff > 0: print(f'\n{n_diff} common items differ {suite1_name}(<) ' f'{suite2_name}(>)') prdict(diff_1_2, '', diff=True, nested=options.nested)
def main(parser, opts, workflow=None, start=None, stop=None): """Implement ``cylc graph``.""" if opts.ungrouped and opts.namespaces: raise UserInputError('Cannot combine --ungrouped and --namespaces.') if not (opts.reference or opts.diff): raise UserInputError( 'Only the --reference and --diff use cases are supported') template_vars = load_template_vars(opts.templatevars, opts.templatevars_file) write = print flows = [(workflow, [])] if opts.diff: flows.append((opts.diff, [])) for flow, graph in flows: if opts.diff: write = graph.append config = get_config(flow, opts, template_vars=template_vars) if opts.namespaces: graph_inheritance(config, write=write) else: graph_workflow(config, start, stop, ungrouped=opts.ungrouped, show_suicide=opts.show_suicide, write=write) if opts.diff: lines = list( unified_diff([f'{line}\n' for line in flows[0][1]], [f'{line}\n' for line in flows[1][1]], fromfile=flows[0][0], tofile=flows[1][0])) if lines: sys.stdout.writelines(lines) sys.exit(1)
def main(parser, options, reg=None): if options.print_hierarchy: print("\n".join(get_config_file_hierarchy(reg))) return if reg is None: glbl_cfg().idump(options.item, sparse=options.sparse, oneline=options.oneline, none_str=options.none_str) return suite, flow_file = parse_suite_arg(options, reg) config = SuiteConfig( suite, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) config.pcfg.idump(options.item, options.sparse, oneline=options.oneline, none_str=options.none_str)
def main(parser, options, reg): suite, suiterc = parse_suite_arg(options, reg) if options.geditor: editor = glbl_cfg().get(['editors', 'gui']) else: editor = glbl_cfg().get(['editors', 'terminal']) # read in the suite.rc file viewcfg = { 'mark': options.mark, 'single': options.single, 'label': options.label, 'empy': options.empy or options.process, 'jinja2': options.jinja2 or options.process, 'contin': options.cat or options.process, 'inline': (options.inline or options.jinja2 or options.empy or options.process), } lines = read_and_proc(suiterc, load_template_vars(options.templatevars, options.templatevars_file), viewcfg=viewcfg, asedit=options.asedit) if options.stdout: for line in lines: print(line) sys.exit(0) # write to a temporary file viewfile = NamedTemporaryFile( suffix=".suite.rc", prefix=suite.replace('/', '_') + '.', ) for line in lines: viewfile.write((line + '\n').encode()) viewfile.seek(0, 0) # set the file to be read only os.chmod(viewfile.name, 0o400) # capture the temp file's mod time in case the user edits it # and overrides the readonly mode. modtime1 = os.stat(viewfile.name).st_mtime # in case editor has options, e.g. 'emacs -nw': command_list = shlex.split(editor) command_list.append(viewfile.name) command = ' '.join(command_list) # THIS BLOCKS UNTIL THE COMMAND COMPLETES retcode = call(command_list) if retcode != 0: # the command returned non-zero exist status raise CylcError(f'{command} failed: {retcode}') # !!!VIEWING FINISHED!!! # Did the user edit the file modtime2 = os.stat(viewfile.name).st_mtime if modtime2 > modtime1: print() print('WARNING: YOU HAVE EDITED A TEMPORARY READ-ONLY SUITE COPY:', file=sys.stderr) print(viewfile.name, file=sys.stderr) print('In future use \'cylc [prep] edit\' to edit a suite.', file=sys.stderr) print() # DONE viewfile.close()
def main(parser: COP, options: 'Values', reg: str) -> None: workflow, flow_file = parse_reg(reg, src=True) if options.geditor: editor = glbl_cfg().get(['editors', 'gui']) else: editor = glbl_cfg().get(['editors', 'terminal']) # read in the flow.cylc file viewcfg = { 'mark': options.mark, 'single': options.single, 'label': options.label, 'empy': options.empy or options.process, 'jinja2': options.jinja2 or options.process, 'contin': options.cat or options.process, 'inline': (options.inline or options.jinja2 or options.empy or options.process), } lines = read_and_proc(flow_file, load_template_vars(options.templatevars, options.templatevars_file), viewcfg=viewcfg) if options.stdout: for line in lines: print(line) sys.exit(0) # write to a temporary file viewfile = NamedTemporaryFile( suffix=".flow.cylc", prefix=workflow.replace('/', '_') + '.', ) for line in lines: viewfile.write((line + '\n').encode()) viewfile.seek(0, 0) # set the file to be read only os.chmod(viewfile.name, 0o400) # capture the temp file's mod time in case the user edits it # and overrides the readonly mode. modtime1 = os.stat(viewfile.name).st_mtime # in case editor has options, e.g. 'emacs -nw': command_list = shlex.split(editor) command_list.append(viewfile.name) command = ' '.join(command_list) # THIS BLOCKS UNTIL THE COMMAND COMPLETES retcode = call(command_list) # nosec (editor command is user configurable) if retcode != 0: # the command returned non-zero exist status raise CylcError(f'{command} failed: {retcode}') # !!!VIEWING FINISHED!!! # Did the user edit the file modtime2 = os.stat(viewfile.name).st_mtime if modtime2 > modtime1: print( "\nWARNING: YOU HAVE EDITED A TEMPORARY READ-ONLY COPY " f"OF THE WORKFLOW:\n {viewfile.name}\n", file=sys.stderr) # DONE viewfile.close()
def main(_, options, reg): """cylc validate CLI.""" profiler = Profiler(None, options.profile_mode) profiler.start() if not cylc.flow.flags.debug: # for readability omit timestamps from logging unless in debug mode for handler in LOG.handlers: if isinstance(handler.formatter, CylcLogFormatter): handler.formatter.configure(timestamp=False) suite, flow_file = parse_suite_arg(options, reg) cfg = SuiteConfig( suite, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file), output_fname=options.output, mem_log_func=profiler.log_memory) # Check bounds of sequences out_of_bounds = [str(seq) for seq in cfg.sequences if seq.get_first_point(cfg.start_point) is None] if out_of_bounds: if len(out_of_bounds) > 1: # avoid spamming users with multiple warnings msg = ('multiple sequences out of bounds for initial cycle point ' '%s:\n%s' % ( cfg.start_point, '\n'.join(textwrap.wrap(', '.join(out_of_bounds), 70)))) else: msg = '%s: sequence out of bounds for initial cycle point %s' % ( out_of_bounds[0], cfg.start_point) if options.strict: LOG.warning(msg) elif cylc.flow.flags.verbose: sys.stderr.write(' + %s\n' % msg) # Instantiate tasks and force evaluation of trigger expressions. # (Taken from config.py to avoid circular import problems.) # TODO - This is not exhaustive, it only uses the initial cycle point. if cylc.flow.flags.verbose: print('Instantiating tasks to check trigger expressions') flow_label = FlowLabelMgr().get_new_label() for name, taskdef in cfg.taskdefs.items(): try: itask = TaskProxy(taskdef, cfg.start_point, flow_label) except TaskProxySequenceBoundsError: # Should already failed above in strict mode. mesg = 'Task out of bounds for %s: %s\n' % (cfg.start_point, name) if cylc.flow.flags.verbose: sys.stderr.write(' + %s\n' % mesg) continue except Exception as exc: raise SuiteConfigError( 'failed to instantiate task %s: %s' % (name, exc)) # force trigger evaluation now try: itask.state.prerequisites_eval_all() except TriggerExpressionError as exc: err = str(exc) if '@' in err: print(f"ERROR, {name}: xtriggers can't be in conditional" f" expressions: {err}", file=sys.stderr) else: print('ERROR, %s: bad trigger: %s' % (name, err), file=sys.stderr) raise SuiteConfigError("ERROR: bad trigger") except Exception as exc: print(str(exc), file=sys.stderr) raise SuiteConfigError( '%s: failed to evaluate triggers.' % name) if cylc.flow.flags.verbose: print(' + %s ok' % itask.identity) print(cparse('<green>Valid for cylc-%s</green>' % CYLC_VERSION)) profiler.stop()
def main(parser, options, suite, *task_ids): """cylc submit CLI. No TASK EVENT HOOKS are set for the submit command because there is no scheduler instance watching for task failure etc. Note: a suite contact env file is not written by this command (it would overwrite the real one if the suite is running). """ if not options.verbose and not options.debug: LOG.setLevel(WARNING) for task_id in task_ids: if not TaskID.is_valid_id(task_id): raise UserInputError("Invalid task ID %s" % task_id) suiterc = get_suite_rc(suite) suite_dir = os.path.dirname(suiterc) # For user-defined batch system handlers sys.path.append(os.path.join(suite_dir, 'python')) # Load suite config and tasks config = SuiteConfig( suite, suiterc, options, load_template_vars(options.templatevars, options.templatevars_file)) itasks = [] for task_id in task_ids: name_str, point_str = TaskID.split(task_id) taskdefs = config.find_taskdefs(name_str) if not taskdefs: raise UserInputError("No task found for %s" % task_id) for taskdef in taskdefs: itasks.append( TaskProxy(taskdef, get_point(point_str).standardise(), is_startup=True)) # Initialise job submit environment make_suite_run_tree(suite) # Extract job.sh from library, for use in job scripts. extract_resources(get_suite_srv_dir(suite), ['etc/job.sh']) pool = SubProcPool() owner = get_user() job_pool = JobPool(suite, owner) db_mgr = SuiteDatabaseManager() task_job_mgr = TaskJobManager( suite, pool, db_mgr, TaskEventsManager(suite, pool, db_mgr, BroadcastMgr(db_mgr), job_pool), job_pool) task_job_mgr.task_remote_mgr.single_task_mode = True task_job_mgr.job_file_writer.set_suite_env({ 'CYLC_UTC': str(config.cfg['cylc']['UTC mode']), 'CYLC_DEBUG': str(cylc.flow.flags.debug).lower(), 'CYLC_VERBOSE': str(cylc.flow.flags.verbose).lower(), 'CYLC_SUITE_NAME': suite, 'CYLC_CYCLING_MODE': str(config.cfg['scheduling']['cycling mode']), 'CYLC_SUITE_INITIAL_CYCLE_POINT': str(config.cfg['scheduling']['initial cycle point']), 'CYLC_SUITE_FINAL_CYCLE_POINT': str(config.cfg['scheduling']['final cycle point']), }) ret_code = 0 waiting_tasks = list(itasks) if options.dry_run: while waiting_tasks: prep_tasks, bad_tasks = task_job_mgr.prep_submit_task_jobs( suite, waiting_tasks, dry_run=True) for itask in prep_tasks + bad_tasks: waiting_tasks.remove(itask) if waiting_tasks: task_job_mgr.proc_pool.process() sleep(1.0) for itask in itasks: if itask.local_job_file_path: print(('JOB SCRIPT=%s' % itask.local_job_file_path)) else: print(('Unable to prepare job file for %s' % itask.identity), file=sys.stderr) ret_code = 1 else: while waiting_tasks: for itask in task_job_mgr.submit_task_jobs(suite, waiting_tasks): waiting_tasks.remove(itask) if waiting_tasks: task_job_mgr.proc_pool.process() sleep(1.0) while task_job_mgr.proc_pool.is_not_done(): task_job_mgr.proc_pool.process() for itask in itasks: if itask.summary.get('submit_method_id') is not None: print(('[%s] Job ID: %s' % (itask.identity, itask.summary['submit_method_id']))) if itask.state(TASK_STATUS_SUBMIT_FAILED): ret_code = 1 sys.exit(ret_code)
def main(parser, options, reg): workflow, flow_file = parse_workflow_arg(options, reg) if options.all_tasks and options.all_namespaces: parser.error("Choose either -a or -n") if options.all_tasks: which = "all tasks" elif options.all_namespaces: which = "all namespaces" elif options.crange: which = "crange" try: tr_start, tr_stop = options.crange.split(',') except ValueError: tr_start = tr_stop = options.crange else: which = "graphed tasks" if options.tree: if os.environ['LANG'] == 'C' and options.box: print("WARNING, ignoring -t/--tree: $LANG=C", file=sys.stderr) options.tree = False if options.titles and options.mro: parser.error("Please choose --mro or --title, not both") if options.tree and any( [options.all_tasks, options.all_namespaces, options.mro]): print("WARNING: -t chosen, ignoring non-tree options.", file=sys.stderr) config = WorkflowConfig( workflow, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) if options.tree: config.print_first_parent_tree(pretty=options.box, titles=options.titles) elif options.crange: for node in sorted(config.get_node_labels(tr_start, tr_stop)): print(node) else: result = config.get_namespace_list(which) namespaces = list(result) namespaces.sort() if (options.mro or options.titles): # compute padding maxlen = 0 for ns in namespaces: if len(ns) > maxlen: maxlen = len(ns) padding = maxlen * ' ' for ns in namespaces: if options.mro: print(ns, padding[0:len(padding) - len(ns)], end=' ') print(' '.join(config.get_mro(ns))) elif options.titles: print(ns, padding[0:len(padding) - len(ns)], end=' ') print(result[ns]) else: print(ns)
def test_load_template_vars_no_params(self): self.assertFalse(load_template_vars())
def main(_, options, *args): # suite name or file path suite, suiterc = parse_suite_arg(options, args[0]) # extract task host accounts from the suite config = SuiteConfig( suite, suiterc, options, load_template_vars(options.templatevars, options.templatevars_file)) account_set = set() for name in config.get_namespace_list('all tasks'): account_set.add( (config.get_config(['runtime', name, 'remote', 'owner']), config.get_config(['runtime', name, 'remote', 'host']))) task_remote_mgr = TaskRemoteMgr(suite, SubProcPool()) for _, host_str in account_set: task_remote_mgr.remote_host_select(host_str) accounts = [] while account_set: for user, host_str in account_set.copy(): res = task_remote_mgr.remote_host_select(host_str) if res: account_set.remove((user, host_str)) accounts.append((user, res)) if account_set: task_remote_mgr.proc_pool.process() sleep(1.0) # Interrogate the each remote account with CYLC_VERSION set to our version. # Post backward compatibility concerns to do this we can just run: # cylc version --host=HOST --user=USER # but this command only exists for version > 6.3.0. # So for the moment generate an actual remote invocation command string for # "cylc --version". # (save verbose flag as gets reset in remrun) verbose = cylc.flow.flags.verbose warn = {} contacted = 0 for user, host in sorted(accounts): argv = ["cylc", "version"] if user and host: argv += ["--user=%s" % user, "--host=%s" % host] user_at_host = "%s@%s" % (user, host) elif user: argv += ["--user=%s" % user] user_at_host = "%s@localhost" % user elif host: argv += ["--host=%s" % host] user_at_host = host if verbose: print("%s: %s" % (user_at_host, ' '.join(argv))) proc = procopen(argv, stdin=open(os.devnull), stdoutpipe=True, stderrpipe=True) out, err = proc.communicate() out = out.decode() err = err.decode() if proc.wait() == 0: if verbose: print(" %s" % out) contacted += 1 out = out.strip() if out != CYLC_VERSION: warn[user_at_host] = out else: print('ERROR ' + user_at_host + ':', file=sys.stderr) print(err, file=sys.stderr) # report results if not warn: if contacted: print("All", contacted, "accounts have cylc-" + CYLC_VERSION) else: print("WARNING: failed to invoke cylc-%s on %d accounts:" % (CYLC_VERSION, len(warn))) m = max(len(ac) for ac in warn) for ac, warning in warn.items(): print(' ', ac.ljust(m), warning) if options.error: sys.exit(1)