def main(parser, options, *args): workflow1_name, workflow1_config = parse_workflow_arg(options, args[0]) workflow2_name, workflow2_config = parse_workflow_arg(options, args[1]) if workflow1_name == workflow2_name: parser.error("You can't diff a single workflow.") print(f"Parsing {workflow1_name} ({workflow1_config})") template_vars = load_template_vars( options.templatevars, options.templatevars_file) config1 = WorkflowConfig( workflow1_name, workflow1_config, options, template_vars).cfg print(f"Parsing {workflow2_name} ({workflow2_config})") config2 = WorkflowConfig( workflow2_name, workflow2_config, options, template_vars, is_reload=True).cfg if config1 == config2: print( f"Workflow definitions {workflow1_name} and {workflow2_name} are " f"identical" ) sys.exit(0) print(f"Workflow definitions {workflow1_name} and {workflow2_name} differ") workflow1_only = {} workflow2_only = {} diff_1_2 = {} diffdict(config1, config2, workflow1_only, workflow2_only, diff_1_2) if n_oone > 0: print(f'\n{n_oone} items only in {workflow1_name} (<)') prdict(workflow1_only, '<', nested=options.nested) if n_otwo > 0: print(f'\n{n_otwo} items only in {workflow2_name} (>)') prdict(workflow2_only, '>', nested=options.nested) if n_diff > 0: print(f'\n{n_diff} common items differ {workflow1_name}(<) ' f'{workflow2_name}(>)') prdict(diff_1_2, '', diff=True, nested=options.nested)
def main(_, options, *args): # workflow name or file path workflow, flow_file = parse_workflow_arg(options, args[0]) # extract task host platforms from the workflow config = WorkflowConfig( workflow, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) platforms = { config.get_config(['runtime', name, 'platform']) for name in config.get_namespace_list('all tasks') } - {None, 'localhost'} # When "workflow run hosts" are formalised as "flow platforms" # we can substitute `localhost` for this, in the mean time # we will have to assume that flow hosts are configured correctly. if not platforms: sys.exit(0) verbose = cylc.flow.flags.verbosity > 0 # get the cylc version on each platform versions = {} for platform_name in sorted(platforms): platform = get_platform(platform_name) cmd = construct_ssh_cmd(['version'], platform) if verbose: print(cmd) proc = procopen(cmd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE) out, err = proc.communicate() out = out.decode() err = err.decode() if proc.wait() == 0: if verbose: print(" %s" % out) versions[platform_name] = out.strip() else: versions[platform_name] = f'ERROR: {err.strip()}' # report results max_len = max((len(platform_name) for platform_name in platforms)) print(f'{"platform".rjust(max_len)}: cylc version') print('-' * (max_len + 14)) for platform_name, result in versions.items(): print(f'{platform_name.rjust(max_len)}: {result}') if all((version == CYLC_VERSION for version in versions.values())): exit = 0 elif options.error: exit = 1 else: exit = 0 sys.exit(exit)
def main(parser, options, reg=None): if options.print_hierarchy: print("\n".join(get_config_file_hierarchy(reg))) return if reg is None: glbl_cfg().idump(options.item, sparse=options.sparse, oneline=options.oneline, none_str=options.none_str) return workflow, flow_file = parse_workflow_arg(options, reg) config = WorkflowConfig( workflow, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) config.pcfg.idump(options.item, options.sparse, oneline=options.oneline, none_str=options.none_str)
def main(parser, options, reg, *patterns): workflow, flow_file = parse_workflow_arg(options, reg) # cylc search WORKFLOW PATTERN pattern = '|'.join(patterns) workflowdir = os.path.dirname(flow_file) if os.path.isfile(flow_file): h = open(flow_file, 'r') lines = h.readlines() h.close() lines = inline(lines, workflowdir, flow_file, for_grep=True) else: parser.error(f"File not found: {flow_file}") sections = deque(['(top)']) line_count = 1 inc_file = None in_include_file = False prev_section_key = None prev_file = None for line in lines: m = re.match(r'^#\+\+\+\+ START INLINED INCLUDE FILE ([\w/\.\-]+)', line) if m: inc_file = m.groups()[0] in_include_file = True inc_line_count = 0 continue if not in_include_file: line_count += 1 else: inc_line_count += 1 m = re.match(r'^#\+\+\+\+ END INLINED INCLUDE FILE ' + inc_file, line) if m: in_include_file = False inc_file = None continue m = re.match(r'\s*(\[+\s*.+\s*\]+)', line) if m: # new section heading detected heading = m.groups()[0] level = section_level(heading) # unwind to the current section level while len(sections) > level - 1: sections.pop() sections.append(heading) continue if re.search(pattern, line): # Found a pattern match. # Print the file name if in_include_file: curr_file = os.path.join(workflowdir, inc_file) line_no = inc_line_count else: curr_file = flow_file line_no = line_count if curr_file != prev_file: prev_file = curr_file print("\nFILE:", curr_file) # Print the nested section headings section_key = '->'.join(sections) if section_key != prev_section_key: prev_section_key = section_key print(' SECTION:', section_key) # Print the pattern match, with line number print(' (' + str(line_no) + '):', line.rstrip('\n')) if not options.search_bin: sys.exit(0) # search files in workflow bin directory bin_ = os.path.join(workflowdir, 'bin') if not os.path.isdir(bin_): print("\nWorkflow " + workflow + " has no bin directory", file=sys.stderr) sys.exit(0) for name in os.listdir(bin_): if name.startswith('.'): # skip hidden dot-files # (e.g. vim editor temporary files) continue new_file = True try: h = open(os.path.join(bin_, name), 'r') except IOError as exc: # e.g. there's a sub-directory under bin; ignore it. print('Unable to open file ' + os.path.join(bin_, name), file=sys.stderr) print(exc, file=sys.stderr) continue contents = h.readlines() h.close() count = 0 for line in contents: line = line.rstrip('\n') count += 1 if re.search(pattern, line): if new_file: print('\nFILE:', os.path.join(bin_, name)) new_file = False print(' (' + str(count) + '): ' + line)
def main(parser, options, reg): workflow, flow_file = parse_workflow_arg(options, reg) if options.all_tasks and options.all_namespaces: parser.error("Choose either -a or -n") if options.all_tasks: which = "all tasks" elif options.all_namespaces: which = "all namespaces" elif options.crange: which = "crange" try: tr_start, tr_stop = options.crange.split(',') except ValueError: tr_start = tr_stop = options.crange else: which = "graphed tasks" if options.tree: if os.environ['LANG'] == 'C' and options.box: print("WARNING, ignoring -t/--tree: $LANG=C", file=sys.stderr) options.tree = False if options.titles and options.mro: parser.error("Please choose --mro or --title, not both") if options.tree and any( [options.all_tasks, options.all_namespaces, options.mro]): print("WARNING: -t chosen, ignoring non-tree options.", file=sys.stderr) config = WorkflowConfig( workflow, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file)) if options.tree: config.print_first_parent_tree(pretty=options.box, titles=options.titles) elif options.crange: for node in sorted(config.get_node_labels(tr_start, tr_stop)): print(node) else: result = config.get_namespace_list(which) namespaces = list(result) namespaces.sort() if (options.mro or options.titles): # compute padding maxlen = 0 for ns in namespaces: if len(ns) > maxlen: maxlen = len(ns) padding = maxlen * ' ' for ns in namespaces: if options.mro: print(ns, padding[0:len(padding) - len(ns)], end=' ') print(' '.join(config.get_mro(ns))) elif options.titles: print(ns, padding[0:len(padding) - len(ns)], end=' ') print(result[ns]) else: print(ns)
def get_config(flow, opts, template_vars=None): """Return a WorkflowConfig object for the provided reg / path.""" flow, flow_file = parse_workflow_arg(opts, flow) return WorkflowConfig(flow, flow_file, opts, template_vars=template_vars)
def main(_, options, reg): """cylc validate CLI.""" profiler = Profiler(None, options.profile_mode) profiler.start() if cylc.flow.flags.verbosity < 2: # for readability omit timestamps from logging unless in debug mode for handler in LOG.handlers: if isinstance(handler.formatter, CylcLogFormatter): handler.formatter.configure(timestamp=False) workflow, flow_file = parse_workflow_arg(options, reg) cfg = WorkflowConfig(workflow, flow_file, options, load_template_vars(options.templatevars, options.templatevars_file), output_fname=options.output, mem_log_func=profiler.log_memory) # Check bounds of sequences out_of_bounds = [ str(seq) for seq in cfg.sequences if seq.get_first_point(cfg.start_point) is None ] if out_of_bounds: if len(out_of_bounds) > 1: # avoid spamming users with multiple warnings out_of_bounds_str = '\n'.join( textwrap.wrap(', '.join(out_of_bounds), 70)) msg = ("multiple sequences out of bounds for initial cycle point " f"{cfg.start_point}:\n{out_of_bounds_str}") else: msg = (f"{out_of_bounds[0]}: sequence out of bounds for " f"initial cycle point {cfg.start_point}") LOG.warning(msg) # Instantiate tasks and force evaluation of trigger expressions. # (Taken from config.py to avoid circular import problems.) # TODO - This is not exhaustive, it only uses the initial cycle point. if cylc.flow.flags.verbosity > 0: print('Instantiating tasks to check trigger expressions') flow_label = FlowLabelMgr().get_new_label() for name, taskdef in cfg.taskdefs.items(): try: itask = TaskProxy(taskdef, cfg.start_point, flow_label) except TaskProxySequenceBoundsError: # Should already failed above mesg = 'Task out of bounds for %s: %s\n' % (cfg.start_point, name) if cylc.flow.flags.verbosity > 0: sys.stderr.write(' + %s\n' % mesg) continue except Exception as exc: raise WorkflowConfigError('failed to instantiate task %s: %s' % (name, exc)) # force trigger evaluation now try: itask.state.prerequisites_eval_all() except TriggerExpressionError as exc: err = str(exc) if '@' in err: print( f"ERROR, {name}: xtriggers can't be in conditional" f" expressions: {err}", file=sys.stderr) else: print('ERROR, %s: bad trigger: %s' % (name, err), file=sys.stderr) raise WorkflowConfigError("ERROR: bad trigger") except Exception as exc: print(str(exc), file=sys.stderr) raise WorkflowConfigError('%s: failed to evaluate triggers.' % name) if cylc.flow.flags.verbosity > 0: print(' + %s ok' % itask.identity) print(cparse('<green>Valid for cylc-%s</green>' % CYLC_VERSION)) profiler.stop()
def main(parser, options, *args): flow_file = parse_workflow_arg(options, args[0])[1] if options.geditor: editor = glbl_cfg().get(['editors', 'gui']) else: editor = glbl_cfg().get(['editors', 'terminal']) workflowdir = os.path.dirname(flow_file) if options.cleanup: # remove backup files left by inlined editing sessions cleanup(workflowdir) sys.exit(0) if not options.inline: # plain old editing. # move to workflow def dir os.chdir(workflowdir) # edit the flow.cylc file if not os.path.isfile(flow_file): raise UserInputError(f'file not found: {flow_file}') # in case editor has options, e.g. 'emacs -nw': command_list = re.split(' ', editor) command_list.append(flow_file) command = ' '.join(command_list) # THIS BLOCKS UNTIL THE COMMAND COMPLETES retcode = call(command_list) if retcode != 0: # the command returned non-zero exist status raise CylcError(f'{command} failed: {retcode}') # !!!EDITING FINISHED!!! sys.exit(0) # read the flow.cylc file if os.path.isfile(flow_file): # back up the original backup(flow_file) # record original modtime modtimes[flow_file] = os.stat(flow_file).st_mtime # read the file h = open(flow_file, 'r') lines0 = h.readlines() h.close() if lines0[0].startswith('# !WARNING! CYLC EDIT INLINED'): print('WARNING: RECOVERING A PREVIOUSLY INLINED FILE') recovery = True lines = lines0 else: recovery = False lines = inline(lines0, workflowdir, flow_file, for_edit=True) else: parser.error(f"File not found: {flow_file}") lines = [i.rstrip() for i in lines] # overwrite the (now backed up) original with the inlined file: h = open(flow_file, 'wb') for line in lines: h.write((line + '\n').encode()) h.close() print('PRE-EDIT BACKUPS:') for file in backups: src = re.sub(workflowdir + '/', '', file) dst = re.sub(workflowdir + '/', '', backups[file]) print(' + ' + src + ' ---> ' + dst) # in case editor has options, e.g. 'emacs -nw': command_list = re.split(' ', editor) command_list.append(flow_file) command = ' '.join(command_list) # THIS BLOCKS UNTIL THE COMMAND COMPLETES retcode = call(command_list) if retcode != 0: # the command returned non-zero exist status raise CylcError(f'{command} failed: {retcode}') print('EDITING DONE') # Now back up the inlined file in case of absolute disaster, so as the # user or his editor corrupting the inlined-include-file marker lines. inlined_flow_file_backup = ( workflowdir + '/flow.cylc.INLINED.EDIT.' + get_current_time_string(override_use_utc=True, use_basic_format=True) ) copy(flow_file, inlined_flow_file_backup) # read in the edited inlined file h = open(flow_file, 'r') lines = h.readlines() h.close() # split it back into separate files split_file(workflowdir, lines, flow_file, recovery) print(f' + edited: {flow_file}') print(f' + backup: {inlined_flow_file_backup}') print('INCLUDE-FILES WRITTEN:') for file in newfiles: f = re.sub(workflowdir + '/', '', file) if re.search(r'\.EDIT\.NEW\.', f): print(' + ' + f + ' (!!! WARNING: original changed on disk !!!)') else: print(' + ' + f)
def main(parser, options, reg): workflow, flow_file = parse_workflow_arg(options, reg) if options.geditor: editor = glbl_cfg().get(['editors', 'gui']) else: editor = glbl_cfg().get(['editors', 'terminal']) # read in the flow.cylc file viewcfg = { 'mark': options.mark, 'single': options.single, 'label': options.label, 'empy': options.empy or options.process, 'jinja2': options.jinja2 or options.process, 'contin': options.cat or options.process, 'inline': (options.inline or options.jinja2 or options.empy or options.process), } lines = read_and_proc(flow_file, load_template_vars(options.templatevars, options.templatevars_file), viewcfg=viewcfg, asedit=options.asedit) if options.stdout: for line in lines: print(line) sys.exit(0) # write to a temporary file viewfile = NamedTemporaryFile( suffix=".flow.cylc", prefix=workflow.replace('/', '_') + '.', ) for line in lines: viewfile.write((line + '\n').encode()) viewfile.seek(0, 0) # set the file to be read only os.chmod(viewfile.name, 0o400) # capture the temp file's mod time in case the user edits it # and overrides the readonly mode. modtime1 = os.stat(viewfile.name).st_mtime # in case editor has options, e.g. 'emacs -nw': command_list = shlex.split(editor) command_list.append(viewfile.name) command = ' '.join(command_list) # THIS BLOCKS UNTIL THE COMMAND COMPLETES retcode = call(command_list) if retcode != 0: # the command returned non-zero exist status raise CylcError(f'{command} failed: {retcode}') # !!!VIEWING FINISHED!!! # Did the user edit the file modtime2 = os.stat(viewfile.name).st_mtime if modtime2 > modtime1: print() print('WARNING: YOU HAVE EDITED A TEMPORARY READ-ONLY WORKFLOW COPY:', file=sys.stderr) print(viewfile.name, file=sys.stderr) print('In future use \'cylc edit\' to edit a workflow.', file=sys.stderr) print() # DONE viewfile.close()