def get_wf_steps(self): wf_steps = self.steps steps_dict = CommentedMap() for step in wf_steps: individual_step_dict = step.to_dict_with_id_key() steps_dict.update(individual_step_dict) return steps_dict
def get_wf_inputs(self): wf_inputs = self.inputs inputs_dict = CommentedMap() for input in wf_inputs: single_input_dict = input.to_dict_with_id_key() inputs_dict.update(single_input_dict) return inputs_dict
def get_wf_outputs(self): wf_outputs = self.outputs outputs_dict = CommentedMap() for output in wf_outputs: individual_output_dict = output.to_dict_with_id_key( ) # output is instance of WorkflowOutputParameter outputs_dict.update(individual_output_dict) return outputs_dict
def create(self, args, tccConfigObject): result = self.check(args) if not result: sys.exit() for attribute in self.mandatoryAttributes: setattr(self,attribute,getattr(args,attribute)) if self.mandatoryAttributes[attribute] == 'ref': refElementCategory = element_lookup[attribute] refElement = tcc.get(refElementCategory,getattr(args, attribute)) back_refs = refElement.back_refs back_refs.append(args.name) for element in tccConfigObject[refElementCategory]: if element['name'] == refElement.name: element['back_refs'] = back_refs if not tccConfigObject: tccConfigObject = CommentedMap([(self.elementCategory, None)]) elif self.elementCategory not in tccConfigObject: tccConfigObject.update(CommentedMap([(self.elementCategory, None)])) if not tccConfigObject[self.elementCategory]: tccConfigObject[self.elementCategory] = [] attributes = copy.deepcopy(self.mandatoryAttributes) for element in self.mandatoryAttributes: attributes[element] = getattr(self, element) if hasattr(self,'back_refs'): attributes['back_refs'] = self.back_refs del self.mandatoryAttributes self.mgmtNetmask = tccConfigObject['Network']['mgmt']['netmask'] self.mgmtGateway = tccConfigObject['Network']['mgmt']['gateway'] self.mgmtDns = tccConfigObject['Network']['mgmt']['dns'] self.vxlanNetmask = tccConfigObject['Network']['vxlan']['netmask'] createFunction = getattr(self, self.createMethod) result = createFunction() if 'Error' in result: print result['Error'] return False if args.type == 'service': terminalDict = { attributes['terminal']:self.Id } attributes['terminal'] = [] attributes['terminal'].append(terminalDict) else: if hasattr(self,'Id'): attributes['Id'] = self.Id tccConfigObject[self.elementCategory].append(attributes) self.updateYaml(tccConfigObject) return self
def create(self, args, tccConfigObject): result = self.check(args) if not result: sys.exit() for attribute in self.mandatoryAttributes: setattr(self, attribute, getattr(args, attribute)) if self.mandatoryAttributes[attribute] == "ref": refElementCategory = element_lookup[attribute] refElement = tcc.get(refElementCategory, getattr(args, attribute)) back_refs = refElement.back_refs back_refs.append(args.name) for element in tccConfigObject[refElementCategory]: if element["name"] == refElement.name: element["back_refs"] = back_refs if not tccConfigObject: tccConfigObject = CommentedMap([(self.elementCategory, None)]) elif self.elementCategory not in tccConfigObject: tccConfigObject.update(CommentedMap([(self.elementCategory, None)])) if not tccConfigObject[self.elementCategory]: tccConfigObject[self.elementCategory] = [] attributes = copy.deepcopy(self.mandatoryAttributes) for element in self.mandatoryAttributes: attributes[element] = getattr(self, element) if hasattr(self, "back_refs"): attributes["back_refs"] = self.back_refs del self.mandatoryAttributes self.mgmtNetmask = tccConfigObject["Network"]["mgmt"]["netmask"] self.mgmtGateway = tccConfigObject["Network"]["mgmt"]["gateway"] self.mgmtDns = tccConfigObject["Network"]["mgmt"]["dns"] self.vxlanNetmask = tccConfigObject["Network"]["vxlan"]["netmask"] createFunction = getattr(self, self.createMethod) result = createFunction() if "Error" in result: print result["Error"] return False if args.type == "service": terminalDict = {attributes["terminal"]: self.Id} attributes["terminal"] = [] attributes["terminal"].append(terminalDict) else: if hasattr(self, "Id"): attributes["Id"] = self.Id tccConfigObject[self.elementCategory].append(attributes) self.updateYaml(tccConfigObject) return self
def _commit_head(self, tag: AnyStr = None, comment: AnyStr = None): """ apply the modifications on curr_data to the underling opened version and create a new tag """ commented_map = CommentedMap() commented_map.update(self._curr_data or self.data) if tag: self._raw[tag] = commented_map self._raw['__current'] = tag else: new_tag = self._make_tag() self._raw.insert(2, new_tag, commented_map, comment=comment) self._raw['__current'] = new_tag self._curr_version = None self._curr_data = None return self
def save_task(task): """ Convert dictionary suitable for serializing as yaml or creating a Changeset. .. code-block:: YAML changeId: target: implementation: inputs: changes: dependencies: messages: outputs: result: # an object or "skipped" """ output = CommentedMap() output["changeId"] = task.changeId if task.previousId: output["previousId"] = task.previousId if task.target: output["target"] = task.target.key save_status(task, output) output["implementation"] = save_config_spec(task.configSpec) if task._resolved_inputs: # only serialize resolved inputs output["inputs"] = serialize_value(task._resolved_inputs) changes = save_resource_changes(task._resourceChanges) if changes: output["changes"] = changes if task.messages: output["messages"] = task.messages dependencies = [save_dependency(val) for val in task.dependencies] if dependencies: output["dependencies"] = dependencies if task.result: if task.result.outputs: output["outputs"] = save_result(task.result.outputs) if task.result.result: output["result"] = save_result(task.result.result) else: output["result"] = "skipped" output.update(task.configurator.save_digest(task)) output["summary"] = task.summary() return output
def test_section_yaml_obj(): commented_map = CommentedMap( {"section1": { "subsection1": { "subsection2": "item" } }}) sec = Section("MAIN_SECTION", parent_yaml=commented_map) assert sec.yaml_obj == commented_map.update({"MAIN_SECTION": None})
def create(self, args, tccConfigObject): result = self.check(args) if not result: sys.exit() for attribute in self.mandatoryAttributes: setattr(self,attribute,getattr(args,attribute)) if self.mandatoryAttributes[attribute] == 'ref': refElementCategory = element_lookup[attribute] refElement = tcc.get(refElementCategory,getattr(args, attribute)) back_refs = refElement.back_refs back_refs.append(args.name) for element in tccConfigObject[refElementCategory]: if element['name'] == refElement.name: element['back_refs'] = back_refs if not tccConfigObject: tccConfigObject = CommentedMap([(self.elementCategory, None)]) elif self.elementCategory not in tccConfigObject: tccConfigObject.update(CommentedMap([(self.elementCategory, None)])) if not tccConfigObject[self.elementCategory]: tccConfigObject[self.elementCategory] = [] attributes = copy.deepcopy(self.mandatoryAttributes) for element in self.mandatoryAttributes: attributes[element] = getattr(self, element) if hasattr(self,'back_refs'): attributes['back_refs'] = self.back_refs del self.mandatoryAttributes self.mgmtNetmask = tccConfigObject['Network']['mgmt']['netmask'] self.mgmtGateway = tccConfigObject['Network']['mgmt']['gateway'] self.mgmtDns = tccConfigObject['Network']['mgmt']['dns'] self.vxlanNetmask = tccConfigObject['Network']['vxlan']['netmask'] createFunction = getattr(self, self.createMethod) result = createFunction() if 'Error' in result: print result['Error'] return False if hasattr(self,'Id'): attributes['Id'] = self.Id tccConfigObject[self.elementCategory].append(attributes) self.updateYaml(tccConfigObject) return self
def render(container_names, version=3): struct = {} networks = [] for cname in container_names: cfile, networks = _generate(cname) struct.update(cfile) with io.StringIO() as buffer: if version == 1: y.dump(CommentedMap(struct), buffer) elif version == 3: d = CommentedMap({'version': '3', 'services': struct}) if networks: d.update({'networks': networks}) y.dump(d, buffer) else: raise NotImplementedError( 'Not implemented for version {}'.format(version)) return buffer.getvalue()
def workflow_clean(document): # type: (MutableMapping[Text, Any]) -> None """Transform draft-3 style Workflows to more idiomatic v1.0""" input_output_clean(document) hints_and_requirements_clean(document) outputs = document['outputs'] for output_id in outputs: outputs[output_id]["outputSource"] = \ outputs[output_id].pop("source").lstrip('#').replace(".", "/") new_steps = CommentedMap() for step in document["steps"]: new_step = CommentedMap() new_step.update(step) step = new_step step_id = step.pop("id") step_id_len = len(step_id)+1 step["out"] = [] for outp in step["outputs"]: clean_outp_id = outp["id"] if clean_outp_id.startswith(step_id): clean_outp_id = clean_outp_id[step_id_len:] step["out"].append(clean_outp_id) del step["outputs"] ins = CommentedMap() for inp in step["inputs"]: ident = inp["id"] if ident.startswith(step_id): ident = ident[step_id_len:] if 'source' in inp: inp["source"] = inp["source"].lstrip('#').replace(".", "/") del inp["id"] if len(inp) > 1: ins[ident] = inp elif len(inp) == 1: if "source" in inp: ins[ident] = inp.popitem()[1] else: ins[ident] = inp else: ins[ident] = {} step["in"] = ins del step["inputs"] if "scatter" in step: if isinstance(step["scatter"], (str, Text)) == 1: source = step["scatter"] if source.startswith(step_id): source = source[step_id_len:] step["scatter"] = source elif isinstance(step["scatter"], list) and len(step["scatter"]) > 1: step["scatter"] = [] for source in step["scatter"]: if source.startswith(step_id): source = source[step_id_len:] step["scatter"].append(source) else: source = step["scatter"][0] if source.startswith(step_id): source = source[step_id_len:] step["scatter"] = source if "description" in step: step["doc"] = step.pop("description") new_steps[step_id.lstrip('#')] = step document["steps"] = new_steps
def __init__(self, toolpath_object, # type: Dict[Text, Any] pos, # type: int loadingContext, # type: LoadingContext parentworkflowProv=None # type: Optional[CreateProvProfile] ): # type: (...) -> None if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step" + Text(pos) loadingContext = loadingContext.copy() loadingContext.requirements = (getdefault(loadingContext.requirements, []) + toolpath_object.get("requirements", []) + get_overrides(getdefault(loadingContext.overrides_list, []), self.id).get("requirements", [])) loadingContext.hints = getdefault(loadingContext.hints, []) + toolpath_object.get("hints", []) try: if isinstance(toolpath_object["run"], dict): self.embedded_tool = loadingContext.construct_tool_object(toolpath_object["run"], loadingContext) else: self.embedded_tool = load_tool( toolpath_object["run"], loadingContext) except validate.ValidationException as vexc: if loadingContext.debug: _logger.exception("Validation exception") raise WorkflowException( u"Tool definition %s failed validation:\n%s" % (toolpath_object["run"], validate.indent(str(vexc)))) validation_errors = [] self.tool = toolpath_object = copy.deepcopy(toolpath_object) bound = set() for stepfield, toolfield in (("in", "inputs"), ("out", "outputs")): toolpath_object[toolfield] = [] for index, step_entry in enumerate(toolpath_object[stepfield]): if isinstance(step_entry, string_types): param = CommentedMap() # type: CommentedMap inputid = step_entry else: param = CommentedMap(six.iteritems(step_entry)) inputid = step_entry["id"] shortinputid = shortname(inputid) found = False for tool_entry in self.embedded_tool.tool[toolfield]: frag = shortname(tool_entry["id"]) if frag == shortinputid: #if the case that the step has a default for a parameter, #we do not want the default of the tool to override it step_default = None if "default" in param and "default" in tool_entry: step_default = param["default"] param.update(tool_entry) param["_tool_entry"] = tool_entry if step_default is not None: param["default"] = step_default found = True bound.add(frag) break if not found: if stepfield == "in": param["type"] = "Any" param["not_connected"] = True else: validation_errors.append( SourceLine(self.tool["out"], index).makeError( "Workflow step output '%s' does not correspond to" % shortname(step_entry)) + "\n" + SourceLine(self.embedded_tool.tool, "outputs").makeError( " tool output (expected '%s')" % ( "', '".join( [shortname(tool_entry["id"]) for tool_entry in self.embedded_tool.tool[toolfield]])))) param["id"] = inputid param.lc.line = toolpath_object[stepfield].lc.data[index][0] param.lc.col = toolpath_object[stepfield].lc.data[index][1] param.lc.filename = toolpath_object[stepfield].lc.filename toolpath_object[toolfield].append(param) missing = [] for i, tool_entry in enumerate(self.embedded_tool.tool["inputs"]): if shortname(tool_entry["id"]) not in bound: if "null" not in tool_entry["type"] and "default" not in tool_entry: missing.append(shortname(tool_entry["id"])) if missing: validation_errors.append(SourceLine(self.tool, "in").makeError( "Step is missing required parameter%s '%s'" % ("s" if len(missing) > 1 else "", "', '".join(missing)))) if validation_errors: raise validate.ValidationException("\n".join(validation_errors)) super(WorkflowStep, self).__init__(toolpath_object, loadingContext) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains embedded workflow but " "SubworkflowFeatureRequirement not in requirements") if "scatter" in self.tool: (feature, _) = self.get_requirement("ScatterFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains scatter but ScatterFeatureRequirement " "not in requirements") inputparms = copy.deepcopy(self.tool["inputs"]) outputparms = copy.deepcopy(self.tool["outputs"]) scatter = aslist(self.tool["scatter"]) method = self.tool.get("scatterMethod") if method is None and len(scatter) != 1: raise validate.ValidationException( "Must specify scatterMethod when scattering over multiple inputs") inp_map = {i["id"]: i for i in inputparms} for inp in scatter: if inp not in inp_map: raise validate.ValidationException( SourceLine(self.tool, "scatter").makeError( "Scatter parameter '%s' does not correspond to " "an input parameter of this step, expecting '%s'" % (shortname(inp), "', '".join( shortname(k) for k in inp_map.keys())))) inp_map[inp]["type"] = {"type": "array", "items": inp_map[inp]["type"]} if self.tool.get("scatterMethod") == "nested_crossproduct": nesting = len(scatter) else: nesting = 1 for index in range(0, nesting): for oparam in outputparms: oparam["type"] = {"type": "array", "items": oparam["type"]} self.tool["inputs"] = inputparms self.tool["outputs"] = outputparms self.prov_obj = None # type: Optional[CreateProvProfile] if loadingContext.research_obj: self.prov_obj = parentworkflowProv if self.embedded_tool.tool["class"] == "Workflow": self.parent_wf = self.embedded_tool.parent_wf else: self.parent_wf = self.prov_obj
def __init__(self, toolpath_object, pos, **kwargs): # type: (Dict[Text, Any], int, **Any) -> None if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step" + Text(pos) kwargs["requirements"] = kwargs.get("requirements", []) + toolpath_object.get("requirements", []) kwargs["hints"] = kwargs.get("hints", []) + toolpath_object.get("hints", []) try: if isinstance(toolpath_object["run"], dict): self.embedded_tool = kwargs.get("makeTool")(toolpath_object["run"], **kwargs) else: self.embedded_tool = load_tool( toolpath_object["run"], kwargs.get("makeTool"), kwargs, enable_dev=kwargs.get("enable_dev"), strict=kwargs.get("strict"), fetcher_constructor=kwargs.get("fetcher_constructor")) except validate.ValidationException as v: raise WorkflowException( u"Tool definition %s failed validation:\n%s" % (toolpath_object["run"], validate.indent(str(v)))) validation_errors = [] self.tool = toolpath_object = copy.deepcopy(toolpath_object) bound = set() for stepfield, toolfield in (("in", "inputs"), ("out", "outputs")): toolpath_object[toolfield] = [] for n, step_entry in enumerate(toolpath_object[stepfield]): if isinstance(step_entry, (str, unicode)): param = CommentedMap() # type: CommentedMap inputid = step_entry else: param = CommentedMap(step_entry.iteritems()) inputid = step_entry["id"] shortinputid = shortname(inputid) found = False for tool_entry in self.embedded_tool.tool[toolfield]: frag = shortname(tool_entry["id"]) if frag == shortinputid: param.update(tool_entry) # type: ignore found = True bound.add(frag) break if not found: if stepfield == "in": param["type"] = "Any" else: validation_errors.append( SourceLine(self.tool["out"], n).makeError( "Workflow step output '%s' does not correspond to" % shortname(step_entry)) + "\n" + SourceLine(self.embedded_tool.tool, "outputs").makeError( " tool output (expected '%s')" % ( "', '".join( [shortname(tool_entry["id"]) for tool_entry in self.embedded_tool.tool[toolfield]])))) param["id"] = inputid param.lc.line = toolpath_object[stepfield].lc.data[n][0] param.lc.col = toolpath_object[stepfield].lc.data[n][1] param.lc.filename = toolpath_object[stepfield].lc.filename toolpath_object[toolfield].append(param) missing = [] for i, tool_entry in enumerate(self.embedded_tool.tool["inputs"]): if shortname(tool_entry["id"]) not in bound: if "null" not in tool_entry["type"] and "default" not in tool_entry: missing.append(shortname(tool_entry["id"])) if missing: validation_errors.append(SourceLine(self.tool, "in").makeError( "Step is missing required parameter%s '%s'" % ("s" if len(missing) > 1 else "", "', '".join(missing)))) if validation_errors: raise validate.ValidationException("\n".join(validation_errors)) super(WorkflowStep, self).__init__(toolpath_object, **kwargs) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains embedded workflow but SubworkflowFeatureRequirement not in requirements") if "scatter" in self.tool: (feature, _) = self.get_requirement("ScatterFeatureRequirement") if not feature: raise WorkflowException("Workflow contains scatter but ScatterFeatureRequirement not in requirements") inputparms = copy.deepcopy(self.tool["inputs"]) outputparms = copy.deepcopy(self.tool["outputs"]) scatter = aslist(self.tool["scatter"]) method = self.tool.get("scatterMethod") if method is None and len(scatter) != 1: raise validate.ValidationException("Must specify scatterMethod when scattering over multiple inputs") inp_map = {i["id"]: i for i in inputparms} for s in scatter: if s not in inp_map: raise validate.ValidationException( SourceLine(self.tool, "scatter").makeError(u"Scatter parameter '%s' does not correspond to an input parameter of this " u"step, expecting '%s'" % (shortname(s), "', '".join(shortname(k) for k in inp_map.keys())))) inp_map[s]["type"] = {"type": "array", "items": inp_map[s]["type"]} if self.tool.get("scatterMethod") == "nested_crossproduct": nesting = len(scatter) else: nesting = 1 for r in xrange(0, nesting): for op in outputparms: op["type"] = {"type": "array", "items": op["type"]} self.tool["inputs"] = inputparms self.tool["outputs"] = outputparms
def __init__( self, toolpath_object: CommentedMap, pos: int, loadingContext: LoadingContext, parentworkflowProv: Optional[ProvenanceProfile] = None, ) -> None: """Initialize this WorkflowStep.""" debug = loadingContext.debug if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step" + str(pos) loadingContext = loadingContext.copy() loadingContext.requirements = copy.deepcopy( getdefault(loadingContext.requirements, [])) assert loadingContext.requirements is not None # nosec loadingContext.requirements.extend( toolpath_object.get("requirements", [])) loadingContext.requirements.extend( cast( List[CWLObjectType], get_overrides(getdefault(loadingContext.overrides_list, []), self.id).get("requirements", []), )) hints = copy.deepcopy(getdefault(loadingContext.hints, [])) hints.extend(toolpath_object.get("hints", [])) loadingContext.hints = hints try: if isinstance(toolpath_object["run"], CommentedMap): self.embedded_tool = loadingContext.construct_tool_object( toolpath_object["run"], loadingContext) # type: Process else: loadingContext.metadata = {} self.embedded_tool = load_tool(toolpath_object["run"], loadingContext) except ValidationException as vexc: if loadingContext.debug: _logger.exception("Validation exception") raise WorkflowException( "Tool definition %s failed validation:\n%s" % (toolpath_object["run"], indent(str(vexc)))) from vexc validation_errors = [] self.tool = toolpath_object = copy.deepcopy(toolpath_object) bound = set() if self.embedded_tool.get_requirement("SchemaDefRequirement")[0]: if "requirements" not in toolpath_object: toolpath_object["requirements"] = [] toolpath_object["requirements"].append( self.embedded_tool.get_requirement("SchemaDefRequirement")[0]) for stepfield, toolfield in (("in", "inputs"), ("out", "outputs")): toolpath_object[toolfield] = [] for index, step_entry in enumerate(toolpath_object[stepfield]): if isinstance(step_entry, str): param = CommentedMap() # type: CommentedMap inputid = step_entry else: param = CommentedMap(step_entry.items()) inputid = step_entry["id"] shortinputid = shortname(inputid) found = False for tool_entry in self.embedded_tool.tool[toolfield]: frag = shortname(tool_entry["id"]) if frag == shortinputid: # if the case that the step has a default for a parameter, # we do not want the default of the tool to override it step_default = None if "default" in param and "default" in tool_entry: step_default = param["default"] param.update(tool_entry) param["_tool_entry"] = tool_entry if step_default is not None: param["default"] = step_default found = True bound.add(frag) break if not found: if stepfield == "in": param["type"] = "Any" param["used_by_step"] = used_by_step( self.tool, shortinputid) param["not_connected"] = True else: if isinstance(step_entry, Mapping): step_entry_name = step_entry["id"] else: step_entry_name = step_entry validation_errors.append( SourceLine(self.tool["out"], index, include_traceback=debug). makeError( "Workflow step output '%s' does not correspond to" % shortname(step_entry_name)) + "\n" + SourceLine( self.embedded_tool.tool, "outputs", include_traceback=debug, ).makeError(" tool output (expected '%s')" % ("', '".join([ shortname(tool_entry["id"]) for tool_entry in self.embedded_tool.tool["outputs"] ])))) param["id"] = inputid param.lc.line = toolpath_object[stepfield].lc.data[index][0] param.lc.col = toolpath_object[stepfield].lc.data[index][1] param.lc.filename = toolpath_object[stepfield].lc.filename toolpath_object[toolfield].append(param) missing_values = [] for _, tool_entry in enumerate(self.embedded_tool.tool["inputs"]): if shortname(tool_entry["id"]) not in bound: if "null" not in tool_entry[ "type"] and "default" not in tool_entry: missing_values.append(shortname(tool_entry["id"])) if missing_values: validation_errors.append( SourceLine(self.tool, "in", include_traceback=debug).makeError( "Step is missing required parameter%s '%s'" % ( "s" if len(missing_values) > 1 else "", "', '".join(missing_values), ))) if validation_errors: raise ValidationException("\n".join(validation_errors)) super().__init__(toolpath_object, loadingContext) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains embedded workflow but " "SubworkflowFeatureRequirement not in requirements") if "scatter" in self.tool: (feature, _) = self.get_requirement("ScatterFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains scatter but ScatterFeatureRequirement " "not in requirements") inputparms = copy.deepcopy(self.tool["inputs"]) outputparms = copy.deepcopy(self.tool["outputs"]) scatter = aslist(self.tool["scatter"]) method = self.tool.get("scatterMethod") if method is None and len(scatter) != 1: raise ValidationException( "Must specify scatterMethod when scattering over multiple inputs" ) inp_map = {i["id"]: i for i in inputparms} for inp in scatter: if inp not in inp_map: SourceLine( self.tool, "scatter", ValidationException, debug).makeError( "Scatter parameter '%s' does not correspond to " "an input parameter of this step, expecting '%s'" % ( shortname(inp), "', '".join( shortname(k) for k in inp_map.keys()), )) inp_map[inp]["type"] = { "type": "array", "items": inp_map[inp]["type"] } if self.tool.get("scatterMethod") == "nested_crossproduct": nesting = len(scatter) else: nesting = 1 for _ in range(0, nesting): for oparam in outputparms: oparam["type"] = {"type": "array", "items": oparam["type"]} self.tool["inputs"] = inputparms self.tool["outputs"] = outputparms self.prov_obj = None # type: Optional[ProvenanceProfile] if loadingContext.research_obj is not None: self.prov_obj = parentworkflowProv if self.embedded_tool.tool["class"] == "Workflow": self.parent_wf = self.embedded_tool.parent_wf else: self.parent_wf = self.prov_obj
def dump(self): map_object = CommentedMap() map_object['id'] = getattr(self, 'id') map_object.update(self.io_object.dump()) return map_object
def _to_doc(d: DatasetDoc, with_formatting: bool): if with_formatting: doc = CommentedMap() doc.yaml_set_comment_before_after_key("$schema", before="Dataset") else: doc = {} doc["$schema"] = ODC_DATASET_SCHEMA_URL doc.update( attr.asdict( d, recurse=True, dict_factory=CommentedMap if with_formatting else dict, # Exclude fields that are the default. filter=lambda attr, value: "doc_exclude" not in attr.metadata and value != attr.default # Exclude any fields set to None. The distinction should never matter in our docs. and value is not None, retain_collection_types=False, )) # Sort properties for readability. # PyCharm '19 misunderstands the type of a `sorted(dict.items())` # noinspection PyTypeChecker doc["properties"] = CommentedMap( sorted(doc["properties"].items(), key=_stac_key_order)) if d.geometry is not None: doc["geometry"] = shapely.geometry.mapping(d.geometry) doc["id"] = str(d.id) if with_formatting: if "geometry" in doc: # Set some numeric fields to be compact yaml format. _use_compact_format(doc["geometry"], "coordinates") if "grids" in doc: for grid in doc["grids"].values(): _use_compact_format(grid, "shape", "transform") # Add user-readable names for measurements as a comment if present. if d.measurements: for band_name, band_doc in d.measurements.items(): if band_doc.alias and band_name.lower( ) != band_doc.alias.lower(): doc["measurements"].yaml_add_eol_comment( band_doc.alias, band_name) _add_space_before( doc, "label" if "label" in doc else "id", "crs", "properties", "measurements", "accessories", "lineage", ) p: CommentedMap = doc["properties"] p.yaml_add_eol_comment("# Ground sample distance (m)", "eo:gsd") return doc
class MVYaml(object): protected_keys = ( '__current', '__type', ) def __init__(self, base64=False): self._b64 = base64 self._raw = CommentedMap() self._yaml = YAML() self._curr_version = None self._curr_data = None self._create() def _create(self): tag = self._make_tag() self._raw[tag] = CommentedMap() self._raw.insert(0, '__current', tag, 'current version') self._raw.insert(1, '__type', None, 'base64 if value are base64') self._commit(tag=tag, comment='Initial version') def import_yaml(self, file: AnyStr = None, stream: AnyStr = None): data = None if file: with open(file, 'r') as fp: data = fp.read() imported_data = self._yaml.load(data or stream) self.override(imported_data) return self def load(self, file_handler: AnyStr = None, stream_data: AnyStr = None): data = None if file_handler: with open(file_handler, 'r') as fp: data = fp.read() self._raw = self._yaml.load(data or stream_data) if self.protected_keys not in self._raw.keys(): raise MVYamlFileException( f'Not a valid mvyaml file. Perhaps is a yaml you want to import with ' f'import_yaml()?') return self def write(self, file_handler: IO = None, comment: AnyStr = None) -> [AnyStr, None]: if not self._raw: return if self._has_changes(): self._commit(comment=comment) output = file_handler or StringIO() self._yaml.dump(self._raw, output) return output.getvalue() if not file_handler else None @property def versions(self): if not self._raw: return [] return [k for k in self._raw.keys() if k not in self.protected_keys] @property def current(self): return self._raw['__current'] @property def data(self): if not self._curr_data: self._curr_data = deepcopy(self._raw[self._curr_version or self.current]) return self._curr_data def with_version(self, version: str = '__current'): if version not in self.versions: raise MVYamlVersionNotFoundException( f'version {version} not found') self._curr_version = version self._curr_data = None return self @staticmethod def _make_tag() -> str: d = datetime.utcnow().isoformat() return d def override(self, data: [Iterable]): self._curr_data = CommentedMap() self._curr_data.update(data) self._commit(comment='Overridden') return self def _commit(self, *args, **kwargs): return self._commit_head(*args, **kwargs) def _commit_head(self, tag: AnyStr = None, comment: AnyStr = None): """ apply the modifications on curr_data to the underling opened version and create a new tag """ commented_map = CommentedMap() commented_map.update(self._curr_data or self.data) if tag: self._raw[tag] = commented_map self._raw['__current'] = tag else: new_tag = self._make_tag() self._raw.insert(2, new_tag, commented_map, comment=comment) self._raw['__current'] = new_tag self._curr_version = None self._curr_data = None return self def _commit_tail(self, tag: AnyStr = None, comment: AnyStr = None): """ apply the modifications on curr_data to the underling opened version and create a new tag """ commented_map = CommentedMap() commented_map.update(self._curr_data or self.data) if tag: self._raw[tag] = commented_map self._raw['__current'] = tag else: new_tag = self._make_tag() self._raw.insert(len(self._raw.keys()), new_tag, commented_map, comment=comment) self._raw['__current'] = new_tag self._curr_version = None self._curr_data = None return self def _has_changes(self): orig = self._raw[self._curr_version or self.current] current = self._curr_data or self.data try: assert_equal(orig, current) except AssertionError: return True return False @property def changes(self) -> AnyStr: if not self._has_changes(): return '' yaml_orig = as_yaml(self._raw[self._curr_version or self.current]) yaml_curr = as_yaml(self._curr_data) differ = Differ() result = list( differ.compare(yaml_orig.splitlines(), yaml_curr.splitlines())) return '\n'.join(result) def set_current(self, version_label: AnyStr): if version_label not in self.versions: raise MVYamlVersionNotFoundException( f'request version [{version_label}] not found') self._raw['__current'] = version_label self.with_version(version_label) return self
class YAMLRoundtripConfig(MutableConfigFile, MutableAbstractItemAccessMixin, MutableAbstractDictFunctionsMixin): """ Class for YAML-based (roundtrip) configurations """ def __init__(self, owner: Any, manager: "m.StorageManager", path: str, *args: List[Any], **kwargs: Dict[Any, Any]): self.data = CommentedMap() super().__init__(owner, manager, path, *args, **kwargs) def load(self): with open(self.path, "r") as fh: self.data = yaml.round_trip_load(fh, version=(1, 2)) def reload(self): self.unload() self.load() def unload(self): self.data.clear() def save(self): if not self.mutable: raise RuntimeError("You may not modify a defaults file at runtime - check the mutable attribute!") with open(self.path, "w") as fh: yaml.round_trip_dump(self.data, fh) # region: CommentedMap functions def insert(self, pos, key, value, *, comment=None): """ Insert a `key: value` pair at the given position, attaching a comment if provided Wrapper for `CommentedMap.insert()` """ return self.data.insert(pos, key, value, comment) def add_eol_comment(self, comment, *, key=NoComment, column=30): """ Add an end-of-line comment for a key at a particular column (30 by default) Wrapper for `CommentedMap.yaml_add_eol_comment()` """ # Setting the column to None as the API actually defaults to will raise an exception, so we have to # specify one unfortunately return self.data.yaml_add_eol_comment(comment, key=key, column=column) def set_comment_before_key(self, key, comment, *, indent=0): """ Set a comment before a given key Wrapper for `CommentedMap.yaml_set_comment_before_after_key()` """ return self.data.yaml_set_comment_before_after_key( key, before=comment, indent=indent, after=None, after_indent=None ) def set_start_comment(self, comment, indent=0): """ Set the starting comment Wrapper for `CommentedMap.yaml_set_start_comment()` """ return self.data.yaml_set_start_comment(comment, indent=indent) # endregion # region: Dict functions def clear(self): return self.data.clear() def copy(self): return self.data.copy() def get(self, key, default=None): return self.data.get(key, default) def items(self): return self.data.items() def keys(self): return self.data.keys() def pop(self, key, default=None): return self.data.pop(key, default) def popitem(self): return self.data.popitem() def setdefault(self, key, default=None): if key not in self.data: self.data[key] = default return default return self.data[key] def update(self, other): return self.data.update(other) def values(self): return self.data.values() # endregion # Item access functions def __contains__(self, key): """ Wrapper for `dict.__contains__()` """ return self.data.__contains__(key) def __delitem__(self, key): """ Wrapper for `dict.__delitem__()` """ del self.data[key] def __getitem__(self, key): """ Wrapper for `dict.__getitem__()` """ return self.data.__getitem__(key) def __iter__(self): """ Wrapper for `dict.__iter__()` """ return self.data.__iter__() def __len__(self): """ Wrapper for `dict.__len__()` """ return self.data.__len__() def __setitem__(self, key, value): """ Wrapper for `dict.__getitem__()` """ return self.data.__setitem__(key, value)
def main(docname): with open(docname, "r") as fi: lines = fi.readlines() context = {} rest_lines = [] for line in lines: # print(line) if "{%" in line: set_expr = re.search("{%(.*)%}", line) set_expr = set_expr.group(1) set_expr = set_expr.replace("set", "", 1).strip() exec(set_expr, globals(), context) else: rest_lines.append(line) yaml = YAML(typ="rt") yaml.preserve_quotes = True yaml.default_flow_style = False yaml.indent(sequence=4, offset=2) yaml.width = 1000 yaml.Representer = MyRepresenter yaml.Loader = ruamel.yaml.RoundTripLoader result_yaml = CommentedMap() result_yaml["context"] = context def has_selector(s): return s.strip().endswith("]") quoted_lines = [] for line in rest_lines: if has_selector(line): selector_start = line.rfind("[") selector_end = line.rfind("]") selector_content = line[selector_start + 1 : selector_end] if line.strip().startswith("-"): line = ( line[: line.find("-") + 1] + f" sel({selector_content}): " + line[ line.find("-") + 1 : min(line.rfind("#"), line.rfind("[")) ].strip() + "\n" ) quoted_lines.append(line) rest_lines = quoted_lines def check_if_quoted(s): s = s.strip() return s.startswith('"') or s.startswith("'") quoted_lines = [] for line in rest_lines: if "{{" in line: # make sure that jinja stuff is quoted if line.find(":") != -1: idx = line.find(":") elif line.strip().startswith("-"): idx = line.find("-") rest = line[idx + 1 :] if not check_if_quoted(rest): if "'" in rest: rest = rest.replace("'", '"') line = line[: idx + 1] + f" '{rest.strip()}'\n" quoted_lines.append(line) rest_lines = quoted_lines skips, wo_skip_lines = [], [] for line in rest_lines: if line.strip().startswith("skip"): parts = line.split(":") rhs = parts[1].strip() if rhs.startswith("true"): selector_start = line.rfind("[") selector_end = line.rfind("]") selector_content = line[selector_start + 1 : selector_end] skips.append(selector_content) else: print("ATTENTION skip: false not handled!") else: wo_skip_lines.append(line) rest_lines = wo_skip_lines result_yaml.update( ruamel.yaml.load("".join(rest_lines), ruamel.yaml.RoundTripLoader) ) if len(skips) != 0: result_yaml["build"]["skip"] = skips if result_yaml.get("outputs"): for o in result_yaml["outputs"]: name = o["name"] package = {"name": name} del o["name"] if o.get("version"): package["version"] = o["version"] del o["version"] build = {} if o.get("script"): build["script"] = o["script"] del o["script"] o["package"] = package o["build"] = build for d in result_yaml["outputs"]: print(order_output_dict(d)) result_yaml["outputs"] = [order_output_dict(d) for d in result_yaml["outputs"]] from io import StringIO output = StringIO() yaml.dump(result_yaml, output) # Hacky way to insert an empty line after the context-key-object context_output = StringIO() yaml.dump(context, context_output) context_output = context_output.getvalue() context_output_len = len(context_output.split("\n")) final_result = output.getvalue() final_result_lines = final_result.split("\n") final_result_lines.insert(context_output_len, "") print("\n".join(final_result_lines))
def __init__(self, toolpath_object, pos, **kwargs): # type: (Dict[Text, Any], int, **Any) -> None if "id" in toolpath_object: self.id = toolpath_object["id"] else: self.id = "#step" + Text(pos) kwargs["requirements"] = kwargs.get( "requirements", []) + toolpath_object.get("requirements", []) kwargs["hints"] = kwargs.get("hints", []) + toolpath_object.get( "hints", []) try: if isinstance(toolpath_object["run"], dict): self.embedded_tool = kwargs.get("makeTool")( toolpath_object["run"], **kwargs) else: self.embedded_tool = load_tool( toolpath_object["run"], kwargs.get("makeTool"), kwargs, enable_dev=kwargs.get("enable_dev"), strict=kwargs.get("strict"), fetcher_constructor=kwargs.get("fetcher_constructor")) except validate.ValidationException as v: raise WorkflowException( u"Tool definition %s failed validation:\n%s" % (toolpath_object["run"], validate.indent(str(v)))) validation_errors = [] self.tool = toolpath_object = copy.deepcopy(toolpath_object) bound = set() for stepfield, toolfield in (("in", "inputs"), ("out", "outputs")): toolpath_object[toolfield] = [] for n, step_entry in enumerate(toolpath_object[stepfield]): if isinstance(step_entry, (str, unicode)): param = CommentedMap() # type: CommentedMap inputid = step_entry else: param = CommentedMap(step_entry.iteritems()) inputid = step_entry["id"] shortinputid = shortname(inputid) found = False for tool_entry in self.embedded_tool.tool[toolfield]: frag = shortname(tool_entry["id"]) if frag == shortinputid: param.update(tool_entry) # type: ignore found = True bound.add(frag) break if not found: if stepfield == "in": param["type"] = "Any" else: validation_errors.append( SourceLine(self.tool["out"], n).makeError( "Workflow step output '%s' does not correspond to" % shortname(step_entry)) + "\n" + SourceLine(self.embedded_tool.tool, "outputs"). makeError(" tool output (expected '%s')" % ("', '".join([ shortname(tool_entry["id"]) for tool_entry in self.embedded_tool.tool[toolfield] ])))) param["id"] = inputid param.lc.line = toolpath_object[stepfield].lc.data[n][0] param.lc.col = toolpath_object[stepfield].lc.data[n][1] param.lc.filename = toolpath_object[stepfield].lc.filename toolpath_object[toolfield].append(param) missing = [] for i, tool_entry in enumerate(self.embedded_tool.tool["inputs"]): if shortname(tool_entry["id"]) not in bound: if "null" not in tool_entry[ "type"] and "default" not in tool_entry: missing.append(shortname(tool_entry["id"])) if missing: validation_errors.append( SourceLine(self.tool, "in").makeError( "Step is missing required parameter%s '%s'" % ("s" if len(missing) > 1 else "", "', '".join(missing)))) if validation_errors: raise validate.ValidationException("\n".join(validation_errors)) super(WorkflowStep, self).__init__(toolpath_object, **kwargs) if self.embedded_tool.tool["class"] == "Workflow": (feature, _) = self.get_requirement("SubworkflowFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains embedded workflow but SubworkflowFeatureRequirement not in requirements" ) if "scatter" in self.tool: (feature, _) = self.get_requirement("ScatterFeatureRequirement") if not feature: raise WorkflowException( "Workflow contains scatter but ScatterFeatureRequirement not in requirements" ) inputparms = copy.deepcopy(self.tool["inputs"]) outputparms = copy.deepcopy(self.tool["outputs"]) scatter = aslist(self.tool["scatter"]) method = self.tool.get("scatterMethod") if method is None and len(scatter) != 1: raise validate.ValidationException( "Must specify scatterMethod when scattering over multiple inputs" ) inp_map = {i["id"]: i for i in inputparms} for s in scatter: if s not in inp_map: raise validate.ValidationException( SourceLine(self.tool, "scatter").makeError( u"Scatter parameter '%s' does not correspond to an input parameter of this " u"step, expecting '%s'" % (shortname(s), "', '".join( shortname(k) for k in inp_map.keys())))) inp_map[s]["type"] = { "type": "array", "items": inp_map[s]["type"] } if self.tool.get("scatterMethod") == "nested_crossproduct": nesting = len(scatter) else: nesting = 1 for r in xrange(0, nesting): for op in outputparms: op["type"] = {"type": "array", "items": op["type"]} self.tool["inputs"] = inputparms self.tool["outputs"] = outputparms
class RootManifest: """ Manifest: defaults to the "vault.yml" file in the current directory. """ def __init__(self, path: str = None, load: bool = True): self.path = os.path.abspath(path) if path else None self._backing = CommentedMap() if load: self._load() self._changes = deepcopy(self._backing) def _load(self): if os.path.exists(self.path): with open(self.path, mode="r") as f: self._backing.update(yaml.safe_load(f)) def set_header(self, header: str) -> None: self._header = header def create_secrets_backend_section(self) -> None: if "secrets_backends" not in self._changes: self._changes["secrets_backends"] = {} self._changes.yaml_set_comment_before_after_key( "secrets_backends", before= "Secrets backends. Each key is the mount to a secrets engine.") def add_secrets_backend(self, name: str, manifest: ManifestItem) -> None: converted = manifest.convert() if not converted: return name = name.strip("/") new_dict = self._changes["secrets_backends"].get(name, {}) new_dict.update(converted) self._changes["secrets_backends"][name] = new_dict def delete_secrets_backend(self, name: str) -> None: name = name.strip("/") if "secrets_backends" in self._changes and name in self._changes[ "secrets_backends"]: del self._changes["secrets_backends"][name] def list_secrets_backend_names(self) -> List[str]: return [ name.strip("/") for name in self._backing.get("secrets_backends", {}) ] def create_auth_method_section(self) -> None: if "auth_methods" not in self._changes: self._changes["auth_methods"] = {} self._changes.yaml_set_comment_before_after_key( "auth_methods", before= "Authentication methods. Each key is the name of the auth method." ) def add_auth_method(self, name: str, manifest: ManifestItem) -> None: converted = manifest.convert() if not converted: return if "auth_methods" not in self._changes: self._changes["auth_methods"] = {} name = name.strip("/") new_dict = self._changes["auth_methods"].get(name, {}) new_dict.update(converted) self._changes["auth_methods"][name] = new_dict def yaml(self) -> str: output = "" if self._header: for line in self._header.split("\n"): output += f"# {line}\n" output += "\n" output += yaml.round_trip_dump(self._changes) return output def save(self) -> None: with open(self.path, "w") as f: f.write(self.yaml())