def _resolve_dsl( self, document: CommentedMap, loader: "Loader", ) -> None: fields = list(loader.type_dsl_fields) fields.extend(loader.secondaryFile_dsl_fields) for d in fields: if d in document: datum2 = datum = document[d] if isinstance(datum, str): datum2 = self._apply_dsl( datum, d, loader, document.lc.data[d] if document.lc.data else document.lc, getattr(document.lc, "filename", ""), ) elif isinstance(datum, CommentedSeq): datum2 = CommentedSeq() for n, t in enumerate(datum): if datum.lc and datum.lc.data: datum2.lc.add_kv_line_col(len(datum2), datum.lc.data[n]) datum2.append( self._apply_dsl( t, d, loader, datum.lc.data[n], document.lc.filename ) ) else: datum2.append(self._apply_dsl(t, d, loader, LineCol(), "")) if isinstance(datum2, CommentedSeq): datum3 = CommentedSeq() seen = [] # type: List[str] for i, item in enumerate(datum2): if isinstance(item, CommentedSeq): for j, v in enumerate(item): if v not in seen: datum3.lc.add_kv_line_col( len(datum3), item.lc.data[j] ) datum3.append(v) seen.append(v) else: if item not in seen: if datum2.lc and datum2.lc.data: datum3.lc.add_kv_line_col( len(datum3), datum2.lc.data[i] ) datum3.append(item) seen.append(item) document[d] = datum3 else: document[d] = datum2
def update_environment_yml(): """Update conda_dev_env.yml file for conda.""" import re from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap, CommentedSeq environment_filename = "conda_dev_env.yml" cmap = CommentedMap() cmap.yaml_set_start_comment( ("Usage: conda env create -n myenvname -f {} python=3.6\n" " conda activate myenvname\n" " pip install --no-deps -e .".format(environment_filename))) cmap["name"] = "aiida_crystal17" cmap["channels"] = CommentedSeq(["conda-forge", "cjs14"]) cmap["channels"].yaml_add_eol_comment("for sqlalchemy-diff and pgtest", 1) cmap["dependencies"] = dmap = CommentedSeq() # additional packages dmap.append("pip") dmap.append("aiida-core.services") # fix incompatibilities between conda and pypi replacements = {"pre-commit": "pre_commit"} setup_json = get_setup_json() for base, key in [ (None, "install_requires"), ("extras_require", "testing"), ("extras_require", "code_style"), ("extras_require", "docs"), ]: requirements = setup_json.get(base, setup_json)[key] count = 0 for req in sorted(requirements, key=lambda x: x.lower()): # skip packages required for specific python versions < 3 if re.findall("python_version\\s*\\<\\s*\\'?3", req): continue req = req.split(";")[0] for (regex, replacement) in iter(replacements.items()): req = re.sub(regex, replacement, req) count += 1 dmap.append(req.lower()) dmap.yaml_set_comment_before_after_key(len(dmap) - count, before=key) yaml = YAML(typ="rt") yaml.default_flow_style = False yaml.encoding = "utf-8" yaml.allow_unicode = True file_path = os.path.join(ROOT_DIR, environment_filename) with open(file_path, "w") as env_file: yaml.dump(cmap, env_file)
def create_default_seq(config): if not config.get('config'): config['config'] = CommentedSeq() if not config.get('package'): config['package'] = CommentedSeq() if not config.get('ignore'): config['ignore'] = CommentedSeq() if not isinstance(config['config'], list): config['config'] = list_to_seq(config['config']) if not isinstance(config['package'], list): config['package'] = list_to_seq(config['package']) if not isinstance(config['ignore'], list): config['ignore'] = list_to_seq(config['ignore']) return config
def update_secondaryFiles(t, top=False): # type: (Any, bool) -> Union[MutableSequence[MutableMapping[str, str]], MutableMapping[str, str]] if isinstance(t, CommentedSeq): new_seq = copy.deepcopy(t) for index, entry in enumerate(t): new_seq[index] = update_secondaryFiles(entry) return new_seq elif isinstance(t, MutableSequence): return CommentedSeq([update_secondaryFiles(p) for p in t]) elif isinstance(t, MutableMapping): return t elif top: return CommentedSeq([CommentedMap([("pattern", t)])]) else: return CommentedMap([("pattern", t)])
def _migrate_decorators(self, element, old_key, new_key): self.log.debug("Converting display_element blink decorator to widget " "animation") decorator = element[old_key] element[new_key] = CommentedMap() element[new_key]['show_slide'] = CommentedSeq() on_dict = CommentedMap() on_dict['property'] = 'opacity' on_dict['value'] = 1 on_dict['duration'] = str(decorator.get('on_secs', .5)) + 's' element[new_key]['show_slide'].append(on_dict) off_dict = CommentedMap() off_dict['property'] = 'opacity' off_dict['value'] = 0 off_dict['duration'] = str(decorator.get('off_secs', .5)) + 's' off_dict['repeat'] = True element[new_key]['show_slide'].append(off_dict) del element[old_key] return element
def _set_subcomponents(self, match_categories): """Set subcomponents for the top component from the match categories.""" data = CommentedMap(self.top_component.as_yaml()) data.yaml_set_start_comment(TOP_LEVEL_COMMENT) temp_list = CommentedSeq() # Set the subcomponents and comments for top_comment, start_index, matches in match_categories: components = self._matches_to_components(matches) for subcomponent in components: try: # Extract inline comment before it's removed inline_comment = subcomponent.inline_comment except AttributeError: inline_comment = None d2 = CommentedMap(subcomponent.as_yaml()) if inline_comment: # Apply inline comment to data d2.yaml_add_eol_comment(comment=inline_comment, key='name') temp_list.append(d2) temp_list.yaml_set_comment_before_after_key(key=start_index, before=top_comment, indent=OFFSET) data['subcomponents'] = temp_list return data
def _type_dsl( self, t, # type: Union[Text, Dict, List] lc, filename): # type: (...) -> Union[Text, Dict[Text, Text], List[Union[Text, Dict[Text, Text]]]] if not isinstance(t, (str, six.text_type)): return t m = Loader.typeDSLregex.match(t) if not m: return t first = m.group(1) second = third = None if bool(m.group(2)): second = CommentedMap((("type", "array"), ("items", first))) second.lc.add_kv_line_col("type", lc) second.lc.add_kv_line_col("items", lc) second.lc.filename = filename if bool(m.group(3)): third = CommentedSeq([u"null", second or first]) third.lc.add_kv_line_col(0, lc) third.lc.add_kv_line_col(1, lc) third.lc.filename = filename return third or second or first
def get_pvc_tasks(self, tags=[]): module_name = 'k8s_v1_persistent_volume_claim' tasks = CommentedSeq() for template in self.get_pvc_templates(): task = CommentedMap() task['name'] = 'Create PVC' task[module_name] = CommentedMap() task[module_name]['state'] = 'present' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['force'] = template.pop('force', False) task[module_name]['resource_definition'] = template if tags: task['tags'] = copy.copy(tags) tasks.append(task) if self._volumes: # Remove any volumes where state is 'absent' for volname, vol_config in iteritems(self._volumes): if self.CONFIG_KEY in vol_config: if vol_config[self.CONFIG_KEY].get('state', 'present') == 'absent': task = CommentedMap() task['name'] = 'Remove PVC' task[module_name] = CommentedMap() task[module_name]['name'] = volname task[module_name]['namespace'] = self._namespace_name task[module_name]['state'] = 'absent' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] if tags: task['tags'] = copy.copy(tags) tasks.append(task) return tasks
def test_rdf_datetime() -> None: """Affirm that datetime objects can be serialized in makerdf().""" ldr = Loader({}) ctx: ContextType = { "id": "@id", "location": {"@id": "@id", "@type": "@id"}, "bar": "http://example.com/bar", "ex": "http://example.com/", } ldr.add_context(ctx) ra: CommentedMap = cast( CommentedMap, ldr.resolve_all( cmap( { "id": "foo", "bar": {"id": "baz"}, } ), "http://example.com", )[0], ) ra["s:dateCreated"] = datetime.datetime(2020, 10, 8) g = makerdf(None, ra, ctx) g.serialize(destination=stdout(), format="n3") g2 = makerdf(None, CommentedSeq([ra]), ctx) g2.serialize(destination=stdout(), format="n3")
def get_service_tasks(self, tags=[]): module_name = 'k8s_v1_service' tasks = CommentedSeq() for template in self.get_services_templates(): task = CommentedMap() task['name'] = 'Create service' task[module_name] = CommentedMap() task[module_name]['state'] = 'present' if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] task[module_name]['force'] = template.pop('force', False) task[module_name]['resource_definition'] = template if tags: task['tags'] = copy.copy(tags) tasks.append(task) if self._services: # Remove an services where state is 'absent' for name, service in iteritems(self._services): if service.get(self.CONFIG_KEY, {}).get('state', 'present') == 'absent': task = CommentedMap() task['name'] = 'Remove service' task[module_name] = CommentedMap() task[module_name]['state'] = 'absent' task[module_name]['name'] = name task[module_name]['namespace'] = self._namespace_name if self._auth: for key in self._auth: task[module_name][key] = self._auth[key] if tags: task['tags'] = copy.copy(tags) tasks.append(task) return tasks
def FSlist(l): # concert list into flow-style (default is block style) from ruamel.yaml.comments import CommentedSeq double_quoted_list = [DoubleQuotedScalarString(x) for x in l] cs = CommentedSeq(double_quoted_list) cs.fa.set_flow_style() return cs
def saveenroll(): """Save and Enroll""" global dir try: yaml = YAML() with open(cmd + "/client-config.yaml") as fp: data = yaml.load(fp) data['url'] = "http://" + addr data['mspdir'] = dir + "/msp" data['csr']['cn'] = t5.get() f = CommentedSeq([ CommentedMap([('C', t6.get()), ('ST', t7.get()), ('L', t8.get()), ('O', t9.get()), ('OU', t10.get())]) ]) data['csr']['names'] = f fp = open(cmd + "/client-config.yaml", "w") yaml.dump(data, fp) res = check_output( "cd " + cmd + "; export FABRIC_CA_CLIENT_HOME=" + dir + "; fabric-ca-client enroll -c client-config.yaml -u http://" + t11.get() + ":" + t12.get() + "@" + addr, shell=True) except Exception as e: error(str(e)) print(res) tkMessageBox.showinfo(message="Successfully Enrolled")
def build_next_node(yaml_path: YAMLPath, depth: int, value: Any = None) -> Any: """ Get the best default value for the next entry in a YAML Path. Parameters: 1. yaml_path (deque) The pre-parsed YAML Path to follow 2. depth (int) Index of the YAML Path segment to evaluate 3. value (Any) The expected value for the final YAML Path entry Returns: (Any) The most appropriate default value Raises: N/A """ default_value = Nodes.wrap_type(value) segments = yaml_path.escaped if not (segments and len(segments) > depth): return default_value typ = segments[depth][0] if typ == PathSegmentTypes.INDEX: default_value = CommentedSeq() elif typ == PathSegmentTypes.KEY: default_value = CommentedMap() return default_value
def _type_dsl( self, t: Union[str, CommentedMap, CommentedSeq], lc: LineCol, filename: str, ) -> Union[str, CommentedMap, CommentedSeq]: if not isinstance(t, str): return t m = typeDSLregex.match(t) if not m: return t first = m.group(1) assert first second = third = None if bool(m.group(2)): second = CommentedMap((("type", "array"), ("items", first))) second.lc.add_kv_line_col("type", lc) second.lc.add_kv_line_col("items", lc) second.lc.filename = filename if bool(m.group(3)): third = CommentedSeq(["null", second or first]) third.lc.add_kv_line_col(0, lc) third.lc.add_kv_line_col(1, lc) third.lc.filename = filename return third or second or first
def wrap_type(value: Any) -> Any: """ Wrap a value in one of the ruamel.yaml wrapper types. Parameters: 1. value (Any) The value to wrap. Returns: (Any) The wrapped value or the original value when a better wrapper could not be identified. Raises: N/A """ wrapped_value = value ast_value = Nodes.typed_value(value) typ = type(ast_value) if typ is list: wrapped_value = CommentedSeq(value) elif typ is dict: wrapped_value = CommentedMap(value) elif typ is str: wrapped_value = PlainScalarString(value) elif typ is int: wrapped_value = ScalarInt(value) elif typ is float: wrapped_value = Nodes.make_float_node(ast_value) elif typ is bool: wrapped_value = ScalarBoolean(bool(value)) return wrapped_value
def seq(self, *args): # type: (Any) -> Any if self.typ == 'rt': from ruamel.yaml.comments import CommentedSeq return CommentedSeq(*args) else: return list(*args)
def to_yaml(self, data): self._should_be_list(data) # TODO : Different length string return CommentedSeq([ validator.to_yaml(item) for item, validator in zip(data, self._validators) ])
def ruamel_structure(data, validator=None): """ Take dicts and lists and return a ruamel.yaml style structure of CommentedMaps, CommentedSeqs and data. If a validator is presented and the type is unknown, it is checked against the validator to see if it will turn it back in to YAML. """ if isinstance(data, dict): if len(data) == 0: raise exceptions.CannotBuildDocumentsFromEmptyDictOrList( "Document must be built with non-empty dicts and lists") return CommentedMap([(ruamel_structure(key), ruamel_structure(value)) for key, value in data.items()]) elif isinstance(data, list): if len(data) == 0: raise exceptions.CannotBuildDocumentsFromEmptyDictOrList( "Document must be built with non-empty dicts and lists") return CommentedSeq([ruamel_structure(item) for item in data]) elif isinstance(data, bool): return u"yes" if data else u"no" elif isinstance(data, (int, float)): return str(data) else: if not is_string(data): raise exceptions.CannotBuildDocumentFromInvalidData( ("Document must be built from a combination of:\n" "string, int, float, bool or nonempty list/dict\n\n" "Instead, found variable with type '{}': '{}'").format( type(data).__name__, data)) return data
def get_secret_templates(self): def _secret(secret_name, secret): template = CommentedMap() template['force'] = secret.get('force', False) template['apiVersion'] = 'v1' template = CommentedMap() template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = "Secret" template['metadata'] = CommentedMap([('name', secret_name), ('namespace', self._namespace_name)]) template['type'] = 'Opaque' template['data'] = {} for key, vault_variable in iteritems(secret): template['data'][key] = "{{ %s | b64encode }}" % vault_variable return template templates = CommentedSeq() if self._secrets: for secret_name, secret_config in iteritems(self._secrets): secret = _secret(secret_name, secret_config) templates.append(secret) return templates
def update_environment_yml(): """ Updates conda_dev_env.yml file for conda. """ import re from ruamel.yaml.comments import CommentedMap, CommentedSeq from ruamel.yaml import YAML environment_filename = 'conda_dev_env.yml' cmap = CommentedMap() cmap.yaml_set_start_comment( 'Usage: conda env create -n myenvname -f {} python=3.6'.format( environment_filename)) cmap['name'] = 'aiida_icl' cmap['channels'] = CommentedSeq(['conda-forge', 'cjs']) cmap['channels'].yaml_add_eol_comment('for sqlalchemy-diff and pgtest', 1) cmap['dependencies'] = dmap = CommentedSeq() # fix incompatibilities between conda and pypi replacements = {} setup_json = get_setup_json() for base, key in [(None, 'install_requires'), ('extras_require', 'testing'), ('extras_require', 'code_style')]: requirements = setup_json.get(base, setup_json)[key] count = 0 for req in sorted(requirements, key=lambda x: x.lower()): # skip packages required for specific python versions < 3 if re.findall("python_version\\s*\\<\\s*\\'?3", req): continue req = req.split(';')[0] for (regex, replacement) in iter(replacements.items()): req = re.sub(regex, replacement, req) count += 1 dmap.append(req.lower()) dmap.yaml_set_comment_before_after_key(len(dmap) - count, before=key) yaml = YAML(typ='rt') yaml.default_flow_style = False yaml.encoding = 'utf-8' yaml.allow_unicode = True file_path = os.path.join(ROOT_DIR, environment_filename) with open(file_path, 'w') as env_file: yaml.dump(cmap, env_file)
def get_step(tool: Workflow, step_id: str, loading_context: LoadingContext) -> CommentedMap: """Extract a single WorkflowStep for the given step_id.""" extracted = CommentedMap() step = find_step(tool.steps, step_id, loading_context)[0] if step is None: raise Exception(f"Step {step_id} was not found") new_id, step_name = cast(str, step["id"]).rsplit("#") extracted["steps"] = CommentedSeq([step]) extracted["inputs"] = CommentedSeq() extracted["outputs"] = CommentedSeq() for in_port in cast(List[CWLObjectType], step["in"]): name = "#" + cast(str, in_port["id"]).split("#")[-1].split("/")[-1] inp: CWLObjectType = {"id": name, "type": "Any"} if "default" in in_port: inp["default"] = in_port["default"] extracted["inputs"].append(CommentedMap(inp)) in_port["source"] = name if "linkMerge" in in_port: del in_port["linkMerge"] for outport in cast(List[Union[str, Mapping[str, Any]]], step["out"]): if isinstance(outport, Mapping): outport_id = cast(str, outport["id"]) else: outport_id = outport name = outport_id.split("#")[-1].split("/")[-1] extracted["outputs"].append({ "id": name, "type": "Any", "outputSource": f"{new_id}#{step_name}/{name}", }) for f in tool.tool: if f not in ("steps", "inputs", "outputs"): extracted[f] = tool.tool[f] extracted["id"] = new_id if "cwlVersion" not in extracted: extracted["cwlVersion"] = tool.metadata["cwlVersion"] return extracted
def _merge_simple_lists(self, lhs: CommentedSeq, rhs: CommentedSeq, path: YAMLPath, node_coord: NodeCoords) -> CommentedSeq: """ Merge two CommentedSeq-wrapped lists of Scalars or CommentedSeqs. Parameters: 1. lhs (CommentedSeq) The merge target. 2. rhs (CommentedSeq) The merge source. 3. path (YAMLPath) Location within the DOM where this merge is taking place. 4. node_coord (NodeCoords) The RHS root node, its parent, and reference within its parent; used for config lookups. Returns: (list) The merged result. Raises: - `MergeException` when a clean merge is impossible. """ if not isinstance(lhs, CommentedSeq): raise MergeException( "Impossible to add Array data to non-Array destination.", path) merge_mode = self.config.array_merge_mode(node_coord) if merge_mode is ArrayMergeOpts.LEFT: return lhs if merge_mode is ArrayMergeOpts.RIGHT: return rhs tagless_lhs = Nodes.tagless_elements(lhs) for idx, ele in enumerate(rhs): path_next = path + "[{}]".format(idx) self.logger.debug("Processing element {} at {}.".format( idx, path_next), prefix="Merger::_merge_simple_lists: ", data=ele) if merge_mode is ArrayMergeOpts.UNIQUE: cmp_val = ele if isinstance(ele, TaggedScalar): cmp_val = ele.value self.logger.debug( "Looking for comparison value, {}, in:".format(cmp_val), prefix="Merger::_merge_simple_lists: ", data=tagless_lhs) if cmp_val in tagless_lhs: lhs = CommentedSeq([ ele if (e == cmp_val or (isinstance(e, TaggedScalar) and e.value == cmp_val)) else e for e in lhs ]) else: lhs.append(ele) continue lhs.append(ele) return lhs
def add_item(self, item: Union[str, int]): """Add a new item to the current section :param item: Receive the value for the current item """ if not isinstance(self.yaml_obj, CommentedSeq): self._get_parent()[self.section_name] = CommentedSeq() RecipeItem(len(self.yaml_obj), self.yaml_obj, item)
def __setitem__(self, key: str, value: Any): if key not in self.yaml_obj: self.add_subsection(key) if isinstance(value, (str, int)): self.yaml_obj[key] = CommentedSeq() self[key].add_item(value) elif isinstance(value, dict): Section(key, self.yaml_obj)
def test_datatype_is_CommentedSeq(self): c = CommentedSeq() c.insert(0, "key") c.insert(1, "to") c2 = CommentedMap() c2.insert(0, "to", "from") c2.insert(1, "__from__", "to") c.insert(2, c2) result = CommentedSeq() result.append("key") result.append("to") result.append("to") self.assertEqual(result, parse_for_variable_hierarchies(c, "__from__"))
def get_services_templates(self): """ Generate a service configuration """ def _create_service(name, service): template = CommentedMap() state = service.get(self.CONFIG_KEY, {}).get('state', 'present') if state == 'present': ports = self.get_service_ports(service) if ports: template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = 'Service' template['force'] = service.get(self.CONFIG_KEY, {}).get( 'service', {}).get('force', False) labels = CommentedMap([('app', self._namespace_name), ('service', name)]) template['metadata'] = CommentedMap([ ('name', name), ('namespace', self._namespace_name), ('labels', copy.deepcopy(labels)) ]) template['spec'] = CommentedMap([('selector', copy.deepcopy(labels)), ('ports', ports)]) # Translate options: if service.get(self.CONFIG_KEY): for key, value in service[self.CONFIG_KEY].items(): if key == 'service': for service_key, service_value in value.items( ): if service_key == 'force': continue elif service_key == 'metadata': self.copy_attribute( template, service_key, service_value) else: self.copy_attribute( template['spec'], service_key, service_value) return template templates = CommentedSeq() if self._services: for name, service in self._services.items(): template = _create_service(name, service) if template: templates.append(template) if service.get('links'): # create services for aliased links for link in service['links']: if ':' in link: service_name, alias = link.split(':') alias_config = self._services.get(service_name) if alias_config: new_service = _create_service( alias, alias_config) if new_service: templates.append(new_service) return templates
def generate_orchestration_playbook(self, url=None, namespace=None, local_images=True, **kwargs): """ Generate an Ansible playbook to orchestrate services. :param url: registry URL where images will be pulled from :param namespace: registry namespace :param local_images: bypass pulling images, and use local copies :return: playbook dict """ for service_name in self.services: image = self.get_latest_image_for_service(service_name) if local_images: self.services[service_name]['image'] = image.tags[0] else: self.services[service_name]['image'] = urljoin(urljoin(url, namespace), image.tags[0]) if kwargs.get('k8s_auth'): self.k8s_client.set_authorization(kwargs['auth']) play = CommentedMap() play['name'] = 'Manage the lifecycle of {} on {}'.format(self.project_name, self.display_name) play['hosts'] = 'localhost' play['gather_facts'] = 'no' play['connection'] = 'local' play['roles'] = CommentedSeq() play['tasks'] = CommentedSeq() role = CommentedMap([ ('role', 'kubernetes-modules') ]) play['roles'].append(role) play.yaml_set_comment_before_after_key( 'roles', before='Include Ansible Kubernetes and OpenShift modules', indent=4) play.yaml_set_comment_before_after_key('tasks', before='Tasks for setting the application state. ' 'Valid tags include: start, stop, restart, destroy', indent=4) play['tasks'].append(self.deploy.get_namespace_task(state='present', tags=['start'])) play['tasks'].append(self.deploy.get_namespace_task(state='absent', tags=['destroy'])) play['tasks'].extend(self.deploy.get_service_tasks(tags=['start'])) play['tasks'].extend(self.deploy.get_deployment_tasks(engine_state='stop', tags=['stop', 'restart'])) play['tasks'].extend(self.deploy.get_deployment_tasks(tags=['start', 'restart'])) play['tasks'].extend(self.deploy.get_pvc_tasks(tags=['start'])) playbook = CommentedSeq() playbook.append(play) logger.debug(u'Created playbook to run project', playbook=playbook) return playbook
def to_yaml(self, data): self._should_be_list(data) if len(set(data)) < len(data): raise YAMLSerializationError( ("Expecting all unique items, " "but duplicates were found in '{}'.".format(data))) return CommentedSeq([self._validator.to_yaml(item) for item in data])
def fill_config(data_obj): """ Make sample config """ data_obj.insert(len(data_obj), "url", "https://jira.example.com", comment="Jira URL") data_obj.insert(len(data_obj), "username", "some_username", comment="Jira login") data_obj.insert(len(data_obj), "password", "SomeSecurePassword", comment="Jira password") data_obj.insert(len(data_obj), "project", "SOME-PROJECT", comment="Jira project") data_obj.insert(len(data_obj), "fields", CommentedMap(), comment="Fields for created tickets") fields_obj = data_obj["fields"] fields_obj.insert(len(fields_obj), "Issue Type", "Bug", comment="(field) Ticket type") fields_obj.insert(len(fields_obj), "Assignee", "Ticket_Assignee", comment="(field) Assignee") fields_obj.insert(len(fields_obj), "Epic Link", "SOMEPROJECT-1234", comment="(field) Epic") fields_obj.insert(len(fields_obj), "Security Level", "SOME_LEVEL", comment="(field) Security level") fields_obj.insert(len(fields_obj), "Components/s", CommentedSeq(), comment="(field) Component/s") components_obj = fields_obj["Components/s"] component_obj = CommentedMap() component_obj.insert(len(component_obj), "name", "Component Name") components_obj.append(component_obj) data_obj.insert(len(data_obj), "custom_mapping", CommentedMap(), comment="Custom priority mapping") mapping_obj = data_obj["custom_mapping"] mapping_obj.insert(len(mapping_obj), "Critical", "Very High") mapping_obj.insert(len(mapping_obj), "Major", "High") mapping_obj.insert(len(mapping_obj), "Medium", "Medium") mapping_obj.insert(len(mapping_obj), "Minor", "Low") mapping_obj.insert(len(mapping_obj), "Trivial", "Low")
def _insert_dict( self, insert_at: YAMLPath, lhs: Union[CommentedMap, CommentedSeq, CommentedSet], rhs: CommentedMap ) -> bool: """Insert an RHS dict merge result into the LHS document.""" merge_performed = False merged_data: Union[ CommentedMap, CommentedSeq, CommentedSet ] = CommentedMap() if isinstance(lhs, CommentedSeq): # Merge a dict into a list self.logger.debug( "Merger::_insert_dict: Merging a dict into a list.") merged_data = self._merge_lists( lhs, CommentedSeq([rhs]), insert_at) merge_performed = True elif isinstance(lhs, CommentedSet): # Merge a dict into a set; this is destructive raise MergeException( "Merging a Hash into a Set is destructive to the" " source Hash because only the keys would be" " preserved. Please adjust your merge to target a" " suitable node.", insert_at) else: # Merge a dict into a dict self.logger.debug( "Merger::_insert_dict: Merging a dict into a dict.") merge_mode = self.config.hash_merge_mode( NodeCoords(rhs, None, None)) if merge_mode is HashMergeOpts.LEFT: self.logger.debug( "Configured mode short-circuits the merge; returning LHS:" , prefix="Merger::_insert_dict: " , data=lhs) merged_data = lhs elif merge_mode is HashMergeOpts.RIGHT: self.logger.debug( "Configured mode short-circuits the merge; returning RHS:" , prefix="Merger::_insert_dict: " , data=rhs) merged_data = rhs else: merged_data = self._merge_dicts(lhs, rhs, insert_at) merge_performed = True # Synchronize YAML Tags self.logger.debug( "Merger::_insert_dict: Setting LHS tag from {} to {}." .format(lhs.tag.value, rhs.tag.value)) lhs.yaml_set_tag(rhs.tag.value) if insert_at.is_root: self.data = merged_data return merge_performed