def main(): opts = getArgs() for o, a in opts: if o in ("-h", "--help"): usage() sys.exit() elif o in ("-k", "--key"): key_to_update = a elif o in ("-v", "--value"): value_to_update = a else: assert False, "unhandled option" yaml = YAML() yaml.explicit_start = True yaml.allow_unicode = True yaml.width = 300 data_list = key_to_update.split(".") data_to_refer = value_to_update for k in data_list[::-1]: data_to_refer = {k: data_to_refer} result = [] for data in list(yaml.load_all(sys.stdin)): if data is not None: data = update_data(data, data_to_refer) result.append(data) yaml.dump_all(result, sys.stdout)
def main(): opts = getArgs() for o, a in opts: if o in ("-h", "--help"): usage() sys.exit() elif o in ("-k", "--key"): d_key = a elif o in ("-v", "--value"): d_value = a else: assert False, "unhandled option" yaml = YAML() yaml.explicit_start = True yaml.allow_unicode = True yaml.width = 300 result = [] for data in list(yaml.load_all(sys.stdin)): if data is not None: if (data['kind'] == "ConfigMap") or \ (data['kind'] == "Secret"): # update data: key=value data['data'][d_key] = d_value result.append(data) elif 'kind' in data.keys(): result.append(data) yaml.dump_all(result, sys.stdout)
def dump_all(self, data, stream=None, *args, **kwargs): inefficient = False if stream is None: inefficient = True stream = StringIO() YAML.dump_all(self, data, stream, *args, **kwargs) if inefficient: return stream.getvalue()
def dump_all(docs, stream): yaml = YAML() yaml.default_flow_style = False doc2 = docs conv_docs = [] for doc in doc2: conv_docs.append(objdict_to_dict(doc)) yaml.dump_all(conv_docs, stream)
def _join_manifest(self, resources): try: yaml = YAML() s = StringIO() yaml.dump_all(resources, s) except Exception as e: raise ParseError(f'join manifest failed: {e}') else: return s.getvalue()
async def patch(self, full_path: str, patch_set: Mapping) -> None: si = SmartInput(full_path) content = await si.content_async() yaml = YAML() dict_content = yaml.load_all(content) new_content = patch(patch_set, list(dict_content)) with open(full_path, "w") as f: yaml.dump_all(new_content, f)
def write_output_document( args: argparse.Namespace, log: ConsolePrinter, yaml_editor: YAML, docs: List[Merger] ) -> None: """Save a backup of the overwrite file, if requested.""" if args.backup: backup_file = args.overwrite + ".bak" log.verbose( "Saving a backup of {} to {}." .format(args.overwrite, backup_file)) if exists(backup_file): remove(backup_file) copy2(args.overwrite, backup_file) document_is_json = ( docs[0].prepare_for_dump(yaml_editor, args.output) is OutputDocTypes.JSON) dumps = [] for doc in docs: doc.prepare_for_dump(yaml_editor, args.output) dumps.append(doc.data) if args.output: with open(args.output, 'w', encoding='utf-8') as out_fhnd: if document_is_json: if len(dumps) > 1: for dump in dumps: print( json.dumps(Parsers.jsonify_yaml_data(dump)), file=out_fhnd) else: json.dump(Parsers.jsonify_yaml_data(dumps[0]), out_fhnd) else: if len(dumps) > 1: yaml_editor.explicit_end = True # type: ignore yaml_editor.dump_all(dumps, out_fhnd) else: yaml_editor.dump(dumps[0], out_fhnd) else: if document_is_json: if len(dumps) > 1: for dump in dumps: print(json.dumps(Parsers.jsonify_yaml_data(dump))) else: json.dump(Parsers.jsonify_yaml_data(dumps[0]), sys.stdout) else: if len(dumps) > 1: yaml_editor.explicit_end = True # type: ignore yaml_editor.dump_all(dumps, sys.stdout) else: yaml_editor.dump(dumps[0], sys.stdout)
def save_dicts_to_yaml(in_dicts, out_f, encoding='utf-8', flow_style=False): """ Сохраняет список словарей в YAML-файл. :param in_dicts: список словарей :param out_f: YAML-файл :param encoding: кодировка YAML-файла :param flow_style: True or False """ yaml = YAML(pure=True) yaml.default_flow_style = flow_style Path(out_f).parent.mkdir(parents=True, exist_ok=True) with open(Path(out_f), 'w', encoding=encoding) as f_out: yaml.dump_all(in_dicts, f_out)
def main(): yaml = YAML() yaml.explicit_start = True yaml.allow_unicode = True yaml.width = 300 result = [] for data in list(yaml.load_all(sys.stdin)): if data is not None: if data['kind'] == 'Secret': for k, v in data['data'].items(): data['data'][k] = base64.b64encode( v.encode('utf-8')).decode('utf-8') result.append(data) elif 'kind' in data.keys(): result.append(data) yaml.dump_all(result, sys.stdout)
def main(): opts = getArgs() for o, a in opts: if o in ("-h", "--help"): usage() sys.exit() elif o in ("-t", "--type"): c_type = a elif o in ("-n", "--name"): c_name = a elif o in ("-i", "--image"): c_image = a else: assert False, "unhandled option" yaml = YAML() yaml.explicit_start = True yaml.allow_unicode = True yaml.width = 300 result = [] for data in list(yaml.load_all(sys.stdin)): if data is not None: if (data['kind'] == "Deployment") or \ (data['kind'] == "DaemonSet") or \ (data['kind'] == "StatefulSet"): # update image container_ls = [ c['name'] for c in data['spec']['template']['spec'][c_type] ] for n, c in enumerate(container_ls): if c == c_name: data['spec']['template']['spec'][c_type][n][ 'image'] = c_image result.append(data) elif data['kind'] == "CronJob": # update image container_ls = [ c['name'] for c in data['spec']['jobTemplate']['spec'] ['template']['spec'][c_type] ] for n, c in enumerate(container_ls): if c == c_name: data['spec']['jobTemplate']['spec']['template'][ 'spec'][c_type][n]['image'] = c_image result.append(data) elif 'kind' in data.keys(): result.append(data) yaml.dump_all(result, sys.stdout)
def main(): opts = getArgs() for o, a in opts: if o in ("-h", "--help"): usage() sys.exit() elif o in ("-t", "--tag"): tag = a else: assert False, "unhandled option" yaml = YAML() yaml.explicit_start = True yaml.allow_unicode = True yaml.width = 300 result = [] for data in list(yaml.load_all(sys.stdin)): if data is not None: if (data['kind'] == "Deployment") or \ (data['kind'] == "DaemonSet") or \ (data['kind'] == "StatefulSet"): # add label: tag data['spec']['template']['metadata']['labels']['tag'] = tag # add label: imageName.n, imageVersion.n image_ls = [c['image'] for c in data['spec']['template']['spec']['containers']] for n,i in enumerate(image_ls): i_name = i.split(':')[0][i.split(':')[0].rfind('/')+1:] i_version = i.split(':')[1] data['spec']['template']['metadata']['labels']['imageName.' + str(n)] = i_name data['spec']['template']['metadata']['labels']['imageVersion.' + str(n)] = i_version result.append(data) elif data['kind'] == "CronJob": # add label: tag data['spec']['jobTemplate']['spec']['template']['metadata']['labels']['tag'] = tag # add label: imageName.n, imageVersion.n image_ls = [c['image'] for c in data['spec']['jobTemplate']['spec']['template']['spec']['containers']] for n,i in enumerate(image_ls): i_name = i.split(':')[0][i.split(':')[0].rfind('/')+1:] i_version = i.split(':')[1] data['spec']['jobTemplate']['spec']['template']['metadata']['labels']['imageName.' + str(n)] = i_name data['spec']['jobTemplate']['spec']['template']['metadata']['labels']['imageVersion.' + str(n)] = i_version result.append(data) elif 'kind' in data.keys(): result.append(data) yaml.dump_all(result, sys.stdout)
def run(): args = _parse() keys = args.keys if not keys: keys = ["kind", "metadata.name"], yaml = YAML() input_docs = yaml.load_all(sys.stdin) unique_docs = dict() for doc in input_docs: # Determine ID of earch supplied document # based on the keys ids = [] for key in keys: # Split each key into the hierarchy of subkeys key_parts = key.split(".") try: # Start iterating with the top-most subkey value = doc[key_parts[0]] # Go deeper into the lower nested subkeys for key_part in key_parts[1:]: # until values is obtainer value = value[key_part] ids.append(str(value)) except KeyError as ke: print( f"Supplied document does not have required key {ke}", file=sys.stderr, ) print("Failed document is:", file=sys.stderr) yaml.dump(doc, sys.stderr) exit(1) # If document with such ID does not yet exist in our # collection, then add it id = "-".join(ids) if id not in unique_docs: unique_docs[id] = doc yaml.dump_all(unique_docs.values(), sys.stdout)
from ruamel.yaml import YAML if __name__ == "__main__": # yaml文件解析 with open('deployments.yaml') as fp: content = fp.read() yaml = YAML() print(content) content = yaml.load_all(content) print(type(content)) data = [] for c in content: data.append(c) print(data[0]) c = data[0] tmp = c['spec']['template']['spec']['containers'][0]['args'][2] c['spec']['template']['spec']['containers'][0]['args'][2] = tmp.format( 'http') data[0] = c content = (d for d in data) print(content) with open('new.yaml', 'w') as f: yaml.dump_all(content, f)
class MultiYamlUpdater(object): def __init__(self, path: str, remove_empty: bool = False): self.yaml = YAML() self.path = path self.remove_empty = remove_empty self._preamble: List[str] = [] self._dirpath = os.path.dirname(path) self._force_rewrite = False def __getitem__(self, i): return self._code[i] def __setitem__(self, i, val): self._code[i] = val def __iter__(self): for m in self._code: yield m def __delitem__(self, i): del self._code[i] def __len__(self): return len(self._code) def __enter__(self): try: with open(self.path, "r") as f: inp = list(f) except FileNotFoundError: self._orig = {} self._orig_text = "" else: for line in inp: if line != "\n" and not line.startswith("#"): break self._preamble.append(line) self._orig_text = "".join(inp) self._orig = list(self.yaml.load_all(self._orig_text)) self._code = self._orig.copy() return self def _get_code(self): return self._code def _set_code(self, v): self._code = v code = property(_get_code, _set_code) def __exit__(self, exc_type, exc_val, exc_tb): if not exc_type: if not self._code and self.remove_empty: if os.path.exists(self.path): os.unlink(self.path) if self._dirpath and not os.listdir(self._dirpath): os.rmdir(self._dirpath) else: if self._force_rewrite or self._code != self._orig: if not os.path.exists(self._dirpath) and self._dirpath: os.mkdir(self._dirpath) with open(self.path, "w") as f: f.writelines(self._preamble) self.yaml.dump_all(self._code, f) return False
def save_dicts_to_yaml(in_dicts, f_name, encoding='utf-8', flow_style=True): yaml = YAML(pure=True) yaml.default_flow_style = flow_style Path(f_name).parent.mkdir(parents=True, exist_ok=True) with open(Path(f_name), 'w', encoding=encoding) as f_out: yaml.dump_all(in_dicts, f_out)
class Parser(dict): def __init__(self, filename, check_value=None, check_value_name=None, check_key='kind'): super().__init__() self.filename = Path(filename) self.yaml = YAML() self.yaml.preserve_quotes = True self.manifests_dict_list = [] self.modify_dict = dict self.tmp_yaml_file = Path("./tmp.yaml") if check_value: if self.filename.exists(): with open(filename) as file: manifests_dicts = self.yaml.load_all(file) for manifest in manifests_dicts: try: if manifest[check_key] == check_value: if check_value_name: if manifest['metadata'][ 'name'] == check_value_name: self.modify_dict = manifest else: self.manifests_dict_list.append( manifest) else: self.modify_dict = manifest else: self.manifests_dict_list.append(manifest) except KeyError: # Key kind is not found so its the values.yaml for helm which only has one dict item self.modify_dict = manifest with open(self.tmp_yaml_file, 'w') as file: self.yaml.dump(self.modify_dict, file) with open(self.tmp_yaml_file) as f: super(Parser, self).update(self.yaml.load(f) or {}) @property def return_manifests_dict(self): if self.filename.exists(): with open(self.filename) as file: manifests_dicts = self.yaml.load_all(file) for manifest in manifests_dicts: self.manifests_dict_list.append(manifest) return self.manifests_dict_list def __setitem__(self, key, value): super(Parser, self).__setitem__(key, value) def dump_it(self): d = self.analyze_ordered_dict_object(self) final_manifest_dict_list = self.manifests_dict_list + [d] with open(self.filename, "w+") as f: self.yaml.dump_all(final_manifest_dict_list, f) with contextlib.suppress(FileNotFoundError): os.remove(self.tmp_yaml_file) def analyze_ordered_dict_object(self, data): if isinstance(data, OrderedDict) or isinstance(data, dict): commented_map = CommentedMap() for k, v in data.items(): commented_map[k] = self.analyze_ordered_dict_object(v) return commented_map return data def __delitem__(self, key): try: super(Parser, self).__delitem__(key) except KeyError as e: logger.error(e) def update(self, other=None, **kwargs): if other is not None: for k, v in other.items() if isinstance(other, Mapping) else other: self[k] = v for k, v in kwargs.items(): self[k] = v super(Parser, self).update(self)