def assert_graphql_resp_expected(resp_orig, exp_response_orig, query, resp_hdrs={}, skip_if_err_msg=False, skip_assertion=False, exp_resp_hdrs={}): print('Reponse Headers: ', resp_hdrs) print(exp_resp_hdrs) # Prepare actual and expected responses so comparison takes into # consideration only the ordering that we care about: resp = collapse_order_not_selset(resp_orig, query) exp_response = collapse_order_not_selset(exp_response_orig, query) matched = equal_CommentedMap(resp, exp_response) and (exp_resp_hdrs or {}).items() <= resp_hdrs.items() if PytestConf.config.getoption("--accept"): print('skipping assertion since we chose to --accept new output') else: yml = yaml.YAML() # https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string : dump_str = StringIO() test_output = { # Keep strict received order when displaying errors: 'response': resp_orig, 'expected': exp_response_orig, 'diff': (lambda diff: "(results differ only in their order of keys)" if diff == {} else diff) (stringify_keys(jsondiff.diff(exp_response, resp))), 'query': query } if 'x-request-id' in resp_hdrs: test_output['request id'] = resp_hdrs['x-request-id'] if exp_resp_hdrs: diff_hdrs = {key: val for key, val in resp_hdrs.items() if key in exp_resp_hdrs} test_output['headers'] = { 'actual': dict(resp_hdrs), 'expected': exp_resp_hdrs, 'diff': (stringify_keys(jsondiff.diff(exp_resp_hdrs, diff_hdrs))) } yml.dump(test_output, stream=dump_str) if not skip_if_err_msg: if skip_assertion: return resp, matched else: assert matched, '\n' + dump_str.getvalue() elif matched: return resp, matched else: def is_err_msg(msg): return any(msg.get(x) for x in ['error','errors']) def as_list(x): return x if isinstance(x, list) else [x] # If it is a batch GraphQL query, compare each individual response separately for (exp, out) in zip(as_list(exp_response), as_list(resp)): matched_ = equal_CommentedMap(exp, out) if is_err_msg(exp) and is_err_msg(out): if not matched_: warnings.warn("Response does not have the expected error message\n" + dump_str.getvalue()) return resp, matched else: if skip_assertion: return resp, matched_ else: assert matched_, '\n' + dump_str.getvalue() return resp, matched # matched always True unless --accept
def prettier(yaml_input_dict, check_type=True): """Takes in a string corresponding to a YAML Komodo configuration, and returns the corresponding prettified YAML string.""" ruamel_instance = ruamel.yaml.YAML() ruamel_instance.indent( # Komodo prefers two space indendation mapping=2, sequence=4, offset=2) ruamel_instance.width = 1000 # Avoid ruamel wrapping long komodo_repository = check_type and is_repository(yaml_input_dict) # On Python3.6+, sorted_config can just be an # ordinary dict as insertion order is then preserved. sorted_config = ruamel.yaml.comments.CommentedMap() for package in sorted(yaml_input_dict, key=str.lower): sorted_config[package] = yaml_input_dict[package] setattr(sorted_config, ruamel.yaml.comments.comment_attrib, yaml_input_dict.ca) yaml_output = StringIO() ruamel_instance.dump( sorted_config, yaml_output, transform=functools.partial(repository_specific_formatting, komodo_repository), ) if sys.version_info < (3, 0): # Need to encode the byte-string on Python2 return yaml_output.getvalue().encode("utf-8") return yaml_output.getvalue()
def diff(redash_url, api_key, in_file): if in_file is None: click.echo('No file provided') return server = redash.Redash(redash_url, api_key) old_queries = server.Get_Queries() old_queries = server.Get_Full_Queries(old_queries) old_sorted_queries = sort_queries(old_queries) old_stream = StringIO() yaml.scalarstring.walk_tree(old_sorted_queries) yaml.dump(old_sorted_queries, old_stream, Dumper=yaml.RoundTripDumper) new_queries = read_yaml(in_file) new_sorted_queries = sort_queries(new_queries) new_stream = StringIO() yaml.scalarstring.walk_tree(new_sorted_queries) yaml.dump(new_sorted_queries, new_stream, Dumper=yaml.RoundTripDumper) # diff = difflib.ndiff(old_stream.getvalue().strip().splitlines(),new_stream.getvalue().strip().splitlines()) diff = difflib.HtmlDiff().make_file( old_stream.getvalue().strip().splitlines(), new_stream.getvalue().strip().splitlines(), "test.html") sys.stdout.writelines(diff)
def test_file_output(unicode_filename, verbose=False): yaml = YAML(typ='safe', pure=True) with open(unicode_filename, 'rb') as fp: data = fp.read().decode('utf-8') handle, filename = tempfile.mkstemp() os.close(handle) try: stream = StringIO() yaml.dump(data, stream, allow_unicode=True) data1 = stream.getvalue() stream = BytesIO() yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True) data2 = stream.getvalue().decode('utf-16-le')[1:] with open(filename, 'w', encoding='utf-16-le') as stream: yaml.dump(data, stream, allow_unicode=True) with open(filename, 'r', encoding='utf-16-le') as fp0: data3 = fp0.read() with open(filename, 'wb') as stream: yaml.dump(data, stream, encoding='utf-8', allow_unicode=True) with open(filename, 'r', encoding='utf-8') as fp0: data4 = fp0.read() assert data1 == data2, (data1, data2) assert data1 == data3, (data1, data3) assert data1 == data4, (data1, data4) finally: if os.path.exists(filename): os.unlink(filename)
def test_unicode_output(unicode_filename, verbose=False): yaml = YAML(typ='safe', pure=True) with open(unicode_filename, 'rb') as fp: data = fp.read().decode('utf-8') value = ' '.join(data.split()) for allow_unicode in [False, True]: data1 = yaml.dump(value, allow_unicode=allow_unicode) for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: stream = StringIO() yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) data2 = stream.getvalue() data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode) if encoding is not None: assert isinstance(data3, bytes) data3 = data3.decode(encoding) stream = BytesIO() if encoding is None: try: yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) except TypeError as exc: if verbose: print(exc) data4 = None else: raise AssertionError('expected an exception') else: yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) data4 = stream.getvalue() if verbose: print('BYTES:', data4[:50]) data4 = data4.decode(encoding) for copy in [data1, data2, data3, data4]: if copy is None: continue assert isinstance(copy, str) if allow_unicode: try: copy[4:].encode('ascii') except UnicodeEncodeError as exc: if verbose: print(exc) else: raise AssertionError('expected an exception') else: copy[4:].encode('ascii') assert isinstance(data1, str), (type(data1), encoding) assert isinstance(data2, str), (type(data2), encoding)
def test_unicode_output(unicode_filename, verbose=False): with open(unicode_filename, 'rb') as fp: data = fp.read().decode('utf-8') value = ' '.join(data.split()) for allow_unicode in [False, True]: data1 = yaml.dump(value, allow_unicode=allow_unicode) for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: stream = StringIO() yaml.dump( value, _unicode_open(stream, 'utf-8'), encoding=encoding, allow_unicode=allow_unicode, ) data2 = stream.getvalue() data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode) stream = StringIO() yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) data4 = stream.getvalue() for copy in [data1, data2, data3, data4]: if allow_unicode: try: copy[4:].encode('ascii') except (UnicodeDecodeError, UnicodeEncodeError) as exc: if verbose: print(exc) else: raise AssertionError('expected an exception') else: copy[4:].encode('ascii') assert isinstance(data1, str), (type(data1), encoding) data1.decode('utf-8') assert isinstance(data2, str), (type(data2), encoding) data2.decode('utf-8') if encoding is None: assert isinstance(data3, unicode), (type(data3), encoding) # NOQA assert isinstance(data4, unicode), (type(data4), encoding) # NOQA else: assert isinstance(data3, str), (type(data3), encoding) data3.decode(encoding) assert isinstance(data4, str), (type(data4), encoding) data4.decode(encoding)
def run(files, template_args): yaml = ruamel.yaml.YAML() data = {} def merge(source, dest): for key, value in source.items(): if isinstance(value, dict): node = dest.setdefault(key, {}) merge(value, node) elif isinstance(value, list): dest[key] = dest.setdefault(key, []) + value else: dest[key] = value for f in files: with open(f) as fp: merge(yaml.load(fp), data) if template_args is None: yaml.dump(data, sys.stdout) else: stream = StringIO() yaml.dump(data, stream) print render(stream.getvalue(), template_args)
def test_issue_290a(self): import sys from ruamel.yaml.compat import StringIO from ruamel.yaml import YAML yamldoc = dedent("""\ --- aliases: # Folded-element comment # for a multi-line value - &FoldedEntry > THIS IS A FOLDED, MULTI-LINE VALUE # Literal-element comment # for a multi-line value - &literalEntry | THIS IS A LITERAL, MULTI-LINE VALUE # Plain-element comment - &plainEntry Plain entry """) yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) yaml.explicit_start = True yaml.preserve_quotes = True yaml.width = sys.maxsize data = yaml.load(yamldoc) buf = StringIO() yaml.dump(data, buf) assert buf.getvalue() == yamldoc
def test_issue_288a(self): import sys from ruamel.yaml.compat import StringIO from ruamel.yaml import YAML yamldoc = dedent("""\ --- # Reusable values aliases: # First-element comment - &firstEntry First entry # Second-element comment - &secondEntry Second entry # Third-element comment is # a multi-line value - &thirdEntry Third entry # EOF Comment """) yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) yaml.explicit_start = True yaml.preserve_quotes = True yaml.width = sys.maxsize data = yaml.load(yamldoc) buf = StringIO() yaml.dump(data, buf) assert buf.getvalue() == yamldoc
def send(self, value): """ Sends a message by serializing, compressing and wrapping to a QByteArray, then streaming over the TCP socket. :param value: The message to send. """ if not self.is_connected(): raise RuntimeError('Try to send on unconnected socket.') logger.debug('socket send: %s', value) # serialize value to yaml stream = StringIO() yaml.dump(value, stream) serialized = stream.getvalue() # encode to utf-8 bytes and compress compressed = zlib.compress(serialized.encode()) # wrap in QByteArray bytearray = QtCore.QByteArray(compressed) # write using a data stream writer = QtCore.QDataStream(self.socket) writer.setVersion(QtCore.QDataStream.Qt_5_5) writer << bytearray
def remove_db_from_config(self): #Uses ruamel.yaml to remove all RDS configuration settings from the saved environment configuraiton tempalte in S3 #and uploads the new version back to the same location in S3. #This newer saved configuration tempalte is then used to create a new Elastic Beanstalk environment without an atatched RDS dB s3 = boto3.resource('s3') obj = s3.Object(self._s3_bucket, ('resources/templates/' + self._app_name + '/' + self._template_name + "-" + self._oenv_id)).get()['Body'].read().decode('utf-8') self._updated_template = self._template_name + "-" + self._oenv_id + '-updated' yaml = YAML() test = yaml.load(obj) del test["OptionSettings"]["aws:rds:dbinstance"] del test["Extensions"]["RDS.EBConsoleSnippet"] stream = StringIO() yaml.dump(test, stream) new = stream.getvalue() obj = s3.Object(self._s3_bucket, ('resources/templates/' + self._app_name + '/' + self._template_name + "-" + self._oenv_id + '-updated')).put(Body=new) try: client = self.client_create('elasticbeanstalk') response = client.create_environment( ApplicationName=self._app_name, EnvironmentName=self._new_env, Description='decoupled Env without RDS', TemplateName=self._updated_template) except Exception as e: return e
def dump(self, data, stream=None, **kwargs): """YAML class that can dump to a string. By default the YAML parser doesn't serialize directly to a string. This class is a small wrapper to output StreamIO as a string if no stream is provided. See https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string. Note: This class should not be used directly but instead is instantiated as part of the yaml convenience methods below. Args: data (`object`): An object, usually dict-like. stream (`None` | stream, optional): A stream object to write the YAML. If default `None`, return value as string. **kwargs: Keywords passed to the `dump` function. Returns: `str`: The serialized object string. """ inefficient = False if stream is None: inefficient = True stream = StringIO() yaml = YAML() yaml.dump(data, stream, **kwargs) if inefficient: return stream.getvalue()
def base64_decode_secrets(content: str) -> str: """ Base64 decode a Kubernetes Secret yaml file :param content: The content of the yaml file :return str: The base64 decoded version of the yaml file """ ruamel_yaml = YAML() secrets = ruamel_yaml.load(content) data = secrets["data"] for key, value in data.items(): if value is not None: value = base64decode(value) value = normalize_line_endings(value) if "\n" in value: # If there's a line break in the value we want to dump it using # the literal syntax. This will use the pipe symbol (|) to # display for example PEM keys on multiple lines in the final # file rather than as one long string containing "\n". value = LiteralScalarString(value) data[key] = value stream = StringIO() ruamel_yaml.dump(secrets, stream) return stream.getvalue().rstrip() + "\n"
def test_issue_222(self): import ruamel.yaml from ruamel.yaml.compat import StringIO buf = StringIO() ruamel.yaml.safe_dump(['012923'], buf) assert buf.getvalue() == "['012923']\n"
def get_config(self, mode="typed"): config = super().get_config() if mode == "typed": return config elif mode == "commented_map": return config.commented_map elif mode == "dict": return dict(config.commented_map) elif mode == "yaml": commented_map = copy.deepcopy(config.commented_map) commented_map.update(dataContextConfigSchema.dump(config)) stream = StringIO() yaml.dump(commented_map, stream) yaml_string = stream.getvalue() # print(commented_map) # print(commented_map.__dict__) # print(str(commented_map)) return yaml_string # config.commented_map.update(dataContextConfigSchema.dump(self)) else: raise ValueError(f"Unknown config mode {mode}")
def dump(self, data): '''return yaml string from python dict''' yaml = YAML() yaml.indent(mapping=4, sequence=6, offset=3) stream = StringIO() ruamel.yaml.safe_dump(data, stream, default_flow_style=False) return stream.getvalue()
def dump(obj: dict, default_flow_style=False) -> str: yaml = YAML() yaml.default_flow_style = default_flow_style yaml.preserve_quotes = True stream = StringIO() yaml.dump(obj, stream) return stream.getvalue()
def ruamel_yaml_dump(yaml_obj): # be carefule, ruamel.yaml doesn't work well with dpath yaml = YAML() stream = StringIO() yaml.dump(yaml_obj, stream=stream) content = stream.getvalue() return content
def assert_graphql_resp_expected(resp_orig, exp_response_orig, query): # Prepare actual and respected responses so comparison takes into # consideration only the ordering that we care about: resp = collapse_order_not_selset(resp_orig, query) exp_response = collapse_order_not_selset(exp_response_orig, query) matched = equal_CommentedMap(resp, exp_response) if PytestConf.config.getoption("--accept"): print('skipping assertion since we chose to --accept new output') else: yml = yaml.YAML() # https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string : dump_str = StringIO() yml.dump( { # Keep strict received order when displaying errors: 'response': resp_orig, 'expected': exp_response_orig, 'diff': (lambda diff: "(results differ only in their order of keys)" if diff == {} else diff)(stringify_keys( jsondiff.diff(exp_response, resp))) }, stream=dump_str) assert matched, dump_str.getvalue() return resp, matched # matched always True unless --accept
def round_trip(self, input, output=None, yaml_version=None): from ruamel.yaml.compat import StringIO yaml, data = self.yaml_load(input.value, yaml_version=yaml_version) buf = StringIO() yaml.dump(data, buf) expected = input.value if output is None else output.value assert buf.getvalue() == expected
def write_compose(config, fname='docker-compose.yml'): # pragma: no cover opts = ctx_opts() if opts.dry_run or opts.verbose: stream = StringIO() yaml.dump(config, stream) show_data(fname, stream.getvalue()) if not opts.dry_run: yaml.dump(config, Path(fname))
def round_trip(ge): stream = StringIO() Y.dump(ge, stream) tmp1 = stream.getvalue() ge2 = Y.load(tmp1) stream2 = StringIO() Y.dump(ge2, stream2) tmp2 = stream2.getvalue() if tmp1 == tmp2: print("Round-trip test passed.") else: print("TMP1: \n" + tmp1) print("TMP2: \n" + tmp2) return tmp1 == tmp2
def dump(self, data, stream=None, **kw): inefficient = False if stream is None: inefficient = True stream = StringIO() YAML.dump(self, data, stream, **kw) if inefficient: return stream.getvalue()
def test_issue_222(self): import ruamel.yaml from ruamel.yaml.compat import StringIO yaml = ruamel.yaml.YAML(typ='safe') buf = StringIO() yaml.dump(['012923'], buf) assert buf.getvalue() == "['012923']\n"
def update(directory, project_descriptor, params): stream = StringIO() project_descriptor.yaml_document.update(params) yaml = YAML() yaml.dump(project_descriptor.yaml_document, stream=stream) yaml_payload = stream.getvalue() filesystem.write_file(f'{directory}/{PROJECT_DESCRIPTOR_FILE}', yaml_payload)
def dump(self, data, stream=None, **kw): is_str_output = False if stream is None: is_str_output = True stream = StringIO() YAML.dump(self, data, stream, **kw) if is_str_output: return stream.getvalue()
def dump(self, data, stream=None, **kw): # pylint: disable=arguments-differ as_string = False if stream is None: as_string = True stream = StringIO() YAML.dump(self, data, stream, **kw) if as_string: return stream.getvalue()
def dump(self, data, stream=None, **kw): dumps = False if stream is None: dumps = True stream = StringIO() ruamel.yaml.YAML.dump(self, data, stream, **kw) if dumps: return stream.getvalue()
def build_toc(content_folder, filename_split_char='_'): """Auto-generate a Table of Contents from files/folders. Parameters ---------- content_folder : str Path to the folder where content exists. The TOC will be generated according to the alphanumeric sort of these files/folders. filename_split_char : str The character used in inferring spaces in page names from filenames. """ content_folder = Path(content_folder) if not content_folder.is_dir(): raise ValueError( f"Could not find the provided content folder\n{content_folder}") # Generate YAML from the directory structure out = [YAML_TOP, YAML_WARN] toc_pages = [] # First find all the allowed file types in path paths = _list_supported_files(content_folder) for ipath in paths: ipath = ipath.with_suffix('') title = _filename_to_title(ipath.name, filename_split_char) url = str(Path(*ipath.parts[1:])) toc_pages.append({'title': title, 'url': url}) # Now find all the top-level directories of the content folder subdirectories = sorted([ sub for sub in content_folder.glob('*') if (sub.is_dir() and '.ipynb_checkpoints' not in sub.name) ]) for subdir in subdirectories: ipaths = _list_supported_files(subdir, rglob=True) if len(ipaths) == 0: continue # Add a section break for this section toc_pages.append("## REPLACE ##") toc_pages.append( {'header': _filename_to_title(subdir.name, filename_split_char)}) # Now add the children as a list of pages for ipath in ipaths: ipath = ipath.with_suffix('') title = _filename_to_title(ipath.name, filename_split_char) url = str(Path(*ipath.parts[1:])) toc_pages.append({'title': title, 'url': url}) # Convert the dictionary into YAML and append it to our output yaml = YAML() string = StringIO() yaml.dump(toc_pages, string) out.append(string.getvalue().replace("- '## REPLACE ##'", '')) return '\n'.join(out)
def dump(self, data, stream=None, **kw): inefficient = False if stream is None: inefficient = True stream = StringIO() self.indent(sequence=4, offset=2) YAML.dump(self, data, stream, **kw) if inefficient: return stream.getvalue()
def object_to_yaml(data: JSON_TYPE) -> str: """Create yaml string from object.""" yaml = YAML(typ='rt') yaml.indent(sequence=4, offset=2) stream = StringIO() try: yaml.dump(data, stream) result = stream.getvalue() # type: str return result except YAMLError as exc: _LOGGER.error("YAML error: %s", exc) raise HomeAssistantError(exc)
def discover_files(walk_dir): print('walk_dir = ' + walk_dir) print('walk_dir (absolute) = ' + os.path.abspath(walk_dir)) for root, subdirs, files in os.walk(walk_dir): if (root.startswith(walk_dir + 'images') or root.startswith(walk_dir + '_site') or root.startswith(walk_dir + 'downloads') or root.startswith(walk_dir + '.grunt') or root.startswith(walk_dir + 'scripts') or root.startswith(walk_dir + '_sass') or root.startswith(walk_dir + '_plugins') or root.startswith(walk_dir + 'bower_components') or root.startswith(walk_dir + 'node_modules') or root.startswith(walk_dir + '.github')): continue for filename in files: _, extension = os.path.splitext(filename) if extension is None: continue if extension.lower() not in ['.html', '.md']: continue if filename.lower() in ['readme.md', 'yaml']: continue file_path = os.path.join(root, filename) # print('\t- file %s (full path: %s)' % (filename, file_path)) with open(file_path, 'r') as original, open(file_path + '.bak', 'w') as updated: try: front_matter = yaml.load(pluck_yaml(original)) except Exception as e: print(e) os.remove(file_path + '.bak') continue if front_matter is None: print('skipping {}'.format(original)) # os.rename(file_path + '.bak', file_path) os.remove(file_path + '.bak') continue front_matter = prune_keys(front_matter) tags = prune_tags(front_matter) if tags is not None: front_matter['tags'] = tags categories = prune_categories(front_matter) if categories is not None: front_matter['categories'] = categories stream = StringIO() yaml.dump((front_matter), stream) front_matter = stream.getvalue() content = pluck_content(original) updated.write('---\n') updated.write(front_matter) updated.write('---\n') updated.write(content) os.rename(file_path + '.bak', file_path)