def diff(redash_url, api_key, in_file): if in_file is None: click.echo('No file provided') return server = redash.Redash(redash_url, api_key) old_queries = server.Get_Queries() old_queries = server.Get_Full_Queries(old_queries) old_sorted_queries = sort_queries(old_queries) old_stream = StringIO() yaml.scalarstring.walk_tree(old_sorted_queries) yaml.dump(old_sorted_queries, old_stream, Dumper=yaml.RoundTripDumper) new_queries = read_yaml(in_file) new_sorted_queries = sort_queries(new_queries) new_stream = StringIO() yaml.scalarstring.walk_tree(new_sorted_queries) yaml.dump(new_sorted_queries, new_stream, Dumper=yaml.RoundTripDumper) # diff = difflib.ndiff(old_stream.getvalue().strip().splitlines(),new_stream.getvalue().strip().splitlines()) diff = difflib.HtmlDiff().make_file( old_stream.getvalue().strip().splitlines(), new_stream.getvalue().strip().splitlines(), "test.html") sys.stdout.writelines(diff)
def test_update_image_apply(docs, data): originals = [man for doc in docs for man in kubeyaml.manifests(doc)] workloads = [wl for wl in originals if wl['kind'] in workload_kinds] assume(len(workloads) > 0) # Make sure we got workloads with different IDs assume(len(workloads) == len(set(map(resource_id, workloads)))) ind = data.draw(strats.integers(min_value=0, max_value=len(workloads) - 1)) workload = workloads[ind] containers = kubeyaml.containers(workload) assume(len(containers) > 0) yaml = kubeyaml.yaml() original = StringIO() for d in docs: yaml.dump(d, original) originalstr = comment_yaml(data.draw, original.getvalue()) note('Original:\n%s\n' % originalstr) indc = data.draw( strats.integers(min_value=0, max_value=len(containers) - 1)) spec = Spec.from_resource(workload) spec.container = containers[indc]['name'] spec.image = data.draw(images_with_tag) note('Spec: %r' % spec) infile, outfile = StringIO(originalstr), StringIO() kubeyaml.apply_to_yaml(lambda ds: kubeyaml.update_image(spec, ds), infile, outfile) # A rough check that the docs are in the same grouping into Lists, # since we'll look at individual manifests, ignoring whether they # are in Lists, after this. updateddocs = list(yaml.load_all(outfile.getvalue())) assert (len(docs) == len(updateddocs)) for i in range(len(docs)): assert (updateddocs[i]['kind'] == docs[i]['kind']) # check that the selected manifest->container has the updated # image; and, the rest are unchanged. updateds = [man for doc in updateddocs for man in kubeyaml.manifests(doc)] assert (len(originals) == len(updateds)) found = False for i in range(len(originals)): if kubeyaml.match_manifest(spec, updateds[i]): assert not found, "spec matched more than one manifest" c = kubeyaml.find_container(spec, updateds[i]) assert c is not None assert c['image'] == spec.image found = True else: assert manifests_equiv(originals[i], updateds[i]) assert found
def remove_updater_creator_from_manifest(manifest: str) -> str: """删除manifest中的添加的平台注入的updater和creator :param manifest: 资源的yaml内容 :return: 返回移除updater和creator后的内容 """ stream = StringIO(manifest) refine_stream = StringIO() for l in stream.readlines(): if ("io.tencent.paas.creator" in l) or ("io.tencent.paas.updator" in l): continue refine_stream.write(l) return refine_stream.getvalue()
def test_unicode_output(unicode_filename, verbose=False): with open(unicode_filename, 'rb') as fp: data = fp.read().decode('utf-8') value = ' '.join(data.split()) for allow_unicode in [False, True]: data1 = yaml.dump(value, allow_unicode=allow_unicode) for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: stream = StringIO() yaml.dump( value, _unicode_open(stream, 'utf-8'), encoding=encoding, allow_unicode=allow_unicode, ) data2 = stream.getvalue() data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode) stream = StringIO() yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) data4 = stream.getvalue() for copy in [data1, data2, data3, data4]: if allow_unicode: try: copy[4:].encode('ascii') except (UnicodeDecodeError, UnicodeEncodeError) as exc: if verbose: print(exc) else: raise AssertionError('expected an exception') else: copy[4:].encode('ascii') assert isinstance(data1, str), (type(data1), encoding) data1.decode('utf-8') assert isinstance(data2, str), (type(data2), encoding) data2.decode('utf-8') if encoding is None: assert isinstance(data3, unicode), (type(data3), encoding) # NOQA assert isinstance(data4, unicode), (type(data4), encoding) # NOQA else: assert isinstance(data3, str), (type(data3), encoding) data3.decode(encoding) assert isinstance(data4, str), (type(data4), encoding) data4.decode(encoding)
def prettier(yaml_input_dict, check_type=True): """Takes in a string corresponding to a YAML Komodo configuration, and returns the corresponding prettified YAML string.""" ruamel_instance = ruamel.yaml.YAML() ruamel_instance.indent( # Komodo prefers two space indendation mapping=2, sequence=4, offset=2) ruamel_instance.width = 1000 # Avoid ruamel wrapping long komodo_repository = check_type and is_repository(yaml_input_dict) # On Python3.6+, sorted_config can just be an # ordinary dict as insertion order is then preserved. sorted_config = ruamel.yaml.comments.CommentedMap() for package in sorted(yaml_input_dict, key=str.lower): sorted_config[package] = yaml_input_dict[package] setattr(sorted_config, ruamel.yaml.comments.comment_attrib, yaml_input_dict.ca) yaml_output = StringIO() ruamel_instance.dump( sorted_config, yaml_output, transform=functools.partial(repository_specific_formatting, komodo_repository), ) if sys.version_info < (3, 0): # Need to encode the byte-string on Python2 return yaml_output.getvalue().encode("utf-8") return yaml_output.getvalue()
def dump(obj: dict, default_flow_style=False) -> str: yaml = YAML() yaml.default_flow_style = default_flow_style yaml.preserve_quotes = True stream = StringIO() yaml.dump(obj, stream) return stream.getvalue()
def remove_db_from_config(self): #Uses ruamel.yaml to remove all RDS configuration settings from the saved environment configuraiton tempalte in S3 #and uploads the new version back to the same location in S3. #This newer saved configuration tempalte is then used to create a new Elastic Beanstalk environment without an atatched RDS dB s3 = boto3.resource('s3') obj = s3.Object(self._s3_bucket, ('resources/templates/' + self._app_name + '/' + self._template_name + "-" + self._oenv_id)).get()['Body'].read().decode('utf-8') self._updated_template = self._template_name + "-" + self._oenv_id + '-updated' yaml = YAML() test = yaml.load(obj) del test["OptionSettings"]["aws:rds:dbinstance"] del test["Extensions"]["RDS.EBConsoleSnippet"] stream = StringIO() yaml.dump(test, stream) new = stream.getvalue() obj = s3.Object(self._s3_bucket, ('resources/templates/' + self._app_name + '/' + self._template_name + "-" + self._oenv_id + '-updated')).put(Body=new) try: client = self.client_create('elasticbeanstalk') response = client.create_environment( ApplicationName=self._app_name, EnvironmentName=self._new_env, Description='decoupled Env without RDS', TemplateName=self._updated_template) except Exception as e: return e
def run(files, template_args): yaml = ruamel.yaml.YAML() data = {} def merge(source, dest): for key, value in source.items(): if isinstance(value, dict): node = dest.setdefault(key, {}) merge(value, node) elif isinstance(value, list): dest[key] = dest.setdefault(key, []) + value else: dest[key] = value for f in files: with open(f) as fp: merge(yaml.load(fp), data) if template_args is None: yaml.dump(data, sys.stdout) else: stream = StringIO() yaml.dump(data, stream) print render(stream.getvalue(), template_args)
def test_issue_290a(self): import sys from ruamel.yaml.compat import StringIO from ruamel.yaml import YAML yamldoc = dedent("""\ --- aliases: # Folded-element comment # for a multi-line value - &FoldedEntry > THIS IS A FOLDED, MULTI-LINE VALUE # Literal-element comment # for a multi-line value - &literalEntry | THIS IS A LITERAL, MULTI-LINE VALUE # Plain-element comment - &plainEntry Plain entry """) yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) yaml.explicit_start = True yaml.preserve_quotes = True yaml.width = sys.maxsize data = yaml.load(yamldoc) buf = StringIO() yaml.dump(data, buf) assert buf.getvalue() == yamldoc
def test_issue_288a(self): import sys from ruamel.yaml.compat import StringIO from ruamel.yaml import YAML yamldoc = dedent("""\ --- # Reusable values aliases: # First-element comment - &firstEntry First entry # Second-element comment - &secondEntry Second entry # Third-element comment is # a multi-line value - &thirdEntry Third entry # EOF Comment """) yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) yaml.explicit_start = True yaml.preserve_quotes = True yaml.width = sys.maxsize data = yaml.load(yamldoc) buf = StringIO() yaml.dump(data, buf) assert buf.getvalue() == yamldoc
def test_registrable_factory_roundtrip_alias(make_aliased_classes): A, B = make_aliased_classes txt = """a: !a_class.some_factory akw1: 8 akw2: !b_ bkw1: 2 bkw2: hello world """ txt_default_alias = """a: !a_class.some_factory akw1: 8 akw2: !b_ bkw1: 2 bkw2: hello world """ config = yaml.load(txt) a = config['a'] assert a.akw1 == 8 assert a.akw2 is not None assert hasattr(a.akw2, "bkw1") assert a.akw2.bkw1 == 2 assert isinstance(a, A) with StringIO() as s: yaml.dump(config, s) assert s.getvalue() == txt_default_alias
def test_file_output(unicode_filename, verbose=False): yaml = YAML(typ='safe', pure=True) with open(unicode_filename, 'rb') as fp: data = fp.read().decode('utf-8') handle, filename = tempfile.mkstemp() os.close(handle) try: stream = StringIO() yaml.dump(data, stream, allow_unicode=True) data1 = stream.getvalue() stream = BytesIO() yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True) data2 = stream.getvalue().decode('utf-16-le')[1:] with open(filename, 'w', encoding='utf-16-le') as stream: yaml.dump(data, stream, allow_unicode=True) with open(filename, 'r', encoding='utf-16-le') as fp0: data3 = fp0.read() with open(filename, 'wb') as stream: yaml.dump(data, stream, encoding='utf-8', allow_unicode=True) with open(filename, 'r', encoding='utf-8') as fp0: data4 = fp0.read() assert data1 == data2, (data1, data2) assert data1 == data3, (data1, data3) assert data1 == data4, (data1, data4) finally: if os.path.exists(filename): os.unlink(filename)
def emit(events, stream=None, Dumper=Dumper, canonical=None, indent=None, width=None, allow_unicode=None, line_break=None): # type: (Any, StreamType, Any, bool, Union[int, None], int, bool, Any) -> Any """ Emit YAML parsing events into a stream. If stream is None, return the produced string instead. """ getvalue = None if stream is None: stream = StringIO() getvalue = stream.getvalue dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, allow_unicode=allow_unicode, line_break=line_break) try: for event in events: dumper.emit(event) finally: dumper._emitter.dispose() if getvalue is not None: return getvalue()
def dump(self, data): '''return yaml string from python dict''' yaml = YAML() yaml.indent(mapping=4, sequence=6, offset=3) stream = StringIO() ruamel.yaml.safe_dump(data, stream, default_flow_style=False) return stream.getvalue()
def test_issue_222(self): import ruamel.yaml from ruamel.yaml.compat import StringIO buf = StringIO() ruamel.yaml.safe_dump(['012923'], buf) assert buf.getvalue() == "['012923']\n"
def test_unicode_input_errors(unicode_filename, verbose=False): with open(unicode_filename, 'rb') as fp: data = fp.read().decode('utf-8') for input in [ data.encode('latin1', 'ignore'), data.encode('utf-16-be'), data.encode('utf-16-le'), codecs.BOM_UTF8 + data.encode('utf-16-be'), codecs.BOM_UTF16_BE + data.encode('utf-16-le'), codecs.BOM_UTF16_LE + data.encode('utf-8') + b'!', ]: try: yaml.load(input) except yaml.YAMLError as exc: if verbose: print(exc) else: raise AssertionError('expected an exception') try: yaml.load(BytesIO(input) if PY3 else StringIO(input)) except yaml.YAMLError as exc: if verbose: print(exc) else: raise AssertionError('expected an exception')
def ruamel_yaml_dump(yaml_obj): # be carefule, ruamel.yaml doesn't work well with dpath yaml = YAML() stream = StringIO() yaml.dump(yaml_obj, stream=stream) content = stream.getvalue() return content
def serialize_all(nodes, stream=None, Dumper=Dumper, canonical=None, indent=None, width=None, allow_unicode=None, line_break=None, encoding=enc, explicit_start=None, explicit_end=None, version=None, tags=None): # type: (Any, StreamType, Any, Any, Union[None, int], Union[None, int], bool, Any, Any, Union[None, bool], Union[None, bool], VersionType, Any) -> Any # NOQA """ Serialize a sequence of representation trees into a YAML stream. If stream is None, return the produced string instead. """ getvalue = None if stream is None: if encoding is None: stream = StringIO() else: stream = BytesIO() getvalue = stream.getvalue dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, allow_unicode=allow_unicode, line_break=line_break, encoding=encoding, version=version, tags=tags, explicit_start=explicit_start, explicit_end=explicit_end) try: dumper._serializer.open() for node in nodes: dumper.serialize(node) dumper._serializer.close() finally: try: dumper._emitter.dispose() except AttributeError: raise dumper.dispose() # cyaml if getvalue is not None: return getvalue()
def assert_graphql_resp_expected(resp_orig, exp_response_orig, query): # Prepare actual and respected responses so comparison takes into # consideration only the ordering that we care about: resp = collapse_order_not_selset(resp_orig, query) exp_response = collapse_order_not_selset(exp_response_orig, query) matched = equal_CommentedMap(resp, exp_response) if PytestConf.config.getoption("--accept"): print('skipping assertion since we chose to --accept new output') else: yml = yaml.YAML() # https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string : dump_str = StringIO() yml.dump( { # Keep strict received order when displaying errors: 'response': resp_orig, 'expected': exp_response_orig, 'diff': (lambda diff: "(results differ only in their order of keys)" if diff == {} else diff)(stringify_keys( jsondiff.diff(exp_response, resp))) }, stream=dump_str) assert matched, dump_str.getvalue() return resp, matched # matched always True unless --accept
def assert_graphql_resp_expected(resp_orig, exp_response_orig, query, resp_hdrs={}, skip_if_err_msg=False, skip_assertion=False, exp_resp_hdrs={}): print('Reponse Headers: ', resp_hdrs) print(exp_resp_hdrs) # Prepare actual and expected responses so comparison takes into # consideration only the ordering that we care about: resp = collapse_order_not_selset(resp_orig, query) exp_response = collapse_order_not_selset(exp_response_orig, query) matched = equal_CommentedMap(resp, exp_response) and (exp_resp_hdrs or {}).items() <= resp_hdrs.items() if PytestConf.config.getoption("--accept"): print('skipping assertion since we chose to --accept new output') else: yml = yaml.YAML() # https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string : dump_str = StringIO() test_output = { # Keep strict received order when displaying errors: 'response': resp_orig, 'expected': exp_response_orig, 'diff': (lambda diff: "(results differ only in their order of keys)" if diff == {} else diff) (stringify_keys(jsondiff.diff(exp_response, resp))), 'query': query } if 'x-request-id' in resp_hdrs: test_output['request id'] = resp_hdrs['x-request-id'] if exp_resp_hdrs: diff_hdrs = {key: val for key, val in resp_hdrs.items() if key in exp_resp_hdrs} test_output['headers'] = { 'actual': dict(resp_hdrs), 'expected': exp_resp_hdrs, 'diff': (stringify_keys(jsondiff.diff(exp_resp_hdrs, diff_hdrs))) } yml.dump(test_output, stream=dump_str) if not skip_if_err_msg: if skip_assertion: return resp, matched else: assert matched, '\n' + dump_str.getvalue() elif matched: return resp, matched else: def is_err_msg(msg): return any(msg.get(x) for x in ['error','errors']) def as_list(x): return x if isinstance(x, list) else [x] # If it is a batch GraphQL query, compare each individual response separately for (exp, out) in zip(as_list(exp_response), as_list(resp)): matched_ = equal_CommentedMap(exp, out) if is_err_msg(exp) and is_err_msg(out): if not matched_: warnings.warn("Response does not have the expected error message\n" + dump_str.getvalue()) return resp, matched else: if skip_assertion: return resp, matched_ else: assert matched_, '\n' + dump_str.getvalue() return resp, matched # matched always True unless --accept
def dump(self, data, stream=None, **kwargs): """YAML class that can dump to a string. By default the YAML parser doesn't serialize directly to a string. This class is a small wrapper to output StreamIO as a string if no stream is provided. See https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string. Note: This class should not be used directly but instead is instantiated as part of the yaml convenience methods below. Args: data (`object`): An object, usually dict-like. stream (`None` | stream, optional): A stream object to write the YAML. If default `None`, return value as string. **kwargs: Keywords passed to the `dump` function. Returns: `str`: The serialized object string. """ inefficient = False if stream is None: inefficient = True stream = StringIO() yaml = YAML() yaml.dump(data, stream, **kwargs) if inefficient: return stream.getvalue()
def base64_decode_secrets(content: str) -> str: """ Base64 decode a Kubernetes Secret yaml file :param content: The content of the yaml file :return str: The base64 decoded version of the yaml file """ ruamel_yaml = YAML() secrets = ruamel_yaml.load(content) data = secrets["data"] for key, value in data.items(): if value is not None: value = base64decode(value) value = normalize_line_endings(value) if "\n" in value: # If there's a line break in the value we want to dump it using # the literal syntax. This will use the pipe symbol (|) to # display for example PEM keys on multiple lines in the final # file rather than as one long string containing "\n". value = LiteralScalarString(value) data[key] = value stream = StringIO() ruamel_yaml.dump(secrets, stream) return stream.getvalue().rstrip() + "\n"
def get_config(self, mode="typed"): config = super().get_config() if mode == "typed": return config elif mode == "commented_map": return config.commented_map elif mode == "dict": return dict(config.commented_map) elif mode == "yaml": commented_map = copy.deepcopy(config.commented_map) commented_map.update(dataContextConfigSchema.dump(config)) stream = StringIO() yaml.dump(commented_map, stream) yaml_string = stream.getvalue() # print(commented_map) # print(commented_map.__dict__) # print(str(commented_map)) return yaml_string # config.commented_map.update(dataContextConfigSchema.dump(self)) else: raise ValueError(f"Unknown config mode {mode}")
def send(self, value): """ Sends a message by serializing, compressing and wrapping to a QByteArray, then streaming over the TCP socket. :param value: The message to send. """ if not self.is_connected(): raise RuntimeError('Try to send on unconnected socket.') logger.debug('socket send: %s', value) # serialize value to yaml stream = StringIO() yaml.dump(value, stream) serialized = stream.getvalue() # encode to utf-8 bytes and compress compressed = zlib.compress(serialized.encode()) # wrap in QByteArray bytearray = QtCore.QByteArray(compressed) # write using a data stream writer = QtCore.QDataStream(self.socket) writer.setVersion(QtCore.QDataStream.Qt_5_5) writer << bytearray
def get_batch_data_and_markers( self, batch_spec: BatchSpec) -> Tuple[Any, BatchMarkers]: # batch_data # We need to build a batch_markers to be used in the dataframe batch_markers: BatchMarkers = BatchMarkers({ "ge_load_time": datetime.datetime.now( datetime.timezone.utc).strftime("%Y%m%dT%H%M%S.%fZ") }) if isinstance(batch_spec, RuntimeDataBatchSpec): # batch_data != None is already checked when RuntimeDataBatchSpec is instantiated batch_data = batch_spec.batch_data elif isinstance(batch_spec, PathBatchSpec): reader_method: str = batch_spec.get("reader_method") reader_options: dict = batch_spec.get("reader_options") or {} path: str = batch_spec["path"] reader_fn: Callable = self._get_reader_fn(reader_method, path) batch_data = reader_fn(path, **reader_options) elif isinstance(batch_spec, S3BatchSpec): if self._s3 is None: raise ge_exceptions.ExecutionEngineError( f"""PandasExecutionEngine has been passed a S3BatchSpec, but the ExecutionEngine does not have a boto3 client configured. Please check your config.""" ) s3_engine = self._s3 s3_url = S3Url(batch_spec.get("s3")) reader_method: str = batch_spec.get("reader_method") reader_options: dict = batch_spec.get("reader_options") or {} s3_object = s3_engine.get_object(Bucket=s3_url.bucket, Key=s3_url.key) logger.debug("Fetching s3 object. Bucket: {} Key: {}".format( s3_url.bucket, s3_url.key)) reader_fn = self._get_reader_fn(reader_method, s3_url.key) batch_data = reader_fn( StringIO(s3_object["Body"].read().decode( s3_object.get("ContentEncoding", "utf-8"))), **reader_options, ) else: raise BatchSpecError( f"batch_spec must be of type RuntimeDataBatchSpec, PathBatchSpec, or S3BatchSpec, not {batch_spec.__class__.__name__}" ) batch_data = self._apply_splitting_and_sampling_methods( batch_spec, batch_data) if batch_data.memory_usage().sum() < HASH_THRESHOLD: batch_markers["pandas_data_fingerprint"] = hash_pandas_dataframe( batch_data) typed_batch_data = self._get_typed_batch_data(batch_data) return typed_batch_data, batch_markers
def dump(self, data, stream=None, **kw): dumps = False if stream is None: dumps = True stream = StringIO() ruamel.yaml.YAML.dump(self, data, stream, **kw) if dumps: return stream.getvalue()
def dump(self, data, stream=None, **kw): is_str_output = False if stream is None: is_str_output = True stream = StringIO() YAML.dump(self, data, stream, **kw) if is_str_output: return stream.getvalue()
def dump(self, data, stream=None, **kw): # pylint: disable=arguments-differ as_string = False if stream is None: as_string = True stream = StringIO() YAML.dump(self, data, stream, **kw) if as_string: return stream.getvalue()
def dump(self, data, stream=None, **kw): inefficient = False if stream is None: inefficient = True stream = StringIO() YAML.dump(self, data, stream, **kw) if inefficient: return stream.getvalue()
def round_trip(ge): stream = StringIO() Y.dump(ge, stream) tmp1 = stream.getvalue() ge2 = Y.load(tmp1) stream2 = StringIO() Y.dump(ge2, stream2) tmp2 = stream2.getvalue() if tmp1 == tmp2: print("Round-trip test passed.") else: print("TMP1: \n" + tmp1) print("TMP2: \n" + tmp2) return tmp1 == tmp2