def main(): # make a library a = np.array([1.0, 1.244840490]) b = np.array([2.0, 4.56654]) c = {0: a.tolist(), 1: b.tolist()} # make it an object instance my_instance = MyClass(c) # register the class in yaml yaml = YAML() yaml.register_class(MyClass) # get fid, dump the class fid = open("test.yml", "w") yaml.dump(my_instance, fid) fid.close() # load just created class fid = open("test.yml", "r") yaml1 = YAML(typ="safe", pure=True) # ini yaml1.register_class(MyClass) read_out_obj = yaml1.load(fid) fid.close() # check if classes attributes are the same AreTheyTheSame = (my_instance.__dict__ == read_out_obj.__dict__) pass
def parse(content: str) -> dict: yaml = YAML(typ='safe') yaml.register_class(SimpleRegex) yaml.register_class(NestedRegex) yaml.register_class(ParamsRegex) yaml.register_class(Dictionary) yaml.register_class(List) yaml.register_class(Char) yaml.register_class(Bool) return yaml.load(content)
def write_ybe_string(ybe_exam, minimal=True): """Dump the provided YbeExam as a .ybe formatted string. Args: ybe_exam (ybe.lib.ybe_contents.YbeExam): the ybe file contents to dump minimal (boolean): if set to False we print all available fields. If set to True (the default) we print only fields with non-default values. Returns: str: an .ybe (Yaml) formatted string """ visitor = YbeConversionVisitor(minimal=minimal) content = {'ybe_version': __version__} content.update(visitor.convert(ybe_exam)) yaml = YAML(typ='rt') yaml.register_class(TextHTML) yaml.register_class(TextMarkdown) yaml.default_flow_style = False yaml.allow_unicode = True yaml.width = float('inf') yaml.indent(mapping=4, offset=4, sequence=4) def beautify_line_spacings(s): ret_val = '' previous_new_line = '' in_questions_block = False for line in s.splitlines(True): new_line = line if in_questions_block: if line.startswith(' '): new_line = line[4:] elif line.startswith('\n'): pass else: in_questions_block = False else: if line.startswith('questions:'): in_questions_block = True if any(new_line.startswith(el) for el in ['info', 'questions:', '- multiple_choice:', '- open:', '- multiple_response:', '- text_only:'])\ and not previous_new_line.startswith('\nquestions:'): new_line = '\n' + new_line previous_new_line = new_line ret_val += new_line return ret_val yaml.dump(content, result := StringIO(), transform=beautify_line_spacings) return result.getvalue()
def save(self): """Save the data about Stylesheet Assets and the final CSS.""" yaml = YAML(typ="safe") yaml.register_class(StylesheetData) yaml.register_class(StylesheetImageList) yaml.register_class(LocalStylesheetImage) yaml.register_class(StoredStylesheetImage) yaml.register_class(RemoteStylesheetImage) logger.debug("Saving serialized StylesheetData class to: '{}'".format( self.config["data_file"], )) try: with open(self.config["data_file"], "w") as yaml_stream: yaml.dump(self.stylesheet_data, yaml_stream) except OSError as error: raise FileSavingException( error, "the Stylesheet Data file", ) from error css_file = self.assets.css_file_path logger.debug("Saving the CSS content to: '{}'".format(css_file, )) try: with open(css_file, "w", encoding="utf-8") as css_stream: css_stream.write(self.assets.adapted_css_content) except OSError as error: raise FileSavingException( error, "the CSS file", ) from error
def load(self): """Load the data about Stylesheet Assets and the new CSS content.""" yaml = YAML(typ="safe") yaml.register_class(StylesheetData) yaml.register_class(StylesheetImageList) yaml.register_class(LocalStylesheetImage) yaml.register_class(StoredStylesheetImage) yaml.register_class(RemoteStylesheetImage) logger.debug("Loading serialized StylesheetData class from: " "'{}'".format(self.config["data_file"], )) try: with open(self.config["data_file"], "r") as yaml_stream: self.stylesheet_data = yaml.load(yaml_stream) except OSError as error: raise FileReadingException( error, "the Stylesheet Data file", ) from error logger.debug("Loading CSS content from: '{}'".format( self.stylesheet_data.css_file, )) try: with open(self.stylesheet_data.css_file, "r", encoding="utf-8") \ as css_stream: self.css_content = css_stream.read() except OSError as error: raise FileReadingException( error, "the CSS file", ) from error
def do_main(args): yaml = YAML(typ='rt') yaml.register_class(YamlJinjaTemplate) yaml.register_class(YamlPluralString) data = yaml.load(args.yaml) if 'calendar' not in data: raise FormatError("missing top-level key 'calendar'") if 'events' not in data: raise FormatError("missing top-level key 'events'") langs = data['calendar'].get('languages', []) multilingual = len(langs) > 1 if args.lang: langs = [args.lang] if len(langs) > 1 and args.ical and '{lang}' not in args.ical: print("multiple languages requested, but output filename is static") raise RuntimeError for lang in langs: if args.ical is None: if multilingual: name = "{}.{}.ics".format(args.yaml.stem, lang) ical = args.yaml.parent / name else: name = "{}.ics".format(args.yaml.stem) ical = args.yaml.parent / name else: ical = args.ical.format(lang=lang) try: with ical.open('rb') as f: old = get_events(Calendar.from_ical(f.read())) except FileNotFoundError: old = None cal = make_ical(data, lang, old) with ical.open('wb') as f: f.write(cal.to_ical()) # This is probably broken if args.update_yaml: yaml.dump(data, args.yaml) # This should probably be different too if args.print_uids: for evdata in data['events']: print(evdata['uid'])
def yaml_dump(cls, stream=sys.stdout): class MyRepresenter(SafeRepresenter): def ignore_aliases(self, data): return True yaml = YAML(typ="safe") yaml.default_flow_style = False yaml.Representer = MyRepresenter types = EntityTypeBase.get_entity_types() for _, t in types.items(): yaml.register_class(t) yaml.indent(mapping=2, sequence=4, offset=2) yaml.dump(cls, stream=stream)
def _read_file() -> Any: path = DEFAULT_FILENAME if not os.path.exists(path): log.critical( f"Config file {os.path.relpath(path)} is missing. " f"Example config file is located at {os.path.join(os.path.relpath('.'), 'config.example.yaml')}" ) sys.exit(1) yaml = YAML(typ="safe") yaml.register_class(EnvTag) with open(path, "r") as f: return yaml.load(f)
async def test_run_acrawler(command_options, capsys): argv = command_options + ["https://example.com"] await acrawler.main(argv) captured = capsys.readouterr() assert captured.out == """\ - !Tag name: a url: https://www.iana.org/domains/example attrs: href: https://www.iana.org/domains/example """ yaml = YAML() yaml.register_class(Tag) sitemap = yaml.load(captured.out) assert sitemap == [ Tag("a", "https://www.iana.org/domains/example", {"href": "https://www.iana.org/domains/example"}) ]
async def main(argv): """Runs a crawler under an event loop""" args = parse_args(argv) yaml = YAML() yaml.register_class(Tag) def serializer(objs): # NOTE: outputing a list takes advantage of YAML's # serialization for lists, which is both concatable and # tailable yaml.dump(objs, sys.stdout) if args.redis: scheduler = RedisScheduler(args.redis) else: scheduler = SimpleScheduler() async with scheduler as open_scheduler: crawler = Crawler(open_scheduler, aiohttp.ClientSession, serializer, args.max_pages, args.num_workers) await crawler.crawl(args.roots)
def save(self): """Export forcing data for later use.""" yaml = YAML() yaml.register_class(self.__class__) target = self.directory / FORCING_YAML # We want to make the yaml and its parent movable, # so the directory and shape should not be included in the yaml file clone = copy(self) del clone.directory if clone.shape: try: clone.shape = str(clone.shape.relative_to(self.directory)) except ValueError: clone.shape = None logger.info( f"Shapefile {self.shape} is not in forcing directory " f"{self.directory}. So, it won't be saved in {target}." ) with open(target, "w") as f: yaml.dump(clone, f) return target
def docs(self, path): from ruamel.yaml import YAML tyaml = YAML(typ='safe', pure=True) tyaml.register_class(YAMLData) tyaml.register_class(Python) tyaml.register_class(Output) tyaml.register_class(Assert) return list(tyaml.load_all(path))
def load(directory: str): """Load previously generated or imported forcing data. Args: directory: forcing data directory; must contain `ewatercycle_forcing.yaml` file Returns: Forcing object """ yaml = YAML() source = to_absolute_path(directory) # TODO give nicer error yaml.register_class(DefaultForcing) for forcing_cls in FORCING_CLASSES.values(): yaml.register_class(forcing_cls) # Set directory in yaml string to parent of yaml file # Because in DefaultForcing.save the directory was removed forcing_info = yaml.load(source / FORCING_YAML) forcing_info.directory = source if forcing_info.shape: forcing_info.shape = to_absolute_path(forcing_info.shape, parent=source) return forcing_info
class Factory: """Helper class to load HermesPy simulation scenarios from YAML configuration files.""" extensions: Set[str] = ['.yml', '.yaml', '.cfg'] """List of recognized filename extensions for serialization files.""" __yaml: YAML __clean: bool __purge_regex_alpha: Pattern __purge_regex_beta: Pattern __db_regex: Pattern __restore_regex_alpha: Pattern __registered_classes: Set[Type[Serializable]] __registered_tags: Set[str] def __init__(self) -> None: # YAML dumper configuration self.__yaml = YAML(typ='safe', pure=True) self.__yaml.default_flow_style = False self.__yaml.compact(seq_seq=False, seq_map=False) self.__yaml.encoding = None self.__yaml.indent(mapping=4, sequence=4, offset=2) self.__clean = True self.__registered_classes = set() self.__registered_tags = set() # Browse the current environment for packages within the 'hermespy' namespace for finder, name, ispkg in iter_modules(hermes.__path__, "hermespy."): module = import_module(name) for _, serializable_class in getmembers(module): if not isclass(serializable_class) or not issubclass( serializable_class, Serializable): continue self.__registered_classes.add(serializable_class) self.__yaml.register_class(serializable_class) if serializable_class.yaml_tag is not None: self.__registered_tags.add(serializable_class.yaml_tag) if issubclass(serializable_class, SerializableArray): array_constructor = partial(Factory.__construct_matrix, serializable_class) self.__yaml.constructor.add_multi_constructor( serializable_class.yaml_tag, array_constructor) # Add constructors for untagged classes self.__yaml.constructor.add_constructor('tag:yaml.org,2002:map', self.__construct_map) # self.__yaml.constructor.add_constructor('tag:yaml.org,2002:seq', self.__construct_sequence) # Construct regular expressions for purging self.__purge_regex_alpha = compile(r': !<.*') self.__purge_regex_beta = compile(r"- !<([^']+)>") self.__restore_regex_alpha = compile(r"([ ]*)([a-zA-Z]+):\n$") self.__restore_regex_beta = compile(r"([ ]*)- ([^\s]+)([^']*)\n$") self.__range_regex = compile( r'([0-9.e-]*)[ ]*,[ ]*([0-9.e-]*)[ ]*,[ ]*\.\.\.[ ]*,[ ]*([0-9.e-]*)' ) self.__db_regex = compile(r"\[([ 0-9.,-]*)\][ ]*dB") @property def clean(self) -> bool: """Access clean flag. Returns: bool: Clean flag. """ return self.__clean @clean.setter def clean(self, flag: bool) -> None: """Modify clean flag. Args: flag (bool): New clean flag. """ self.__clean = flag @property def registered_classes(self) -> Set[Type[Serializable]]: """Classes registered for serialization within the factory.""" return self.__registered_classes.copy() @property def registered_tags(self) -> Set[str]: """Read registered YAML tags. Returns: Set[str]: Set of registered YAML tags. """ return self.__registered_tags def load(self, path: str) -> List[Serializable]: """Load a serialized executable configuration from a filesystem location. Args: path (str): Path to a file or a folder featuring serialization files. Returns: executables (List[Serializable]): Serializable HermesPy objects. Raises: RuntimeError: If `path` does not contain an executable object. RuntimeError: If `path` contains more than one executable object. """ # Recover serialized objects hermes_objects: List[Any] = self.from_path(path) executables: List[Serializable] = [] for hermes_object in hermes_objects: if isinstance(hermes_object, Serializable): executables.append(hermes_object) # Return fully configured executable return executables @staticmethod def __construct_matrix(cls: Any, constructor: SafeConstructor, tag_suffix: str, node: Any)\ -> Tuple[Any, Tuple[int, ...]]: """Construct a matrix node from YAML. Args: cls (Any): The type of class to be constructed. This argument will be managed by ruamel. The class `cls` must define a `from_yaml` routine. constructor (SafeConstructor): A handle to the constructor extracting the YAML information. tag_suffix (str): Tag suffix in the YAML config describing the channel position within the matrix. node (Node): YAML node representing the `cls` serialization. Returns: cls: Newly created `cls` instance. int: First dimension position within the matrix. int: Second dimension within the matrix. """ indices: List[str] = re.split(' |_', tag_suffix) if indices[0] == '': indices.pop(0) indices: Tuple[int] = tuple([int(idx) for idx in indices]) return cls.from_yaml(constructor, node), indices @staticmethod def __construct_map(constructor: SafeConstructor, node: MappingNode) -> Mapping[MappingNode, Any]: """A custom map generator. Hacks ruamel to accept node names as tags. Args: constructor (SafeConstructor): Handle to the constructor. node (MappingNode): A YAML map node. Returns: Mapping[MappingNode, Any]: A sequence of objects created from `node`. """ tag = node.value[0][0].value if tag in constructor.yaml_constructors: return constructor.yaml_constructors[tag](constructor, node.value[0][1]) else: return constructor.construct_mapping(node, deep=True) @staticmethod def __construct_sequence(constructor: SafeConstructor, node: SequenceNode) -> Sequence[Any]: """A custom sequence generator. Hacks ruamel to accept node names as tags. Args: constructor (SafeConstructor): Handle to the constructor. node (SequenceNode): A YAML sequence node. Returns: Sequence[Any]: A sequence of objects created from `node`. """ sequence = [] for node in node.value: if node.tag in constructor.yaml_constructors: sequence.append(constructor.yaml_constructors[node.tag]( constructor, node)) else: sequence.append( constructor.construct_non_recursive_object(node)) return sequence def __purge_tags(self, serialization: str) -> str: """Callback to remove explicit YAML tags from serialization stream. Args: serialization (str): The serialization sequence to be purged. Returns: str: The purged sequence. """ cleaned_sequence = '' for line in serialization.splitlines(True): cleaned_line = self.__purge_regex_alpha.sub(r':', line) cleaned_line = self.__purge_regex_beta.sub(r'- \1', cleaned_line) cleaned_line = cleaned_line.replace('%20', " ") cleaned_sequence += cleaned_line return cleaned_sequence def refurbish_tags(self, serialization: str) -> str: """Callback to restore explicit YAML tags to serialization streams.""" pass @staticmethod def __decibel_conversion(match: re.Match) -> str: """Convert linear series to decibel series. Args: match (re.Match): The serialization sequence to be converted. Returns: str: The purged sequence. """ linear_values = [ db2lin(float(str_rep)) for str_rep in match[1].replace(' ', '').split(',') ] string_replacement = "[" for linear_value in linear_values: string_replacement += str(linear_value) + ', ' string_replacement += "]" return string_replacement def from_path(self, paths: Union[str, Set[str]]) -> List[Any]: """Load a configuration from an arbitrary file system path. Args: paths (Union[str, Set[str]]): Paths to a file or a folder featuring .yml config files. Returns: List[Any]: List of serializable objects recalled from `paths`. Raises: ValueError: If the provided `path` does not exist on the filesystem. """ # Convert single path to a set if required if isinstance(paths, str): paths = [paths] hermes_objects = [] for path in paths: if not os.path.exists(path): raise ValueError(f"Lookup path '{path}' not found") if os.path.isdir(path): hermes_objects += self.from_folder(path) elif os.path.isfile(path): hermes_objects += self.from_file(path) else: raise ValueError( "Lookup location '{}' not recognized".format(path)) return hermes_objects def from_folder(self, path: str, recurse: bool = True, follow_links: bool = False) -> List[Any]: """Load a configuration from a folder. Args: path (str): Path to the folder configuration. recurse (bool, optional): Recurse into sub-folders within `path`. follow_links (bool, optional): Follow links within `path`. Returns: List[Any]: List of serializable objects recalled from `path`. Raises: ValueError: If `path` is not a directory. """ if not os.path.exists(path): raise ValueError("Lookup path '{}' not found".format(path)) if not os.path.isdir(path): raise ValueError( "Lookup path '{}' is not a directory".format(path)) hermes_objects: List[Any] = [] for directory, _, files in os.walk(path, followlinks=follow_links): for file in files: _, extension = os.path.splitext(file) if extension in self.extensions: hermes_objects += self.from_file( os.path.join(directory, file)) if not recurse: break return hermes_objects def to_folder(self, path: str, *args: Any) -> None: """Dump a configuration to a folder. Args: path (str): Path to the folder configuration. *args (Any): Configuration objects to be dumped. """ pass def from_str(self, config: str) -> List[Any]: """Load a configuration from a string object. Args: config (str): The configuration to be loaded. Returns: List[Any]: List of serialized objects within `path`. """ stream = StringIO(config) return self.from_stream(stream) def to_str(self, *args: Any) -> str: """Dump a configuration to a folder. Args: *args (Any): Configuration objects to be dumped. Returns: str: String containing full YAML configuration. Raises: RepresenterError: If objects in ``*args`` are unregistered classes. """ stream = StringIO() self.to_stream(stream, args) return stream.getvalue() def from_file(self, file: str) -> List[Any]: """Load a configuration from a single YAML file. Args: file (str): Path to the folder configuration. Returns: List[Any]: List of serialized objects within `path`. """ with open(file, mode='r') as file_stream: try: return self.from_stream(file_stream) # Re-raise constructor errors with the correct file name except ConstructorError as constructor_error: constructor_error.problem_mark.name = file raise constructor_error def to_file(self, path: str, *args: Any) -> None: """Dump a configuration to a single YML file. Args: path (str): Path to the configuration file. *args (Any): Configuration objects to be dumped. Raises: RepresenterError: If objects in ``*args`` are unregistered classes. """ pass def __restore_callback_alpha(self, m: Match) -> str: """Internal regular expression callback. Args: m (Match): Regular expression match. Returns: str: The processed match line. """ if m.group(2) in self.registered_tags: return m.group(1) + m.group(2) + ": !<" + m.group(2) + ">\n" else: return m.string def __restore_callback_beta(self, m: Match) -> str: """Internal regular expression callback. Args: m (Match): Regular expression match. Returns: str: The processed match line. """ if m.group(2) in self.registered_tags: indices = m.group(3).replace(" ", "%20") return m.group(1) + "- !<" + m.group(2) + indices + ">\n" else: return m.string @staticmethod def __range_restore_callback(m: Match) -> str: """Internal regular expression callback. Args: m (Match): Regular expression match. Returns: str: The processed match line. """ # Extract range parameters start = float(m.group(1)) step = float(m.group(2)) - start stop = float(m.group(3)) + step range = np.arange(start=start, stop=stop, step=step) replacement = '' for step in range[:-1]: replacement += str(step) + ', ' replacement += str(range[-1]) return replacement def from_stream(self, stream: TextIOBase) -> List[Any]: """Load a configuration from an arbitrary text stream. Args: stream (TextIOBase): Text stream containing the configuration. Returns: List[Any]: List of serialized objects within `stream`. Raises: ConstructorError: If YAML parsing fails. """ if not self.__clean: return self.__yaml.load(stream) clean_stream = '' for line in stream.readlines(): clean_line = self.__range_regex.sub(self.__range_restore_callback, line) clean_line = self.__db_regex.sub(self.__decibel_conversion, clean_line) clean_stream += clean_line hermes_objects = self.__yaml.load(StringIO(clean_stream)) if hermes_objects is None: return [] if isinstance(hermes_objects, Iterable): return hermes_objects else: return [hermes_objects] def to_stream(self, stream: TextIOBase, *args: Any) -> None: """Dump a configuration to an arbitrary text stream. Args: stream (TextIOBase): Text stream to the configuration. *args (Any): Configuration objects to be dumped. Raises: RepresenterError: If objects in ``*args`` are unregistered classes. """ for serializable_object in args: if self.__clean: self.__yaml.dump(*serializable_object, stream, transform=self.__purge_tags) else: self.__yaml.dump(*serializable_object, stream)
def _execute(self, browser): transactions = [] for extractor_config in self.config.extractors: extracted_transaction = extract(extractor_config, browser, self.last_transaction, self.config.options) transactions += convert(extracted_transaction, extractor_config.transaction.converters) if len(transactions) > 0: write(self.config.writer, transactions) try: yaml = YAML(typ="safe", pure=True) yaml.register_class(Struct) chrome_driver_path = os.path.join(os.getcwd(), 'chromedriver.exe') with io.open('config.yml', mode='r', encoding='utf-8') as _f: _config = Struct(**yaml.load(_f)) with io.open('last-transaction.yml', mode='r', encoding='utf-8') as _f: _last_transaction = yaml.load(_f) if not isinstance(_last_transaction, Struct): _last_transaction = Struct(**_last_transaction) Main(_config, _last_transaction).execute() if _config.options.rememberLastTransaction: with io.open('last-transaction.yml', mode='w', encoding='utf-8') as _f: yaml.dump(_last_transaction, _f)
fields = att_ref.split(".") if len(fields) != 2: raise ValueError( '!GetAtt {} does not match "<resource>.<attribute>"'.format(att_ref) ) self.reference = fields[0] self.attribute = fields[1] @classmethod def to_yaml(cls, representer, node): return representer.represent_scalar( cls.yaml_tag, "{reference}.{attribute}".format(node) ) @classmethod def from_yaml(cls, constructor, node): return cls(node.value) yaml = YAML(typ="safe", pure=True) yaml.register_class(Ref) yaml.register_class(GetAtt) def load_template(stream): return yaml.load(stream) def template_to_string(o): return json.dumps(o, cls=TemplateEncoder)
t_modified: the last time the file was modified (epoch timestamp) as reported by the filesystem. linesize: total number of lines member_tokens: the members this file contains """ def __init__( self, filename=None, cachedfile=None, t_modified=None, linesize=None, member_tokens=None, ): """Create new CppFile object.""" self.filename = filename self.cachedfile = cachedfile self.t_modified = t_modified self.linesize = linesize self.member_tokens = member_tokens self.__loadall__() def __loadall__(self): """Load the file from the filecache or from the absolute path.""" with open(self.filename, "r", errors="ignore") as file: self.linestring = file.read() # class must be registered to be loadable/dumpable by the C implementation # of the SafeLoader and SafeParser YA_ML.register_class(CppFile)
def get_store_config_file_parser(self): yaml = YAML(typ='unsafe') yaml.register_class(StoreConfig) yaml.register_class(BackendConfig) return yaml
@property def name(self): return fp.basename(self.path) @property def quoted_url(self): return qut(qut(self.path)) @property def suffix(self): return fp.splitext(self.path)[1] def __eq__(self, path): return self.path == Path(path).path @classmethod def to_yaml(cls, representer, obj): return representer.represent_scalar(cls.yaml_tag, obj.abs) @classmethod def from_yaml(cls, constructor, node): return Path(constructor.construct_scalar(node)) def __hash__(self): return hash(self.path) yaml = YAML() yaml.register_class(Path)
from ruamel.yaml import YAML from paradrop.core.auth.user import User from paradrop.core.chute.chute import Chute from paradrop.core.chute.service import Service yaml = YAML(typ='safe') yaml.default_flow_style = False yaml.register_class(User) yaml.register_class(Chute) yaml.register_class(Service)
self.branch = None # filled after adding to a queue def render_files(self): with StringIO() as buf: yaml.dump(self, buf) content = buf.getvalue() return {'job.yml': content} @property def email(self): return os.environ.get('CROSSBOW_EMAIL', self.target.email) # configure yaml serializer yaml = YAML() yaml.register_class(Job) yaml.register_class(Task) yaml.register_class(Target) # state color mapping to highlight console output COLORS = {'ok': 'green', 'error': 'red', 'missing': 'red', 'failure': 'red', 'pending': 'yellow', 'success': 'green'} # define default paths DEFAULT_CONFIG_PATH = CWD / 'tasks.yml' DEFAULT_ARROW_PATH = CWD.parents[1] DEFAULT_QUEUE_PATH = CWD.parents[2] / 'crossbow'
from buildtools import os_utils from buildtools.bt_logging import NullIndenter, log from buildtools.maestro.base_target import BuildTarget from buildtools.maestro.fileio import (ConcatenateBuildTarget, CopyFilesTarget, CopyFileTarget, MoveFileTarget, ReplaceTextTarget) from buildtools.maestro.utils import (SerializableFileLambda, SerializableLambda, callLambda) from ruamel.yaml import YAML from typing import List from tqdm import tqdm yaml = YAML(typ='safe', pure=True) yaml.register_class(SerializableLambda) yaml.register_class(SerializableFileLambda) class TarjanGraphVertex(object): def __init__(self, ID: int, refs: List[int]): self.ID = ID self.refs = refs self.disc = -1 self.low = -1 self.stackMember = False class TarjanGraph(object): def __init__(self): self.cur_id = 0 # Used for populating the graph.
class Yaml(Reporter): r""" Writes results to a YAML file. EXAMPLES:: >>> from flatsurvey.surfaces import Ngon >>> surface = Ngon((1, 1, 1)) >>> log = Yaml(surface) >>> import asyncio >>> from flatsurvey.jobs import FlowDecompositions, SaddleConnectionOrientations, SaddleConnections, CompletelyCylinderPeriodic >>> from flatsurvey.cache import Cache >>> from flatsurvey.reporting import Report >>> flow_decompositions = FlowDecompositions(surface=surface, report=Report([]), saddle_connection_orientations=SaddleConnectionOrientations(SaddleConnections(surface))) >>> ccp = CompletelyCylinderPeriodic(report=Report([log]), flow_decompositions=flow_decompositions, cache=Cache()) >>> report = ccp.report() >>> asyncio.run(report) >>> log.flush() # doctest: +ELLIPSIS surface: ... completely-cylinder-periodic: - {cylinder_periodic_directions: 0, undetermined_directions: 0, value: null} """ @copy_args_to_internal_fields def __init__(self, surface, stream=None): import sys self._stream = stream or sys.stdout self._data = {"surface": surface} from ruamel.yaml import YAML self._yaml = YAML() self._yaml.width = 2**16 self._yaml.representer.default_flow_style = None self._yaml.representer.add_representer(None, Yaml._represent_undefined) self._yaml.register_class(type(self._data["surface"])) self._yaml.register_class(Pickle) @classmethod def _represent_undefined(cls, representer, data): import pickle return representer.represent_data({ "pickle": Pickle(pickle.dumps(data)), "repr": repr(data), }) def _render(self, *args, **kwargs): if len(args) == 0: return self._render(kwargs) if len(args) > 1: return self._render(args, **kwargs) value = args[0] if not kwargs: from sage.all import ZZ if type(value) is type(ZZ()): value = int(value) if hasattr(type(value), "to_yaml"): self._yaml.representer.add_representer(type(value), type(value).to_yaml) return value value = self._render(value) ret = self._render(kwargs) if isinstance(value, dict): ret.update(value) else: ret["value"] = value from pickle import dumps try: dumps(value) except Exception as e: ret["value"] = "Failed: " + str(e) return ret async def result(self, source, result, **kwargs): r""" Report that computation ``source`` concluded with ``result``. EXAMPLES:: >>> from flatsurvey.surfaces import Ngon >>> surface = Ngon((1, 1, 1)) >>> log = Yaml(surface) >>> from flatsurvey.jobs import FlowDecompositions, SaddleConnectionOrientations, SaddleConnections, CompletelyCylinderPeriodic >>> from flatsurvey.reporting import Report >>> flow_decompositions = FlowDecompositions(surface=surface, report=Report([log]), saddle_connection_orientations=SaddleConnectionOrientations(SaddleConnections(surface))) Write the first two flow decompositions to the YAML output: >>> import asyncio >>> produce = flow_decompositions.produce() >>> asyncio.run(produce) True >>> produce = flow_decompositions.produce() >>> asyncio.run(produce) True >>> log.flush() # doctest: +ELLIPSIS surface: ... flow-decompositions: - orientation: ... - orientation: ... """ self._data.setdefault(str(source), []) self._data[str(source)].append(self._render(result, **kwargs)) def flush(self): r""" Write out the full YAML document. >>> from flatsurvey.surfaces import Ngon >>> surface = Ngon((1, 1, 1)) >>> log = Yaml(surface) >>> log.flush() # doctest: +ELLIPSIS surface: ... """ self._yaml.dump(self._data, self._stream) self._stream.flush() @classmethod @click.command( name="yaml", cls=GroupedCommand, group="Reports", help=__doc__.split("EXAMPLES")[0], ) @click.option( "--output", type=click.File("w"), default=None, help="[default: derived from surface name]", ) def click(output): return { "bindings": [ FactoryBindingSpec( "yaml", lambda surface: Yaml( surface, stream=output or open(f"{surface.basename()}.yaml", "w" ), ), ) ], "reporters": [Yaml], } def command(self): import sys command = ["yaml"] if self._stream is not sys.stdout: command.append(f"--output={self._stream.name}") return command
def load_f(self, name, path): #yaml = YAML(typ='unsafe') yaml = YAML() yaml.register_class(SemanticVersion) yaml.register_class(ForceSemanticVer) self.data[name] = yaml.load(path)
# state = data.__dict__.copy() # return self.represent_mapping(tag, state, flow_style=flow_style) @classmethod def from_yaml(cls, constructor, node): data = CommentedMap() #constructor.construct_mapping(node, data, deep=True) constructor.construct_scalar(node) return cls(**data) def __str__(self): return f'vault({data})' #def yaml_hell(): yaml.register_class(VaultVar) #class VaultTag(yaml.YAMLObject): # yaml_tag = u'!vault' # def __init__(self, value): # self.value = value # def __repr__(self: # return value # @staticmethod # def yaml_constructor(loader, node): # return VaultTag(loader.constr #def Custom_Constructor(loader, tag_suffix, node):
from ruamel.yaml import YAML from kaex.models.application import Application from kaex.models.resource import Resource from kaex.models.deployment import Deployment from kaex.models.service import Service from kaex.models.ingress import Ingress from kaex.models.pvc import PersistentVolumeClaim yaml = YAML(typ='safe') yaml.register_class(Deployment) yaml.register_class(Service) yaml.register_class(Ingress) yaml.register_class(PersistentVolumeClaim)
# -*- coding: utf-8 -*- import sys, os, glob, re from wolfrpg import commands, maps, databases, gamedats, common_events, route from wolfrpg.service_fn import write_csv_list from ruamel.yaml import YAML # NOTE: for debug and string search purposes DUMP_YAML = True yaml = YAML() yaml.register_class(maps.Map) yaml.register_class(maps.Map.Event) yaml.register_class(maps.Map.Event.Page) yaml.register_class(databases.Database) yaml.register_class(databases.Database.Type) yaml.register_class(databases.Database.Field) yaml.register_class(databases.Database.Data) yaml.register_class(common_events.CommonEvents) yaml.register_class(common_events.CommonEvents.Event) yaml.register_class(commands.Command) yaml.register_class(commands.Blank) yaml.register_class(commands.Checkpoint) yaml.register_class(commands.Message) yaml.register_class(commands.Choices) yaml.register_class(commands.Comment) yaml.register_class(commands.ForceStopMessage) yaml.register_class(commands.DebugMessage) yaml.register_class(commands.ClearDebugText) yaml.register_class(commands.VariableCondition) yaml.register_class(commands.StringCondition) yaml.register_class(commands.SetVariable) yaml.register_class(commands.SetString) yaml.register_class(commands.InputKey)
class PlayerList(Player, Indexable, Importable, Exportable, JsonSerializable): errors = [] id_list = [] # Tuple description # (field, unique, optional, sub-key-settings) # field: indexed keys # unique: check for duplicates here # optional: Don't throw an exeption for these keys if they are missing # sub-key-settings: settings for contained sub-keys IndexSetting = namedtuple('IndexSetting', 'key unique optional sub_key_settings') index_key_settings = [ IndexSetting('id', True, False, None), IndexSetting('name', True, False, None), IndexSetting('aoeelo', True, True, None), IndexSetting('esportsearnings', True, True, None), IndexSetting('platforms', True, True, [ IndexSetting('de', True, True, None), IndexSetting('voobly', True, True, None) ]), IndexSetting('liquipedia', True, True, None), IndexSetting('country', False, True, None) ] def __init__(self): self.players = {} self.yaml = YAML() self.yaml.register_class(Player) self.yaml.preserve_quotes = True def add_player_to_list(self, player): """ Append a player to the players list Args: player (Player): Player object """ self.players.append(player) def check_country_names_being_valid(self): """ Iterates through the list of country names parsed from the 'players.yaml' and checks if they are valid Returns: InvalidCountryCodeError: Contains an error with a recommendation for a possible country code """ errors = [] LOGGER.debug("Validating country codes ...") attribute_name = f"country_{INDEX_LIST_SUFFIX}" merged_country_codes = { c.upper() for c in getattr(self, attribute_name) } for country in merged_country_codes: if pycountry.countries.get(alpha_2=country) is None: suggestion = (pycountry.countries.lookup(country)).alpha_2 LOGGER.debug(f"Invalid country code detected: '{country}', " f"try '{suggestion}'") errors.append( InvalidCountryCodeError( f"Country '{country}'' is invalid, " f"did you mean '{suggestion}'?")) LOGGER.debug("Country codes validated.") err_len = len(errors) if err_len == 0: LOGGER.debug("Country codes validated.") return None elif err_len > 0: LOGGER.error(f"Country codes validated with {err_len} error(s).") return errors
(None, vault_pw) ]) class VaultSecret: yaml_tag = u'!vault' def __init__(self, secret): self.secret = secret @classmethod def from_yaml(cls, constructor, node): return VaultSecret(vl.decrypt(node.value)).secret.decode('utf-8') yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) yaml.register_class(VaultSecret) try: with open(sys.argv[1], 'r') as orig: try: y = yaml.load(orig) except AnsibleVaultError as e: print("Failed to decrypt") print(e) sys.exit(1) except FileExistsError: print(f"Failed to open {sys.argv[2]}") with open(sys.argv[2], 'w') as dest: yaml.dump(y, dest)
def get_pycred_config_file_parser(self): yaml = YAML(typ='unsafe') yaml.register_class(PyCredConfig) yaml.register_class(PyCredBackendDefaultConfig) yaml.register_class(BackendConfig) return yaml
def read_ybe_string(ybe_str): """Load the data from the provided Ybe formatted string and return an :class:`ybe.lib.ybe_contents.YbeExam` object. Args: ybe_str (str): an .ybe formatted string to load Returns: ybe.lib.ybe_contents.YbeExam: the contents of the .ybe file. Raises: ybe.lib.errors.YbeLoadingError: if the file could not be loaded due to syntax errors """ yaml = YAML(typ='safe') yaml.register_class(TextHTML) yaml.register_class(TextMarkdown) data = yaml.load(ybe_str) if not len(data): return YbeExam() if 'ybe_version' not in data: raise YbeLoadingError('Missing "ybe_version" specifier.') def _convert_text(input_data): if isinstance(input_data, TextData): return input_data return TextPlain(input_data) def _convert_answer_options(input_data): result = [] for item in input_data: if 'answer' in item: result.append( dacite.from_dict( AnswerOption, item['answer'], config=dacite.Config( type_hooks={TextData: _convert_text}))) return result def _convert_analytics(input_data): analytic_types = { 'exam': QuestionUsedInExam, } result = [] for usage in input_data: usage_type, usage_value = list(usage.items())[0] result.append( dacite.from_dict(analytic_types[usage_type], usage_value, config=dacite.Config( type_hooks={TextData: _convert_text}))) return result def _convert_questions(input_data): question_types = { 'multiple_choice': MultipleChoice, 'multiple_response': MultipleResponse, 'open': OpenQuestion, 'text_only': TextOnly } result = [] for question in input_data: q_type, q_value = list(question.items())[0] result.append( dacite.from_dict( question_types[q_type], q_value, config=dacite.Config( type_hooks={ TextData: _convert_text, List[AnswerOption]: _convert_answer_options, List[AnalyticsQuestionUsage]: _convert_analytics }))) return result return dacite.from_dict( YbeExam, data, config=dacite.Config(type_hooks={ List[Question]: _convert_questions, TextData: _convert_text }))
'is: `{}`'.format(task_name, str(e))) # validate that the defined tasks are renderable, in order to to that # define the required object with dummy data target = Target(head='e279a7e06e61c14868ca7d71dea795420aea6539', branch='master', remote='https://github.com/apache/arrow', version='1.0.0dev123', email='*****@*****.**') for task_name, task in self['tasks'].items(): task = Task(**task) files = task.render_files( self.template_searchpath, params=dict( arrow=target, queue_remote_url='https://github.com/org/crossbow')) if not files: raise CrossbowError( 'No files have been rendered for task `{}`'.format( task_name)) # configure yaml serializer yaml = YAML() yaml.register_class(Job) yaml.register_class(Task) yaml.register_class(Target) yaml.register_class(Queue) yaml.register_class(TaskStatus)
self.branch = None # filled after adding to a queue def render_files(self): with StringIO() as buf: yaml.dump(self, buf) content = buf.getvalue() return {'job.yml': content} @property def email(self): return os.environ.get('CROSSBOW_EMAIL', self.target.email) # configure yaml serializer yaml = YAML() yaml.register_class(Job) yaml.register_class(Task) yaml.register_class(Target) # state color mapping to highlight console output COLORS = { 'ok': 'green', 'error': 'red', 'missing': 'red', 'failure': 'red', 'pending': 'yellow', 'success': 'green' } # define default paths DEFAULT_CONFIG_PATH = CWD / 'tasks.yml'
from ruamel.yaml import YAML, yaml_object from model.fat32model import * from util.enums import * from workflow.workflow import WorkflowStep, WorkflowValidationException, WorkflowLog from filesystem.fat32 import * yaml = YAML() yaml.register_class(FAT32Parameter) yaml.register_class(FAT32BootParameter) yaml.register_class(FAT32FsInfoParameter) @yaml_object(yaml) class FAT32CreateBootSectorStep(WorkflowStep): yaml_tag = u'!FAT32CreateBootSector' def __init__(self, workflow, pathToConfig=None, description='Write FAT32 BootSector'): super().__init__(workflow, description) self.pathToConfig = pathToConfig def validate(self): if self.pathToConfig is None: return WorkflowLog(self, "validate", WorkflowStatus.FAILED, LogType.VALIDATIONERROR, reason="config path not defined")