Ejemplo n.º 1
0
def read_yaml(content):
    fix_yaml_loader()
    replace_environment_variables()

    yaml_parser = yaml.YAML(typ="safe")
    yaml_parser.version = "1.2"
    yaml_parser.unicode_supplementary = True

    return yaml_parser.load(content)
Ejemplo n.º 2
0
def assert_graphql_resp_expected(resp_orig,
                                 exp_response_orig,
                                 query,
                                 resp_hdrs={},
                                 skip_if_err_msg=False):
    # Prepare actual and respected responses so comparison takes into
    # consideration only the ordering that we care about:
    resp = collapse_order_not_selset(resp_orig, query)
    exp_response = collapse_order_not_selset(exp_response_orig, query)
    matched = equal_CommentedMap(resp, exp_response)

    if PytestConf.config.getoption("--accept"):
        print('skipping assertion since we chose to --accept new output')
    else:
        yml = yaml.YAML()
        # https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string  :
        dump_str = StringIO()
        test_output = {
            # Keep strict received order when displaying errors:
            'response':
            resp_orig,
            'expected':
            exp_response_orig,
            'diff':
            (lambda diff: "(results differ only in their order of keys)"
             if diff == {} else diff)(stringify_keys(
                 jsondiff.diff(exp_response, resp))),
            'query':
            query
        }
        if 'x-request-id' in resp_hdrs:
            test_output['request id'] = resp_hdrs['x-request-id']
        yml.dump(test_output, stream=dump_str)
        if not skip_if_err_msg:
            assert matched, '\n' + dump_str.getvalue()
        elif matched:
            return resp, matched
        else:

            def is_err_msg(msg):
                return any(msg.get(x) for x in ['error', 'errors'])

            def as_list(x):
                return x if isinstance(x, list) else [x]

            # If it is a batch GraphQL query, compare each individual response separately
            for (exp, out) in zip(as_list(exp_response), as_list(resp)):
                matched_ = equal_CommentedMap(exp, out)
                if is_err_msg(exp):
                    if not matched_:
                        warnings.warn(
                            "Response does not have the expected error message\n"
                            + dump_str.getvalue())
                        return resp, matched
                else:
                    assert matched_, '\n' + dump_str.getvalue()
    return resp, matched  # matched always True unless --accept
Ejemplo n.º 3
0
    def loads(self, data: str) -> MutableMapping[str, Any]:
        """
		Deserialize data using this :class:`~.Serializer`.

		:param data:
		"""

        y = yaml.YAML(typ="safe", pure=True)
        return y.load(str(data))
Ejemplo n.º 4
0
def run_local_topology(name=None, env_name=None, time=0, options=None):
    """Run a topology locally using Flux and `storm jar`."""
    name, topology_file = get_topology_definition(name)
    config = get_config()
    env_name, env_config = get_env_config(env_name)
    topology_class = get_topology_from_file(topology_file)

    set_topology_serializer(env_config, config, topology_class)

    storm_options = resolve_options(options,
                                    env_config,
                                    topology_class,
                                    name,
                                    local_only=True)
    if storm_options['topology.acker.executors'] != 0:
        storm_options['topology.acker.executors'] = 1
    storm_options['topology.workers'] = 1

    # Set parallelism based on env_name if necessary
    for spec in topology_class.specs:
        if isinstance(spec.par, dict):
            spec.par = spec.par.get(env_name)

    # Check Storm version is the same
    local_version = local_storm_version()
    project_version = storm_lib_version()
    if local_version != project_version:
        raise ValueError('Local Storm version, {}, is not the same as the '
                         'version in your project.clj, {}. The versions must '
                         'match.'.format(local_version, project_version))

    # Prepare a JAR that has Storm dependencies packaged
    topology_jar = jar_for_deploy(simple_jar=False)

    if time <= 0:
        time = 9223372036854775807  # Max long value in Java

    # Write YAML file
    with show('output'):
        with NamedTemporaryFile(mode='w', suffix='.yaml',
                                delete=False) as yaml_file:
            topology_flux_dict = topology_class.to_flux_dict(name)
            topology_flux_dict['config'] = storm_options
            if yaml.version_info < (0, 15):
                yaml.safe_dump(topology_flux_dict,
                               yaml_file,
                               default_flow_style=False)
            else:
                yml = yaml.YAML(typ='safe', pure=True)
                yml.default_flow_style = False
                yml.dump(topology_flux_dict, yaml_file)
            cmd = (
                'storm jar {jar} org.apache.storm.flux.Flux --local --no-splash '
                '--sleep {time} {yaml}'.format(jar=topology_jar,
                                               time=time,
                                               yaml=yaml_file.name))
            local(cmd)
Ejemplo n.º 5
0
def preview_search(args: Namespace) -> None:
    experiment_config = yaml.safe_load(args.config_file.read())
    args.config_file.close()

    if "searcher" not in experiment_config:
        print("Experiment configuration must have 'searcher' section")
        sys.exit(1)
    r = api.post(args.master, "searcher/preview", body=experiment_config)
    j = r.json()

    def count_steps(sequence: str) -> int:
        return sum(1 for k in sequence if k == "S")

    def to_full_name(kind: str) -> str:
        if kind == "S":
            return "step"
        elif kind == "V":
            return "validation"
        elif kind == "C":
            return "checkpoint"
        else:
            raise ValueError("unexpected kind: {}".format(kind))

    def render_sequence(sequence: str) -> str:
        if not sequence:
            return "N/A"
        instructions = []
        current = sequence[0]
        count = 0
        for k in sequence:
            if k != current:
                instructions.append("{} x {}".format(count, to_full_name(current)))
                current = k
                count = 1
            else:
                count += 1
        instructions.append("{} x {}".format(count, to_full_name(current)))
        return ", ".join(instructions)

    headers = ["Trials", "Steps", "Breakdown"]
    values = [
        (count, count_steps(workloads), render_sequence(workloads))
        for workloads, count in j["results"].items()
    ]

    print(colored("Using search configuration:", "green"))
    yml = yaml.YAML()
    yml.indent(mapping=2, sequence=4, offset=2)
    yml.dump(experiment_config["searcher"], sys.stdout)
    print()
    print(
        "This search will create a total of {} trial(s) and run {} steps".format(
            sum(j["results"].values()),
            sum(count_steps(workloads) * cnt for workloads, cnt in j["results"].items()),
        )
    )
    print(tabulate.tabulate(values, headers, tablefmt="presto"), flush=False)
Ejemplo n.º 6
0
def process():
    with open("../calliope/config/defaults.yaml", "r") as f:
        defaults = yaml.round_trip_load(f)

    write_csv(
        "./user/includes/default_essentials.csv",
        get_section(defaults["techs"]["default_tech"]["essentials"]),
    )
    write_csv(
        "./user/includes/default_constraints.csv",
        get_section(defaults["techs"]["default_tech"]["constraints"]),
    )
    write_csv(
        "./user/includes/default_costs.csv",
        get_section(
            defaults["techs"]["default_tech"]["costs"]["default_cost"]),
    )

    write_csv("./user/includes/model_settings.csv",
              get_section(defaults["model"]))
    write_csv("./user/includes/run_settings.csv", get_section(defaults["run"]))

    y = yaml.YAML()

    for tech_group in defaults["tech_groups"]:
        this_group_defaults = {
            "essentials":
            defaults["tech_groups"][tech_group].get("essentials", {}),
            "constraints":
            defaults["tech_groups"][tech_group].get("constraints", {}),
            "costs":
            defaults["tech_groups"][tech_group].get("costs", {}),
        }
        with open("./user/includes/basetech_{}.yaml".format(tech_group),
                  "w") as f:
            f.write(yaml.dump(this_group_defaults,
                              Dumper=yaml.RoundTripDumper))

        required_allowed = {
            "required_constraints":
            y.seq(defaults["tech_groups"][tech_group].get(
                "required_constraints", [])),
            "allowed_constraints":
            y.seq(defaults["tech_groups"][tech_group].get(
                "allowed_constraints", [])),
            "allowed_costs":
            y.seq(defaults["tech_groups"][tech_group].get("allowed_costs",
                                                          [])),
        }
        with open(
                "./user/includes/required_allowed_{}.yaml".format(tech_group),
                "w") as f:
            f.write(
                yaml.dump(required_allowed,
                          indent=4,
                          Dumper=yaml.RoundTripDumper))
def test_fileformatyaml_pass_with_substitutions():
    """Relative path to file should succeed.

     Strictly speaking not a unit test.
    """
    context = Context({
        'k1':
        'v1',
        'k2':
        'v2',
        'k3':
        'v3',
        'k4':
        'v4',
        'k5':
        'v5',
        'fileFormatYamlIn':
        './tests/testfiles/testsubst.yaml',
        'fileFormatYamlOut':
        './tests/testfiles/out/outsubst.yaml'
    })

    fileformat.run_step(context)

    assert context, "context shouldn't be None"
    assert len(context) == 7, "context should have 7 items"
    assert context['k1'] == 'v1'
    assert context['fileFormatYamlIn'] == './tests/testfiles/testsubst.yaml'
    assert context['fileFormatYamlOut'] == ('./tests/testfiles/out/'
                                            'outsubst.yaml')

    with open('./tests/testfiles/out/outsubst.yaml') as outfile:
        yaml_loader = yaml.YAML(typ='rt', pure=True)
        outcontents = yaml_loader.load(outfile)

    expected = {
        'key':
        'v1value1 !£$%# *',
        'key2v2':
        'blah',
        # there is a comment here
        'key3': [
            'l1',
            # and another
            '!£$% * v3',
            'l2',
            ['l31v4', {
                'l32': ['l321', 'l322v5']
            }]
        ]
    }

    assert outcontents == expected

    # atrociously lazy test clean-up
    os.remove('./tests/testfiles/out/outsubst.yaml')
Ejemplo n.º 8
0
def render_configuration(config):
    '''
    Given a config data structure of nested OrderedDicts, render the config as YAML and return it.
    '''
    dumper = yaml.YAML()
    dumper.indent(mapping=INDENT, sequence=INDENT + SEQUENCE_INDENT, offset=INDENT)
    rendered = io.StringIO()
    dumper.dump(config, rendered)

    return rendered.getvalue()
Ejemplo n.º 9
0
    def test_standard_ruamel(self):
        """
		Standard yaml_tag method for ruamel.
		"""
        yaml_this = yamlr.YAML()
        # in ruamel we register classes to use yaml_tags
        yaml_this.register_class(StandardYAMLTag)
        loaded = yaml_this.load(text_has_anchors_tags)
        cleaned = recursive_clean(loaded)
        self.assertEqual(cleaned, {'greeting': 'hello mary'})
Ejemplo n.º 10
0
def object_to_yaml_str(obj, options=None):
    if options is None:
        options = {}
    string_stream = StringIO()
    y = yaml.YAML()
    y.indent(mapping=2, sequence=4, offset=4)
    y.dump(obj, string_stream, **options)
    output_str = string_stream.getvalue()
    string_stream.close()
    return output_str
Ejemplo n.º 11
0
def get_yaml_parser_safe():
    """Create the safe yaml parser object with this factory method.

    The safe yaml parser does NOT resolve unknown tags.

    Returns:
        ruamel.yaml.YAML object with safe loader

    """
    return yamler.YAML(typ='safe', pure=True)
Ejemplo n.º 12
0
    def load_config_file(self, filename: str):
        with open(filename) as file:
            y = yaml.YAML()
            data = y.load(file)

        if data is None:
            raise SetupError(
                'No setup configuration found in {}.'.format(filename))

        self.load_config(data)
Ejemplo n.º 13
0
    def setUp(self):
        self.specs_path = 'test_load_namespace.specs.yaml'
        self.namespace_path = 'test_load_namespace.namespace.yaml'

        # write basically empty specs file
        to_dump = {'groups': []}
        with open(self.specs_path, 'w') as tmp:
            yaml_obj = yaml.YAML(typ='safe', pure=True)
            yaml_obj.default_flow_style = False
            yaml_obj.dump(json.loads(json.dumps(to_dump)), tmp)
Ejemplo n.º 14
0
def person_list_yaml_dumper(f, person_list, use_omap=False):
    y = yaml.YAML()
    if not use_omap:
        y.representer.add_representer(
            OrderedDict, yaml.representer.RoundTripRepresenter.represent_dict
        )
    p_list = serializers.person_list_serializer(person_list)
    for name, person in p_list.items():
        y.dump({name: person}, f)
        print("", file=f)
Ejemplo n.º 15
0
 def __init__(self):
     if yaml.version_info >= (0,15):
         self.yaml = yaml.YAML(typ='rt')
         self.yaml.default_flow_style=False
         self._load = self.yaml.load
         self._dump = self.yaml.dump
     else:
         self.yaml = yaml
         self._load = self.yaml.round_trip_load
         self._dump = self.yaml.round_trip_dump
Ejemplo n.º 16
0
def load_schema(schema_path):
    """Load the schema into a dictionary.

    This function allows the schema, like BIDS itself, to be specified in
    a hierarchy of directories and files.
    File names (minus extensions) and directory names become keys
    in the associative array (dict) of entries composed from content
    of files and entire directories.

    Parameters
    ----------
    schema_path : str
        Folder containing yaml files or yaml file.

    Returns
    -------
    dict
        Schema in dictionary form.
    """
    _yaml = yaml.YAML(typ="safe", pure=True)

    schema_path = Path(schema_path)
    objects_dir = schema_path / "objects/"
    rules_dir = schema_path / "rules/"

    if not objects_dir.is_dir() or not rules_dir.is_dir():
        raise ValueError(
            f"Schema path or paths do not exist:\n\t{str(objects_dir)}\n\t{str(rules_dir)}"
        )

    schema = {}
    schema["objects"] = {}
    schema["rules"] = {}

    # Load object definitions. All are present in single files.
    for object_group_file in sorted(objects_dir.glob("*.yaml")):
        lgr.debug(f"Loading {object_group_file.stem} objects.")
        dict_ = _yaml.load(object_group_file.read_text())
        schema["objects"][object_group_file.stem] = dereference_yaml(
            dict_, dict_)

    # Grab single-file rule groups
    for rule_group_file in sorted(rules_dir.glob("*.yaml")):
        lgr.debug(f"Loading {rule_group_file.stem} rules.")
        dict_ = _yaml.load(rule_group_file.read_text())
        schema["rules"][rule_group_file.stem] = dereference_yaml(dict_, dict_)

    # Load folders of rule subgroups.
    for rule_group_file in sorted(rules_dir.glob("*/*.yaml")):
        rule = schema["rules"].setdefault(rule_group_file.parent.name, {})
        lgr.debug(f"Loading {rule_group_file.stem} rules.")
        dict_ = _yaml.load(rule_group_file.read_text())
        rule[rule_group_file.stem] = dereference_yaml(dict_, dict_)

    return schema
Ejemplo n.º 17
0
 def serialize(cls, data):
     if settings.INDENT_YAML_BLOCKS:
         f = StringIO()
         y = yaml.YAML()
         y.indent(mapping=2, sequence=4, offset=2)
         y.dump(data, f)
         text = f.getvalue().strip() + '\n'
     else:
         text = yaml.round_trip_dump(data) or ""
         text = text.replace('- \n', '-\n')
     return "" if text == "{}\n" else text
Ejemplo n.º 18
0
 def load_settings():
     conandir = os.path.expanduser("~/.conan/")
     if not os.path.exists(conandir):
         return
     settings_file = os.path.join(conandir, 'settings.yml')
     with open(settings_file) as f:
         txt = f.read()
         YAML = yaml.YAML()
         data = YAML.load(txt)
         settings = odict(data)
         return settings
Ejemplo n.º 19
0
def load_yaml(yml_file):
    # Load YAML data from file using the "safe" loading option.
    try:
        yaml_ = yaml.YAML(typ="safe")
        with open(yml_file, "rt", encoding="utf-8") as stream:
            return yaml_.load(stream)
    except yaml.constructor.ConstructorError:
        with open(yml_file, "rt", encoding="utf-8") as stream:
            # Ensure that  the loader remains backward-compatible with legacy
            # ruamel.yaml versions (prior to 0.17.0).
            return yaml.safe_load(stream)
Ejemplo n.º 20
0
def parse_config(path):
    yaml_path = path
    ext = os.path.splitext(yaml_path)[1].lstrip('.')
    ext = ext.rstrip(' ')
    assert ext == 'yaml'
    with open(yaml_path) as file:
        yml = yaml.YAML()
        yml.allow_duplicate_keys = True
        doc = yml.load(file)
    usr_config = UsrConfigs(doc)
    return usr_config
Ejemplo n.º 21
0
  def __init__(self, *args, **kwargs):
    super(YamlPrinter, self).__init__(*args, retain_none_values=True, **kwargs)
    # pylint:disable=g-import-not-at-top, Delay import for performance.
    from ruamel import yaml
    # Use pure=True to only use python implementations. Otherwise, it can load
    # the the _ruamel_yaml C extension from site packages if
    # CLOUDSDK_PYTHON_SITEPACKAGES=1 is set. There is no guarantee that the C
    # extension is compatible with our vendored ruamel.yaml and the python
    # runtime.
    self._yaml = yaml.YAML(typ='safe', pure=True)
    self._yaml.default_flow_style = False
    self._yaml.old_indent = resource_printer_base.STRUCTURED_INDENTATION
    self._yaml.allow_unicode = True
    self._yaml.encoding = log.LOG_FILE_ENCODING

    null = self.attributes.get('null')
    version = self.attributes.get('version')
    # If no version specified, uses ruamel's default (1.2)
    if version:
      self._yaml.version = str(version)

    def _FloatPresenter(unused_dumper, data):
      return yaml.nodes.ScalarNode(
          'tag:yaml.org,2002:float', resource_transform.TransformFloat(data))

    def _LiteralLinesPresenter(dumper, data):
      return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')

    def _NullPresenter(dumper, unused_data):
      if null in ('null', None):
        return dumper.represent_scalar('tag:yaml.org,2002:null', 'null')
      return dumper.represent_scalar('tag:yaml.org,2002:str', null)

    def _OrderedDictPresenter(dumper, data):
      return dumper.represent_mapping('tag:yaml.org,2002:map', data.items())

    def _UndefinedPresenter(dumper, data):
      r = repr(data)
      if r == '[]':
        return dumper.represent_list([])
      if r == '{}':
        return dumper.represent_dict({})
      dumper.represent_undefined(data)

    self._yaml.representer.add_representer(float,
                                           _FloatPresenter)
    self._yaml.representer.add_representer(YamlPrinter._LiteralLines,
                                           _LiteralLinesPresenter)
    self._yaml.representer.add_representer(None,
                                           _UndefinedPresenter)
    self._yaml.representer.add_representer(type(None),
                                           _NullPresenter)
    self._yaml.representer.add_representer(collections.OrderedDict,
                                           _OrderedDictPresenter)
Ejemplo n.º 22
0
    def save_commented_model_yaml(self, path):
        """
        Save a fully built and commented version of the model to a YAML file
        at the given ``path``. Comments in the file indicate where values
        were overridden. This is Calliope's internal representation of
        a model directly before the model_data xarray.Dataset is built,
        and can be useful for debugging possible issues in the model
        formulation.

        """
        if not self._model_run or not self._debug_data:
            raise KeyError(
                'This model does not have the fully built model attached, '
                'so `save_commented_model_yaml` is not available. Likely '
                'reason is that the model was built with a verion of Calliope '
                'prior to 0.6.5.'
            )

        yaml = ruamel_yaml.YAML()

        model_run_debug = self._model_run.copy()
        try:
            del model_run_debug['timeseries_data']  # Can't be serialised!
        except KeyError:
            # Possible that timeseries_data is already gone if the model
            # was read from a NetCDF file
            pass

        # Turn sets in model_run into lists for YAML serialization
        for k, v in model_run_debug.sets.items():
            model_run_debug.sets[k] = list(v)

        debug_comments = self._debug_data['comments']

        stream = StringIO()
        yaml.dump(model_run_debug.as_dict(), stream=stream)
        debug_yaml = yaml.load(stream.getvalue())

        for k in debug_comments.model_run.keys_nested():
            v = debug_comments.model_run.get_key(k)
            if v:
                keys = k.split('.')
                apply_to_dict(debug_yaml, keys[:-1], 'yaml_add_eol_comment', (v, keys[-1]))

        dumper = ruamel_yaml.dumper.RoundTripDumper
        dumper.ignore_aliases = lambda self, data: True

        with open(path, 'w') as f:
            ruamel_yaml.dump(
                debug_yaml,
                stream=f,
                Dumper=dumper,
                default_flow_style=False
            )
Ejemplo n.º 23
0
 def _load_yml(self, yml):
     YAML = yaml.YAML()
     dump = YAML.load(yml)
     dump = odict(dump)
     for i in ('project', 'config', 'flag_aliases'):
         if dump.get(i) is None:
             dump[i] = CommentedMap()
         setattr(self, i, dump[i])
     self._dump = dump
     from . import flags as c4flags
     self.flag_aliases = c4flags.FlagAliases(
         yml=dump.get('flag_aliases', CommentedMap()))
Ejemplo n.º 24
0
 def __call__(self, parser, namespace, values, option_string=None):
     if getattr(namespace, self.dest, None) is None:
         setattr(namespace, self.dest, {})
     # Only doing a copy here because that's what _AppendAction does
     items = copy.copy(getattr(namespace, self.dest))
     key, val = values.split("=", 1)
     if yaml.version_info < (0, 15):
         items[key] = yaml.safe_load(val)
     else:
         yml = yaml.YAML(typ="safe", pure=True)
         items[key] = yml.load(val)
     setattr(namespace, self.dest, items)
Ejemplo n.º 25
0
def get_config():
    with open('config.yaml') as file:
        config = yaml.YAML().load(file)

    def convert(item):
        if isinstance(item, dict):
            for key, value in item.items():
                item[key] = convert(value)
            return types.SimpleNamespace(**item)
        return item

    return convert(config)
Ejemplo n.º 26
0
def _get_base_image_name(name: str) -> str:
    """
    Get the name of the docker image to use for the project.

    Args:
        name (str): Project name.
    """
    yaml_obj = ryml.YAML()
    with open(_get_project_folder(name) + '/config.yml', 'r') as f:
        doc = yaml_obj.load(f)
    base_docker_image = doc['base-image']
    return base_docker_image
Ejemplo n.º 27
0
def load_config():
    from ruamel import yaml

    with open(OORT_CONFIG, 'r') as fp:
        yaml = yaml.YAML(typ='safe')
        config = yaml.load(fp)

    config['verbose'] = config.get('verbose') or 0
    config['port'] = config.get('port') or 51826
    config['bridge_name'] = config.get('bridge_name') or 'oort'

    return config
Ejemplo n.º 28
0
    def dump(self, configs, path):
        ya_data: CommentedMap = CommentedMap()
        root_comment = getattr(configs, '__doc__', None)
        if root_comment:
            ya_data.yaml_set_start_comment(root_comment)

        ya_data = _to_yaml(ya_data, configs)

        yml = yaml.YAML()

        with open(path, 'w') as f:
            yml.dump(ya_data, f)
Ejemplo n.º 29
0
    def __init__(self, *args, **kwargs):
        super(YamlPrinter, self).__init__(*args,
                                          retain_none_values=True,
                                          **kwargs)
        # pylint:disable=g-import-not-at-top, Delay import for performance.
        from ruamel import yaml
        self._yaml = yaml.YAML(typ='safe')
        self._yaml.default_flow_style = False
        self._yaml.old_indent = resource_printer_base.STRUCTURED_INDENTATION
        self._yaml.allow_unicode = True
        self._yaml.encoding = log.LOG_FILE_ENCODING

        null = self.attributes.get('null')
        version = self.attributes.get('version')
        # If no version specified, uses ruamel's default (1.2)
        if version:
            self._yaml.version = str(version)

        def _FloatPresenter(unused_dumper, data):
            return yaml.nodes.ScalarNode(
                'tag:yaml.org,2002:float',
                resource_transform.TransformFloat(data))

        def _LiteralLinesPresenter(dumper, data):
            return dumper.represent_scalar('tag:yaml.org,2002:str',
                                           data,
                                           style='|')

        def _NullPresenter(dumper, unused_data):
            if null in ('null', None):
                return dumper.represent_scalar('tag:yaml.org,2002:null',
                                               'null')
            return dumper.represent_scalar('tag:yaml.org,2002:str', null)

        def _OrderedDictPresenter(dumper, data):
            return dumper.represent_mapping('tag:yaml.org,2002:map',
                                            data.items())

        def _UndefinedPresenter(dumper, data):
            r = repr(data)
            if r == '[]':
                return dumper.represent_list([])
            if r == '{}':
                return dumper.represent_dict({})
            dumper.represent_undefined(data)

        self._yaml.representer.add_representer(float, _FloatPresenter)
        self._yaml.representer.add_representer(YamlPrinter._LiteralLines,
                                               _LiteralLinesPresenter)
        self._yaml.representer.add_representer(None, _UndefinedPresenter)
        self._yaml.representer.add_representer(type(None), _NullPresenter)
        self._yaml.representer.add_representer(collections.OrderedDict,
                                               _OrderedDictPresenter)
    def _edit_yaml(self, yaml_file, field, value):
        """  """

        yaml_parser = yaml.YAML()
        loaded = self._load_yaml(yaml_parser=yaml_parser, yaml_file=yaml_file)

        field_name = field.split("/")[-1]
        self._get_inner_dict_recursively(loaded, field)[field_name] = value

        self._edit_file(yaml_file=yaml_file,
                        yaml_parser=yaml_parser,
                        data=loaded)