Пример #1
0
def test_yaml_extension():

    txt = """
        name: "Model"

        symbols:
            controls: [alpha, beta]
            states: [hei, ho]

        equations:
            arbitrage: |

                β*(c[t+1]/c[t])^(-γ+)*r - 1
                β*(c[t+1]/c[t])^(-γ)*(r_2[t+1]-r_1[t+1])
        
        calibration:
            a: 0.1
            b: 10
    """

    import dolang  # monkey-patch yaml

    import yaml

    data = yaml.compose(txt)

    assert "name" in data
    assert "equation" not in data

    assert data["name"].value == "Model"

    assert [e.value for e in data["symbols"]["controls"]] == ["alpha", "beta"]

    assert [*data["symbols"].keys()] == ["controls", "states"]
Пример #2
0
 def do_it(self, istream):
     node = compose(istream)
     self.process_node(node)
     node = self.crop(node)
     if node != None:
         self.decorate(node)
         return self.serialize(node)
Пример #3
0
 def do_it(self, istream):
     node = compose(istream)
     self.process_node(node)
     node = self.crop(node)
     if node != None:
         self.decorate(node)
         return self.serialize(node)
Пример #4
0
    def _lazy_load(self):
        if self._loaded:
            return

        with open(self._name) as fp:
            self._root = yaml.compose(fp)

        # Root value is a list of 2-tuples for name/value of top-level
        # items in yaml file.
        for item in self._root.value:
            if item[0].value == 'env_variables':
                self._env_vars = item[1].value
            if item[0].value == 'libraries':
                self._library_list = item[1].value
            if item[0].value == 'application':
                self._application = item[1].value

        # Libraries item is a list of name/value 2-tuples.
        # Extract name and version for each library.
        self._lib_versions = {}
        for lib_spec in self._library_list:
            name = None
            vers = None
            for lib_item in lib_spec.value:
                if lib_item[0].value == 'name':
                    name = lib_item[1].value
                elif lib_item[0].value == 'version':
                    vers = lib_item[1].value
            if name and vers:
                self._lib_versions[name] = vers
        self._loaded = True
Пример #5
0
    def load_and_merge_partial_documents(input_list):
        """Classify input yamls into their kinds and package into resource_document_map"""
        # load all yamls inputs
        partial_resource_document_map = {}
        for input_file in input_list:
            with open(input_file) as input_file_handle:
                my_dict = yaml.load(input_file_handle)

            with open(input_file) as input_file_handle:
                mapping_node = yaml.compose(input_file_handle)

            kind = list(my_dict.keys())[0]
            name = my_dict.get(kind).get(Constants.PrimaryPropertyName)
            key = (kind, name)

            yaml_document = PartialDocument(mapping_node, input_file)

            # create a map inputs, group by kind and name
            if key in partial_resource_document_map:
                partial_resource_document_map[key].append(yaml_document)
            else:
                temp_list = []
                temp_list.append(yaml_document)
                partial_resource_document_map[key] = temp_list

        merged_document_map = {}
        for key in partial_resource_document_map:
            merged_document = YamlMerge.Merge(
                partial_resource_document_map[key], key[0],
                Constants.PrimaryPropertyName)
            merged_document_map[key] = merged_document

        return merged_document_map
Пример #6
0
def _parse(stream, error_file_name):
    # type: (Any, unicode) -> syntax.IDLParsedSpec
    """
    Parse a YAML document into an idl.syntax tree.

    stream: is a io.Stream.
    error_file_name: just a file name for error messages to use.
    """
    # pylint: disable=too-many-branches

    # This will raise an exception if the YAML parse fails
    root_node = yaml.compose(stream)

    ctxt = errors.ParserContext(error_file_name,
                                errors.ParserErrorCollection())

    spec = syntax.IDLSpec()

    # If the document is empty, we are done
    if not root_node:
        return syntax.IDLParsedSpec(spec, None)

    if not root_node.id == "mapping":
        raise errors.IDLError(
            "Expected a YAML mapping node as root node of IDL document, got '%s' instead"
            % root_node.id)

    field_name_set = set()  # type: Set[str]

    for [first_node, second_node] in root_node.value:

        first_name = first_node.value

        if first_name in field_name_set:
            ctxt.add_duplicate_error(first_node, first_name)
            continue

        if first_name == "global":
            _parse_global(ctxt, spec, second_node)
        elif first_name == "imports":
            _parse_imports(ctxt, spec, second_node)
        elif first_name == "enums":
            _parse_mapping(ctxt, spec, second_node, 'enums', _parse_enum)
        elif first_name == "types":
            _parse_mapping(ctxt, spec, second_node, 'types', _parse_type)
        elif first_name == "structs":
            _parse_mapping(ctxt, spec, second_node, 'structs', _parse_struct)
        elif first_name == "commands":
            _parse_mapping(ctxt, spec, second_node, 'commands', _parse_command)
        else:
            ctxt.add_unknown_root_node_error(first_node)

        field_name_set.add(first_name)

    if ctxt.errors.has_errors():
        return syntax.IDLParsedSpec(None, ctxt.errors)
    else:
        _propagate_globals(spec)

        return syntax.IDLParsedSpec(spec, None)
Пример #7
0
def test_variable_definitions_errors():

    from dolang.grammar import parse_string
    from lark.exceptions import UnexpectedCharacters
    import os
    import yaml

    DIR_PATH, this_filename = os.path.split(__file__)
    DATA_PATH = os.path.join(DIR_PATH, "syntax_errors.yaml")

    txt = open(DATA_PATH, "rt", encoding="utf-8").read()
    data = yaml.compose(txt)

    for v in (data["definitions"]).value:
        k = v[0]
        v = v[1].value
        if v == "None":
            parse_string(k, start="variable")  # no problem
        else:
            line = int(v[0].value)
            column = int(v[1].value)
            try:
                parse_string(k, start="variable")  # no problem
            except UnexpectedCharacters as e:
                assert e.column == column
                assert e.line == line
Пример #8
0
def yaml_import(fname, check=True, check_only=False):

    txt = read_file_or_url(fname)

    try:
        data = yaml.compose(txt)
        # print(data)
        # return data
    except Exception as ex:
        print(
            "Error while parsing YAML file. Probable YAML syntax error in file : ",
            fname,
        )
        raise ex

    # if check:
    #     from dolo.linter import lint
    #     data = ry.load(txt, ry.RoundTripLoader)
    #     output = lint(data, source=fname)
    #     if len(output) > 0:
    #         print(output)

    # if check_only:
    #     return output

    data["filename"] = fname

    from dolo.compiler.model import Model

    return Model(data, check=check)
Пример #9
0
    def _lazy_load(self):
        if self._loaded:
            return

        with open(self._name) as fp:
            self._root = yaml.compose(fp)

        # Root value is a list of 2-tuples for name/value of top-level
        # items in yaml file.
        for item in self._root.value:
            if item[0].value == 'env_variables':
                self._env_vars = item[1].value
            if item[0].value == 'libraries':
                self._library_list = item[1].value
            if item[0].value == 'application':
                self._application = item[1].value

        # Libraries item is a list of name/value 2-tuples.
        # Extract name and version for each library.
        self._lib_versions = {}
        for lib_spec in self._library_list:
            name = None
            vers = None
            for lib_item in lib_spec.value:
                if lib_item[0].value == 'name':
                    name = lib_item[1].value
                elif lib_item[0].value == 'version':
                    vers = lib_item[1].value
            if name and vers:
                self._lib_versions[name] = vers
        self._loaded = True
Пример #10
0
def test_syntax_errors():

    from dolang.grammar import parse_string
    import os
    import yaml

    DIR_PATH, this_filename = os.path.split(__file__)
    DATA_PATH = os.path.join(DIR_PATH, "syntax_errors.yaml")

    txt = open(DATA_PATH, "rt", encoding="utf-8").read()
    data = yaml.compose(txt)

    try:
        parse_string(data["equations"]["list"][1], start="equation")
    except Exception as e:
        assert e.line == 13
        assert e.column == 29

    try:
        parse_string(data["equations"]["block"], start="equation_block")
    except Exception as e:
        assert e.line == 17
        assert e.column == 29

    try:
        parse_string(data["equations"]["block2"], start="equation_block")
    except Exception as e:
        assert e.line == 25
        assert e.column == 29

    try:
        parse_string(data["equations"]["inline"][1], start="equation")
    except Exception as e:
        assert e.line == 27
        assert e.column == 26
Пример #11
0
def _parse(stream, error_file_name):
    # type: (Any, unicode) -> syntax.IDLParsedSpec
    """
    Parse a YAML document into an idl.syntax tree.

    stream: is a io.Stream.
    error_file_name: just a file name for error messages to use.
    """
    # pylint: disable=too-many-branches

    # This will raise an exception if the YAML parse fails
    root_node = yaml.compose(stream)

    ctxt = errors.ParserContext(error_file_name, errors.ParserErrorCollection())

    spec = syntax.IDLSpec()

    # If the document is empty, we are done
    if not root_node:
        return syntax.IDLParsedSpec(spec, None)

    if not root_node.id == "mapping":
        raise errors.IDLError(
            "Expected a YAML mapping node as root node of IDL document, got '%s' instead" %
            root_node.id)

    field_name_set = set()  # type: Set[str]

    for [first_node, second_node] in root_node.value:

        first_name = first_node.value

        if first_name in field_name_set:
            ctxt.add_duplicate_error(first_node, first_name)
            continue

        if first_name == "global":
            _parse_global(ctxt, spec, second_node)
        elif first_name == "imports":
            _parse_imports(ctxt, spec, second_node)
        elif first_name == "enums":
            _parse_mapping(ctxt, spec, second_node, 'enums', _parse_enum)
        elif first_name == "types":
            _parse_mapping(ctxt, spec, second_node, 'types', _parse_type)
        elif first_name == "structs":
            _parse_mapping(ctxt, spec, second_node, 'structs', _parse_struct)
        elif first_name == "commands":
            _parse_mapping(ctxt, spec, second_node, 'commands', _parse_command)
        else:
            ctxt.add_unknown_root_node_error(first_node)

        field_name_set.add(first_name)

    if ctxt.errors.has_errors():
        return syntax.IDLParsedSpec(None, ctxt.errors)

    _propagate_globals(spec)

    return syntax.IDLParsedSpec(spec, None)
Пример #12
0
 def from_file(self, filename):
     data = yaml.compose(filename, ProcessLoader)
     data = yaml_to_node(data)
     root = {}
     root['filename'] = filename
     pack = self.builder.common_build(self, root, self.Root,
                                      StrNode(filename), data)
     return pack
Пример #13
0
class Tag(Enum):
    SEQUENCE = compose("[]").tag
    MAPPING = compose("{}").tag
    STRING = compose("hello").tag
    INT = compose("3").tag
    FLOAT = compose("3.14159265359").tag
    BOOL = compose("true").tag
    NULL = compose("null").tag
Пример #14
0
 def test_resolver(self):
   composed = yaml.compose(self.yaml_src)
   self.assertEqual(
     composed.value[0].tag,
     QuoteAsStrings.yaml_tag
   )
   self.assertNotEqual(
     composed.value[1].tag,
     QuoteAsStrings.yaml_tag
   )
Пример #15
0
def parse(stream, error_file_name="unknown"):
    # type: (Any, unicode) -> syntax.IDLParsedSpec
    """
    Parse a YAML document into an idl.syntax tree.

    stream: is a io.Stream.
    error_file_name: just a file name for error messages to use.
    """

    # This will raise an exception if the YAML parse fails
    root_node = yaml.compose(stream)

    ctxt = errors.ParserContext(error_file_name, errors.ParserErrorCollection())

    spec = syntax.IDLSpec()

    # If the document is empty, we are done
    if not root_node:
        return syntax.IDLParsedSpec(spec, None)

    if not root_node.id == "mapping":
        raise errors.IDLError(
            "Expected a YAML mapping node as root node of IDL document, got '%s' instead" %
            root_node.id)

    field_name_set = set()  # type: Set[str]

    for node_pair in root_node.value:
        first_node = node_pair[0]
        second_node = node_pair[1]

        first_name = first_node.value

        if first_name in field_name_set:
            ctxt.add_duplicate_error(first_node, first_name)
            continue

        if first_name == "global":
            _parse_global(ctxt, spec, second_node)
        elif first_name == "types":
            _parse_types(ctxt, spec, second_node)
        elif first_name == "structs":
            _parse_structs(ctxt, spec, second_node)
        else:
            ctxt.add_unknown_root_node_error(first_node)

        field_name_set.add(first_name)

    if ctxt.errors.has_errors():
        return syntax.IDLParsedSpec(None, ctxt.errors)
    else:
        return syntax.IDLParsedSpec(spec, None)
Пример #16
0
 def _load_file(context: ILoadingContext, path: Path) -> Optional[Node]:
     """Load a YAML document, emit a MarshPyError on ParseError."""
     with open(path, "r", encoding="utf-8") as yaml_file:
         try:
             return cast(Node, compose(yaml_file))  # type: ignore
         except ParserError as error:
             context.error(
                 ErrorCode.VALUE_ERROR,
                 _("Parse error while loading {} : {}"),
                 path,
                 error,
             )
     return None
Пример #17
0
 def test_resolver(self):
   composed = yaml.compose(self.yaml_src)
   self.assertEqual(
     composed.value[0].tag,
     EscapedDollar.yaml_tag
   ) 
   self.assertNotEqual(
     composed.tag,
     EscapedDollar.yaml_tag)
   self.assertNotEqual(
     composed.value[1].tag,
     EscapedDollar.yaml_tag
   )
Пример #18
0
    def replace_parameter_values(merged_resource_documents,
                                 parameters_to_replace):
        """Replace parameter values with values in parameters_to_replace"""
        yaml_string = yaml.serialize(merged_resource_documents)

        for parameter_iter in parameters_to_replace:
            if yaml_string.find(
                    "'[parameters(''{0}'')]'".format(parameter_iter)) >= 0:
                yaml_string = yaml_string.replace(
                    "'[parameters(''{0}'')]'".format(parameter_iter),
                    parameters_to_replace[parameter_iter])

        yaml_dict = yaml.compose(yaml_string)
        return yaml_dict
Пример #19
0
def test_implicit_resolver(data_filename, detect_filename, verbose=False):
    correct_tag = None
    node = None
    try:
        correct_tag = open(detect_filename, "rb").read().strip()
        node = yaml.compose(open(data_filename, "rb"))
        assert isinstance(node, yaml.SequenceNode), node
        for scalar in node.value:
            assert isinstance(scalar, yaml.ScalarNode), scalar
            assert scalar.tag == correct_tag, (scalar.tag, correct_tag)
    finally:
        if verbose:
            print "CORRECT TAG:", correct_tag
            if hasattr(node, "value"):
                print "CHILDREN:"
                pprint.pprint(node.value)
Пример #20
0
def test_implicit_resolver(data_filename, detect_filename, verbose=False):
    correct_tag = None
    node = None
    try:
        correct_tag = open(detect_filename, 'r').read().strip()
        node = yaml.compose(open(data_filename, 'rb'))
        assert isinstance(node, yaml.SequenceNode), node
        for scalar in node.value:
            assert isinstance(scalar, yaml.ScalarNode), scalar
            assert scalar.tag == correct_tag, (scalar.tag, correct_tag)
    finally:
        if verbose:
            print("CORRECT TAG:", correct_tag)
            if hasattr(node, 'value'):
                print("CHILDREN:")
                pprint.pprint(node.value)
Пример #21
0
def html_tree(stream, loader=yaml.SafeLoader):
    body = visit(yaml.compose(stream, loader))
    return (
        "<!DOCTYPE html>"
        "<html>"
        "<head>"
        '  <meta charset="utf-8">'
        "  <title>YAML Tree Preview</title>"
        '  <link href="https://fonts.googleapis.com/css2'
        '?family=Roboto+Condensed&display=swap" rel="stylesheet">'
        "  <style>"
        "    * { font-family: 'Roboto Condensed', sans-serif; }"
        "    ul { list-style: none; }"
        "    ul.sequence { list-style: '- '; }"
        "    .key { font-weight: bold; }"
        "    .multiline { white-space: pre; }"
        "  </style>"
        "</head>"
        f"<body>{body}</body></html>"
    )
Пример #22
0
 def read_configuration(cls, config_file):
     """read YAML configuration file"""
     # load YAML events/measurements definition
     f = open(config_file, 'r')
     doc_yaml = yaml.compose(f)
     f.close()
     # split events & measurements definitions
     measurements, events = list(), list()
     for key, value in doc_yaml.value:
         if value.tag == Measurements.yaml_tag:
             measurements.append((key, value))
         if value.tag == Events.yaml_tag:
             events.append((key, value))
     measurements_yaml = yaml.MappingNode(u'tag:yaml.org,2002:map',
                                          measurements)
     measurements_stream = yaml.serialize(measurements_yaml)
     events_yaml = yaml.MappingNode(u'tag:yaml.org,2002:map', events)
     events_stream = yaml.serialize(events_yaml)
     # return event & measurements definition
     return events_stream, measurements_stream
Пример #23
0
def load(file_obj):
    defs = yaml.compose(file_obj)
    assert type(defs) == yaml.nodes.MappingNode
    file_obj.close()

    kinds = {
        'include': KindInclude,
        'struct': KindStruct,
        'const': KindConst,
        'define': KindDefine,
        'enum': KindEnum,
    }

    defines = []
    for kind, kind_def in defs.value:
        cls = kinds[kind.value]
        for data_name, data_def in kind_def.value:
            data_name = data_name.value
            defines.append(cls(data_name, data_def.value))
    return defines
Пример #24
0
    def transform_yaml_source(self, source, key, add_new_nodes=True):
        """Transform the given yaml source so its value of key matches the binding.

    Has no effect if key is not among the bindings.
    But raises a KeyError if it is in the bindings but not in the source.

    Args:
      source [string]: A YAML document
      key [string]: A key into the bindings.
      add_new_nodes [boolean]: If true, add node for key if not already present.
           Otherwise raise a KeyError.

    Returns:
      Transformed source with value of key replaced to match the bindings.
    """
        try:
            value = self[key]
        except KeyError:
            return source

        if isinstance(value, basestring) and re.search('{[^}]*{', value):
            # Quote strings with nested {} yaml flows
            value = '"{0}"'.format(value)

        # yaml doesn't understand capital letter boolean values.
        if isinstance(value, bool):
            value = str(value).lower()

        yaml_root = yaml.compose(source)
        context, span = self.find_yaml_context(source, yaml_root, key,
                                               not add_new_nodes)

        text_before = context[0]
        text_after = context[1]
        start_cut = span[0]
        end_cut = span[1]
        return ''.join([
            source[0:start_cut], text_before, '{value}'.format(value=value),
            text_after, source[end_cut:]
        ])
Пример #25
0
def value_from_event_stream(content_events, *, safe_loading=True):
    """Convert an iterable of YAML events to a Pythonic value
    
    The *content_events* MUST NOT include stream or document events.
    """
    content_events = iter(content_events)
    events = [yaml.StreamStartEvent(), yaml.DocumentStartEvent()]
    depth = 0
    while True:
        events.append(next(content_events))
        if isinstance(events[-1], yaml.CollectionStartEvent):
            depth += 1
        elif isinstance(events[-1], yaml.CollectionEndEvent):
            depth -= 1

        if depth == 0:
            break
    events.extend([yaml.DocumentEndEvent(), yaml.StreamEndEvent()])
    node = yaml.compose(events, Loader=EventsToNodes)
    node_constructor = (yaml.constructor.SafeConstructor
                        if safe_loading else yaml.constructor.Constructor)()
    return node_constructor.construct_object(node, True)
Пример #26
0
 def test_resolver(self):
   composed = yaml.compose(self.yaml_src)
   self.assertEqual(
     composed.value[0].tag,
     Symbol.yaml_tag
   )
   self.assertEqual(
     composed.value[1].value[0][0].tag,
     Symbol.yaml_tag
   )
   self.assertEqual(
     composed.value[1].value[0][1].value[1].tag,
     Symbol.yaml_tag
   )
   self.assertNotEqual(
     composed.value[2].tag,
     Symbol.yaml_tag
   )
   self.assertNotEqual(
     composed.value[1].value[0][1].value[0].tag,
     Symbol.yaml_tag
   )
Пример #27
0
    def load(self):
        """
        - should not be empty
        - yaml itself should be valid (should we grab more than 1 yaml error at a time?)
        - we should have whitelist of fields the can set at each level, and start
          getting objects out of those, processing grammar if necessary, validating other settings
          errors coming all the way back up
        """
        if not isinstance(self.config_text, str):
            self.config_text = self.config_text.decode('utf-8')

        if not self.config_text.strip():
            self._errors.append(
                ConfigError(
                    title='YAML is empty',
                    description='Your configuration file appears to be empty.')
            )
            return

        # simple way to check that yaml itself is valid
        try:
            self.config_dict = yaml.safe_load(self.config_text)
        except yaml.YAMLError as e:
            error = ConfigError.create_from_yaml_error(e)
            self._errors.append(error)
            # could have more than 1 line, keep giong

        if self.config_dict:
            # we have valid yaml with data, so start checking the components
            node_tree = yaml.compose(self.config_text)
            # give it the parsed settings, and the node info
            self.config_root = self.config_root_class(
                value=self.config_dict,
                value_node=node_tree,
                context=self.context,
                variables=self.variables,
            )
Пример #28
0
    def get_parameters(parameters_arr):
        """Get Parameters from the parameters if passed"""
        params_store = {}
        json_object = None
        if parameters_arr:
            for parameters_i in parameters_arr:
                parameters = parameters_i[0]
                if os.path.isfile(parameters):
                    extension = os.path.splitext(parameters)[1]
                    if extension.lower() == ".yaml":
                        # load yaml
                        yaml_file = yaml.compose(open(parameters))
                        json_object = YamlToJson.to_ordered_dict(
                            yaml_file, None, None)
                        params_store.update(json_object)

                    elif extension.lower() == ".json":
                        # load json
                        with open(parameters, 'r') as parameter_file_fp:
                            json_object = json.load(parameter_file_fp)
                        params_store.update(
                            SFMergeUtility.transform_to_kv(
                                json_object['parameters']))
                else:
                    try:
                        # load direct json
                        parameters = parameters.replace("\'", "\"")
                        params = json.loads(parameters)
                        params_store.update(
                            SFMergeUtility.transform_to_kv(params))
                    except json.decoder.JSONDecodeError:
                        print(
                            "Could not load parameters as files or direct json object."
                        )
                        pass
        return params_store
Пример #29
0
def check_load(
    source: Union[str, IO[str]],
    object_class: Optional[Type[Any]] = None,
    expected_error: Optional[ErrorCode] = None,
    field: Optional[BaseField] = None,
    location: Optional[str] = None,
    tag_handlers: Optional[List[TagHandler]] = None,
    config: Optional[List[Any]] = None,
) -> Any:
    """Load a yaml document, given the specified parameters."""
    if tag_handlers is None:
        tag_handlers = []

    tag_handlers.append(FailTagHandler())

    handler_called = False

    def _handler(__: Node, code: ErrorCode, ___: str) -> None:
        nonlocal handler_called
        handler_called = True
        assert expected_error is not None
        assert code == expected_error

    context = LoadingContext(_handler, tag_handlers, config=config)
    node = compose(source)

    if field is None:
        assert object_class is not None
        field = ObjectField(object_class=object_class)

    result = context.load(field, node, str(location))

    if expected_error is not None:
        assert handler_called

    return result
Пример #30
0
 def load(self, fd):
     return self._parse(yaml.compose(fd))
Пример #31
0
def canonical_compose(stream):
    return yaml.compose(stream, Loader=CanonicalLoader)
Пример #32
0
def load_and_send_flashcards(filename):
    with open(filename) as yaml_input_file:
        log.info("\nSending file '{}' to Anki...\n".format(filename))
        nodes = yaml.compose(yaml_input_file)
        data = yaml.load(yaml.serialize(nodes))
        defaults = data.get('defaults', None)
        log.debug("defaults: {}".format(defaults))

        def_tags = defaults.get("extraTags", list())
        def_deckName = defaults.get("deckName", "Default")
        def_modelName = defaults.get("modelName", "BasicMathJax")
        def_fields = defaults.get("fields", dict())
        def_useMarkdown = defaults.get("useMarkdown", True)
        def_markdownStyle = defaults.get("markdownStyle", "tango")
        def_markdownLineNums = defaults.get("markdownLineNums", False)
        def_markdownTabLength = defaults.get("markdownTabLength", 4)

        # Extract notes_node
        top_map = {k.value: v.value for k, v in nodes.value}
        note_nodes = top_map['notes']

        connection = AnkiConnectClient()

        # For each note_node in notes_node:
        new_notes_were_created = False
        for note_node in note_nodes:
            # Convert to note_dict
            note = yaml.load(yaml.serialize(note_node))

            tags = note.get('extraTags', def_tags).copy()
            tags.extend(note.get('tags', list()))
            tags = sorted(tags)
            deckName = note.get('deckName', def_deckName)
            modelName = note.get('modelName', def_modelName)

            # Set note's fields to defaults, if not already set.
            fields = dict(def_fields)
            fields.update(note.get("fields", dict()))
            # Convert each field from Markdown (if `useMarkdown` is True).
            fields = {
                k: format_text(
                    str(v),
                    note.get('useMarkdown', def_useMarkdown),
                    note.get('markdownStyle', def_markdownStyle),
                    note.get('markdownLineNums', def_markdownLineNums),
                    note.get('markdownTabLength', def_markdownTabLength),
                )
                for (k, v) in fields.items()
            }

            should_create_new_note = True
            must_replace_existing_note_id = False

            if 'id' in note:
                # Check for note with given ID.
                log.debug("Checking for existing note...")
                note_id = note['id']

                # Get info for existing note.
                response, result = connection.send_as_json(
                    action="notesInfo", params=dict(notes=[note_id]))
                if result.get("error", None) or not result['result'][0]:
                    report_anki_error(result, "\nCan't find note with ID: %s.",
                                      note_id)
                    log.info("The ID will be ignored, and a new note created.")
                    must_replace_existing_note_id = True
                else:
                    should_create_new_note = False

            if should_create_new_note:
                # No provided ID; assume new note should be created.
                log.debug("Creating new note...")

                # Create, obtaining returned ID
                response, result = connection.send_as_json(
                    action="addNote",
                    params=dict(note=dict(
                        deckName=deckName,
                        modelName=modelName,
                        fields=fields,
                        tags=tags,
                    )))
                if result.get("error", None):
                    report_anki_error(result, "Can't create note: %s", note)
                else:
                    # Add ID to note_node
                    note_id = result['result']
                    if must_replace_existing_note_id:
                        prev_id = None
                        for k, v in note_node.value:
                            if k.value == 'id':
                                prev_id, v.value = v.value, str(note_id)
                        if prev_id:
                            log.info("ID %s replaced with %s.", prev_id,
                                     note_id)
                        else:
                            log.warn("Failed to assign new note ID!")
                    else:
                        note_node.value.insert(0, (
                            yaml.ScalarNode(tag='tag:yaml.org,2002:str',
                                            value='id'),
                            yaml.ScalarNode(tag='tag:yaml.org,2002:int',
                                            value=str(note_id)),
                        ))
                    new_notes_were_created = True

            else:
                # Assume provided ID is valid for existing note to be updated.
                log.debug("Updating existing note...")

                # Update note fields...
                params = dict(note=dict(id=note_id, fields=fields))
                log.debug("params: {}".format(params))
                response, result = connection.send_as_json(
                    action="updateNoteFields",
                    params=params,
                )
                if result.get("error", None):
                    report_anki_error(result, "Can't update note: %s", note)
                    continue

                # Update note tags...
                ## First get existing note tags.
                response, result = connection.send_as_json(
                    action="notesInfo", params=dict(notes=[note_id]))
                if result.get("error", None):
                    report_anki_error(result, "Can't get tags for note: %s",
                                      note)
                    continue
                current_tags = sorted(result['result'][0]['tags'])

                # log.debug("current tags: %s", current_tags)
                # log.debug("new tags: %s", tags)
                # log.debug("equal?: %s", current_tags == tags)
                if current_tags != tags:
                    # log.debug("updating tags.")

                    ## Remove existing note tags.
                    response, result = connection.send_as_json(
                        action="removeTags",
                        params=dict(notes=[note_id],
                                    tags=" ".join(current_tags)))
                    if result.get("error", None):
                        report_anki_error(result,
                                          "Can't remove tags for note: %s",
                                          note)

                    ## Add new note tags.
                    response, result = connection.send_as_json(
                        action="addTags",
                        params=dict(notes=[note_id], tags=" ".join(tags)))
                    if result.get("error", None):
                        report_anki_error(result,
                                          "Can't add tags for note: %s", note)

    if new_notes_were_created:
        # If any new notes were created, their IDs must be added to YAML file.
        with open(filename, mode='w') as yaml_output_file:
            log.info(
                "\nUpdating file '{}' with new note IDs...".format(filename))
            yaml_output_file.write(yaml.serialize(nodes))
Пример #33
0
def as_node(_):
    return compose("null")
Пример #34
0
def canonical_compose(stream):
    return yaml.compose(stream, Loader=CanonicalLoader)
Пример #35
0
 def create_root_node(cls, stream):
     try:
         return yaml.compose(stream)
     except yaml.YAMLError as e:
         raise YamlFormatError(str(e))
Пример #36
0
 def create_root_node(cls, stream):
     try:
         return yaml.compose(stream)
     except yaml.YAMLError as e:
         raise YamlFormatError(str(e))
Пример #37
0
def load(  # pylint: disable=too-many-locals
    source: Union[str, IO[str]],
    object_class: Optional[Type[ObjectType]] = None,
    tag_handlers: Optional[Iterable[TagHandler]] = None,
    error_handler: Optional[ErrorHandler] = None,
    root_field: Optional[BaseField] = None,
    config: Optional[List[Any]] = None,
) -> LoadResult[ObjectType]:
    """Deserialize a YAML file, stream or string into an object.

    Args:
        source :            Either a string containing YAML, or a stream to a
                            YAML source.
        object_class :      Class of the object to create. It will infer the
                            root field to use from this type (Scalar, list,
                            dictionary, or object).
        tag_handlers :      Custom TagHandlers.
        error_handler :     Called with arguments (node, error_message) when an
                            error occurs. If it's not specified, a MarshPyError
                            will be raised when an error occurs.
        root_field:         The field to use to load the root node. You can
                            specify a type (list, dict, one of the scalar types
                            or an objec type as cls parameter to get it infered)
        config:             List of objects used to eventually configure custom
                            fields, that will be retrievable through the
                            get_config method.

    """
    # This fails with pyfakefs, no simple way to check this, so disable it for
    # now
    # assert isinstance(source, (str, TextIOBase)), \
    #     _('source parameter must be a string or Text I/O.')

    all_tag_handlers: List[TagHandler] = [
        ImportHandler(),
        GlobHandler(),
        EnvHandler(),
        IfHandler(),
    ]

    if tag_handlers is not None:
        for handler_it in tag_handlers:
            assert isinstance(handler_it, TagHandler), _(
                "tag_handlers items should be subclasses of TagHandler"
            )
        all_tag_handlers.extend(tag_handlers)

    if error_handler is not None:
        assert callable(error_handler), _("error_handler must be a callable object.")

    context = LoadingContext(
        error_handler=error_handler, tag_handlers=all_tag_handlers, config=config
    )

    if root_field is None:
        assert object_class is not None
        assert isclass(object_class), _("object_class must be a type")
        root_field = _ROOT_FIELDS_MAPPING.get(object_class)

    if root_field is None:
        assert object_class is not None
        assert isclass(object_class), _("object_class must be a type")
        root_field = ObjectField(object_class=object_class)

    node = compose(source)  # type: ignore
    node_path = None
    if isinstance(source, TextIOBase) and hasattr(source, "name"):
        node_path = source.name

    result = context.load(root_field, node, node_path)
    if result is UNDEFINED:
        return UNDEFINED

    return cast(ObjectType, result)