Beispiel #1
0
    def test_yaml_data(self, yaml, tmpdir):
        from collections.abc import Mapping

        idx = 0
        typ = None
        yaml_version = None

        docs = self.docs(yaml)
        if isinstance(docs[0], Mapping):
            d = docs[0]
            typ = d.get('type')
            yaml_version = d.get('yaml_version')
            if 'python' in d:
                if not check_python_version(d['python']):
                    pytest.skip('unsupported version')
            idx += 1
        data = output = confirm = python = None
        for doc in docs[idx:]:
            if isinstance(doc, Output):
                output = doc
            elif isinstance(doc, Assert):
                confirm = doc
            elif isinstance(doc, Python):
                python = doc
                if typ is None:
                    typ = 'python_run'
            elif isinstance(doc, YAMLData):
                data = doc
            else:
                print('no handler for type:', type(doc), repr(doc))
                raise AssertionError()
        if typ is None:
            if data is not None and output is not None:
                typ = 'rt'
            elif data is not None and confirm is not None:
                typ = 'load_assert'
            else:
                assert data is not None
                typ = 'rt'
        print('type:', typ)
        if data is not None:
            print('data:', data.value, end='')
        print('output:', output.value if output is not None else output)
        if typ == 'rt':
            self.round_trip(data, output, yaml_version=yaml_version)
        elif typ == 'python_run':
            inp = None if output is None or data is None else data
            self.run_python(python,
                            output if output is not None else data,
                            tmpdir,
                            input=inp)
        elif typ == 'load_assert':
            self.load_assert(data, confirm, yaml_version=yaml_version)
        elif typ == 'comment':
            actions = []
            self.insert_comments(data, actions)
        else:
            _F('\n>>>>>> run type unknown: "{typ}" <<<<<<\n')
            raise AssertionError()
Beispiel #2
0
 def add_path_resolver(cls, tag, path, kind=None):
     # type: (Any, Any, Any) -> None
     # Note: `add_path_resolver` is experimental.  The API could be changed.
     # `new_path` is a pattern that is matched against the path from the
     # root to the node that is being considered.  `node_path` elements are
     # tuples `(node_check, index_check)`.  `node_check` is a node class:
     # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`.  `None`
     # matches any kind of a node.  `index_check` could be `None`, a boolean
     # value, a string value, or a number.  `None` and `False` match against
     # any _value_ of sequence and mapping nodes.  `True` matches against
     # any _key_ of a mapping node.  A string `index_check` matches against
     # a mapping value that corresponds to a scalar key which content is
     # equal to the `index_check` value.  An integer `index_check` matches
     # against a sequence value with the index equal to `index_check`.
     if 'yaml_path_resolvers' not in cls.__dict__:
         cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
     new_path = []  # type: List[Any]
     for element in path:
         if isinstance(element, (list, tuple)):
             if len(element) == 2:
                 node_check, index_check = element
             elif len(element) == 1:
                 node_check = element[0]
                 index_check = True
             else:
                 raise ResolverError(
                     _F('Invalid path element: {element!s}',
                        element=element))
         else:
             node_check = None
             index_check = element
         if node_check is str:
             node_check = ScalarNode
         elif node_check is list:
             node_check = SequenceNode
         elif node_check is dict:
             node_check = MappingNode
         elif (node_check not in [ScalarNode, SequenceNode, MappingNode]
               and not isinstance(node_check, str)
               and node_check is not None):
             raise ResolverError(
                 _F('Invalid node checker: {node_check!s}',
                    node_check=node_check))
         if not isinstance(index_check,
                           (str, int)) and index_check is not None:
             raise ResolverError(
                 _F('Invalid index checker: {index_check!s}',
                    index_check=index_check))
         new_path.append((node_check, index_check))
     if kind is str:
         kind = ScalarNode
     elif kind is list:
         kind = SequenceNode
     elif kind is dict:
         kind = MappingNode
     elif kind not in [ScalarNode, SequenceNode, MappingNode
                       ] and kind is not None:
         raise ResolverError(_F('Invalid node kind: {kind!s}', kind=kind))
     cls.yaml_path_resolvers[tuple(new_path), kind] = tag
Beispiel #3
0
 def represent_name(self, data):
     # type: (Any) -> Any
     try:
         name = _F(
             '{modname!s}.{qualname!s}', modname=data.__module__, qualname=data.__qualname__
         )
     except AttributeError:
         # ToDo: check if this can be reached in Py3
         name = _F('{modname!s}.{name!s}', modname=data.__module__, name=data.__name__)
     return self.represent_scalar('tag:yaml.org,2002:python/name:' + name, "")
Beispiel #4
0
 def represent_complex(self, data):
     # type: (Any) -> Any
     if data.imag == 0.0:
         data = repr(data.real)
     elif data.real == 0.0:
         data = _F('{data_imag!r}j', data_imag=data.imag)
     elif data.imag > 0:
         data = _F('{data_real!r}+{data_imag!r}j', data_real=data.real, data_imag=data.imag)
     else:
         data = _F('{data_real!r}{data_imag!r}j', data_real=data.real, data_imag=data.imag)
     return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
Beispiel #5
0
 def move_old_comment(self, target, empty=False):
     # type: (Any, bool) -> Any
     """move a comment from this token to target (normally next token)
     used to combine e.g. comments before a BlockEntryToken to the
     ScalarToken that follows it
     empty is a special for empty values -> comment after key
     """
     c = self.comment
     if c is None:
         return
     # don't push beyond last element
     if isinstance(target, (StreamEndToken, DocumentStartToken)):
         return
     delattr(self, '_comment')
     tc = target.comment
     if not tc:  # target comment, just insert
         # special for empty value in key: value issue 25
         if empty:
             c = [c[0], c[1], None, None, c[0]]
         target._comment = c
         # nprint('mco2:', self, target, target.comment, empty)
         return self
     if c[0] and tc[0] or c[1] and tc[1]:
         raise NotImplementedError(
             _F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
     if c[0]:
         tc[0] = c[0]
     if c[1]:
         tc[1] = c[1]
     return self
Beispiel #6
0
 def parse_block_mapping_key(self):
     # type: () -> Any
     if self.scanner.check_token(KeyToken):
         token = self.scanner.get_token()
         self.move_token_comment(token)
         if not self.scanner.check_token(KeyToken, ValueToken,
                                         BlockEndToken):
             self.states.append(self.parse_block_mapping_value)
             return self.parse_block_node_or_indentless_sequence()
         else:
             self.state = self.parse_block_mapping_value
             return self.process_empty_scalar(token.end_mark)
     if self.resolver.processing_version > (
             1, 1) and self.scanner.check_token(ValueToken):
         self.state = self.parse_block_mapping_value
         return self.process_empty_scalar(
             self.scanner.peek_token().start_mark)
     if not self.scanner.check_token(BlockEndToken):
         token = self.scanner.peek_token()
         raise ParserError(
             'while parsing a block mapping',
             self.marks[-1],
             _F('expected <block end>, but found {token_id!r}',
                token_id=token.id),
             token.start_mark,
         )
     token = self.scanner.get_token()
     self.move_token_comment(token)
     event = MappingEndEvent(token.start_mark,
                             token.end_mark,
                             comment=token.comment)
     self.state = self.states.pop()
     self.marks.pop()
     return event
Beispiel #7
0
 def parse_block_sequence_entry(self):
     # type: () -> Any
     if self.scanner.check_token(BlockEntryToken):
         token = self.scanner.get_token()
         self.move_token_comment(token)
         if not self.scanner.check_token(BlockEntryToken, BlockEndToken):
             self.states.append(self.parse_block_sequence_entry)
             return self.parse_block_node()
         else:
             self.state = self.parse_block_sequence_entry
             return self.process_empty_scalar(token.end_mark)
     if not self.scanner.check_token(BlockEndToken):
         token = self.scanner.peek_token()
         raise ParserError(
             'while parsing a block collection',
             self.marks[-1],
             _F('expected <block end>, but found {token_id!r}',
                token_id=token.id),
             token.start_mark,
         )
     token = self.scanner.get_token()  # BlockEndToken
     event = SequenceEndEvent(token.start_mark,
                              token.end_mark,
                              comment=token.comment)
     self.state = self.states.pop()
     self.marks.pop()
     return event
Beispiel #8
0
 def move_new_comment(self, target, empty=False):
     # type: (Any, bool) -> Any
     """move a comment from this token to target (normally next token)
     used to combine e.g. comments before a BlockEntryToken to the
     ScalarToken that follows it
     empty is a special for empty values -> comment after key
     """
     c = self.comment
     if c is None:
         return
     # don't push beyond last element
     if isinstance(target, (StreamEndToken, DocumentStartToken)):
         return
     delattr(self, '_comment')
     tc = target.comment
     if not tc:  # target comment, just insert
         # special for empty value in key: value issue 25
         if empty:
             c = [c[0], c[1], c[2]]
         target._comment = c
         # nprint('mco2:', self, target, target.comment, empty)
         return self
     # if self and target have both pre, eol or post comments, something seems wrong
     for idx in range(3):
         if c[idx] is not None and tc[idx] is not None:
             raise NotImplementedError(
                 _F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
     # move the comment parts
     for idx in range(3):
         if c[idx]:
             tc[idx] = c[idx]
     return self
Beispiel #9
0
 def __repr__(self):
     # type: () -> Any
     # attributes = [key for key in self.__slots__ if not key.endswith('_mark') and
     #               hasattr('self', key)]
     attributes = [
         key for key in self.__slots__ if not key.endswith('_mark')
     ]
     attributes.sort()
     # arguments = ', '.join(
     #  [_F('{key!s}={gattr!r})', key=key, gattr=getattr(self, key)) for key in attributes]
     # )
     arguments = [
         _F('{key!s}={gattr!r}', key=key, gattr=getattr(self, key))
         for key in attributes
     ]
     if SHOW_LINES:
         try:
             arguments.append('line: ' + str(self.start_mark.line))
         except:  # NOQA
             pass
     try:
         arguments.append('comment: ' + str(self._comment))
     except:  # NOQA
         pass
     return '{}({})'.format(self.__class__.__name__, ', '.join(arguments))
Beispiel #10
0
 def compose_node(self, parent, index):
     # type: (Any, Any) -> Any
     if self.parser.check_event(AliasEvent):
         event = self.parser.get_event()
         alias = event.anchor
         if alias not in self.anchors:
             raise ComposerError(
                 None,
                 None,
                 _F('found undefined alias {alias!r}', alias=alias),
                 event.start_mark,
             )
         return self.return_alias(self.anchors[alias])
     event = self.parser.peek_event()
     anchor = event.anchor
     if anchor is not None:  # have an anchor
         if anchor in self.anchors:
             # raise ComposerError(
             #     "found duplicate anchor %r; first occurrence"
             #     % (anchor), self.anchors[anchor].start_mark,
             #     "second occurrence", event.start_mark)
             ws = (
                 '\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
                 '{}'.format((anchor), self.anchors[anchor].start_mark,
                             event.start_mark))
             warnings.warn(ws, ReusedAnchorWarning)
     self.resolver.descend_resolver(parent, index)
     if self.parser.check_event(ScalarEvent):
         node = self.compose_scalar_node(anchor)
     elif self.parser.check_event(SequenceStartEvent):
         node = self.compose_sequence_node(anchor)
     elif self.parser.check_event(MappingStartEvent):
         node = self.compose_mapping_node(anchor)
     self.resolver.ascend_resolver()
     return node
Beispiel #11
0
 def __repr__(self):
     # type: () -> Any
     attributes = [
         key for key in
         ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
         if hasattr(self, key)
     ]
     arguments = ', '.join([
         _F('{key!s}={attr!r}', key=key, attr=getattr(self, key))
         for key in attributes
     ])
     if self.comment not in [None, CommentCheck]:
         arguments += ', comment={!r}'.format(self.comment)
     return _F(
         '{self_class_name!s}{arguments!s}',
         self_class_name=self.__class__.__name__,
         arguments=arguments,
     )
Beispiel #12
0
 def __str__(self):
     # type: () -> Any
     where = _F(
         '  in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
         sname=self.name,
         sline1=self.line + 1,
         scolumn1=self.column + 1,
     )
     return where
Beispiel #13
0
 def __repr__(self):
     # type: () -> Any
     snippet = self.get_snippet()
     where = _F(
         '  in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
         sname=self.name,
         sline1=self.line + 1,
         scolumn1=self.column + 1,
     )
     if snippet is not None:
         where += ':\n' + snippet
     return where
Beispiel #14
0
 def __repr__(self):
     # type: () -> Any
     if True:
         arguments = []
         if hasattr(self, 'value'):
             # if you use repr(getattr(self, 'value')) then flake8 complains about
             # abuse of getattr with a constant. When you change to self.value
             # then mypy throws an error
             arguments.append(repr(self.value))  # type: ignore
         for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
             v = getattr(self, key, None)
             if v is not None:
                 arguments.append(_F('{key!s}={v!r}', key=key, v=v))
         if self.comment not in [None, CommentCheck]:
             arguments.append('comment={!r}'.format(self.comment))
         if SHOW_LINES:
             arguments.append('({}:{}/{}:{})'.format(
                 self.start_mark.line,
                 self.start_mark.column,
                 self.end_mark.line,
                 self.end_mark.column,
             ))
         arguments = ', '.join(arguments)  # type: ignore
     else:
         attributes = [
             key for key in
             ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
             if hasattr(self, key)
         ]
         arguments = ', '.join([
             _F('{k!s}={attr!r}', k=key, attr=getattr(self, key))
             for key in attributes
         ])
         if self.comment not in [None, CommentCheck]:
             arguments += ', comment={!r}'.format(self.comment)
     return _F(
         '{self_class_name!s}({arguments!s})',
         self_class_name=self.__class__.__name__,
         arguments=arguments,
     )
Beispiel #15
0
 def __str__(self):
     # type: () -> str
     if isinstance(self.character, bytes):
         return _F(
             "'{self_encoding!s}' codec can't decode byte #x{ord_self_character:02x}: "
             '{self_reason!s}\n'
             '  in "{self_name!s}", position {self_position:d}',
             self_encoding=self.encoding,
             ord_self_character=ord(self.character),
             self_reason=self.reason,
             self_name=self.name,
             self_position=self.position,
         )
     else:
         return _F(
             'unacceptable character #x{self_character:04x}: {self._reason!s}\n'
             '  in "{self_name!s}", position {self_position:d}',
             self_character=self.character,
             self_reason=self.reason,
             self_name=self.name,
             self_position=self.position,
         )
Beispiel #16
0
 def process_directives(self):
     # type: () -> Any
     yaml_version = None
     self.tag_handles = {}
     while self.scanner.check_token(DirectiveToken):
         token = self.scanner.get_token()
         if token.name == 'YAML':
             if yaml_version is not None:
                 raise ParserError(None, None,
                                   'found duplicate YAML directive',
                                   token.start_mark)
             major, minor = token.value
             if major != 1:
                 raise ParserError(
                     None,
                     None,
                     'found incompatible YAML document (version 1.* is required)',
                     token.start_mark,
                 )
             yaml_version = token.value
         elif token.name == 'TAG':
             handle, prefix = token.value
             if handle in self.tag_handles:
                 raise ParserError(
                     None,
                     None,
                     _F('duplicate tag handle {handle!r}', handle=handle),
                     token.start_mark,
                 )
             self.tag_handles[handle] = prefix
     if bool(self.tag_handles):
         value = yaml_version, self.tag_handles.copy()  # type: Any
     else:
         value = yaml_version, None
     if self.loader is not None and hasattr(self.loader, 'tags'):
         self.loader.version = yaml_version
         if self.loader.tags is None:
             self.loader.tags = {}
         for k in self.tag_handles:
             self.loader.tags[k] = self.tag_handles[k]
     for key in self.DEFAULT_TAGS:
         if key not in self.tag_handles:
             self.tag_handles[key] = self.DEFAULT_TAGS[key]
     return value
Beispiel #17
0
 def parse_document_start(self):
     # type: () -> Any
     # Parse any extra document end indicators.
     while self.scanner.check_token(DocumentEndToken):
         self.scanner.get_token()
     # Parse an explicit document.
     if not self.scanner.check_token(StreamEndToken):
         token = self.scanner.peek_token()
         start_mark = token.start_mark
         version, tags = self.process_directives()
         if not self.scanner.check_token(DocumentStartToken):
             raise ParserError(
                 None,
                 None,
                 _F(
                     "expected '<document start>', but found {pt!r}",
                     pt=self.scanner.peek_token().id,
                 ),
                 self.scanner.peek_token().start_mark,
             )
         token = self.scanner.get_token()
         end_mark = token.end_mark
         # if self.loader is not None and \
         #    end_mark.line != self.scanner.peek_token().start_mark.line:
         #     self.loader.scalar_after_indicator = False
         event = DocumentStartEvent(start_mark,
                                    end_mark,
                                    explicit=True,
                                    version=version,
                                    tags=tags)  # type: Any
         self.states.append(self.parse_document_end)
         self.state = self.parse_document_content
     else:
         # Parse the end of the stream.
         token = self.scanner.get_token()
         event = StreamEndEvent(token.start_mark,
                                token.end_mark,
                                comment=token.comment)
         assert not self.states
         assert not self.marks
         self.state = None
     return event
Beispiel #18
0
 def parse_flow_mapping_key(self, first=False):
     # type: (Any) -> Any
     if not self.scanner.check_token(FlowMappingEndToken):
         if not first:
             if self.scanner.check_token(FlowEntryToken):
                 self.scanner.get_token()
             else:
                 token = self.scanner.peek_token()
                 raise ParserError(
                     'while parsing a flow mapping',
                     self.marks[-1],
                     _F("expected ',' or '}}', but got {token_id!r}",
                        token_id=token.id),
                     token.start_mark,
                 )
         if self.scanner.check_token(KeyToken):
             token = self.scanner.get_token()
             if not self.scanner.check_token(ValueToken, FlowEntryToken,
                                             FlowMappingEndToken):
                 self.states.append(self.parse_flow_mapping_value)
                 return self.parse_flow_node()
             else:
                 self.state = self.parse_flow_mapping_value
                 return self.process_empty_scalar(token.end_mark)
         elif self.resolver.processing_version > (
                 1, 1) and self.scanner.check_token(ValueToken):
             self.state = self.parse_flow_mapping_value
             return self.process_empty_scalar(
                 self.scanner.peek_token().end_mark)
         elif not self.scanner.check_token(FlowMappingEndToken):
             self.states.append(self.parse_flow_mapping_empty_value)
             return self.parse_flow_node()
     token = self.scanner.get_token()
     event = MappingEndEvent(token.start_mark,
                             token.end_mark,
                             comment=token.comment)
     self.state = self.states.pop()
     self.marks.pop()
     return event
Beispiel #19
0
 def __repr__(self):
     # type: () -> Any
     value = self.value
     # if isinstance(value, list):
     #     if len(value) == 0:
     #         value = '<empty>'
     #     elif len(value) == 1:
     #         value = '<1 item>'
     #     else:
     #         value = f'<{len(value)} items>'
     # else:
     #     if len(value) > 75:
     #         value = repr(value[:70]+' ... ')
     #     else:
     #         value = repr(value)
     value = repr(value)
     return _F(
         '{class_name!s}(tag={self_tag!r}, value={value!s})',
         class_name=self.__class__.__name__,
         self_tag=self.tag,
         value=value,
     )
Beispiel #20
0
    def parse_flow_sequence_entry(self, first=False):
        # type: (bool) -> Any
        if not self.scanner.check_token(FlowSequenceEndToken):
            if not first:
                if self.scanner.check_token(FlowEntryToken):
                    self.scanner.get_token()
                else:
                    token = self.scanner.peek_token()
                    raise ParserError(
                        'while parsing a flow sequence',
                        self.marks[-1],
                        _F("expected ',' or ']', but got {token_id!r}",
                           token_id=token.id),
                        token.start_mark,
                    )

            if self.scanner.check_token(KeyToken):
                token = self.scanner.peek_token()
                event = MappingStartEvent(None,
                                          None,
                                          True,
                                          token.start_mark,
                                          token.end_mark,
                                          flow_style=True)  # type: Any
                self.state = self.parse_flow_sequence_entry_mapping_key
                return event
            elif not self.scanner.check_token(FlowSequenceEndToken):
                self.states.append(self.parse_flow_sequence_entry)
                return self.parse_flow_node()
        token = self.scanner.get_token()
        event = SequenceEndEvent(token.start_mark,
                                 token.end_mark,
                                 comment=token.comment)
        self.state = self.states.pop()
        self.marks.pop()
        return event
Beispiel #21
0
 def __repr__(self):
     # type: () -> str
     return _F('LineCol({line}, {col})', line=self.line, col=self.col)  # type: ignore
Beispiel #22
0
    def parse_node(self, block=False, indentless_sequence=False):
        # type: (bool, bool) -> Any
        if self.scanner.check_token(AliasToken):
            token = self.scanner.get_token()
            event = AliasEvent(token.value, token.start_mark,
                               token.end_mark)  # type: Any
            self.state = self.states.pop()
            return event

        anchor = None
        tag = None
        start_mark = end_mark = tag_mark = None
        if self.scanner.check_token(AnchorToken):
            token = self.scanner.get_token()
            self.move_token_comment(token)
            start_mark = token.start_mark
            end_mark = token.end_mark
            anchor = token.value
            if self.scanner.check_token(TagToken):
                token = self.scanner.get_token()
                tag_mark = token.start_mark
                end_mark = token.end_mark
                tag = token.value
        elif self.scanner.check_token(TagToken):
            token = self.scanner.get_token()
            start_mark = tag_mark = token.start_mark
            end_mark = token.end_mark
            tag = token.value
            if self.scanner.check_token(AnchorToken):
                token = self.scanner.get_token()
                start_mark = tag_mark = token.start_mark
                end_mark = token.end_mark
                anchor = token.value
        if tag is not None:
            handle, suffix = tag
            if handle is not None:
                if handle not in self.tag_handles:
                    raise ParserError(
                        'while parsing a node',
                        start_mark,
                        _F('found undefined tag handle {handle!r}',
                           handle=handle),
                        tag_mark,
                    )
                tag = self.transform_tag(handle, suffix)
            else:
                tag = suffix
        # if tag == '!':
        #     raise ParserError("while parsing a node", start_mark,
        #             "found non-specific tag '!'", tag_mark,
        #      "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
        #     and share your opinion.")
        if start_mark is None:
            start_mark = end_mark = self.scanner.peek_token().start_mark
        event = None
        implicit = tag is None or tag == '!'
        if indentless_sequence and self.scanner.check_token(BlockEntryToken):
            comment = None
            pt = self.scanner.peek_token()
            if self.loader and self.loader.comment_handling is None:
                if pt.comment and pt.comment[0]:
                    comment = [pt.comment[0], []]
                    pt.comment[0] = None
            elif self.loader:
                if pt.comment:
                    comment = pt.comment
            end_mark = self.scanner.peek_token().end_mark
            event = SequenceStartEvent(anchor,
                                       tag,
                                       implicit,
                                       start_mark,
                                       end_mark,
                                       flow_style=False,
                                       comment=comment)
            self.state = self.parse_indentless_sequence_entry
            return event

        if self.scanner.check_token(ScalarToken):
            token = self.scanner.get_token()
            # self.scanner.peek_token_same_line_comment(token)
            end_mark = token.end_mark
            if (token.plain and tag is None) or tag == '!':
                implicit = (True, False)
            elif tag is None:
                implicit = (False, True)
            else:
                implicit = (False, False)
            # nprint('se', token.value, token.comment)
            event = ScalarEvent(
                anchor,
                tag,
                implicit,
                token.value,
                start_mark,
                end_mark,
                style=token.style,
                comment=token.comment,
            )
            self.state = self.states.pop()
        elif self.scanner.check_token(FlowSequenceStartToken):
            pt = self.scanner.peek_token()
            end_mark = pt.end_mark
            event = SequenceStartEvent(
                anchor,
                tag,
                implicit,
                start_mark,
                end_mark,
                flow_style=True,
                comment=pt.comment,
            )
            self.state = self.parse_flow_sequence_first_entry
        elif self.scanner.check_token(FlowMappingStartToken):
            pt = self.scanner.peek_token()
            end_mark = pt.end_mark
            event = MappingStartEvent(
                anchor,
                tag,
                implicit,
                start_mark,
                end_mark,
                flow_style=True,
                comment=pt.comment,
            )
            self.state = self.parse_flow_mapping_first_key
        elif block and self.scanner.check_token(BlockSequenceStartToken):
            end_mark = self.scanner.peek_token().start_mark
            # should inserting the comment be dependent on the
            # indentation?
            pt = self.scanner.peek_token()
            comment = pt.comment
            # nprint('pt0', type(pt))
            if comment is None or comment[1] is None:
                comment = pt.split_old_comment()
            # nprint('pt1', comment)
            event = SequenceStartEvent(anchor,
                                       tag,
                                       implicit,
                                       start_mark,
                                       end_mark,
                                       flow_style=False,
                                       comment=comment)
            self.state = self.parse_block_sequence_first_entry
        elif block and self.scanner.check_token(BlockMappingStartToken):
            end_mark = self.scanner.peek_token().start_mark
            comment = self.scanner.peek_token().comment
            event = MappingStartEvent(anchor,
                                      tag,
                                      implicit,
                                      start_mark,
                                      end_mark,
                                      flow_style=False,
                                      comment=comment)
            self.state = self.parse_block_mapping_first_key
        elif anchor is not None or tag is not None:
            # Empty scalars are allowed even if a tag or an anchor is
            # specified.
            event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark,
                                end_mark)
            self.state = self.states.pop()
        else:
            if block:
                node = 'block'
            else:
                node = 'flow'
            token = self.scanner.peek_token()
            raise ParserError(
                _F('while parsing a {node!s} node', node=node),
                start_mark,
                _F('expected the node content, but found {token_id!r}',
                   token_id=token.id),
                token.start_mark,
            )
        return event
Beispiel #23
0
    def represent_object(self, data):
        # type: (Any) -> Any
        # We use __reduce__ API to save the data. data.__reduce__ returns
        # a tuple of length 2-5:
        #   (function, args, state, listitems, dictitems)

        # For reconstructing, we calls function(*args), then set its state,
        # listitems, and dictitems if they are not None.

        # A special case is when function.__name__ == '__newobj__'. In this
        # case we create the object with args[0].__new__(*args).

        # Another special case is when __reduce__ returns a string - we don't
        # support it.

        # We produce a !!python/object, !!python/object/new or
        # !!python/object/apply node.

        cls = type(data)
        if cls in copyreg.dispatch_table:  # type: ignore
            reduce = copyreg.dispatch_table[cls](data)  # type: ignore
        elif hasattr(data, '__reduce_ex__'):
            reduce = data.__reduce_ex__(2)
        elif hasattr(data, '__reduce__'):
            reduce = data.__reduce__()
        else:
            raise RepresenterError(
                _F('cannot represent object: {data!r}', data=data))
        reduce = (list(reduce) + [None] * 5)[:5]
        function, args, state, listitems, dictitems = reduce
        args = list(args)
        if state is None:
            state = {}
        if listitems is not None:
            listitems = list(listitems)
        if dictitems is not None:
            dictitems = dict(dictitems)
        if function.__name__ == '__newobj__':
            function = args[0]
            args = args[1:]
            tag = 'tag:yaml.org,2002:python/object/new:'
            newobj = True
        else:
            tag = 'tag:yaml.org,2002:python/object/apply:'
            newobj = False
        try:
            function_name = _F('{fun!s}.{qualname!s}',
                               fun=function.__module__,
                               qualname=function.__qualname__)
        except AttributeError:
            # ToDo: check if this can be reached in Py3
            function_name = _F('{fun!s}.{name!s}',
                               fun=function.__module__,
                               name=function.__name__)
        if not args and not listitems and not dictitems and isinstance(
                state, dict) and newobj:
            return self.represent_mapping(
                'tag:yaml.org,2002:python/object:' + function_name, state)
        if not listitems and not dictitems and isinstance(state,
                                                          dict) and not state:
            return self.represent_sequence(tag + function_name, args)
        value = {}
        if args:
            value['args'] = args
        if state or not isinstance(state, dict):
            value['state'] = state
        if listitems:
            value['listitems'] = listitems
        if dictitems:
            value['dictitems'] = dictitems
        return self.represent_mapping(tag + function_name, value)
Beispiel #24
0
 def represent_undefined(self, data):
     # type: (Any) -> None
     raise RepresenterError(
         _F('cannot represent an object: {data!s}', data=data))