Exemplo n.º 1
0
 def get_snippet(self, indent=4, max_length=75):
     # type: (int, int) -> Any
     if self.buffer is None:  # always False
         return None
     head = ""
     start = self.pointer
     while start > 0 and self.buffer[start - 1] not in u'\0\r\n\x85\u2028\u2029':
         start -= 1
         if self.pointer - start > max_length / 2 - 1:
             head = ' ... '
             start += 5
             break
     tail = ""
     end = self.pointer
     while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
         end += 1
         if end - self.pointer > max_length / 2 - 1:
             tail = ' ... '
             end -= 5
             break
     snippet = utf8(self.buffer[start:end])
     caret = '^'
     caret = '^ (line: {})'.format(self.line + 1)
     return (
         ' ' * indent
         + head
         + snippet
         + tail
         + '\n'
         + ' ' * (indent + self.pointer - start + len(head))
         + caret
     )
Exemplo n.º 2
0
 def process_directives(self):
     # type: () -> Any
     self.yaml_version = None
     self.tag_handles = {}
     while self.scanner.check_token(DirectiveToken):
         token = self.scanner.get_token()
         if token.name == u'YAML':
             if self.yaml_version is not None:
                 raise ParserError(
                     None, None, 'found duplicate YAML directive', token.start_mark
                 )
             major, minor = token.value
             if major != 1:
                 raise ParserError(
                     None,
                     None,
                     'found incompatible YAML document (version 1.* is ' 'required)',
                     token.start_mark,
                 )
             self.yaml_version = token.value
         elif token.name == u'TAG':
             handle, prefix = token.value
             if handle in self.tag_handles:
                 raise ParserError(
                     None, None, 'duplicate tag handle %r' % utf8(handle), token.start_mark
                 )
             self.tag_handles[handle] = prefix
     if bool(self.tag_handles):
         value = self.yaml_version, self.tag_handles.copy()  # type: Any
     else:
         value = self.yaml_version, None
     for key in self.DEFAULT_TAGS:
         if key not in self.tag_handles:
             self.tag_handles[key] = self.DEFAULT_TAGS[key]
     return value
Exemplo n.º 3
0
 def compose_node(self, parent, index):
     # type: (Any, Any) -> Any
     if self.parser.check_event(AliasEvent):
         event = self.parser.get_event()
         alias = event.anchor
         if alias not in self.anchors:
             raise ComposerError(
                 None, None, "found undefined alias %r"
                 % utf8(alias), event.start_mark)
         return self.anchors[alias]
     event = self.parser.peek_event()
     anchor = event.anchor
     if anchor is not None:  # have an anchor
         if anchor in self.anchors:
             # raise ComposerError(
             #     "found duplicate anchor %r; first occurrence"
             #     % utf8(anchor), self.anchors[anchor].start_mark,
             #     "second occurrence", event.start_mark)
             ws = "\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence " "{}".format(
                      (anchor), self.anchors[anchor].start_mark, event.start_mark)
             warnings.warn(ws, ReusedAnchorWarning)
     self.resolver.descend_resolver(parent, index)
     if self.parser.check_event(ScalarEvent):
         node = self.compose_scalar_node(anchor)
     elif self.parser.check_event(SequenceStartEvent):
         node = self.compose_sequence_node(anchor)
     elif self.parser.check_event(MappingStartEvent):
         node = self.compose_mapping_node(anchor)
     self.resolver.ascend_resolver()
     return node
Exemplo n.º 4
0
 def process_directives(self):
     # type: () -> Any
     self.yaml_version = None
     self.tag_handles = {}
     while self.scanner.check_token(DirectiveToken):
         token = self.scanner.get_token()
         if token.name == u'YAML':
             if self.yaml_version is not None:
                 raise ParserError(
                     None, None, 'found duplicate YAML directive', token.start_mark
                 )
             major, minor = token.value
             if major != 1:
                 raise ParserError(
                     None,
                     None,
                     'found incompatible YAML document (version 1.* is ' 'required)',
                     token.start_mark,
                 )
             self.yaml_version = token.value
         elif token.name == u'TAG':
             handle, prefix = token.value
             if handle in self.tag_handles:
                 raise ParserError(
                     None, None, 'duplicate tag handle %r' % utf8(handle), token.start_mark
                 )
             self.tag_handles[handle] = prefix
     if bool(self.tag_handles):
         value = self.yaml_version, self.tag_handles.copy()  # type: Any
     else:
         value = self.yaml_version, None
     for key in self.DEFAULT_TAGS:
         if key not in self.tag_handles:
             self.tag_handles[key] = self.DEFAULT_TAGS[key]
     return value
Exemplo n.º 5
0
 def get_snippet(self, indent=4, max_length=75):
     # type: (int, int) -> Any
     if self.buffer is None:  # always False
         return None
     head = ''
     start = self.pointer
     while (start > 0 and
            self.buffer[start - 1] not in u'\0\r\n\x85\u2028\u2029'):
         start -= 1
         if self.pointer - start > max_length / 2 - 1:
             head = ' ... '
             start += 5
             break
     tail = ''
     end = self.pointer
     while (end < len(self.buffer) and
            self.buffer[end] not in u'\0\r\n\x85\u2028\u2029'):
         end += 1
         if end - self.pointer > max_length / 2 - 1:
             tail = ' ... '
             end -= 5
             break
     snippet = utf8(self.buffer[start:end])
     caret = '^'
     caret = '^ (line: {})'.format(self.line + 1)
     return ' ' * indent + head + snippet + tail + '\n' + ' ' * (indent + self.pointer - start + len(head)) + caret
Exemplo n.º 6
0
 def compose_node(self, parent, index):
     # type: (Any, Any) -> Any
     if self.parser.check_event(AliasEvent):
         event = self.parser.get_event()
         alias = event.anchor
         if alias not in self.anchors:
             raise ComposerError(
                 None, None, 'found undefined alias %r' % utf8(alias), event.start_mark
             )
         return self.anchors[alias]
     event = self.parser.peek_event()
     anchor = event.anchor
     if anchor is not None:  # have an anchor
         if anchor in self.anchors:
             # raise ComposerError(
             #     "found duplicate anchor %r; first occurrence"
             #     % utf8(anchor), self.anchors[anchor].start_mark,
             #     "second occurrence", event.start_mark)
             ws = (
                 '\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
                 '{}'.format((anchor), self.anchors[anchor].start_mark, event.start_mark)
             )
             warnings.warn(ws, ReusedAnchorWarning)
     self.resolver.descend_resolver(parent, index)
     if self.parser.check_event(ScalarEvent):
         node = self.compose_scalar_node(anchor)
     elif self.parser.check_event(SequenceStartEvent):
         node = self.compose_sequence_node(anchor)
     elif self.parser.check_event(MappingStartEvent):
         node = self.compose_mapping_node(anchor)
     self.resolver.ascend_resolver()
     return node
Exemplo n.º 7
0
    def parse_node(self, block=False, indentless_sequence=False):
        # type: (bool, bool) -> Any
        if self.scanner.check_token(AliasToken):
            token = self.scanner.get_token()
            event = AliasEvent(token.value, token.start_mark, token.end_mark)  # type: Any
            self.state = self.states.pop()
            return event

        anchor = None
        tag = None
        start_mark = end_mark = tag_mark = None
        if self.scanner.check_token(AnchorToken):
            token = self.scanner.get_token()
            start_mark = token.start_mark
            end_mark = token.end_mark
            anchor = token.value
            if self.scanner.check_token(TagToken):
                token = self.scanner.get_token()
                tag_mark = token.start_mark
                end_mark = token.end_mark
                tag = token.value
        elif self.scanner.check_token(TagToken):
            token = self.scanner.get_token()
            start_mark = tag_mark = token.start_mark
            end_mark = token.end_mark
            tag = token.value
            if self.scanner.check_token(AnchorToken):
                token = self.scanner.get_token()
                start_mark = tag_mark = token.start_mark
                end_mark = token.end_mark
                anchor = token.value
        if tag is not None:
            handle, suffix = tag
            if handle is not None:
                if handle not in self.tag_handles:
                    raise ParserError(
                        'while parsing a node',
                        start_mark,
                        'found undefined tag handle %r' % utf8(handle),
                        tag_mark,
                    )
                tag = self.transform_tag(handle, suffix)
            else:
                tag = suffix
        # if tag == u'!':
        #     raise ParserError("while parsing a node", start_mark,
        #             "found non-specific tag '!'", tag_mark,
        #      "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
        #     and share your opinion.")
        if start_mark is None:
            start_mark = end_mark = self.scanner.peek_token().start_mark
        event = None
        implicit = tag is None or tag == u'!'
        if indentless_sequence and self.scanner.check_token(BlockEntryToken):
            comment = None
            pt = self.scanner.peek_token()
            if pt.comment and pt.comment[0]:
                comment = [pt.comment[0], []]
                pt.comment[0] = None
            end_mark = self.scanner.peek_token().end_mark
            event = SequenceStartEvent(
                anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
            )
            self.state = self.parse_indentless_sequence_entry
            return event

        if self.scanner.check_token(ScalarToken):
            token = self.scanner.get_token()
            # self.scanner.peek_token_same_line_comment(token)
            end_mark = token.end_mark
            if (token.plain and tag is None) or tag == u'!':
                implicit = (True, False)
            elif tag is None:
                implicit = (False, True)
            else:
                implicit = (False, False)
            # nprint('se', token.value, token.comment)
            event = ScalarEvent(
                anchor,
                tag,
                implicit,
                token.value,
                start_mark,
                end_mark,
                style=token.style,
                comment=token.comment,
            )
            self.state = self.states.pop()
        elif self.scanner.check_token(FlowSequenceStartToken):
            pt = self.scanner.peek_token()
            end_mark = pt.end_mark
            event = SequenceStartEvent(
                anchor,
                tag,
                implicit,
                start_mark,
                end_mark,
                flow_style=True,
                comment=pt.comment,
            )
            self.state = self.parse_flow_sequence_first_entry
        elif self.scanner.check_token(FlowMappingStartToken):
            pt = self.scanner.peek_token()
            end_mark = pt.end_mark
            event = MappingStartEvent(
                anchor,
                tag,
                implicit,
                start_mark,
                end_mark,
                flow_style=True,
                comment=pt.comment,
            )
            self.state = self.parse_flow_mapping_first_key
        elif block and self.scanner.check_token(BlockSequenceStartToken):
            end_mark = self.scanner.peek_token().start_mark
            # should inserting the comment be dependent on the
            # indentation?
            pt = self.scanner.peek_token()
            comment = pt.comment
            # nprint('pt0', type(pt))
            if comment is None or comment[1] is None:
                comment = pt.split_comment()
            # nprint('pt1', comment)
            event = SequenceStartEvent(
                anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
            )
            self.state = self.parse_block_sequence_first_entry
        elif block and self.scanner.check_token(BlockMappingStartToken):
            end_mark = self.scanner.peek_token().start_mark
            comment = self.scanner.peek_token().comment
            event = MappingStartEvent(
                anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
            )
            self.state = self.parse_block_mapping_first_key
        elif anchor is not None or tag is not None:
            # Empty scalars are allowed even if a tag or an anchor is
            # specified.
            event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark)
            self.state = self.states.pop()
        else:
            if block:
                node = 'block'
            else:
                node = 'flow'
            token = self.scanner.peek_token()
            raise ParserError(
                'while parsing a %s node' % node,
                start_mark,
                'expected the node content, but found %r' % token.id,
                token.start_mark,
            )
        return event
Exemplo n.º 8
0
    def parse_node(self, block=False, indentless_sequence=False):
        # type: (bool, bool) -> Any
        if self.scanner.check_token(AliasToken):
            token = self.scanner.get_token()
            event = AliasEvent(token.value, token.start_mark, token.end_mark)  # type: Any
            self.state = self.states.pop()
            return event

        anchor = None
        tag = None
        start_mark = end_mark = tag_mark = None
        if self.scanner.check_token(AnchorToken):
            token = self.scanner.get_token()
            start_mark = token.start_mark
            end_mark = token.end_mark
            anchor = token.value
            if self.scanner.check_token(TagToken):
                token = self.scanner.get_token()
                tag_mark = token.start_mark
                end_mark = token.end_mark
                tag = token.value
        elif self.scanner.check_token(TagToken):
            token = self.scanner.get_token()
            start_mark = tag_mark = token.start_mark
            end_mark = token.end_mark
            tag = token.value
            if self.scanner.check_token(AnchorToken):
                token = self.scanner.get_token()
                start_mark = tag_mark = token.start_mark
                end_mark = token.end_mark
                anchor = token.value
        if tag is not None:
            handle, suffix = tag
            if handle is not None:
                if handle not in self.tag_handles:
                    raise ParserError(
                        'while parsing a node',
                        start_mark,
                        'found undefined tag handle %r' % utf8(handle),
                        tag_mark,
                    )
                tag = self.transform_tag(handle, suffix)
            else:
                tag = suffix
        # if tag == u'!':
        #     raise ParserError("while parsing a node", start_mark,
        #             "found non-specific tag '!'", tag_mark,
        #      "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
        #     and share your opinion.")
        if start_mark is None:
            start_mark = end_mark = self.scanner.peek_token().start_mark
        event = None
        implicit = tag is None or tag == u'!'
        if indentless_sequence and self.scanner.check_token(BlockEntryToken):
            comment = None
            pt = self.scanner.peek_token()
            if pt.comment and pt.comment[0]:
                comment = [pt.comment[0], []]
                pt.comment[0] = None
            end_mark = self.scanner.peek_token().end_mark
            event = SequenceStartEvent(
                anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
            )
            self.state = self.parse_indentless_sequence_entry
            return event

        if self.scanner.check_token(ScalarToken):
            token = self.scanner.get_token()
            # self.scanner.peek_token_same_line_comment(token)
            end_mark = token.end_mark
            if (token.plain and tag is None) or tag == u'!':
                implicit = (True, False)
            elif tag is None:
                implicit = (False, True)
            else:
                implicit = (False, False)
            # nprint('se', token.value, token.comment)
            event = ScalarEvent(
                anchor,
                tag,
                implicit,
                token.value,
                start_mark,
                end_mark,
                style=token.style,
                comment=token.comment,
            )
            self.state = self.states.pop()
        elif self.scanner.check_token(FlowSequenceStartToken):
            pt = self.scanner.peek_token()
            end_mark = pt.end_mark
            event = SequenceStartEvent(
                anchor,
                tag,
                implicit,
                start_mark,
                end_mark,
                flow_style=True,
                comment=pt.comment,
            )
            self.state = self.parse_flow_sequence_first_entry
        elif self.scanner.check_token(FlowMappingStartToken):
            pt = self.scanner.peek_token()
            end_mark = self.scanner.peek_token().end_mark
            event = MappingStartEvent(
                anchor,
                tag,
                implicit,
                start_mark,
                end_mark,
                flow_style=True,
                comment=pt.comment,
            )
            self.state = self.parse_flow_mapping_first_key
        elif block and self.scanner.check_token(BlockSequenceStartToken):
            end_mark = self.scanner.peek_token().start_mark
            # should inserting the comment be dependent on the
            # indentation?
            pt = self.scanner.peek_token()
            comment = pt.comment
            # nprint('pt0', type(pt))
            if comment is None or comment[1] is None:
                comment = pt.split_comment()
            # nprint('pt1', comment)
            event = SequenceStartEvent(
                anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
            )
            self.state = self.parse_block_sequence_first_entry
        elif block and self.scanner.check_token(BlockMappingStartToken):
            end_mark = self.scanner.peek_token().start_mark
            comment = self.scanner.peek_token().comment
            event = MappingStartEvent(
                anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
            )
            self.state = self.parse_block_mapping_first_key
        elif anchor is not None or tag is not None:
            # Empty scalars are allowed even if a tag or an anchor is
            # specified.
            event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark)
            self.state = self.states.pop()
        else:
            if block:
                node = 'block'
            else:
                node = 'flow'
            token = self.scanner.peek_token()
            raise ParserError(
                'while parsing a %s node' % node,
                start_mark,
                'expected the node content, but found %r' % token.id,
                token.start_mark,
            )
        return event