Exemplo n.º 1
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter != self.__class__.OPENING_DELIMITER:
            raise TokenizingError(opening_delimiter_position, "Multiple-escape tokenizer called with unknown opening sequence “%s”" % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Exemplo n.º 2
0
    def __init__(self, context: tokenization_context, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Exemplo n.º 3
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        assert len(self.__class__.CLOSING_DELIMITER) == 1 # TODO: handle larger closing delimiters?
Exemplo n.º 4
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        if opening_delimiter in self.__class__.DELIMITER_PAIRS:
            self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]
Exemplo n.º 5
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(opening_delimiter_position, "Unregistered delimiter pair, for opening sequence “%s”" % opening_delimiter)

        self.opening_delimiter = opening_delimiter[1:]
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]
Exemplo n.º 6
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        assert len(self.__class__.CLOSING_DELIMITER
                   ) == 1  # TODO: handle larger closing delimiters?
Exemplo n.º 7
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        if opening_delimiter in self.__class__.DELIMITER_PAIRS:
            self.closing_delimiter = self.__class__.DELIMITER_PAIRS[
                opening_delimiter]
Exemplo n.º 8
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter != self.__class__.OPENING_DELIMITER:
            raise TokenizingError(
                opening_delimiter_position,
                "Multiple-escape tokenizer called with unknown opening sequence “%s”"
                % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Exemplo n.º 9
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = opening_delimiter

        # toggle '```' tokenizer between macro character and closing sequence
        readtable = context.readtable
        opening_delimiter_properties = readtable.query(opening_delimiter)[0]
        self.my_tokenizer_name = opening_delimiter_properties["tokenizer"]
        del opening_delimiter_properties["tokenizer"]
        opening_delimiter_properties["type"] = RT.CLOSING
Exemplo n.º 10
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(
                opening_delimiter_position,
                "Unregistered delimiter pair, for opening sequence “%s”" %
                opening_delimiter)

        self.opening_delimiter = opening_delimiter[1:]
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[
            opening_delimiter]
Exemplo n.º 11
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)


        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = opening_delimiter


        # toggle '```' tokenizer between macro character and closing sequence
        readtable = context.readtable
        opening_delimiter_properties = readtable.query(opening_delimiter)[0]
        self.my_tokenizer_name = opening_delimiter_properties["tokenizer"]
        del opening_delimiter_properties["tokenizer"]
        opening_delimiter_properties["type"] = RT.CLOSING
Exemplo n.º 12
0
    def __init__(self,
                 context: TokenizationContext,
                 opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(opening_delimiter_position,
                                  "Lisp mode tokenizer called with unknown opening delimiter sequence `%s`" % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]

        if opening_delimiter[0] == "#":
            self.opening_delimiter = opening_delimiter
            # toggle '(' tokenizer between LispMode and Delimiter tokenizers
            readtable = context.readtable
            self.set_delimiter_tokenizers(readtable, "LispModeTokenizer", "SharpDelimiterTokenizer")
        else:
            self.opening_delimiter = '#' + opening_delimiter
Exemplo n.º 13
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(
                opening_delimiter_position,
                "Lisp mode tokenizer called with unknown opening delimiter sequence `%s`"
                % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[
            opening_delimiter]

        if opening_delimiter[0] == "#":
            self.opening_delimiter = opening_delimiter
            # toggle '(' tokenizer between LispMode and Delimiter tokenizers
            readtable = context.readtable
            self.set_delimiter_tokenizers(readtable, "LispModeTokenizer",
                                          "SharpDelimiterTokenizer")
        else:
            self.opening_delimiter = '#' + opening_delimiter
Exemplo n.º 14
0
    def __init__(self, context: tokenization_context, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Exemplo n.º 15
0
    def __init__(self, context: TokenizationContext):
        assert isinstance(context.stream, IndentedCharacterStream)
        Tokenizer.__init__(self, context)

        self.last_begin_token = None
Exemplo n.º 16
0
    def __init__(self, context: TokenizationContext):
        assert isinstance(context.stream, IndentedCharacterStream)
        Tokenizer.__init__(self, context)

        self.last_begin_token = None