Esempio n. 1
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter != self.__class__.OPENING_DELIMITER:
            raise TokenizingError(opening_delimiter_position, "Multiple-escape tokenizer called with unknown opening sequence “%s”" % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Esempio n. 2
0
    def __init__(self, context: tokenization_context, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Esempio n. 3
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        assert len(self.__class__.CLOSING_DELIMITER) == 1 # TODO: handle larger closing delimiters?
Esempio n. 4
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        if opening_delimiter in self.__class__.DELIMITER_PAIRS:
            self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]
Esempio n. 5
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(opening_delimiter_position, "Unregistered delimiter pair, for opening sequence “%s”" % opening_delimiter)

        self.opening_delimiter = opening_delimiter[1:]
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]
Esempio n. 6
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        assert len(self.__class__.CLOSING_DELIMITER
                   ) == 1  # TODO: handle larger closing delimiters?
Esempio n. 7
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after

        if opening_delimiter in self.__class__.DELIMITER_PAIRS:
            self.closing_delimiter = self.__class__.DELIMITER_PAIRS[
                opening_delimiter]
Esempio n. 8
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter != self.__class__.OPENING_DELIMITER:
            raise TokenizingError(
                opening_delimiter_position,
                "Multiple-escape tokenizer called with unknown opening sequence “%s”"
                % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Esempio n. 9
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = opening_delimiter

        # toggle '```' tokenizer between macro character and closing sequence
        readtable = context.readtable
        opening_delimiter_properties = readtable.query(opening_delimiter)[0]
        self.my_tokenizer_name = opening_delimiter_properties["tokenizer"]
        del opening_delimiter_properties["tokenizer"]
        opening_delimiter_properties["type"] = RT.CLOSING
Esempio n. 10
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(
                opening_delimiter_position,
                "Unregistered delimiter pair, for opening sequence “%s”" %
                opening_delimiter)

        self.opening_delimiter = opening_delimiter[1:]
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[
            opening_delimiter]
Esempio n. 11
0
    def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)


        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = opening_delimiter


        # toggle '```' tokenizer between macro character and closing sequence
        readtable = context.readtable
        opening_delimiter_properties = readtable.query(opening_delimiter)[0]
        self.my_tokenizer_name = opening_delimiter_properties["tokenizer"]
        del opening_delimiter_properties["tokenizer"]
        opening_delimiter_properties["type"] = RT.CLOSING
Esempio n. 12
0
    def __init__(self,
                 context: TokenizationContext,
                 opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(opening_delimiter_position,
                                  "Lisp mode tokenizer called with unknown opening delimiter sequence `%s`" % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]

        if opening_delimiter[0] == "#":
            self.opening_delimiter = opening_delimiter
            # toggle '(' tokenizer between LispMode and Delimiter tokenizers
            readtable = context.readtable
            self.set_delimiter_tokenizers(readtable, "LispModeTokenizer", "SharpDelimiterTokenizer")
        else:
            self.opening_delimiter = '#' + opening_delimiter
Esempio n. 13
0
    def __init__(self, context: TokenizationContext, opening_delimiter: str,
                 opening_delimiter_position: StreamPosition,
                 opening_delimiter_position_after: StreamPosition):
        Tokenizer.__init__(self, context)

        if opening_delimiter not in self.__class__.DELIMITER_PAIRS:
            raise TokenizingError(
                opening_delimiter_position,
                "Lisp mode tokenizer called with unknown opening delimiter sequence `%s`"
                % opening_delimiter)

        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
        self.closing_delimiter = self.__class__.DELIMITER_PAIRS[
            opening_delimiter]

        if opening_delimiter[0] == "#":
            self.opening_delimiter = opening_delimiter
            # toggle '(' tokenizer between LispMode and Delimiter tokenizers
            readtable = context.readtable
            self.set_delimiter_tokenizers(readtable, "LispModeTokenizer",
                                          "SharpDelimiterTokenizer")
        else:
            self.opening_delimiter = '#' + opening_delimiter
Esempio n. 14
0
    def __init__(self, context: tokenization_context, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition):
        Tokenizer.__init__(self, context)

        self.opening_delimiter = opening_delimiter
        self.opening_delimiter_position = opening_delimiter_position
        self.opening_delimiter_position_after = opening_delimiter_position_after
Esempio n. 15
0
    def __init__(self, context: TokenizationContext):
        assert isinstance(context.stream, IndentedCharacterStream)
        Tokenizer.__init__(self, context)

        self.last_begin_token = None
Esempio n. 16
0
    def __init__(self, context: TokenizationContext):
        assert isinstance(context.stream, IndentedCharacterStream)
        Tokenizer.__init__(self, context)

        self.last_begin_token = None