def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): Tokenizer.__init__(self, context) if opening_delimiter != self.__class__.OPENING_DELIMITER: raise TokenizingError(opening_delimiter_position, "Multiple-escape tokenizer called with unknown opening sequence “%s”" % opening_delimiter) self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after
def __init__(self, context: tokenization_context, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after
def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after assert len(self.__class__.CLOSING_DELIMITER) == 1 # TODO: handle larger closing delimiters?
def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after if opening_delimiter in self.__class__.DELIMITER_PAIRS: self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]
def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): Tokenizer.__init__(self, context) if opening_delimiter not in self.__class__.DELIMITER_PAIRS: raise TokenizingError(opening_delimiter_position, "Unregistered delimiter pair, for opening sequence “%s”" % opening_delimiter) self.opening_delimiter = opening_delimiter[1:] self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter]
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after assert len(self.__class__.CLOSING_DELIMITER ) == 1 # TODO: handle larger closing delimiters?
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after if opening_delimiter in self.__class__.DELIMITER_PAIRS: self.closing_delimiter = self.__class__.DELIMITER_PAIRS[ opening_delimiter]
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) if opening_delimiter != self.__class__.OPENING_DELIMITER: raise TokenizingError( opening_delimiter_position, "Multiple-escape tokenizer called with unknown opening sequence “%s”" % opening_delimiter) self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after self.closing_delimiter = opening_delimiter # toggle '```' tokenizer between macro character and closing sequence readtable = context.readtable opening_delimiter_properties = readtable.query(opening_delimiter)[0] self.my_tokenizer_name = opening_delimiter_properties["tokenizer"] del opening_delimiter_properties["tokenizer"] opening_delimiter_properties["type"] = RT.CLOSING
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) if opening_delimiter not in self.__class__.DELIMITER_PAIRS: raise TokenizingError( opening_delimiter_position, "Unregistered delimiter pair, for opening sequence “%s”" % opening_delimiter) self.opening_delimiter = opening_delimiter[1:] self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after self.closing_delimiter = self.__class__.DELIMITER_PAIRS[ opening_delimiter]
def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after self.closing_delimiter = opening_delimiter # toggle '```' tokenizer between macro character and closing sequence readtable = context.readtable opening_delimiter_properties = readtable.query(opening_delimiter)[0] self.my_tokenizer_name = opening_delimiter_properties["tokenizer"] del opening_delimiter_properties["tokenizer"] opening_delimiter_properties["type"] = RT.CLOSING
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) if opening_delimiter not in self.__class__.DELIMITER_PAIRS: raise TokenizingError(opening_delimiter_position, "Lisp mode tokenizer called with unknown opening delimiter sequence `%s`" % opening_delimiter) self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after self.closing_delimiter = self.__class__.DELIMITER_PAIRS[opening_delimiter] if opening_delimiter[0] == "#": self.opening_delimiter = opening_delimiter # toggle '(' tokenizer between LispMode and Delimiter tokenizers readtable = context.readtable self.set_delimiter_tokenizers(readtable, "LispModeTokenizer", "SharpDelimiterTokenizer") else: self.opening_delimiter = '#' + opening_delimiter
def __init__(self, context: TokenizationContext, opening_delimiter: str, opening_delimiter_position: StreamPosition, opening_delimiter_position_after: StreamPosition): Tokenizer.__init__(self, context) if opening_delimiter not in self.__class__.DELIMITER_PAIRS: raise TokenizingError( opening_delimiter_position, "Lisp mode tokenizer called with unknown opening delimiter sequence `%s`" % opening_delimiter) self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after self.closing_delimiter = self.__class__.DELIMITER_PAIRS[ opening_delimiter] if opening_delimiter[0] == "#": self.opening_delimiter = opening_delimiter # toggle '(' tokenizer between LispMode and Delimiter tokenizers readtable = context.readtable self.set_delimiter_tokenizers(readtable, "LispModeTokenizer", "SharpDelimiterTokenizer") else: self.opening_delimiter = '#' + opening_delimiter
def __init__(self, context: tokenization_context, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): Tokenizer.__init__(self, context) self.opening_delimiter = opening_delimiter self.opening_delimiter_position = opening_delimiter_position self.opening_delimiter_position_after = opening_delimiter_position_after
def __init__(self, context: TokenizationContext): assert isinstance(context.stream, IndentedCharacterStream) Tokenizer.__init__(self, context) self.last_begin_token = None