def optimizeConfigs(self, interpreter): if self.readonly: raise IllegalStateException("This set is readonly") if len(self.configs) == 0: return for config in self.configs: config.context = interpreter.getCachedContext(config.context)
def getPrecedenceStartState(self, precedence:int): if not self.precedenceDfa: raise IllegalStateException("Only precedence DFAs may contain a precedence start state.") # s0.edges is never null for a precedence DFA if precedence < 0 or precedence >= len(self.s0.edges): return None return self.s0.edges[precedence]
def setPrecedenceStartState(self, precedence:int, startState:DFAState): if not self.precedenceDfa: raise IllegalStateException("Only precedence DFAs may contain a precedence start state.") if precedence < 0: return # synchronization on s0 here is ok. when the DFA is turned into a # precedence DFA, s0 will be initialized once and not updated again # s0.edges is never null for a precedence DFA if precedence >= len(self.s0.edges): ext = [None] * (precedence + 1 - len(self.s0.edges)) self.s0.edges.extend(ext) self.s0.edges[precedence] = startState
def nextToken(self): if self._input is None: raise IllegalStateException( "nextToken requires a non-null input stream.") # Mark start location in char stream so unbuffered streams are # guaranteed at least have text of current token tokenStartMarker = self._input.mark() try: while True: if self._hitEOF: self.emitEOF() return self._token self._token = None self._channel = Token.DEFAULT_CHANNEL self._tokenStartCharIndex = self._input.index self._tokenStartColumn = self._interp.column self._tokenStartLine = self._interp.line self._text = None continueOuter = False while True: self._type = Token.INVALID_TYPE ttype = self.SKIP try: ttype = self._interp.match(self._input, self._mode) except LexerNoViableAltException as e: self.notifyListeners(e) # report error self.recover(e) if self._input.LA(1) == Token.EOF: self._hitEOF = True if self._type == Token.INVALID_TYPE: self._type = ttype if self._type == self.SKIP: continueOuter = True break if self._type != self.MORE: break if continueOuter: continue if self._token is None: self.emit() return self._token finally: # make sure we release marker after match or # unbuffered char stream will keep buffering self._input.release(tokenStartMarker)
def consume(self): skipEofCheck = False if self.index >= 0: if self.fetchedEOF: # the last token in tokens is EOF. skip check if p indexes any # fetched token except the last. skipEofCheck = self.index < len(self.tokens) - 1 else: # no EOF token in tokens. skip check if p indexes a fetched token. skipEofCheck = self.index < len(self.tokens) else: # not yet initialized skipEofCheck = False if not skipEofCheck and self.LA(1) == Token.EOF: raise IllegalStateException("cannot consume EOF") if self.sync(self.index + 1): self.index = self.adjustSeekIndex(self.index + 1)
def clear(self): if self.readonly: raise IllegalStateException("This set is readonly") self.configs.clear() self.cachedHashCode = -1 self.configLookup.clear()
def getReturnState(self, index: int): raise IllegalStateException("illegal!")