def create(self, source, type, text, channel, start, stop, line, column): t = CommonToken(source, type, channel, start, stop) t.line = line t.column = column if text is not None: t.text = text elif self.copyText and source[1] is not None: t.text = source[1].getText(start,stop) return t
def create(self, source, type, text, channel, start, stop, line, column): t = CommonToken(source, type, channel, start, stop) t.line = line t.column = column if text is not None: t.text = text elif self.copyText and source[1] is not None: t.text = source[1].getText(start, stop) return t
def nextToken(self): token = super(PhpBaseLexer, self).nextToken() if token.type == self.PHPEnd or token.type == self.PHPEndSingleLineComment: if self._mode == self.SingleLineCommentMode: # SingleLineCommentMode for such allowed syntax: # // <?php echo "Hello world"; // comment ?> self.popMode() self.popMode() if token.text == "</script>": self._phpScript = False token.type = self.ScriptClose else: # Add semicolon to the end of statement if it is absent. # For example: <?php echo "Hello world" ?> if self._prevTokenType == self.SemiColon or \ self._prevTokenType == self.Colon or \ self._prevTokenType == self.OpenCurlyBracket or \ self._prevTokenType == self.CloseCurlyBracket: token = super(PhpBaseLexer, self).nextToken() else: token = CommonToken(type=self.SemiColon) token.text = ';' elif token.type == self.HtmlName: self._htmlNameText = token.text elif token.type == self.HtmlDoubleQuoteString: if token.text == "php" and self._htmlNameText == "language": self._phpScript = True elif self._mode == self.HereDoc: # Heredoc and Nowdoc syntax support: http://php.net/manual/en/language.types.string.php#language.types.string.syntax.heredoc if token.type == self.StartHereDoc or token.type == self.StartNowDoc: self._heredocIdentifier = token.text[3:].strip().replace( "'", "") if token.type == self.HereDocText: if self.CheckHeredocEnd(token.text): self.popMode() heredoc_identifier = self.GetHeredocEnd(token.text) if token.text.strip().endswith(';'): text = heredoc_identifier + ";\n" token = CommonToken(type=self.SemiColon) token.text = text else: token = super(PhpBaseLexer, self).nextToken() token.text = heredoc_identifier + "\n;" elif self._mode == self.PHP: if self._channel == self.HIDDEN: self._prevTokenType = token.type return token
def addToken(self, prefix: str, s: str, token_type: TT): token = CommonToken(type=token_type.value) token.text = prefix + s token.column = self.pos token.line = self.lineNo self.pos += len(s) self.builder.append(token)
def raw(self, expected: []): token = CommonToken(type=Token.EOF) token.text = "<eof-token>" self.tokens.append(token) raw_types = self.tokens_to_types(self.tokens) self.assertEqual(expected, raw_types, "raw tokens") return self
def createToken(self, type_, text="", length=0): start = self._tokenStartCharIndex stop = start + length t = CommonToken(self._tokenFactorySourcePair, type_, self.DEFAULT_TOKEN_CHANNEL, start, stop) t.text = text return t
def insertLeft(self, type, text): token = CommonToken() token.type = type.value token.text = text token.line = self._dllTokens.value.line token.tokenIndex = self._dllTokens.value.tokenIndex inserted = self._dllAll.insert(token, self._dllTokens) return TokenEditor(inserted, self._dllAll)
def emit_type(self, token_type, channel=Token.DEFAULT_CHANNEL, text=""): char_index = self.getCharIndex() token = CommonToken(self._tokenFactorySourcePair, token_type, channel, char_index - len(text), char_index) token.line = self.line token.column = self.column token.text = text self.emitToken(token) return token
def insertRight(self, type, text): token = CommonToken() token.type = type.value token.text = text token.line = self._dllTokens.value.line token.tokenIndex = self._dllTokens.value.tokenIndex next = self._dllTokens.next if next: inserted = self._dllAll.insert(token, next) else: inserted = self._dllAll.append(token) return TokenEditor(inserted, self._dllAll)
def exitPlusOp(self, ctx: FoxySheepParser.PlusOpContext): """PlusOp[expr1,expr2] We have to treat PlusOp special, because we have to keep the operators intact, and only plus and minus (not PlusMinus or MinusPlus) are flat. The situation is complicated by the fact that Mathematica parses "a-b" as "Plus[a, Times[-1, b]]". We Rewrite the parse tree, inserting the Times context and changing BINARYMINUS to BINARYPLUS.""" # If the op isn't Plus or Minus, nothing to do. if ctx.BINARYMINUS() is None and ctx.BINARYPLUS() is None: return # Since ANTLR4 parses this operator as left associative, we only # need to check the left hand side expr. rhs = ctx.getChild(2) # If the operator of the PlusOp is BINARYMINUS, we rewrite the tree as # "Plus[lhs, Times[-1, rhs]]". Note that if rhs is TIMES, we have to # keep that TIMES flat. if ctx.BINARYMINUS() is not None: # Construct Times, or obtain it from the rhs. times = None if isinstance(rhs, FoxySheepParser.TimesContext): times = rhs else: # If rhs is already a times, keep it flat. times = FoxySheepParser.TimesContext( None, FoxySheepParser.ExprContext(None)) ctx.children.remove(rhs) adopt(ctx, times) adopt(times, rhs) # Add "-1" as the first child of Times. addChild(times, makeNumber(times, -1), 0) # Finally, we have to change operator to BINARYPLUS. plustoken = CommonToken(type=FoxySheepParser.BINARYPLUS) plustoken.text = '+' plus = TerminalNodeImpl(plustoken) # Replace minus token with plus. ctx.children[1] = plus plus.parentCtx = ctx # Flatten flatten(ctx)
def main(argv): sai = StaticAnalysisInfo() input = FileStream(argv[1]) lexer = CPP14Lexer(input) stream = CommonTokenStream(lexer) parser = CPP14Parser(stream) tree = parser.translationunit() tracker = Tracker1(sai, stream) walker = ParseTreeWalker() walker.walk(tracker, tree) print(stream.getText()) stream.tokens[0].text = "test" tmp_tok = stream.getTokens(0,3) space = CommonToken() space.text = ' ' tmp_tok.append(space) stream.tokens[2:2] = tmp_tok #help(stream.tokens[0]) #tmp_tok = stream.tokens[2].clone() #stream.tokens.insert(2, tmp_tok) #ttt = CommonToken() #ttt.text = '???' #stream.tokens.insert(2, ttt) print(stream.getText()) print("---------") #lll = stream.getTokens(0,5) #print(lll) #print(stream.tokens[0].getTokenSource().text) #print(tracker.rewrite.getTokenStream().getText()) '''
def createThin(self, type, text): t = CommonToken(type=type) t.text = text return t
def createThin(self, type: int, text: str): t = CommonToken(type=type) t.text = text return t
def createThin(self, type:int, text:str): t = CommonToken(type=type) t.text = text return t