def token_from_text(self, rule_token, txt): """Given the rule_token with txt create a token. Update the lexer lineno, column, and start. """ start_pos = pctx.SourcePoint(self.lineno, self.column, self.start) len_txt = len(txt) if rule_token is self.NL: # Newline -- increase line number & set col to 1 self.lineno += 1 self.column = 1 elif rule_token in self.MULTILINE_TOKENS and self._NL in txt: # Advance line & col according to how many new lines # are in comments/strings/etc. self.lineno += txt.count(self._NL) self.column = len(txt.rsplit(self._NL, 1)[1]) + 1 else: self.column += len_txt self.start += len_txt end_pos = pctx.SourcePoint(self.lineno, self.column, self.start) return Token(txt, type=rule_token, text=txt, start=start_pos, end=end_pos, filename=self.filename)
async def _execute_ddl(conn, sql_text): try: if debug.flags.bootstrap: debug.header('Delta Script') debug.dump_code(sql_text, lexer='sql') await conn.execute(sql_text) except Exception as e: position = getattr(e, 'position', None) internal_position = getattr(e, 'internal_position', None) context = getattr(e, 'context', '') if context: pl_func_line = re.search( r'^PL/pgSQL function inline_code_block line (\d+).*', context, re.M) if pl_func_line: pl_func_line = int(pl_func_line.group(1)) else: pl_func_line = None point = None if position is not None: position = int(position) point = parser_context.SourcePoint( None, None, position) text = e.query if text is None: # Parse errors text = sql_text elif internal_position is not None: internal_position = int(internal_position) point = parser_context.SourcePoint( None, None, internal_position) text = e.internal_query elif pl_func_line: point = parser_context.SourcePoint( pl_func_line, None, None ) text = sql_text if point is not None: context = parser_context.ParserContext( 'query', text, start=point, end=point) exceptions.replace_context(e, context) raise
def context(self, tok=None, pos: (int, int, int) = None): lex = self.lexer name = lex.filename if lex.filename else '<string>' if tok is None: if pos is None: pos = lex.end_of_input position = pctx.SourcePoint(*pos) context = pctx.ParserContext(name=name, buffer=lex.inputstr, start=position, end=position) else: context = pctx.ParserContext(name=name, buffer=lex.inputstr, start=pctx.SourcePoint(*tok.start()), end=pctx.SourcePoint(*tok.end())) return context
def context(self, tok=None): lex = self.lexer name = lex.filename if lex.filename else '<string>' if tok is None: position = pctx.SourcePoint( line=lex.lineno, column=lex.column, pointer=lex.start) context = pctx.ParserContext( name=name, buffer=lex.inputstr, start=position, end=position) else: context = pctx.ParserContext( name=name, buffer=lex.inputstr, start=tok.start, end=tok.end) return context