def tokens_by_lineno(self) -> Mapping[int, List[Token]]: if not self.tree: raise AttributeError("This file doesn't contain valid Python, so .tokens_by_lineno doesn't exist") return group_by_key_func( self.asttokens().tokens, lambda tok: tok.start[0], )
def variables(self) -> List[Variable]: """ All Variable objects whose nodes are contained within .scope and whose values could be safely evaluated by pure_eval. """ if not self.scope: return [] evaluator = Evaluator.from_frame(self.frame) scope = self.scope node_values = [ pair for pair in evaluator.find_expressions(scope) if is_expression_interesting(*pair) ] # type: List[Tuple[ast.AST, Any]] if isinstance(scope, (ast.FunctionDef, ast.AsyncFunctionDef)): for node in ast.walk(scope.args): if not isinstance(node, ast.arg): continue name = node.arg try: value = evaluator.names[name] except KeyError: pass else: node_values.append((node, value)) # Group equivalent nodes together def get_text(n): if isinstance(n, ast.arg): return n.arg else: return self.source.asttokens().get_text(n) def normalise_node(n): try: # Add parens to avoid syntax errors for multiline expressions return ast.parse('(' + get_text(n) + ')') except Exception: return n grouped = group_by_key_func( node_values, lambda nv: ast.dump(normalise_node(nv[0])), ) result = [] for group in grouped.values(): nodes, values = zip(*group) value = values[0] text = get_text(nodes[0]) if not text: continue result.append(Variable(text, nodes, value)) return result
def _raw_split_into_pieces( self, stmt: ast.AST, start: int, end: int, ) -> Iterator[Tuple[int, int]]: self.asttokens() for name, body in ast.iter_fields(stmt): if ( isinstance(body, list) and body and isinstance(body[0], (ast.stmt, ast.ExceptHandler)) ): for rang, group in sorted(group_by_key_func(body, line_range).items()): sub_stmt = group[0] for inner_start, inner_end in self._raw_split_into_pieces(sub_stmt, *rang): yield start, inner_start yield inner_start, inner_end start = inner_end yield start, end