def test_bind_2arg(self): self.assertEqual( base_matchers.Bind('foo', base_matchers.Anything()).match( _FAKE_CONTEXT, 1), matcher.MatchInfo( match.ObjectMatch(1), {'foo': matcher.BoundValue(match.ObjectMatch(1))}))
def test_from_diff_multiple(self): self.assertEqual( matcher.MatchInfo.from_diff('metavar', 'a\nb\nc\n', '1\nb\n3\n'), matcher.MatchInfo( match.Match(), bindings={ 'metavar.0': matcher.BoundValue( match.SpanMatch( string='a\n', span=(0, 2), )), 'metavar.1': matcher.BoundValue( match.SpanMatch( string='c\n', span=(4, 6), )), }, replacements={ 'metavar.0': formatting.LiteralTemplate('1\n'), 'metavar.1': formatting.LiteralTemplate('3\n'), }, ), )
def test_string(self): self.assertEqual( base_matchers.HasItem(1, base_matchers.Bind('a')).match( _FAKE_CONTEXT, 'xy'), matcher.MatchInfo( match.StringMatch('xy'), {'a': matcher.BoundValue(match.StringMatch('y'))}))
def test_type_only(self): parsed, e = expression('~a') self.assertEqual( ast_matchers.UnaryOp().match(matcher.MatchContext(parsed), e), matcher.MatchInfo( matcher.LexicalASTMatch(e, parsed.text, e.first_token, e.last_token)))
def test_contains_binds(self): items = [1, 2, 3] m = base_matchers.Contains(base_matchers.Bind('foo', 1)) expected = matcher.MatchInfo( match.ObjectMatch(items), {'foo': matcher.BoundValue(match.ObjectMatch(1))}) self.assertEqual(m.match(_FAKE_CONTEXT, items), expected)
def test_non_lexical_node(self): """The matcher doesn't return lexical data for non-lexical AST nodes.""" parsed, binop = expression('a + b') add = binop.op self.assertEqual( ast_matchers.Add().match(matcher.MatchContext(parsed), add), matcher.MatchInfo(match.ObjectMatch(add)))
def test_match(self): container = [1] self.assertEqual( base_matchers.ItemsAre([base_matchers.Bind('a') ]).match(_FAKE_CONTEXT, container), matcher.MatchInfo(match.ObjectMatch(container), {'a': matcher.BoundValue(match.ObjectMatch(1))}))
def test_multi_bind_first(self): self.assertEqual( base_matchers.AnyOf(base_matchers.Bind('foo'), base_matchers.Bind('bar'), _NOTHING).match(_FAKE_CONTEXT, 1), matcher.MatchInfo( match.ObjectMatch(1), {'foo': matcher.BoundValue(match.ObjectMatch(1))}))
def test_bindings(self): m = base_matchers.Once(base_matchers.Bind('foo')) self.assertEqual( m.match(_FAKE_CONTEXT.new(), 1), matcher.MatchInfo( match.ObjectMatch(1), {'foo': matcher.BoundValue(match.ObjectMatch(1))})) self.assertEqual(m.bind_variables, {'foo'})
def test_negative_index(self): container = ['xyz'] self.assertEqual( base_matchers.HasItem(-1, base_matchers.Bind('a')).match( _FAKE_CONTEXT, container), matcher.MatchInfo( match.ObjectMatch(container), {'a': matcher.BoundValue(match.StringMatch('xyz'))}))
def _match(self, context, candidate): # Not all ast-nodes have the lineno attr, only expressions and statements # (so modules and some other weird ones don't). if getattr(candidate, 'lineno', None) in self.lines: return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate)) else: return None
def _match(self, context, candidate): parent = context.parsed_file.nav.get_parent(candidate) if parent is None: return None m = self._submatcher.match(context, parent) if m is None: return None return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate), m.bindings)
def test_multi_overlap(self): # TODO: it'd be nice to give a good error at some point, instead. self.assertEqual( base_matchers.AllOf(base_matchers.Bind('foo'), base_matchers.Bind('foo')).match( _FAKE_CONTEXT, 1), matcher.MatchInfo( match.ObjectMatch(1), {'foo': matcher.BoundValue(match.ObjectMatch(1))}))
def _match(self, context, candidate): for child in _ast_children(candidate): m = self._submatcher.match(context, child) if m is None: continue return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate), m.bindings) return None
def test_simple(self): for nonempty_container in (('x', 'y'), ['x', 'y'], {1: 'y'}): with self.subTest(nonempty_container=nonempty_container): self.assertEqual( base_matchers.HasItem(1, base_matchers.Bind('a')).match( _FAKE_CONTEXT, nonempty_container), matcher.MatchInfo( match.ObjectMatch(nonempty_container), {'a': matcher.BoundValue(match.StringMatch('y'))}))
def test_explicit_anything(self): parsed, e = expression('~a') self.assertEqual( ast_matchers.UnaryOp( op=base_matchers.Anything(), operand=base_matchers.Anything()).match( matcher.MatchContext(parsed), e), matcher.MatchInfo( matcher.LexicalASTMatch(e, parsed.text, e.first_token, e.last_token)))
def test_multi_bind(self): self.assertEqual( base_matchers.AllOf(base_matchers.Bind('foo'), base_matchers.Bind('bar')).match( _FAKE_CONTEXT, 1), matcher.MatchInfo( match.ObjectMatch(1), { 'foo': matcher.BoundValue(match.ObjectMatch(1)), 'bar': matcher.BoundValue(match.ObjectMatch(1)), }))
def test_variable_name(self): parsed = matcher.parse_ast('3', '<string>') expr = parsed.tree.body[0].value expr_match = matcher.LexicalASTMatch(expr, parsed.text, expr.first_token, expr.last_token) self.assertEqual( syntax_matchers.ExprPattern('$name').match( matcher.MatchContext(parsed), expr), matcher.MatchInfo(expr_match, {'name': matcher.BoundValue(expr_match)}))
def test_lvalue_variable(self): parsed = matcher.parse_ast('a = b', '<string>') stmt = parsed.tree.body[0] self.assertEqual( syntax_matchers.StmtPattern('$x = $y').match( matcher.MatchContext(parsed), stmt), matcher.MatchInfo(matcher.LexicalASTMatch(stmt, parsed.text, stmt.first_token, stmt.last_token), bindings=mock.ANY))
def test_fully_specified_matcher(self): parsed, e = expression('~a') self.assertEqual( ast_matchers.UnaryOp( op=ast_matchers.Invert(), operand=ast_matchers.Name(ctx=ast_matchers.Load())).match( matcher.MatchContext(parsed), e), matcher.MatchInfo( matcher.LexicalASTMatch(e, parsed.text, e.first_token, e.last_token)))
def test_complex_variable(self): parsed = matcher.parse_ast('foo + bar', '<string>') expr = parsed.tree.body[0].value self.assertEqual( syntax_matchers.ExprPattern('foo + $name').match( matcher.MatchContext(parsed), expr), matcher.MatchInfo(matcher.LexicalASTMatch(expr, parsed.text, expr.first_token, expr.last_token), bindings=mock.ANY))
def _match(self, context, candidate): try: sub_candidate = candidate[self._index] except (LookupError, TypeError): return None else: m = self._submatcher.match(context, sub_candidate) if m is None: return None return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate), m.bindings)
def _match(self, context, candidate): try: items = iter(candidate) except TypeError: return None for can in items: m = self._submatcher.match(context, can) if m is not None: return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate), m.bindings) return None
def _match(self, context, candidate): del candidate # unused matches = _file_matches_regex.setdefault(context.parsed_file, {}) if self._compiled not in matches: matches[self._compiled] = self._compiled.search( context.parsed_file.text) m = matches[self._compiled] if m is None: return None return matcher.MatchInfo( match.Match(), _re_match_to_bindings(self._compiled, context.parsed_file.text, m))
def _constant_match(context, candidate, value_matcher, value_types): if type(candidate) != ast.Constant: # pylint: disable=unidiomatic-typecheck return None # note: not isinstance. The only concrete subclass that can occur in a # Constant AST is bool (which subclasses int). And in that case, we actually # don't want to include it -- Num() should not match `True`!. # Instead, all types must be listed out explicitly. if type(candidate.value) not in value_types: return None result = value_matcher.match(context, candidate.value) if result is None: return None return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate), result.bindings)
def test_from_diff_change(self): self.assertEqual( matcher.MatchInfo.from_diff('metavar', 'a\nb\n', 'a\nc\n'), matcher.MatchInfo( match.Match(), bindings={ 'metavar.0': matcher.BoundValue( match.SpanMatch( string='b\n', span=(2, 4), )) }, replacements={'metavar.0': formatting.LiteralTemplate('c\n')}, ), )
def test_matches(self): parsed = matcher.parse_ast('xy = 2', '<string>') matches = list( matcher.find_iter( base_matchers.MatchesRegex(r'^(?P<name>.)(.)$', base_matchers.Bind('inner')), parsed)) # There is only one AST node of length >= 2 (which the regex requires): xy. self.assertEqual(matches, [ matcher.MatchInfo( mock.ANY, { 'inner': mock.ANY, 'name': matcher.BoundValue(match.SpanMatch('x', (0, 1))), }) ]) [matchinfo] = matches self.assertEqual(matchinfo.match.span, (0, 2)) self.assertEqual(matchinfo.match, matchinfo.bindings['inner'].value)
def _match(self, context, candidate): parent = candidate while True: parent = context.parsed_file.nav.get_parent(parent) if parent is None: return None m = self._first_ancestor.match(context, parent) if m is not None: break ancestor = m.match.matched m2 = self._also_matches.match(context, ancestor) if m2 is None: return None return matcher.MatchInfo( matcher.create_match(context.parsed_file, candidate), matcher.merge_bindings(m.bindings, m2.bindings))
def test_identical_patterns(self): """Tests that patterns match themselves when not parameterized. Many cases (e.g. None) are interesting for 2/3 compatibility, because the AST changes in Python 3. syntax_matchers gives an easy way to get cross-version compatibility. """ for code in ['None', '{}', '[]', '{1:2, 3:4}', 'lambda a: a', '""']: parsed = matcher.parse_ast(code, '<string>') expr = parsed.tree.body[0].value for extra_comment in ['', "# comment doesn't matter"]: with self.subTest(code=code, extra_comment=extra_comment): self.assertEqual( syntax_matchers.ExprPattern( code + extra_comment).match( matcher.MatchContext(parsed), expr), matcher.MatchInfo( matcher.LexicalASTMatch(expr, parsed.text, expr.first_token, expr.last_token)))
def test_from_diff_empty(self): self.assertEqual( matcher.MatchInfo.from_diff('metavar', 'a\nb\nc\n', 'a\nb\nc\n'), matcher.MatchInfo(match.Match()), )