def test_no_matching_token(): """Ensure that we handle cases when there is no match.""" code = """ foo """ assert not list(match_tokens(tokenize(code), start=match_regex("foo"), end=match_regex("bar")))
def test_no_matching_token(): """Ensure that we handle cases when there is no match.""" code = """ foo """ assert not list( match_tokens( tokenize(code), start=match_regex("foo"), end=match_regex("bar")))
def test_not_enough_lookahead(): """Ensure that we don't return a match if there's not enough lookahead.""" code = """ foo bar """ assert not list(match_tokens(tokenize(code), start=match_regex("foo"), end=match_regex("bar"), lookahead=1))
def test_not_enough_lookahead(): """Ensure that we don't return a match if there's not enough lookahead.""" code = """ foo bar """ assert not list( match_tokens(tokenize(code), start=match_regex("foo"), end=match_regex("bar"), lookahead=1))
def test_match_length(): """Ensure that we correctly find sequences of a provided length.""" code = """ foo foo bar """ result = match_tokens(tokenize(code), start=match_regex("foo"), end=match_regex("bar"), length=3) result = list(result) assert len(result) == 1 assert [i.value for i in result[0]] == ["foo", "foo", "bar"]
def test_match_tokens(): """Ensure that we match tokens correctly.""" code = """ foo qux bar foo grault bar foo bar """ expected = [["foo", "qux", "bar"], ["foo", "grault", "bar"], ["foo", "bar"]] assert expected == [[ token.value for token in match ] for match in match_tokens( tokenize(code), start=match_regex("foo"), end=match_regex("bar"))]
def test_match_tokens(): """Ensure that we match tokens correctly.""" code = """ foo qux bar foo grault bar foo bar """ expected = [["foo", "qux", "bar"], ["foo", "grault", "bar"], ["foo", "bar"]] assert expected == [ [token.value for token in match] for match in match_tokens(tokenize(code), start=match_regex("foo"), end=match_regex("bar"))]