forked from ar4s/flake8_tuple
/
flake8_tuple.py
122 lines (101 loc) · 3.12 KB
/
flake8_tuple.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
# -*- coding: utf-8 -*-
import ast
import collections
import six
import token
import tokenize
try:
import pycodestyle as pep8
except ImportError:
import pep8
__version__ = '0.2.13'
ERROR_CODE = 'T801'
ERROR_MESSAGE = 'one element tuple'
if six.PY2:
"""
Backported from Python 3.x
"""
TokenInfo = collections.namedtuple(
'TokenInfo', ['type', 'string', 'start', 'end', 'line']
)
else:
TokenInfo = tokenize.TokenInfo
def get_lines(filename):
if filename in ('stdin', '-', None):
return pep8.stdin_get_value().splitlines(True)
else:
return pep8.readlines(filename)
class TupleChecker(object):
name = 'flake8-tuple'
version = __version__
def __init__(self, tree, filename):
self.tree = tree
self.filename = filename
def run(self):
try:
lines = get_lines(self.filename)
except IOError:
yield
noqa = get_noqa_lines(lines)
for error in check_for_wrong_tuple(self.tree, lines, noqa):
yield (
error[0],
error[1],
('{} {}').format(ERROR_CODE, ERROR_MESSAGE),
type(self)
)
def get_noqa_lines(code):
tokens = tokenize.generate_tokens(lambda L=iter(code): next(L))
noqa = [
x[2][0]
for x in tokens
if (
x[0] == tokenize.COMMENT and
(
x[1].endswith('noqa') or
(
isinstance(x[0], str) and x[0].endswith('noqa')
)
)
)]
return noqa
def check_code_for_wrong_tuple(code):
tree = ast.parse(code)
code = [line + '\n' for line in code.split('\n')]
noqa = get_noqa_lines(code)
return check_for_wrong_tuple(tree, code, noqa)
def ending_of_bad_tuple(x):
return x.type == token.OP and x.string == ','
def check_for_wrong_tuple(tree, code, noqa):
errors = []
candidates = []
for assign in ast.walk(tree):
if not isinstance(assign, ast.Assign) or assign.lineno in noqa:
continue
elif isinstance(assign.value, ast.Call):
continue
for tuple_el in ast.walk(assign):
if isinstance(tuple_el, ast.Tuple) and len(tuple_el.elts) == 1:
candidates.append((assign.lineno, assign.col_offset))
break
if not candidates:
return []
for candidate in candidates:
tokens = tokenize.generate_tokens(
lambda L=iter(code): next(L)
)
previous_token = None
for t in tokens:
x = TokenInfo(*t)
if x.start[0] != candidate[0]:
continue
if x.type == token.NEWLINE and ending_of_bad_tuple(previous_token):
errors.append(x.start)
if x.type == token.OP and x.string == '=':
x = TokenInfo(*next(tokens))
if x.type != token.OP and x.string != '(':
x_next = TokenInfo(*next(tokens))
if ending_of_bad_tuple(x_next):
errors.append(x.start)
previous_token = x
return errors