Example #1
0
    def __init__(self, target, replacement, inspectors=None):
        inspectors = inspectors or []

        self.tokens = tokenize(target)
        self.replacement = replacement
        self.inspectors = inspectors

        self._search_strategy = TokenSearchStrategy(target)
Example #2
0
    def _replace(self, line):
        tokens = tokenize(line)
        index = self._search_strategy.find(tokens)

        if index == -1:
            return line

        if self._is_misdetection(line):
            return line

        replaced_tokens = (tokens[:index] +
                           [self.replacement] +
                           tokens[index + len(self.tokens):])
        return ''.join(replaced_tokens)
Example #3
0
    def __call__(self, text):
        lines = text.split(os.linesep)

        for lineno, line in enumerate(lines, start=1):

            # 先にざっくりと文字列のレベルで一致するか確認しておく
            position = line.find(self._word)
            if position == -1:
                continue

            # それっぽいものが見つかったら構文解析器を使って詳しく調べる
            tokens = tokenize(line)
            index = self.find(tokens)
            if index == -1:
                continue

            # さらに誤検出ではないか調べる (インスペクタが登録されているときだけのオプション)
            if self._is_misdetection(line):
                continue

            yield Violation(lineno, position, line)
Example #4
0
    def __init__(self, word, inspectors=None):
        inspectors = inspectors or []

        self.tokens = tokenize(word)
        self.inspectors = inspectors
        self._word = word