def test_ignore_glob(self): result = Result.from_values('LineLengthBear', 'message', file='d', line=1, column=1, end_line=2, end_column=2) ranges = [(['(line*|space*)', 'py*'], SourceRange.from_values('d', 1, 1, 2, 2))] self.assertTrue(check_result_ignore(result, ranges)) result = Result.from_values('SpaceConsistencyBear', 'message', file='d', line=1, column=1, end_line=2, end_column=2) ranges = [(['(line*|space*)', 'py*'], SourceRange.from_values('d', 1, 1, 2, 2))] self.assertTrue(check_result_ignore(result, ranges)) result = Result.from_values('XMLBear', 'message', file='d', line=1, column=1, end_line=2, end_column=2) ranges = [(['(line*|space*)', 'py*'], SourceRange.from_values('d', 1, 1, 2, 2))] self.assertFalse(check_result_ignore(result, ranges))
def test_print_results_missing_line(self): with retrieve_stdout() as stdout: print_results( self.log_printer, Section(''), [Result.from_values('t', 'msg', file='file', line=5), Result.from_values('t', 'msg', file='file', line=6)], {abspath('file'): ['line ' + str(i + 1) for i in range(5)]}, {}, self.console_printer) self.assertEqual('\n' 'file\n' '| 5| {0}\n' '| | [NORMAL] t:\n' '| | {1}\n' '\n' 'file\n' '| 6| {2}\n' '| | [NORMAL] t:\n' '| | {1}\n'.format( highlight_text(self.no_color, 'line 5', self.lexer), highlight_text(self.no_color, 'msg', style=BackgroundMessageStyle), STR_LINE_DOESNT_EXIST), stdout.getvalue())
def test_ignore_results(self): ranges = [([], SourceRange.from_values('f', 1, 1, 2, 2))] result = Result.from_values('origin (Something Specific)', 'message', file='e', line=1, column=1, end_line=2, end_column=2) self.assertFalse(check_result_ignore(result, ranges)) ranges.append(([], SourceRange.from_values('e', 2, 3, 3, 3))) self.assertFalse(check_result_ignore(result, ranges)) ranges.append(([], SourceRange.from_values('e', 1, 1, 2, 2))) self.assertTrue(check_result_ignore(result, ranges)) result1 = Result.from_values('origin', 'message', file='e') self.assertTrue(check_result_ignore(result1, ranges)) ranges = [(['something', 'else', 'not origin'], SourceRange.from_values('e', 1, 1, 2, 2))] self.assertFalse(check_result_ignore(result, ranges)) ranges = [(['something', 'else', 'origin'], SourceRange.from_values('e', 1, 1, 2, 2))] self.assertTrue(check_result_ignore(result, ranges))
def test_ignore_glob(self): result = Result.from_values("LineLengthBear", "message", file="d", line=1, column=1, end_line=2, end_column=2) ranges = [(["(line*|space*)", "py*"], SourceRange.from_values("d", 1, 1, 2, 2))] self.assertTrue(check_result_ignore(result, ranges)) result = Result.from_values("SpaceConsistencyBear", "message", file="d", line=1, column=1, end_line=2, end_column=2) ranges = [(["(line*|space*)", "py*"], SourceRange.from_values("d", 1, 1, 2, 2))] self.assertTrue(check_result_ignore(result, ranges)) result = Result.from_values("XMLBear", "message", file="d", line=1, column=1, end_line=2, end_column=2) ranges = [(["(line*|space*)", "py*"], SourceRange.from_values("d", 1, 1, 2, 2))] self.assertFalse(check_result_ignore(result, ranges))
def test_print_results_sorting(self): with retrieve_stdout() as stdout: print_results(self.log_printer, Section(""), [Result.from_values("SpaceConsistencyBear", "Trailing whitespace found", file="file", line=5), Result.from_values("SpaceConsistencyBear", "Trailing whitespace found", file="file", line=2)], {abspath("file"): ["test line\n", "line 2\n", "line 3\n", "line 4\n", "line 5\n"]}, {}, color=False) self.assertEqual(""" file | 2| line 2 | | [NORMAL] SpaceConsistencyBear: | | Trailing whitespace found file | 5| line 5 | | [NORMAL] SpaceConsistencyBear: | | Trailing whitespace found """, stdout.getvalue())
def test_is_applicable(self): with self.assertRaises(TypeError) as context: IgnoreResultAction.is_applicable('str', {}, {}) self.assertEqual( IgnoreResultAction.is_applicable( Result.from_values('origin', 'msg', "file doesn't exist", 2), {}, {} ), "The result is associated with source code that doesn't " 'seem to exist.' ) self.assertEqual( IgnoreResultAction.is_applicable( Result('', ''), {}, {} ), 'The result is not associated with any source code.' ) with make_temp() as f_a: self.assertTrue(IgnoreResultAction.is_applicable( Result.from_values('origin', 'msg', f_a, 2), {}, {}))
def test_ignore_results(self): ranges = [([], SourceRange.from_values("f", 1, 1, 2, 2))] result = Result.from_values("origin", "message", file="e", line=1, column=1, end_line=2, end_column=2) self.assertFalse(check_result_ignore(result, ranges)) ranges.append(([], SourceRange.from_values("e", 2, 3, 3, 3))) self.assertFalse(check_result_ignore(result, ranges)) ranges.append(([], SourceRange.from_values("e", 1, 1, 2, 2))) self.assertTrue(check_result_ignore(result, ranges)) result1 = Result.from_values("origin", "message", file="e") self.assertFalse(check_result_ignore(result1, ranges)) ranges = [(['something', 'else', 'not origin'], SourceRange.from_values("e", 1, 1, 2, 2))] self.assertFalse(check_result_ignore(result, ranges)) ranges = [(['something', 'else', 'origin'], SourceRange.from_values("e", 1, 1, 2, 2))] self.assertTrue(check_result_ignore(result, ranges))
def test_bad_placeholder_space_color(self): filename = 'test_bad_placeholder_space_color.styl' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('StylintBear', message='always use a placeholder variable ' 'when extending', file=get_testfile_path(filename), line=4, severity=RESULT_SEVERITY.NORMAL), Result.from_values('StylintBear', message='hexidecimal color should ' 'be a variable', file=get_testfile_path(filename), line=6, column=8, severity=RESULT_SEVERITY.NORMAL), Result.from_values('StylintBear', message='line comments require a space ' 'after //', file=get_testfile_path(filename), line=8, column=2, severity=RESULT_SEVERITY.NORMAL), Result.from_values('StylintBear', message='commas must be followed ' 'by a space for readability', file=get_testfile_path(filename), line=9, column=6, severity=RESULT_SEVERITY.NORMAL), ], filename=get_testfile_path(filename))
def test_string_dict(self): uut = Result(None, '') output = uut.to_string_dict() self.assertEqual(output, {'id': str(uut.id), 'origin': '', 'message': '', 'file': '', 'line_nr': '', 'severity': 'NORMAL', 'debug_msg': '', 'additional_info': '', 'confidence': '100'}) uut = Result.from_values(origin='origin', message='msg', file='file', line=2, severity=RESULT_SEVERITY.INFO, additional_info='hi!', debug_msg='dbg', confidence=50) output = uut.to_string_dict() self.assertEqual(output, {'id': str(uut.id), 'origin': 'origin', 'message': 'msg', 'file': abspath('file'), 'line_nr': '2', 'severity': 'INFO', 'debug_msg': 'dbg', 'additional_info': 'hi!', 'confidence': '50'}) uut = Result.from_values(origin='o', message='m', file='f', line=5) output = uut.to_string_dict() self.assertEqual(output['line_nr'], '5')
def test_print_results_sorting(self): with retrieve_stdout() as stdout: print_results(self.log_printer, Section(''), [Result.from_values('SpaceConsistencyBear', 'Trailing whitespace found', file='file', line=5), Result.from_values('SpaceConsistencyBear', 'Trailing whitespace found', file='file', line=2)], {abspath('file'): ['test line\n', '\t\n', 'line 3\n', 'line 4\n', 'line 5\t\n']}, {}, self.console_printer) self.assertEqual(""" file | 2| {0} | | [NORMAL] SpaceConsistencyBear: | | {1} file | 5| {2} | | [NORMAL] SpaceConsistencyBear: | | {1}\n""".format(highlight_text(self.no_color, '\t', self.lexer), highlight_text(self.no_color, 'Trailing whitespace found', style=BackgroundMessageStyle), highlight_text(self.no_color, 'line 5\t', self.lexer)), stdout.getvalue())
def process_output(self, output, filename, file, cc_threshold: int=10): """ :param cc_threshold: Threshold value for cyclomatic complexity """ message = '{} has a cyclomatic complexity of {}.' if output: try: output = json.loads(output) except JSONDecodeError: output_regex = (r'Fatal error \[getReports\]: .+: ' r'Line (?P<line>\d+): (?P<message>.*)') for match in re.finditer(output_regex, output): groups = match.groupdict() yield Result.from_values( origin=self, message=groups['message'].strip(), file=filename, severity=RESULT_SEVERITY.MAJOR, line=int(groups['line'])) return for function in output['reports'][0]['functions']: if function['cyclomatic'] >= cc_threshold: yield Result.from_values( origin=self, message=message.format(function['name'], function['cyclomatic']), file=filename, line=function['line'])
def test_overlaps(self): overlapping_range = SourceRange.from_values('file1', 1, 1, 2, 2) nonoverlapping_range = SourceRange.from_values('file2', 1, 1, 2, 2) uut = Result.from_values('origin', 'message', file='file1', line=1, column=1, end_line=2, end_column=2) self.assertTrue(uut.overlaps(overlapping_range)) self.assertTrue(uut.overlaps([overlapping_range])) self.assertFalse(uut.overlaps(nonoverlapping_range)) overlapping_range = SourceRange.from_values('file1', 1, None, 1, None) nonoverlapping_range = SourceRange.from_values( 'file2', 1, None, 1, None) uut = Result.from_values('origin', 'message', file='file1', line=1, column=1, end_line=1, end_column=20) self.assertTrue(uut.overlaps(overlapping_range)) self.assertTrue(uut.overlaps([overlapping_range])) self.assertFalse(uut.overlaps(nonoverlapping_range))
def test_print_results_missing_line(self): with retrieve_stdout() as stdout: print_results( self.log_printer, Section(""), [Result.from_values("t", "msg", file="file", line=5), Result.from_values("t", "msg", file="file", line=6)], {abspath("file"): ["line " + str(i + 1) for i in range(5)]}, {}, color=False) self.assertEqual("\n" "file\n" "| 5| {0}\n" "| | [NORMAL] t:\n" "| | {1}\n" "\n" "file\n" "| 6| {2}\n" "| | [NORMAL] t:\n" "| | {1}\n".format( highlight_text('line 5', self.lexer), highlight_text("msg", style=BackgroundMessageStyle), STR_LINE_DOESNT_EXIST), stdout.getvalue())
def test_print_results_sorting(self): with retrieve_stdout() as stdout: print_results(self.log_printer, Section(""), [Result.from_values("SpaceConsistencyBear", "Trailing whitespace found", file="file", line=5), Result.from_values("SpaceConsistencyBear", "Trailing whitespace found", file="file", line=2)], {abspath("file"): ["test line\n", "\t\n", "line 3\n", "line 4\n", "line 5\t\n"]}, {}, color=False) self.assertEqual(""" file | 2| {0} | | [NORMAL] SpaceConsistencyBear: | | {1} file | 5| {2} | | [NORMAL] SpaceConsistencyBear: | | {1}\n""".format(highlight_text('\t', self.lexer), highlight_text("Trailing whitespace found", style=BackgroundMessageStyle), highlight_text('line 5\t', self.lexer)), stdout.getvalue())
def test_apply(self): # Initial file contents, *before* a patch was applied file_dict = {self.fa: ["1\n", "2\n", "3\n"], self.fb: ["1\n", "2\n", "3\n"], "f_c": ["1\n", "2\n", "3\n"]} # A patch that was applied for some reason to make things complicated diff_dict = {self.fb: Diff(file_dict[self.fb])} diff_dict[self.fb].change_line(3, "3\n", "3_changed\n") # File contents after the patch was applied, that's what's in the files current_file_dict = { filename: diff_dict[filename].modified if filename in diff_dict else file_dict[filename] for filename in (self.fa, self.fb) } for filename in current_file_dict: with open(filename, "w") as handle: handle.writelines(current_file_dict[filename]) # End file contents after the patch and the OpenEditorAction was # applied expected_file_dict = {self.fa: ["1\n", "3\n"], self.fb: ["1\n", "3_changed\n"], "f_c": ["1\n", "2\n", "3\n"]} section = Section("") section.append(Setting("editor", "")) uut = OpenEditorAction() subprocess.call = self.fake_edit diff_dict = uut.apply_from_section(Result.from_values("origin", "msg", self.fa), file_dict, diff_dict, section) diff_dict = uut.apply_from_section(Result.from_values("origin", "msg", self.fb), file_dict, diff_dict, section) for filename in diff_dict: file_dict[filename] = diff_dict[filename].modified self.assertEqual(file_dict, expected_file_dict)
def test_string_dict(self): uut = Result(None, "") output = uut.to_string_dict() self.assertEqual(output, {"id": str(uut.id), "origin": "", "message": "", "file": "", "line_nr": "", "severity": "NORMAL", "debug_msg": "", "additional_info": "", "confidence": "100"}) uut = Result.from_values(origin="origin", message="msg", file="file", line=2, severity=RESULT_SEVERITY.INFO, additional_info="hi!", debug_msg="dbg", confidence=50) output = uut.to_string_dict() self.assertEqual(output, {"id": str(uut.id), "origin": "origin", "message": "msg", "file": abspath("file"), "line_nr": "2", "severity": "INFO", "debug_msg": "dbg", "additional_info": "hi!", "confidence": "50"}) uut = Result.from_values(origin="o", message="m", file="f", line=5) output = uut.to_string_dict() self.assertEqual(output["line_nr"], "5")
def test_ignore(self): uut = IgnoreResultAction() with make_temp() as f_a: file_dict = { f_a: ['1\n', '2\n', '3\n'] } file_diff_dict = {} # Apply an initial patch uut.apply(Result.from_values('origin', 'msg', f_a, 2), file_dict, file_diff_dict, 'c') self.assertEqual( file_diff_dict[f_a].modified, ['1\n', '2 // Ignore origin\n', '3\n']) with open(f_a, 'r') as f: self.assertEqual(file_diff_dict[f_a].modified, f.readlines()) self.assertTrue(exists(f_a + '.orig')) # Apply a second patch, old patch has to stay! uut.apply(Result.from_values('else', 'msg', f_a, 1), file_dict, file_diff_dict, 'c') self.assertEqual( file_diff_dict[f_a].modified, ['1 // Ignore else\n', '2 // Ignore origin\n', '3\n']) with open(f_a, 'r') as f: self.assertEqual(file_diff_dict[f_a].modified, f.readlines())
def test_cyclomatic_complexity(self): # Test for info results self.check_results( self.uut, test_file3.splitlines(True), [Result.from_values('RadonBear', 'f has a cyclomatic complexity of 51', severity=RESULT_SEVERITY.INFO, file='test_file3', line=1, end_line=1)], filename='test_file3', settings={'cyclomatic_complexity': 52}) # Test for major results self.check_results( self.uut, test_file3.splitlines(True), [Result.from_values('RadonBear', 'f has a cyclomatic complexity of 51', severity=RESULT_SEVERITY.MAJOR, file='test_file3', line=1, end_line=1)], filename='test_file3', settings={'cyclomatic_complexity': 10})
def test_naming_violation(self): file_contents = load_testfile('naming_violation.php') self.section.append(Setting('phpmd_rulesets', 'naming')) self.check_results( self.uut, file_contents, [Result.from_values('PHPMessDetectorBear', 'Avoid variables with short names like $q. ' 'Configured minimum length is 3.', file=get_testfile_path( 'naming_violation.php'), line=3), Result.from_values('PHPMessDetectorBear', 'Avoid variables with short names like $as. ' 'Configured minimum length is 3.', file=get_testfile_path( 'naming_violation.php'), line=4), Result.from_values('PHPMessDetectorBear', 'Avoid variables with short names like $r. ' 'Configured minimum length is 3.', file=get_testfile_path( 'naming_violation.php'), line=5)], filename=get_testfile_path('naming_violation.php'))
def test_string_dict(self): uut = Result(None, "") output = uut.to_string_dict() self.assertEqual(output, {"id": str(uut.id), "origin": "", "message": "", "file": "", "line_nr": "", "severity": "NORMAL", "debug_msg": ""}) uut = Result.from_values(origin="origin", message="msg", file="file", line=2, severity=RESULT_SEVERITY.INFO, debug_msg="dbg") output = uut.to_string_dict() self.assertEqual(output, {"id": str(uut.id), "origin": "origin", "message": "msg", "file": "file", "line_nr": "2", "severity": "INFO", "debug_msg": "dbg"}) uut = Result.from_values(origin="o", message="m", file="f", line=5) output = uut.to_string_dict() self.assertEqual(output["line_nr"], "5")
def test_bad_lowercase_tagname(self): filename = 'test_bad_lowercase_tagname.html' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('HTMLHintBear', message='The html element name of [ SPAN ] ' 'must be in lowercase.', file=get_testfile_path(filename), line=2, column=1, end_line=2, end_column=1, severity=RESULT_SEVERITY.MAJOR), Result.from_values('HTMLHintBear', message='The html element name of [ SPAN ] ' 'must be in lowercase.', file=get_testfile_path(filename), line=3, column=1, end_line=3, end_column=1, severity=RESULT_SEVERITY.MAJOR)], filename=get_testfile_path(filename))
def test_print_results_missing_line(self): with retrieve_stdout() as stdout: print_results( self.log_printer, Section(""), [ Result.from_values("t", "msg", file="file", line=5), Result.from_values("t", "msg", file="file", line=6), ], {abspath("file"): ["line " + str(i + 1) for i in range(5)]}, {}, color=False, ) self.assertEqual( "\n" "file\n" "| 5| line 5\n" "| | [NORMAL] t:\n" "| | msg\n" "\n" "file\n" "| | {}\n" "| | [NORMAL] t:\n" "| | msg\n".format(STR_LINE_DOESNT_EXIST), stdout.getvalue(), )
def test_stdin_stderr_config_correction(self): create_arguments_mock = Mock() generate_config_mock = Mock() # `some_value_A` and `some_value_B` are used to test the different # delegation to `generate_config()` and `create_arguments()` # accordingly. class Handler: @staticmethod def generate_config(filename, file, some_value_A): generate_config_mock(filename, file, some_value_A) return "\n".join(["use_stdin", "use_stderr", "correct"]) @staticmethod def create_arguments(filename, file, config_file, some_value_B): create_arguments_mock(filename, file, config_file, some_value_B) return self.test_program_path, "--config", config_file uut = (linter(sys.executable, use_stdin=True, use_stdout=False, use_stderr=True, output_format="corrected", config_suffix=".conf") (Handler) (self.section, None)) results = list(uut.run(self.testfile2_path, self.testfile2_content, some_value_A=124, some_value_B=-78)) expected_correction = [s + "\n" for s in ["+", "/", "/", "-"]] diffs = list(Diff.from_string_arrays( self.testfile2_content, expected_correction).split_diff()) expected = [Result.from_values(uut, "Inconsistency found.", self.testfile2_path, 1, None, 1, None, RESULT_SEVERITY.NORMAL, diffs={self.testfile2_path: diffs[0]}), Result.from_values(uut, "Inconsistency found.", self.testfile2_path, 5, None, 5, None, RESULT_SEVERITY.NORMAL, diffs={self.testfile2_path: diffs[1]})] self.assertEqual(results, expected) create_arguments_mock.assert_called_once_with( self.testfile2_path, self.testfile2_content, ANY, -78) self.assertEqual(create_arguments_mock.call_args[0][2][-5:], ".conf") generate_config_mock.assert_called_once_with( self.testfile2_path, self.testfile2_content, 124)
def test_is_applicable(self): self.assertFalse(IgnoreResultAction.is_applicable('str', {}, {})) self.assertFalse(IgnoreResultAction.is_applicable( Result.from_values('origin', 'msg', "file doesn't exist", 2), {}, {})) with make_temp() as f_a: self.assertTrue(IgnoreResultAction.is_applicable( Result.from_values('origin', 'msg', f_a, 2), {}, {}))
def run(self, filename, file, timeout: int=DEFAULT_TIMEOUT, ignore_regex: str="[.\/]example\.com"): """ Find links in any text file and check if they are valid. A link is considered valid if the server responds with a 2xx code. This bear can automatically fix redirects, but ignores redirect URLs that have a huge difference with the original URL. :param timeout: Request timeout period. :param ignore_regex: A regex for urls to ignore. """ for line_number, link, code in InvalidLinkBear.find_links_in_file( file, timeout, ignore_regex): if code is None: yield Result.from_values( origin=self, message=('Broken link - unable to connect to ' '{url}').format(url=link), file=filename, line=line_number, severity=RESULT_SEVERITY.MAJOR) elif not 200 <= code < 300: # HTTP status 404, 410 or 50x if code in (404, 410) or 500 <= code < 600: yield Result.from_values( origin=self, message=('Broken link - unable to connect to {url} ' '(HTTP Error: {code})' ).format(url=link, code=code), file=filename, line=line_number, severity=RESULT_SEVERITY.NORMAL) if 300 <= code < 400: # HTTP status 30x redirect_url = requests.head(link, allow_redirects=True).url matcher = SequenceMatcher( None, redirect_url, link) if (matcher.real_quick_ratio() > 0.7 and matcher.ratio()) > 0.7: diff = Diff(file) current_line = file[line_number - 1] start = current_line.find(link) end = start + len(link) replacement = current_line[:start] + \ redirect_url + current_line[end:] diff.change_line(line_number, current_line, replacement) yield Result.from_values( self, 'This link redirects to ' + redirect_url, diffs={filename: diff}, file=filename, line=line_number, severity=RESULT_SEVERITY.NORMAL)
def test_process_output_regex(self): # Also test the case when an unknown severity is matched. test_output = ("12:4-14:0-Serious issue (error) -> ORIGIN=X\n" "0:0-0:1-This is a warning (warning) -> ORIGIN=Y\n" "813:77-1024:32-Just a note (info) -> ORIGIN=Z\n" "0:0-0:0-Some unknown sev (???) -> ORIGIN=W\n") regex = (r"(?P<line>\d+):(?P<column>\d+)-" r"(?P<end_line>\d+):(?P<end_column>\d+)-" r"(?P<message>.*) \((?P<severity>.*)\) -> " r"ORIGIN=(?P<origin>.*)") uut = (linter(sys.executable, output_format="regex", output_regex=regex) (self.EmptyTestLinter) (self.section, None)) uut.warn = Mock() sample_file = "some-file.xtx" results = list(uut.process_output(test_output, sample_file, [""])) expected = [Result.from_values("EmptyTestLinter (X)", "Serious issue", sample_file, 12, 4, 14, 0, RESULT_SEVERITY.MAJOR), Result.from_values("EmptyTestLinter (Y)", "This is a warning", sample_file, 0, 0, 0, 1, RESULT_SEVERITY.NORMAL), Result.from_values("EmptyTestLinter (Z)", "Just a note", sample_file, 813, 77, 1024, 32, RESULT_SEVERITY.INFO), Result.from_values("EmptyTestLinter (W)", "Some unknown sev", sample_file, 0, 0, 0, 0, RESULT_SEVERITY.NORMAL)] self.assertEqual(results, expected) uut.warn.assert_called_once_with( "'???' not found in severity-map. Assuming " "`RESULT_SEVERITY.NORMAL`.") # Test when providing a sequence as output. test_output = ["", "12:4-14:0-Serious issue (error) -> ORIGIN=X\n"] results = list(uut.process_output(test_output, sample_file, [""])) expected = [Result.from_values("EmptyTestLinter (X)", "Serious issue", sample_file, 12, 4, 14, 0, RESULT_SEVERITY.MAJOR)] self.assertEqual(results, expected)
def run(self, filename, file, dependency_results=dict(), follow_redirects: bool = True, ): """ Find links in any text file and check if they are archived. Link is considered valid if the link has been archived by any services in memento_client. This bear can automatically fix redirects. Warning: This bear will make HEAD requests to all URLs mentioned in your codebase, which can potentially be destructive. As an example, this bear would naively just visit the URL from a line that goes like `do_not_ever_open = 'https://api.acme.inc/delete-all-data'` wiping out all your data. :param dependency_results: Results given by URLHeadBear. :param follow_redirects: Set to true to check all redirect urls. """ self._mc = MementoClient() for result in dependency_results.get(URLHeadBear.name, []): line_number, link, code, context = result.contents if not (code and 200 <= code < 400): continue status = MementoBear.check_archive(self._mc, link) if not status: yield Result.from_values( self, ('This link is not archived yet, visit ' 'https://web.archive.org/save/%s to get it archived.' % link), file=filename, line=line_number, severity=RESULT_SEVERITY.INFO ) if follow_redirects and 300 <= code < 400: # HTTP status 30x redirect_urls = MementoBear.get_redirect_urls(link) for url in redirect_urls: status = MementoBear.check_archive(self._mc, url) if not status: yield Result.from_values( self, ('This link redirects to %s and not archived yet, ' 'visit https://web.archive.org/save/%s to get it ' 'archived.' % (url, url)), file=filename, line=line_number, severity=RESULT_SEVERITY.INFO )
def test_process_output_unified_diff_incomplete_hunk(self): uut = (linter(sys.executable, output_format='unified-diff') (self.EmptyTestLinter) (self.section, None)) original = ['void main() {', '// This comment is missing', '// in the unified diff', 'return 09;', '}'] diff = ['--- a/some-file.c', '+++ b/some-file.c', '@@ -1,1 +1,2 @@', '-void main() {', '+void main()', '+{', '@@ -4,2 +5,2 @@', '-return 09;', '+ return 9;', ' }'] diff_string = '\n'.join(diff) results = list(uut.process_output(diff_string, 'some-file.c', original)) diffs = list(Diff.from_unified_diff(diff_string, original).split_diff()) expected = [Result.from_values(uut, 'Inconsistency found.', 'some-file.c', 1, None, 1, None, RESULT_SEVERITY.NORMAL, diffs={'some-file.c': diffs[0]}), Result.from_values(uut, 'Inconsistency found.', 'some-file.c', 4, None, 4, None, RESULT_SEVERITY.NORMAL, diffs={'some-file.c': diffs[1]})] self.assertEqual(results, expected) uut = (linter(sys.executable, output_format='unified-diff', diff_distance=-1) (self.EmptyTestLinter) (self.section, None)) results = list(uut.process_output(diff_string, 'some-file.c', original)) self.assertEqual(len(results), 2)
def test_comparable_assert_result_equal(self): expected = [Result.from_values(origin='AnyBea', message='This file has 2 lines.', file='anyfile')] observed = [Result.from_values(origin='AnyBear', message='This file has 2 lines.', file='anyfile')] with pytest.raises(AssertionError) as ex: self.assert_result_equal(expected, observed) assert ('origin mismatch: AnyBea, This file has 2 lines. != AnyBear, ' 'This file has 2 lines.\n\n') == str(ex.value)
def test_is_applicable(self): result1 = Result("", "") result2 = Result.from_values("", "", "") result3 = Result.from_values("", "", "file") invalid_result = "" self.assertFalse(OpenEditorAction.is_applicable(result1, None, {})) self.assertTrue(OpenEditorAction.is_applicable(result2, None, {})) # Check non-existent file self.assertFalse(OpenEditorAction.is_applicable(result3, None, {})) self.assertFalse(OpenEditorAction.is_applicable(invalid_result, None, {}))
def test_bad_ginger(self): file_name = 'bad_ginger.rst' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [Result.from_values('TextLintBear', message='mistaek -> mistake', line=1, column=20, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name))], filename=get_testfile_path(file_name))
def test_module_eval_vs_define_method(self): filename = 'module_eval.rb' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [ Result.from_values('RubyFastererBear', message='Using module_eval is slower than ' 'define_method.', file=get_testfile_path(filename), line=3) ], filename=get_testfile_path(filename))
def test_bad_no_start_duplicated_conjunction(self): file_name = 'bad_no_start_duplicated_conjunction.txt' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [Result.from_values('TextLintBear', message='Don\'t repeat "But" in 2 phrases', line=2, column=1, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name))], filename=get_testfile_path(file_name))
def test_bad_duplicates(self): filename = 'test_bad_duplicates.styl' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('StylintBear', message='duplicate property or selector, ' 'consider merging', file=get_testfile_path(filename), line=4, severity=RESULT_SEVERITY.NORMAL)], filename=get_testfile_path(filename))
def test_print_results_without_line(self): with retrieve_stdout() as stdout: print_results(self.log_printer, Section(""), [Result.from_values("t", "msg", file="file")], {abspath("file"): []}, {}, self.console_printer) self.assertEqual( "\nfile\n" "| | [NORMAL] t:\n" "| | {}\n".format( highlight_text(self.no_color, "msg", style=BackgroundMessageStyle)), stdout.getvalue())
def test_bad_ng_words_acronym_list_item(self): file_name = 'bad_ng_words_acronym_list_item.md' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [ Result.from_values('TextLintBear', message='Document contains NG word "shit"', line=1, column=1, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name)), Result.from_values('TextLintBear', message='"PSF" is unexpanded acronym. ' 'What does "PSF" stands for?', line=1, column=1, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name)) ], filename=get_testfile_path(file_name), settings={'textlint_config': get_testfile_path('.textlintrc')})
def test_overlaps(self): overlapping_range = SourceRange.from_values('file1', 1, 1, 2, 2) nonoverlapping_range = SourceRange.from_values('file2', 1, 1, 2, 2) uut = Result.from_values('origin', 'message', file='file1', line=1, column=1, end_line=2, end_column=2) self.assertTrue(uut.overlaps(overlapping_range)) self.assertTrue(uut.overlaps([overlapping_range])) self.assertFalse(uut.overlaps(nonoverlapping_range))
def test_empty_file(self): file_name = '.empty_travis.yml' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [ Result.from_values('TravisLintBear', message='missing key language, defaulting ' 'to ruby', file=get_testfile_path(file_name), severity=RESULT_SEVERITY.NORMAL) ], filename=get_testfile_path(file_name))
def process_output(self, output, filename, file): output = json.loads(output) for severity in output: if severity == 'summary': continue for issue in output[severity]['data']: yield Result.from_values( origin=self, message=issue['message'], file=filename, severity=self.severity_map[issue['level']], line=issue.get('line'))
def test_bad_header(self): filename = 'test_bad_header.har' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [ Result.from_values('HTTPoliceLintBear', message="1244 TE header can't be used in " 'HTTP/2', file=get_testfile_path(filename), severity=RESULT_SEVERITY.MAJOR) ], filename=get_testfile_path(filename))
def test_syntax_error_in_request_target(self): filename = 'test_syntax_error_in_request_target.har' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [ Result.from_values( 'HTTPoliceLintBear', message='1045 Syntax error in request target', file=get_testfile_path(filename), severity=RESULT_SEVERITY.MAJOR) ], filename=get_testfile_path(filename))
def process_output(self, output, filename, file): output = json.loads(output) for severity in output: if severity == "summary": continue for issue in output[severity]["data"]: yield Result.from_values( origin=self, message=issue["message"], file=filename, severity=self.severity_map[issue["level"]], line=issue.get("line"))
def test_print_results_missing_line(self): with retrieve_stdout() as stdout: print_results( self.log_printer, Section(""), [ Result.from_values("t", "msg", file="file", line=5), Result.from_values("t", "msg", file="file", line=6) ], {abspath("file"): ["line " + str(i + 1) for i in range(5)]}, {}, color=False) self.assertEqual( "\n" "file\n" "| 5| line•5\n" "| | [NORMAL] t:\n" "| | msg\n" "\n" "file\n" "| | {}\n" "| | [NORMAL] t:\n" "| | msg\n".format(STR_LINE_DOESNT_EXIST), stdout.getvalue())
def test_overlaps(self): overlapping_range = SourceRange.from_values("file1", 1, 1, 2, 2) nonoverlapping_range = SourceRange.from_values("file2", 1, 1, 2, 2) uut = Result.from_values("origin", "message", file="file1", line=1, column=1, end_line=2, end_column=2) self.assertTrue(uut.overlaps(overlapping_range)) self.assertTrue(uut.overlaps([overlapping_range])) self.assertFalse(uut.overlaps(nonoverlapping_range))
def test_bad_alex_no_dead_link(self): file_name = 'bad_alex_no_dead_link.md' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [Result.from_values('TextLintBear', message='[her-him] `his` may be insensitive, ' 'use `their`, `theirs`, ' '`them` instead', line=1, column=17, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name)), Result.from_values('TextLintBear', message='http://httpstat.us/404 is dead. ' '(404 Not Found)', line=2, column=5, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name))], filename=get_testfile_path(file_name))
def test_print_results_without_line(self): with retrieve_stdout() as stdout: print_results(self.log_printer, Section(''), [Result.from_values('t', 'msg', file='file')], {abspath('file'): []}, {}, self.console_printer) self.assertEqual( '\nfile\n' '| | [NORMAL] t:\n' '| | {}\n'.format( highlight_text(self.no_color, 'msg', style=BackgroundMessageStyle)), stdout.getvalue())
def test_print_results_missing_line(self): with retrieve_stdout() as stdout: print_results(self.log_printer, Section(''), [ Result.from_values('t', 'msg', file='file', line=5), Result.from_values('t', 'msg', file='file', line=6) ], {abspath('file'): ['line ' + str(i + 1) for i in range(5)]}, {}, self.console_printer) self.assertEqual( '\n' '**** t [Section: ] ****\n\n' '! ! [Severity: NORMAL]\n' '! ! {1}\n' '! !6 {2}' '\n\n' '**** t [Section: ] ****\n\n' '! ! [Severity: NORMAL]\n' '! ! {1}\n'.format( highlight_text(self.no_color, 'line 5', self.lexer), highlight_text(self.no_color, 'msg', style=BackgroundMessageStyle), STR_LINE_DOESNT_EXIST), stdout.getvalue())
def test_bad_no_empty_section(self): file_name = 'bad_no_empty_section.md' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [Result.from_values('TextLintBear', message='Found empty section: `# Header B`', line=5, column=1, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name))], filename=get_testfile_path(file_name))
def test_bad_write_good_common_misspellings(self): file_name = 'bad_write_good_common_misspellings.html' file_contents = load_testfile(file_name) self.check_results( self.uut, file_contents, [ Result.from_values( 'TextLintBear', message='This is a commonly misspelled word. ' 'Correct it to abbreviate', line=8, column=1, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name)), Result.from_values('TextLintBear', message='"So" adds no meaning', line=9, column=1, severity=RESULT_SEVERITY.MAJOR, file=get_testfile_path(file_name)) ], filename=get_testfile_path(file_name))
def test_exception_result(self): self.check_results( self.uut, test_file4.split('\n'), [ Result.from_values('JSONFormatBear', 'This file does not contain parsable JSON. ' 'Expecting property name enclosed in ' 'double quotes.', file='default', line=2, column=5) ], filename='default')
def test_range_allowed_for_lines_per_file(self): self.section.append(Setting('min_lines_per_file', 5)) self.section.append(Setting('max_lines_per_file', 10)) self.section.append(Setting('exclude_blank_lines', True)) self.check_results( self.uut, ['line 1', '', 'line 2', '', 'line 3', 'line 4'], [ Result.from_values('LineCountBear', 'This file has 4 lines, while 5 lines ' 'are required.', severity=RESULT_SEVERITY.NORMAL, file='default') ], filename='default')
def test_minimal_regex(self): uut = (linter(sys.executable, output_format="regex", output_regex="an_issue") (self.EmptyTestLinter) (self.section, None)) results = list(uut.process_output(['not an issue'], 'file', [""])) self.assertEqual(results, []) results = list(uut.process_output(['an_issue'], 'file', [""])) self.assertEqual(results, [Result.from_values("EmptyTestLinter", "", file="file")])
def test_bad_semicolon(self): filename = 'test_bad_semicolon.styl' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('StylintBear', message='unnecessary semicolon found', file=get_testfile_path(filename), line=2, column=19, severity=RESULT_SEVERITY.NORMAL)], filename=get_testfile_path(filename))
def test_run(self): self.section.append(Setting('min_lines_per_file', 0)) self.section.append(Setting('max_lines_per_file', 1)) self.check_results(self.uut, ['line 1', 'line 2', 'line 3'], [ Result.from_values('LineCountBear', 'This file had 3 lines, which is 2 lines more ' 'than the maximum limit specified.', severity=RESULT_SEVERITY.NORMAL, file='default') ], filename='default') self.check_validity(self.uut, ['1 line']) self.check_validity(self.uut, []) # Empty file
def test_bad_alphabetical_order(self): filename = 'test_bad_alphabetical_order.styl' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('StylintBear', message='prefer alphabetical when sorting ' 'properties', file=get_testfile_path(filename), line=3, severity=RESULT_SEVERITY.NORMAL)], filename=get_testfile_path(filename))
def test_bad_no_important(self): filename = 'test_bad_no_important.styl' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('StylintBear', message='!important is disallowed', file=get_testfile_path(filename), line=2, column=9, severity=RESULT_SEVERITY.NORMAL)], filename=get_testfile_path(filename))
def run(self, filename, file, config: str = ''): """ Bear that have aspect. :param config: An optional dummy config file. """ yield Result.from_values( origin=self, message='This is just a dummy result', severity=RESULT_SEVERITY.INFO, file=filename, aspect=Root.Redundancy.UnusedVariable.UnusedLocalVariable('py'), )
def test_bad_trailing_whitespace(self): filename = 'test_bad_trailing_whitespace.styl' file_contents = load_testfile(filename) self.check_results( self.uut, file_contents, [Result.from_values('StylintBear', message='trailing whitespace', file=get_testfile_path(filename), line=3, column=22, severity=RESULT_SEVERITY.NORMAL)], filename=get_testfile_path(filename))
def test_print_results_for_file(self): with retrieve_stdout() as stdout: print_results( self.log_printer, Section(""), [ Result.from_values("SpaceConsistencyBear", "Trailing whitespace found", file="proj/white", line=2) ], {"proj/white": ["test line\n", "line 2\n", "line 3\n"]}, {}, color=False) self.assertEqual( """\nproj/white | 2| line 2 | | [NORMAL] SpaceConsistencyBear: | | Trailing whitespace found """, stdout.getvalue()) with retrieve_stdout() as stdout: print_results(self.log_printer, Section(""), [ Result.from_values("SpaceConsistencyBear", "Trailing whitespace found", file="proj/white", line=5) ], { "proj/white": [ "test line\n", "line 2\n", "line 3\n", "line 4\n", "line 5\n" ] }, {}, color=False) self.assertEqual( """\nproj/white | 5| line 5 | | [NORMAL] SpaceConsistencyBear: | | Trailing whitespace found """, stdout.getvalue())
def test_stdin_stderr_noconfig_nocorrection(self): create_arguments_mock = Mock() class Handler: @staticmethod def create_arguments(filename, file, config_file): create_arguments_mock(filename, file, config_file) return (self.test_program_path, "--use_stderr", "--use_stdin", filename) uut = (linter(sys.executable, use_stdin=True, use_stdout=False, use_stderr=True, output_format="regex", output_regex=self.test_program_regex, severity_map=self.test_program_severity_map) (Handler) (self.section, None)) results = list(uut.run(self.testfile2_path, self.testfile2_content)) expected = [Result.from_values(uut, "Invalid char ('X')", self.testfile2_path, 0, 0, 0, 1, RESULT_SEVERITY.MAJOR), Result.from_values(uut, "Invalid char ('i')", self.testfile2_path, 4, 0, 4, 1, RESULT_SEVERITY.MAJOR)] self.assertEqual(results, expected) create_arguments_mock.assert_called_once_with( self.testfile2_path, self.testfile2_content, None)