def test_get_filtered_bears(self): sys.argv = ['coala', '-I'] with bear_test_module(): local_bears, global_bears = get_filtered_bears( None, self.log_printer) self.assertEqual(len(local_bears['cli']), TEST_BEARS_COUNT) self.assertEqual( [str(bear) for bear in local_bears['cli']], TEST_BEAR_NAME_REPRS) with bear_test_module(): local_bears, global_bears = get_filtered_bears( ['Java'], self.log_printer) local_bears['cli'] = _sort_bears(local_bears['cli']) global_bears['cli'] = _sort_bears(global_bears['cli']) self.assertEqual(len(local_bears['cli']), 3) self.assertEqual( str(local_bears['cli'][0]), "<class 'AspectsGeneralTestBear.AspectsGeneralTestBear'>") self.assertEqual(str(local_bears['cli'][1]), "<class 'JavaTestBear.JavaTestBear'>") self.assertEqual(str(local_bears['cli'][2]), "<class 'LineCountTestBear.LineCountTestBear'>") self.assertEqual(len(global_bears['cli']), 0)
def test_dependency_resolving(self): sections = {"test": self.section} self.section['bears'] = "DependentBear" with simulate_console_inputs("True"), bear_test_module(): fill_settings(sections, acquire_settings, self.log_printer) self.assertEqual(bool(self.section["use_spaces"]), True)
def test_show_bears_specified_in_args(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--bears', 'JavaTestBear', '--no-color') self.assertEqual(retval, 0) self.assertEqual(stdout.strip(), 'JavaTestBear')
def test_section_ordering(self, debug=False): with bear_test_module(): with prepare_file(['#include <a>'], None) as (lines, filename): retval, stdout, stderr = execute_coala( coala.main, 'coala', 'b', 'a', '--non-interactive', '-S', 'a.bears=SpaceConsistencyTestBear', 'a.files={}'.format(filename), 'a.use_spaces=True', 'b.bears=SpaceConsistencyTestBear', 'b.files={}'.format(filename), 'b.use_spaces=True', '-c', os.devnull, debug=debug) stdout_list = stdout.splitlines(True) self.assertEqual('Executing section b...\n', stdout_list[0]) self.assertEqual('Executing section a...\n', stdout_list[1]) retval, stdout, stderr = execute_coala( coala.main, 'coala', 'a', 'b', '--non-interactive', '-S', 'a.bears=SpaceConsistencyTestBear', 'a.files={}'.format(filename), 'a.use_spaces=True', 'b.bears=SpaceConsistencyTestBear', 'b.files={}'.format(filename), 'b.use_spaces=True', '-c', os.devnull, debug=debug) stdout_list = stdout.splitlines(True) self.assertEqual('Executing section a...\n', stdout_list[0]) self.assertEqual('Executing section b...\n', stdout_list[1])
def test_run_coala_no_autoapply(self, debug=False): with bear_test_module(): with prepare_file(['#fixme '], None) as (lines, filename): self.assertEqual( 1, len(run_coala( console_printer=ConsolePrinter(), log_printer=LogPrinter(), arg_list=( '-c', os.devnull, '-f', filename, '-b', 'SpaceConsistencyTestBear', '--apply-patches', '-S', 'use_spaces=yeah' ), autoapply=False, debug=debug )[0]['cli']) ) self.assertEqual( 0, len(run_coala( console_printer=ConsolePrinter(), log_printer=LogPrinter(), arg_list=( '-c', os.devnull, '-f', filename, '-b', 'SpaceConsistencyTestBear', '--apply-patches', '-S', 'use_spaces=yeah' ), debug=debug )[0]['cli']) )
def test_fail_acquire_settings(self): with bear_test_module(): retval, output = execute_coala(coala.main, 'coala', '--non-interactive', '-b', 'SpaceConsistencyTestBear', '-c', os.devnull) self.assertIn('During execution, we found that some', output)
def test_filter_relevant_bears_gruntfile_present(self): # Reset the IMPORTANT_BEARS_LIST import coala_quickstart.generation.Bears as Bears Bears.IMPORTANT_BEARS_LIST = { "JavaScript": "DoesNotExistBear", "Python": "DoesNotExistAsWellBear" } sys.argv.append('--no-filter-by-capabilities') with bear_test_module(): languages = [('JavaScript', 70), ('Python', 20)] res = {} with generate_files(["Gruntfile.js"], [gruntfile], self.project_dir) as gen_files: extracted_info = collect_info(self.project_dir) res = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) expected_results = { "JavaScript": set(["SomeLinterBear"]), } for lang, lang_bears in expected_results.items(): for bear in lang_bears: res_bears = [b.name for b in res[lang]] self.assertIn(bear, res_bears)
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala( coala_json.main, 'coala-json', '-B', '-l', 'java') self.assertEqual(retval, 0) output = json.loads(output) self.assertEqual(len(output["bears"]), 2)
def test_caching_multi_results(self): """ Integration test to assert that results are not dropped when coala is ran multiple times with caching enabled and one section yields a result and second one doesn't. """ filename = 'tests/misc/test_caching_multi_results/' with bear_test_module(): with simulate_console_inputs('0'): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr) retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertIn('During execution of coala', stderr) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr)
def test_caching_multi_results(self): """ Integration test to assert that results are not dropped when coala is ran multiple times with caching enabled and one section yields a result and second one doesn't. """ filename = 'tests/misc/test_caching_multi_results/' with bear_test_module(): with simulate_console_inputs('n'): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr) retval, stdout, stderr = execute_coala( coala.main, 'coala', '--non-interactive', '--no-color', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertEqual(2, len(stderr.splitlines())) self.assertIn( 'LineCountTestBear: This result has no patch attached.', stderr) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr)
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala( coala.main, 'coala', '-B', '-l', 'java') self.assertEqual(retval, 0) # 2 bears plus 1 line holding the closing colour escape sequence self.assertEqual(len(output.splitlines()), 3)
def test_filter_by_language_c(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--filter-by', 'language', 'c') self.assertEqual(retval, 0) self.assertEqual(len(stdout.strip().splitlines()), C_BEARS_COUNT_OUTPUT)
def test_file_cache_proxy_integration(self, debug=False): with bear_test_module(): with prepare_file(['disk-copy\n'], None) as (_, filename): memory_data = 'in-memory\n' proxy = FileProxy(filename, None, memory_data) proxymap = FileProxyMap([proxy]) self.cache.set_proxymap(proxymap) results, exitcode, file_dicts = run_coala( console_printer=ConsolePrinter(), log_printer=LogPrinter(), arg_list=( '-c', os.devnull, '-f', filename, '-b', 'TestBear', ), autoapply=False, debug=debug, cache=self.cache ) self.assertEqual(exitcode, 0) self.assertEqual(len(results), 1) # run_coala() output's name is always lower case self.assertEqual(file_dicts['cli'][filename.lower()], (memory_data,))
def test_dependency_resolving(self): sections = {'test': self.section} self.section['bears'] = 'DependentBear' with simulate_console_inputs('True'), bear_test_module(): fill_settings(sections, acquire_settings, self.log_printer) self.assertEqual(bool(self.section['use_spaces']), True)
def test_version_conflict_in_collecting_bears(self, import_fn, _): with bear_test_module(): import_fn.side_effect = VersionConflict("msg1", "msg2") retval, output = execute_coala(coala.main, "coala", "-B") self.assertEqual(retval, 13) self.assertIn(("There is a conflict in the version of a " "dependency you have installed"), output) self.assertIn("pip install msg2", output) # Check recommendation
def test_fill_settings_section_match_with_conflicts(self): self.section = Section('test1') self.section["files"] = "hello.py" sections = {'test1': self.section} self.section.append(Setting('bears', 'BearC')) with simulate_console_inputs("False") as generator, \ bear_test_module(), retrieve_stdout() as sio: with generate_files([".editorconfig", "hello.py"], [editorconfig_4, "pass"], self.project_dir): extracted_info = collect_info(self.project_dir) local_bears, global_bears = fill_settings( sections, acquire_settings, self.log_printer, fill_section_method=fill_section, extracted_info=extracted_info) self.assertEqual(len(local_bears['test1']), 1) self.assertEqual(len(global_bears['test1']), 0) prompt_msg = ( 'coala-quickstart has detected multiple potential values ' 'for the setting "use_spaces"') self.assertIn(prompt_msg, sio.getvalue()) self.assertEqual(generator.last_input, 0) self.assertEqual(bool(self.section['use_spaces']), False)
def test_filter_by_language_c(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--filter-by', 'language', 'c') self.assertEqual(retval, 0) # 1 bear plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 2)
def test_show_capabilities_with_supported_language(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-p', 'R', '-I', debug=debug) self.assertEqual(retval, 0) self.assertEqual(len(stdout.splitlines()), 2) self.assertFalse(stderr)
def test_filter_relevant_bears_with_capabilities(self): # Clear the IMPORTANT_BEARS_LIST import coala_quickstart.generation.Bears as Bears Bears.IMPORTANT_BEARS_LIST = {} with bear_test_module(): languages = [] capability_to_select = 'Smell' cap_number = ( sorted(ALL_CAPABILITIES).index(capability_to_select) + 1) res = [] with simulate_console_inputs('1000', str(cap_number)) as generator: res = filter_relevant_bears(languages, self.printer, self.arg_parser, {}) # 1000 was not a valid option, so there will be two prompts self.assertEqual(generator.last_input, 1) expected_results = { "All": set(["SmellCapabilityBear"]) } for lang, lang_bears in expected_results.items(): for bear in lang_bears: res_bears = [b.name for b in res[lang]] self.assertIn(bear, res_bears)
def test_get_all_bears_names(self): with bear_test_module(): names = get_all_bears_names() assert isinstance(names, list) self.assertSetEqual( set(names), set(TEST_BEAR_NAMES))
def test_coala_no_unexpected_warnings(self): with bear_test_module(): with prepare_file(['#fixme'], None) as (lines, filename): retval, stdout, stderr = execute_coala( coala.main, 'coala') errors = filter(bool, stderr.split('\n')) errors = list(errors) unexpected = errors.copy() expected = [ err for err in unexpected if "Implicit 'Default' section inheritance" in err] self.assertNotEqual([], expected) # Filter them out unexpected = [err for err in unexpected if err not in expected] # These errors depend on the state of the host, so ignore them ignored = [ err for err in unexpected if re.search("No bears matching '.*' were found", err)] # Filter them out unexpected = [err for err in unexpected if err not in ignored] self.assertEqual([], unexpected) self.assertEqual( retval, 0, 'coala must return zero when there are no errors;' ' errors={errors}'.format(errors=list(errors)))
def test_filter_bears_ci_mode(self): sys.argv.append('--ci') with bear_test_module(): languages = [] res_1 = filter_relevant_bears( languages, self.printer, self.arg_parser, {}) res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs("Yes") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) # Make sure there was no prompt self.assertEqual(generator.last_input, -1) # The NonOptionalSettingBear is not selected due to non-optional # setting value in non-interactive mode. additional_bears_by_lang = { "All": [] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears)
def test_fail_acquire_settings(self): with bear_test_module(): retval, output = execute_coala(coala.main, 'coala', '--json', '-c', os.devnull, '-b', 'SpaceConsistencyTestBear') self.assertIn('During execution, we found that some', output, 'Missing settings not logged')
def test_show_all_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '-I') self.assertEqual(retval, 0) # 6 bears plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 7) self.assertFalse(stderr)
def test_get_all_languages_without_unknown(self): with bear_test_module(): languages = get_all_languages() assert isinstance(languages, tuple) self.assertEqual(len(languages), LANGUAGE_COUNT) self.assertSetEqual( {str(language) for language in languages}, set(LANGUAGE_NAMES))
def test_show_bears_specified_in_args_regex(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--bears', '*Java*', '*Space*', '--no-color') self.assertEqual(retval, 0) self.assertEqual(['JavaTestBear', 'SpaceConsistencyTestBear'], [bear.strip() for bear in stdout.splitlines()])
def test_filter_by_can_detect_syntax(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--filter-by', 'can_detect', 'syntax') self.assertEqual(retval, 0) # 2 bears plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 3)
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala( coala.main, 'coala', '--json', '-B', '-l', 'java', stdout_only=True) self.assertEqual(retval, 0) output = json.loads(output) self.assertEqual(len(output['bears']), 2)
def test_version_conflict_in_collecting_bears(self, import_fn, _): with bear_test_module(): import_fn.side_effect = VersionConflict('msg1', 'msg2') retval, output = execute_coala(coala.main, 'coala', '-B') self.assertEqual(retval, 13) self.assertIn(('There is a conflict in the version of a ' 'dependency you have installed'), output) self.assertIn('pip install "msg2"', output)
def test_get_all_bears_names(self): with bear_test_module(): self.assertSetEqual( set(get_all_bears_names()), {'EchoBear', 'LineCountTestBear', 'JavaTestBear', 'SpaceConsistencyTestBear'})
def test_show_language_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--json', '-B', '-l', 'java', '-I') self.assertEqual(retval, 0) output = json.loads(stdout) self.assertEqual(len(output['bears']), 2) self.assertFalse(stderr)
def test_filter_by_unknown(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'unknown', 'arg1') self.assertEqual(retval, 2) self.assertRaisesRegex( InvalidFilterException, '{!r} is an invalid filter. Available ' 'filters: {}'.format(filter, get_all_filters_str()))
def test_filter_by_unknown(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'unknown', 'arg1') self.assertEqual(retval, 2) self.assertIn( "'unknown' is an invalid filter. Available " 'filters: ' + ', '.join(sorted(available_filters)), stdout)
def test_show_all_bears(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '-I', debug=debug) self.assertEqual(retval, 0) # All bears plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), TEST_BEARS_COUNT + 1) self.assertFalse(stderr)
def test_show_language_bears(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '-l', 'java', '-I', debug=debug) self.assertEqual(retval, 0) self.assertEqual(len(stdout.splitlines()), JAVA_BEARS_COUNT_OUTPUT) self.assertIn( "'--filter-by-language ...' is deprecated", stderr)
def test_filter_by_can_fix_syntax(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--filter-by', 'can_fix', 'syntax') self.assertEqual(retval, 0) # 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 1) self.assertIn('No bears to show.', stdout)
def test_version_conflict_in_collecting_bears(self, import_fn, _): with bear_test_module(): import_fn.side_effect = VersionConflict('msg1', 'msg2') retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B') self.assertEqual(retval, 13) self.assertIn(('There is a conflict in the version of a ' 'dependency you have installed'), stderr) self.assertIn('pip install "msg2"', stderr) self.assertFalse(stdout)
def test_coala(self): with bear_test_module(), \ prepare_file(["#fixme"], None) as (lines, filename): retval, output = execute_coala(coala.main, "coala", "-c", os.devnull, "-f", re.escape(filename), "-b", "LineCountTestBear") self.assertIn("This file has 1 lines.", output, "The output should report count as 1 lines")
def test_format_show_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'language', 'java', '-I', '--format') self.assertEqual(retval, 0) self.assertFalse(stderr) self.assertRegex(stdout, 'name:.*:can_detect:.*:can_fix:.*:' 'description:.*')
def test_filter_bylanguage_java_can_detect_syntax(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by-language', 'java', '--filter-by', 'can_detect', 'formatting') self.assertEqual(retval, 0) # 1 bear plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 2)
def test_coala(self): with bear_test_module(), \ prepare_file(['#fixme'], None) as (lines, filename): retval, output = execute_coala(coala.main, 'coala', '-c', os.devnull, '-f', re.escape(filename), '-b', 'LineCountTestBear') self.assertIn('This file has 1 lines.', output, 'The output should report count as 1 lines')
def test_version_conflict_in_collecting_bears(self, import_fn): with bear_test_module(): import_fn.side_effect = ( lambda *args, **kwargs: raise_error(VersionConflict, "msg1", "msg2")) retval, output = execute_coala(coala.main, "coala", "-A") self.assertEqual(retval, 13) self.assertIn(("There is a conflict in the version of a " "dependency you have installed"), output) self.assertIn("pip install msg2", output) # Check recommendation
def test_fail_acquire_settings(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--non-interactive', '-b', 'SpaceConsistencyTestBear', '-c', os.devnull) self.assertFalse(stdout) self.assertIn('During execution, we found that some', stderr) self.assertNotEqual(retval, 0, 'coala was expected to return non-zero')
def test_get_all_languages_with_unknown(self): with bear_test_module(): languages = get_all_languages(include_unknown=True) language_names = LANGUAGE_NAMES.copy() language_names.append('Unknown') assert isinstance(languages, tuple) self.assertEqual(len(languages), LANGUAGE_COUNT + 1) self.assertSetEqual({str(language) for language in languages}, set(language_names))
def test_get_filtered_bears(self): sys.argv = ['coala', '-I'] with bear_test_module(): local_bears, global_bears = get_filtered_bears( None, self.log_printer) self.assertEqual(len(local_bears['cli']), 13) with bear_test_module(): local_bears, global_bears = get_filtered_bears(['Java'], self.log_printer) self.assertEqual(len(local_bears['cli']), 2) self.assertEqual(str(local_bears['cli'][0]), "<class 'JavaTestBear.JavaTestBear'>") self.assertEqual(str(local_bears['cli'][1]), "<class 'LineCountTestBear.LineCountTestBear'>") self.assertEqual(len(global_bears['cli']), 0)
def test_show_all_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '--json', '-B', '-I') self.assertEqual(retval, 0) output = json.loads(stdout) self.assertEqual(len(output['bears']), TEST_BEARS_COUNT) self.assertFalse(stderr) self.assertEqual(output, {'bears': list(TEST_BEAR_NAMES)})
def test_get_all_bears_names(self): with bear_test_module(): names = get_all_bears_names() assert isinstance(names, list) self.assertSetEqual( set(names), { 'DependentBear', 'EchoBear', 'LineCountTestBear', 'JavaTestBear', 'SpaceConsistencyTestBear', 'TestBear', 'ErrorTestBear', 'RaiseTestBear' })
def test_fail_acquire_settings(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--json', '-c', os.devnull, '-b', 'SpaceConsistencyTestBear') test_text = '{\n "results": {}\n}\n' self.assertEqual(stdout, test_text) self.assertIn('During execution, we found that some', stderr, 'Missing settings not logged')
def test_coala_ignore_file(self): with bear_test_module(): with prepare_file(['#fixme'], None) as (lines, filename): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', os.devnull, '--non-interactive', '-f', filename, '--ignore', filename, '-b', 'LineCountTestBear') self.assertEqual(stdout, 'Executing section cli...\n') self.assertEqual( retval, 0, 'coala must return zero when ' 'there are no errors')
def test_run_coala_bear_run_raises(self): configure_logging() with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.assertRaisesRegex( RuntimeError, r"^That's all the RaiseTestBear can do\.$"): run_coala(console_printer=ConsolePrinter(), log_printer=LogPrinter(), arg_list=('-c', os.devnull, '-f', re.escape(filename), '-b', 'RaiseTestBear'), debug=True)
def test_coala_with_color(self): with bear_test_module(), \ prepare_file(['#fixme'], None) as (lines, filename): retval, stdout, stderr = execute_coala(coala.main, 'coala') errors = filter(bool, stderr.split('\n')) # Every error message must start with characters # used for coloring. for err in errors: self.assertNotRegex(err, r'^\[WARNING\]') self.assertEqual( retval, 0, 'coala must return zero when there are no errors')
def test_show_capabilities_with_supported_language(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-p', 'R', '-I', debug=debug) self.assertEqual(retval, 0) self.assertEqual(len(stdout.splitlines()), 2) self.assertFalse(stderr)
def test_filter_relevant_bears_with_non_optional_settings(self): sys.argv.append('--no-filter-by-capabilities') with bear_test_module(): languages = [] res_1 = filter_relevant_bears( languages, self.printer, self.arg_parser, {}) # results with extracted information res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs("Yes") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) self.assertEqual(generator.last_input, 0) # Comparing both the scenarios additional_bears_by_lang = { "All": ["NonOptionalSettingBear"] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears) # Simulating the situation when user rejects the bear res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs( "Some random text which will not be accepted", "No") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) self.assertEqual(generator.last_input, 1) # This time there will be no additional bears additional_bears_by_lang = { "All": [] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears)
def test_bears_no_filter_by_capability_mode(self): languages = [] with bear_test_module(): # Results without filtering sys.argv.append('--no-filter-by-capabilities') res = [] with simulate_console_inputs() as generator: res = filter_relevant_bears(languages, self.printer, self.arg_parser, {}) self.assertEqual(generator.last_input, -1) self.assertEqual(res, {"All": set()})
def test_find_issues(self): with bear_test_module(), \ prepare_file(["#fixme"], None) as (lines, filename): retval, output = execute_coala(coala_ci.main, "coala-ci", "-c", os.devnull, "-b", "LineCountTestBear", "-f", re.escape(filename)) self.assertIn("This file has 1 lines.", output, "The output should report count as 1 lines") self.assertNotEqual(retval, 0, "coala-ci was expected to return non-zero")
def test_find_no_issues(self): with bear_test_module(), \ prepare_file(["#include <a>"], None) as (lines, filename): retval, output = execute_coala(coala_ci.main, "coala-ci", '-c', os.devnull, '-f', re.escape(filename), '-b', 'SpaceConsistencyTestBear', "--settings", "use_spaces=True") self.assertIn("Executing section Default", output) self.assertEqual(retval, 0, "coala-ci must return zero when successful")
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala(coala_json.main, 'coala-json', '-B', '-l', 'java', stdout_only=True) self.assertEqual(retval, 0) output = json.loads(output) self.assertEqual(len(output["bears"]), 2)
def test_fail_acquire_settings(self): with bear_test_module(): retval, output = execute_coala(coala_json.main, 'coala-json', '-c', os.devnull, '-b', 'SpaceConsistencyTestBear') output = json.loads(output) found = False for msg in output["logs"]: if "During execution, we found that some" in msg["message"]: found = True self.assertTrue(found, "Missing settings not logged")
def test_dependency_resolving(self): sections = {'test': self.section} self.section['bears'] = 'DependentBear' with simulate_console_inputs('True'), bear_test_module(): fill_settings(sections, acquire_settings, self.log_printer, fill_section_method=fill_section, extracted_info={}) self.assertEqual(bool(self.section['use_spaces']), True)
def test_show_all_bears(self): with bear_test_module(): retval, output = execute_coala(coala.main, "coala", "-A") self.assertEqual(retval, 0) lines = output.splitlines() bear_lines = sum(1 for line in lines if line.startswith("===")) self.assertEqual(bear_lines, 4) for line in lines: self.assertNotIn("WARNING", line)