def test_caching_multi_results(self): """ Integration test to assert that results are not dropped when coala is ran multiple times with caching enabled and one section yields a result and second one doesn't. """ filename = 'tests/misc/test_caching_multi_results/' with bear_test_module(): with simulate_console_inputs('0'): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr) retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertIn('During execution of coala', stderr) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr)
def test_caching_multi_results(self): """ Integration test to assert that results are not dropped when coala is ran multiple times with caching enabled and one section yields a result and second one doesn't. """ filename = 'tests/misc/test_caching_multi_results/' with bear_test_module(): with simulate_console_inputs('n'): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr) retval, stdout, stderr = execute_coala( coala.main, 'coala', '--non-interactive', '--no-color', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', stdout) self.assertEqual(2, len(stderr.splitlines())) self.assertIn( 'LineCountTestBear: This result has no patch attached.', stderr) self.assertIn( 'Implicit \'Default\' section inheritance is deprecated', stderr)
def test_section_ordering(self, debug=False): with bear_test_module(): with prepare_file(['#include <a>'], None) as (lines, filename): retval, stdout, stderr = execute_coala( coala.main, 'coala', 'b', 'a', '--non-interactive', '-S', 'a.bears=SpaceConsistencyTestBear', 'a.files={}'.format(filename), 'a.use_spaces=True', 'b.bears=SpaceConsistencyTestBear', 'b.files={}'.format(filename), 'b.use_spaces=True', '-c', os.devnull, debug=debug) stdout_list = stdout.splitlines(True) self.assertEqual('Executing section b...\n', stdout_list[0]) self.assertEqual('Executing section a...\n', stdout_list[1]) retval, stdout, stderr = execute_coala( coala.main, 'coala', 'a', 'b', '--non-interactive', '-S', 'a.bears=SpaceConsistencyTestBear', 'a.files={}'.format(filename), 'a.use_spaces=True', 'b.bears=SpaceConsistencyTestBear', 'b.files={}'.format(filename), 'b.use_spaces=True', '-c', os.devnull, debug=debug) stdout_list = stdout.splitlines(True) self.assertEqual('Executing section a...\n', stdout_list[0]) self.assertEqual('Executing section b...\n', stdout_list[1])
def test_did_nothing(self): retval, output = execute_coala(coala.main, 'coala', '-I', '-S', 'default.enabled=false') self.assertEqual(retval, 2) self.assertIn('Did you forget to give the `--files`', output) retval, output = execute_coala(coala.main, 'coala', '-I', '-b', 'JavaTestBear', '-f', '*.java', '-S', 'default.enabled=false') self.assertEqual(retval, 2) self.assertIn('Nothing to do.', output)
def test_coala_main_bear_run_raises(self): with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.assertRaisesRegex( RuntimeError, r"^That's all the RaiseTestBear can do\.$"): execute_coala( coala.main, 'coala', '-c', os.devnull, '-f', filename, '-b', 'RaiseTestBear', debug=True)
def test_coala_delete_orig(self): with TemporaryDirectory() as tempdir,\ NamedTemporaryFile(suffix='.orig', dir=tempdir, delete=False) as orig_file,\ make_temp(suffix='.coafile', prefix='', dir=tempdir) as coafile,\ make_temp(dir=tempdir) as unrelated_file: orig_file.close() execute_coala(coala_ci.main, "coala-ci", "-c", re.escape(coafile)) self.assertFalse(os.path.isfile(orig_file.name)) self.assertTrue(os.path.isfile(unrelated_file))
def test_coala_main_bear__init__raises(self): with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.assertRaisesRegex( RuntimeError, r'^The bear ErrorTestBear does not fulfill all ' r"requirements\. 'I_do_not_exist' is not installed\.$"): execute_coala( coala.main, 'coala', '-c', os.devnull, '-f', filename, '-b', 'ErrorTestBear', debug=True)
def test_coala_main_mode_json_raises(self, mocked_mode_json): mocked_mode_json.side_effect = RuntimeError('Mocked mode_json fails.') with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.assertRaisesRegex(RuntimeError, r'^Mocked mode_json fails\.$'): # additionally use RaiseTestBear to verify independency from # failing bears execute_coala( coala.main, 'coala', '--json', '-c', os.devnull, '-f', filename, '-b', 'RaiseTestBear', debug=True)
def test_caching_results(self): """ A simple integration test to assert that results are not dropped when coala is ran multiple times with caching enabled. """ with bear_test_module(): with prepare_file(['a=(5,6)'], None) as (lines, filename): with simulate_console_inputs('n'): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', os.devnull, '--disable-caching', '--flush-cache', '-f', filename, '-b', 'LineCountTestBear', '-L', 'DEBUG') self.assertIn('This file has', stdout) self.assertIn('Running bear LineCountTestBear', stderr) # Due to the change in configuration from the removal of # ``--flush-cache`` this run will not be sufficient to # assert this behavior. retval, stdout, stderr = execute_coala( coala.main, 'coala', '--non-interactive', '--no-color', '-c', os.devnull, '-f', filename, '-b', 'LineCountTestBear') self.assertIn('This file has', stdout) self.assertEqual(1, len(stderr.splitlines())) self.assertIn( 'LineCountTestBear: This result has no patch attached.', stderr) retval, stdout, stderr = execute_coala( coala.main, 'coala', '--non-interactive', '--no-color', '-c', os.devnull, '-f', filename, '-b', 'LineCountTestBear') self.assertIn('This file has', stdout) self.assertEqual(1, len(stderr.splitlines())) self.assertIn( 'LineCountTestBear: This result has no patch attached.', stderr)
def test_filter_by_language_c(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--filter-by', 'language', 'c') self.assertEqual(retval, 0) # 1 bear plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 2)
def test_nonexistent(self): retval, output = execute_coala( coala_json.main, "coala-json", "-c", 'nonex', "test") output = json.loads(output) self.assertRegex( output["logs"][0]["message"], "The requested coafile '.*' does not exist. .+")
def test_version_conflict_in_collecting_bears(self, import_fn, _): with bear_test_module(): import_fn.side_effect = VersionConflict("msg1", "msg2") retval, output = execute_coala(coala.main, "coala", "-B") self.assertEqual(retval, 13) self.assertIn(("There is a conflict in the version of a " "dependency you have installed"), output) self.assertIn("pip install msg2", output) # Check recommendation
def test_fail_acquire_settings(self): with bear_test_module(): retval, output = execute_coala(coala.main, 'coala', '--non-interactive', '-b', 'SpaceConsistencyTestBear', '-c', os.devnull) self.assertIn('During execution, we found that some', output)
def test_fail_acquire_settings(self): with bear_test_module(): retval, output = execute_coala(coala.main, 'coala', '--json', '-c', os.devnull, '-b', 'SpaceConsistencyTestBear') self.assertIn('During execution, we found that some', output, 'Missing settings not logged')
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala( coala.main, 'coala', '-B', '-l', 'java') self.assertEqual(retval, 0) # 2 bears plus 1 line holding the closing colour escape sequence self.assertEqual(len(output.splitlines()), 3)
def test_filter_by_language_c(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--filter-by', 'language', 'c') self.assertEqual(retval, 0) self.assertEqual(len(stdout.strip().splitlines()), C_BEARS_COUNT_OUTPUT)
def test_show_bears_specified_in_args(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--bears', 'JavaTestBear', '--no-color') self.assertEqual(retval, 0) self.assertEqual(stdout.strip(), 'JavaTestBear')
def test_did_nothing(self, debug=False): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-I', '-S', 'cli.enabled=false', debug=debug) self.assertEqual(retval, 2) self.assertIn('Did you forget to give the `--files`', stderr) self.assertFalse(stdout) retval, stdout, stderr = execute_coala(coala.main, 'coala', '-I', '-b', 'JavaTestBear', '-f', '*.java', '-S', 'cli.enabled=false', debug=debug) self.assertEqual(retval, 2) self.assertIn('Nothing to do.', stderr) self.assertFalse(stdout)
def test_log(self, debug=False): retval, stdout, stderr = execute_coala( coala_ci.main, 'coala-ci', '--help', debug=debug) self.assertIn('usage: coala', stdout) self.assertIn('Use of `coala-ci` executable is deprecated', stderr) self.assertEqual(retval, 0, 'coala must return zero when successful')
def test_nonexistent(self): retval, stdout, stderr = execute_coala( coala.main, 'coala', '--non-interactive', '-c', 'nonex', 'test') self.assertFalse(stdout) self.assertRegex( stderr, ".*\\[ERROR\\].*The requested coafile '.*' does not exist. .+\n")
def test_bear_run_raises(self, mocked_mode_json): mocked_mode_json.side_effect = None mocked_ipdb = self.ipdbMock() with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.pipReqIsInstalledMock(), \ patch.dict('sys.modules', ipdb=mocked_ipdb), \ self.assertRaisesRegex( RuntimeError, r"^That's all the RaiseTestBear can do\.$"): execute_coala( coala.main, 'coala', '--debug', '-c', os.devnull, '-f', re.escape(filename), '-b', 'RaiseTestBear') mocked_ipdb.launch_ipdb_on_exception.assert_called_once_with()
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala( coala_json.main, 'coala-json', '-B', '-l', 'java') self.assertEqual(retval, 0) output = json.loads(output) self.assertEqual(len(output["bears"]), 2)
def test_coala_no_unexpected_warnings(self): with bear_test_module(): with prepare_file(['#fixme'], None) as (lines, filename): retval, stdout, stderr = execute_coala( coala.main, 'coala') errors = filter(bool, stderr.split('\n')) errors = list(errors) unexpected = errors.copy() expected = [ err for err in unexpected if "Implicit 'Default' section inheritance" in err] self.assertNotEqual([], expected) # Filter them out unexpected = [err for err in unexpected if err not in expected] # These errors depend on the state of the host, so ignore them ignored = [ err for err in unexpected if re.search("No bears matching '.*' were found", err)] # Filter them out unexpected = [err for err in unexpected if err not in ignored] self.assertEqual([], unexpected) self.assertEqual( retval, 0, 'coala must return zero when there are no errors;' ' errors={errors}'.format(errors=list(errors)))
def test_show_capabilities_with_supported_language(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-p', 'R', '-I', debug=debug) self.assertEqual(retval, 0) self.assertEqual(len(stdout.splitlines()), 2) self.assertFalse(stderr)
def test_unimportable_bear(self, import_fn): with bear_test_module(): import_fn.side_effect = SyntaxError retval, output = execute_coala(coala.main, "coala", "-A") self.assertEqual(retval, 0) self.assertIn("Unable to collect bears from", output) import_fn.side_effect = VersionConflict("msg1", "msg2") retval, output = execute_coala(coala.main, "coala", "-A") # Note that bear version conflicts don't give exitcode=13, # they just give a warning with traceback in log_level debug. self.assertEqual(retval, 0) self.assertRegex(output, "Unable to collect bears from .* because there " "is a conflict with the version of a dependency " "you have installed") self.assertIn("pip install msg2", output) # Check recommendation
def test_show_language_bears(self): with bear_test_module(): retval, output = execute_coala( coala.main, 'coala', '--json', '-B', '-l', 'java', stdout_only=True) self.assertEqual(retval, 0) output = json.loads(output) self.assertEqual(len(output['bears']), 2)
def test_unimportable_bear(self, import_fn): with bear_test_module(): import_fn.side_effect = SyntaxError retval, output = execute_coala(coala.main, 'coala', '-B') self.assertEqual(retval, 0) self.assertIn('Unable to collect bears from', output) import_fn.side_effect = VersionConflict('msg1', 'msg2') retval, output = execute_coala(coala.main, 'coala', '-B') # Note that bear version conflicts don't give exitcode=13, # they just give a warning with traceback in log_level debug. self.assertEqual(retval, 0) self.assertRegex(output, 'Unable to collect bears from .* because there ' 'is a conflict with the version of a dependency ' 'you have installed') self.assertIn('pip install "msg2"', output)
def test_nonexistent(self): retval, stdout, stderr = execute_coala( coala.main, 'coala', '--json', '-c', 'nonex', 'test') test_text = '{\n "results": {}\n}\n' self.assertEqual(stdout, test_text) self.assertRegex(stderr, ".*Requested coafile '.*' does not exist") self.assertNotEqual(retval, 0, 'coala must return nonzero when errors occured')
def test_version_conflict_in_collecting_bears(self, import_fn, _): with bear_test_module(): import_fn.side_effect = VersionConflict('msg1', 'msg2') retval, output = execute_coala(coala.main, 'coala', '-B') self.assertEqual(retval, 13) self.assertIn(('There is a conflict in the version of a ' 'dependency you have installed'), output) self.assertIn('pip install "msg2"', output)
def test_show_bears_specified_in_args_regex(self): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '--bears', '*Java*', '*Space*', '--no-color') self.assertEqual(retval, 0) self.assertEqual(['JavaTestBear', 'SpaceConsistencyTestBear'], [bear.strip() for bear in stdout.splitlines()])
def test_filter_by_unknown(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'unknown', 'arg1') self.assertEqual(retval, 2) self.assertIn( "'unknown' is an invalid filter. Available " 'filters: ' + ', '.join(sorted(available_filters)), stdout)
def test_show_all_bears(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '-I', debug=debug) self.assertEqual(retval, 0) # All bears plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), TEST_BEARS_COUNT + 1) self.assertFalse(stderr)
def test_show_language_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--json', '-B', '-l', 'java', '-I') self.assertEqual(retval, 0) output = json.loads(stdout) self.assertEqual(len(output['bears']), 2) self.assertFalse(stderr)
def test_bear__init__raises(self, mocked_mode_json): mocked_mode_json.side_effect = None mocked_ipdb = self.ipdbMock() with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.pipReqIsInstalledMock(), \ patch.dict('sys.modules', ipdb=mocked_ipdb), \ self.assertRaisesRegex( RuntimeError, r'^The bear ErrorTestBear does not fulfill all ' r"requirements\. 'I_do_not_exist' is not installed\.$"): execute_coala( coala.main, 'coala', '--debug', '-c', os.devnull, '-f', filename, '-b', 'ErrorTestBear') mocked_ipdb.launch_ipdb_on_exception.assert_called_once_with()
def test_show_all_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--json', '-B', '-I') self.assertEqual(retval, 0) output = json.loads(stdout) self.assertEqual(len(output['bears']), TEST_BEARS_COUNT) self.assertFalse(stderr) self.assertEqual(output, {'bears': list(TEST_BEAR_NAMES)})
def test_text_logs(self): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--json', '-c', 'nonex') test_text = '{\n "results": {}\n}\n' self.assertRegex( stderr, ".*\\[ERROR\\].*Requested coafile '.*' does not exist") self.assertEqual(stdout, test_text) self.assertNotEqual(retval, 0, 'coala must return nonzero when errors occured')
def test_filter_by_can_fix_syntax(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'can_fix', 'syntax') self.assertEqual(retval, 0) # 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 1) self.assertIn('No bears to show.', stdout)
def test_coala_main_mode_json_launches_ipdb(self, mocked_mode_json): mocked_mode_json.side_effect = RuntimeError('Mocked mode_json fails.') mocked_ipdb = self.ipdbMock() with bear_test_module(), \ prepare_file(['#fixme '], None) as (lines, filename), \ self.pipReqIsInstalledMock(), \ patch.dict('sys.modules', ipdb=mocked_ipdb), \ self.assertRaisesRegex(RuntimeError, r'^Mocked mode_json fails\.$'): # additionally use RaiseTestBear to verify independency from # failing bears execute_coala( coala.main, 'coala', '--debug', '--json', '-c', os.devnull, '-f', filename, '-b', 'RaiseTestBear') mocked_ipdb.launch_ipdb_on_exception.assert_called_once_with()
def test_format_show_bears(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'language', 'java', '-I', '--format') self.assertEqual(retval, 0) self.assertFalse(stderr) self.assertRegex(stdout, 'name:.*:can_detect:.*:can_fix:.*:' 'description:.*')
def test_coala(self): with bear_test_module(), \ prepare_file(["#fixme"], None) as (lines, filename): retval, output = execute_coala(coala.main, "coala", "-c", os.devnull, "-f", re.escape(filename), "-b", "LineCountTestBear") self.assertIn("This file has 1 lines.", output, "The output should report count as 1 lines")
def test_log(self, debug=False): testargs = ['coala-ci', '--help', '--non-interactive'] with mock.patch.object(sys, 'argv', testargs): retval, stdout, stderr = execute_coala( coala_ci.main, 'coala-ci', '--help', debug=debug) self.assertIn('usage: coala', stdout) self.assertIn('Use of `coala-ci` executable is deprecated', stderr) self.assertEqual(retval, 0, 'coala must return zero when successful')
def test_filter_by_unknown(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by', 'unknown', 'arg1') self.assertEqual(retval, 2) self.assertRaisesRegex( InvalidFilterException, '{!r} is an invalid filter. Available ' 'filters: {}'.format(filter, get_all_filters_str()))
def test_version_conflict_in_collecting_bears(self, import_fn, _): with bear_test_module(): import_fn.side_effect = VersionConflict('msg1', 'msg2') retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B') self.assertEqual(retval, 13) self.assertIn(('There is a conflict in the version of a ' 'dependency you have installed'), stderr) self.assertIn('pip install "msg2"', stderr) self.assertFalse(stdout)
def test_nonexistent(self, debug=False): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--non-interactive', '-c', 'nonex', 'test') self.assertFalse(stdout) self.assertRegex( stderr, ".*\\[ERROR\\].*Requested coafile '.*' does not exist") self.assertNotEqual(retval, 0, 'coala must return nonzero when errors occured')
def test_show_language_bears(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-B', '-l', 'java', '-I', debug=debug) self.assertEqual(retval, 0) self.assertEqual(len(stdout.splitlines()), JAVA_BEARS_COUNT_OUTPUT) self.assertIn( "'--filter-by-language ...' is deprecated", stderr)
def test_caching_multi_results(self): """ Integration test to assert that results are not dropped when coala is ran multiple times with caching enabled and one section yields a result and second one doesn't. """ filename = 'tests/misc/test_caching_multi_results/' with bear_test_module(): with simulate_console_inputs('0'): retval, output = execute_coala(coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', output) retval, output = execute_coala(coala.main, 'coala', '-c', filename + '.coafile', '-f', filename + 'test.py') self.assertIn('This file has', output)
def test_coala(self): with bear_test_module(), \ prepare_file(['#fixme'], None) as (lines, filename): retval, output = execute_coala(coala.main, 'coala', '-c', os.devnull, '-f', re.escape(filename), '-b', 'LineCountTestBear') self.assertIn('This file has 1 lines.', output, 'The output should report count as 1 lines')
def test_filter_bylanguage_java_can_detect_syntax(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B', '--filter-by-language', 'java', '--filter-by', 'can_detect', 'formatting') self.assertEqual(retval, 0) # 1 bear plus 1 line holding the closing colour escape sequence. self.assertEqual(len(stdout.strip().splitlines()), 2)
def test_fail_acquire_settings(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--json', '-c', os.devnull, '-b', 'SpaceConsistencyTestBear') test_text = '{\n "results": {}\n}\n' self.assertEqual(stdout, test_text) self.assertIn('During execution, we found that some', stderr, 'Missing settings not logged')
def test_version_conflict_in_collecting_bears(self, import_fn): with bear_test_module(): import_fn.side_effect = ( lambda *args, **kwargs: raise_error(VersionConflict, "msg1", "msg2")) retval, output = execute_coala(coala.main, "coala", "-A") self.assertEqual(retval, 13) self.assertIn(("There is a conflict in the version of a " "dependency you have installed"), output) self.assertIn("pip install msg2", output) # Check recommendation
def test_fail_acquire_settings(self): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '--non-interactive', '-b', 'SpaceConsistencyTestBear', '-c', os.devnull) self.assertFalse(stdout) self.assertIn('During execution, we found that some', stderr) self.assertNotEqual(retval, 0, 'coala was expected to return non-zero')
def test_unimportable_bear(self, import_fn): with bear_test_module(): import_fn.side_effect = SyntaxError retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B') self.assertEqual(retval, 0) self.assertIn('Unable to collect bears from', stderr) self.assertIn('No bears to show.', stdout) import_fn.side_effect = VersionConflict('msg1', 'msg2') retval, stdout, stderr = execute_coala(coala.main, 'coala', '-B') # Note that bear version conflicts don't give exitcode=13, # they just give a warning with traceback in log_level debug. self.assertEqual(retval, 0) self.assertRegex( stderr, 'Unable to collect bears from .* because there ' 'is a conflict with the version of a dependency ' 'you have installed') self.assertIn('pip3 install "msg2"', stderr) self.assertIn('No bears to show.', stdout)
def test_tagging(self): with bear_test_module(), \ prepare_file(["\t#include <a>"], None) as (lines, filename): log_printer = LogPrinter(NullPrinter()) execute_coala(coala_ci.main, "coala-ci", "default", "-c", self.coafile, "-f", re.escape(filename), "-b", "SpaceConsistencyTestBear", "-S", "tag=test_tag") tag_path = get_tag_path("test_tag", self.unescaped_coafile, log_printer) self.assertTrue(os.path.exists(tag_path)) execute_coala(coala_ci.main, "coala-ci", "default", "-c", self.coafile, "-f", re.escape(filename), "-b", "SpaceConsistencyTestBear", "-S", "dtag=test_tag") self.assertFalse(os.path.exists(tag_path))
def test_coala_main_mode_json_raises(self, mocked_mode_json): mocked_mode_json.side_effect = RuntimeError('Mocked mode_json fails.') with bear_test_module(): with prepare_file(['#fixme '], None) as (lines, filename): with self.assertRaisesRegex(RuntimeError, r'^Mocked mode_json fails\.$'): # additionally use RaiseTestBear to verify independency from # failing bears execute_coala(coala.main, 'coala', '--json', '-c', os.devnull, '-f', filename, '-b', 'RaiseTestBear', debug=True)
def test_section_ordering(self, debug=False): with bear_test_module(), \ prepare_file(['#include <a>'], None) as (lines, filename): retval, stdout, stderr = execute_coala( coala.main, 'coala', 'b', 'a', '--non-interactive', '-S', 'a.bears=SpaceConsistencyTestBear', 'a.files={}'.format(filename), 'a.use_spaces=True', 'b.bears=SpaceConsistencyTestBear', 'b.files={}'.format(filename), 'b.use_spaces=True', '-c', os.devnull, debug=debug) stdout_list = stdout.splitlines(True) self.assertEqual('Executing section b...\n', stdout_list[0]) self.assertEqual('Executing section a...\n', stdout_list[1]) retval, stdout, stderr = execute_coala( coala.main, 'coala', 'a', 'b', '--non-interactive', '-S', 'a.bears=SpaceConsistencyTestBear', 'a.files={}'.format(filename), 'a.use_spaces=True', 'b.bears=SpaceConsistencyTestBear', 'b.files={}'.format(filename), 'b.use_spaces=True', '-c', os.devnull, debug=debug) stdout_list = stdout.splitlines(True) self.assertEqual('Executing section a...\n', stdout_list[0]) self.assertEqual('Executing section b...\n', stdout_list[1])
def test_caching_results(self): """ A simple integration test to assert that results are not dropped when coala is ran multiple times with caching enabled. """ with bear_test_module(), \ prepare_file(['a=(5,6)'], None) as (lines, filename): with simulate_console_inputs('n'): retval, stdout, stderr = execute_coala( coala.main, 'coala', '-c', os.devnull, '--disable-caching', '--flush-cache', '-f', re.escape(filename), '-b', 'LineCountTestBear', '-L', 'DEBUG') self.assertIn('This file has', stdout) self.assertIn('Running bear LineCountTestBear', stderr) # Due to the change in configuration from the removal of # ``--flush-cache`` this run will not be sufficient to # assert this behavior. retval, stdout, stderr = execute_coala(coala.main, 'coala', '--non-interactive', '--no-color', '-c', os.devnull, '-f', re.escape(filename), '-b', 'LineCountTestBear') self.assertIn('This file has', stdout) self.assertEqual(1, len(stderr.splitlines())) self.assertIn( 'LineCountTestBear: This result has no patch attached.', stderr) retval, stdout, stderr = execute_coala(coala.main, 'coala', '--non-interactive', '--no-color', '-c', os.devnull, '-f', re.escape(filename), '-b', 'LineCountTestBear') self.assertIn('This file has', stdout) self.assertEqual(1, len(stderr.splitlines())) self.assertIn( 'LineCountTestBear: This result has no patch attached.', stderr)
def test_coala_with_color(self): with bear_test_module(), \ prepare_file(['#fixme'], None) as (lines, filename): retval, stdout, stderr = execute_coala(coala.main, 'coala') errors = filter(bool, stderr.split('\n')) # Every error message must start with characters # used for coloring. for err in errors: self.assertNotRegex(err, r'^\[WARNING\]') self.assertEqual( retval, 0, 'coala must return zero when there are no errors')
def test_show_capabilities_with_supported_language(self, debug=False): with bear_test_module(): retval, stdout, stderr = execute_coala(coala.main, 'coala', '-p', 'R', '-I', debug=debug) self.assertEqual(retval, 0) self.assertEqual(len(stdout.splitlines()), 2) self.assertFalse(stderr)
def test_find_issues(self): with bear_test_module(), \ prepare_file(["#fixme"], None) as (lines, filename): retval, output = execute_coala(coala_ci.main, "coala-ci", "-c", os.devnull, "-b", "LineCountTestBear", "-f", re.escape(filename)) self.assertIn("This file has 1 lines.", output, "The output should report count as 1 lines") self.assertNotEqual(retval, 0, "coala-ci was expected to return non-zero")
def test_find_no_issues(self): with bear_test_module(), \ prepare_file(["#include <a>"], None) as (lines, filename): retval, output = execute_coala(coala_ci.main, "coala-ci", '-c', os.devnull, '-f', re.escape(filename), '-b', 'SpaceConsistencyTestBear', "--settings", "use_spaces=True") self.assertIn("Executing section Default", output) self.assertEqual(retval, 0, "coala-ci must return zero when successful")