def test_filter_relevant_bears_with_non_optional_settings(self): sys.argv.append('--no-filter-by-capabilities') with bear_test_module(): languages = [] res_1 = filter_relevant_bears( languages, self.printer, self.arg_parser, {}) # results with extracted information res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs("Yes") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) self.assertEqual(generator.last_input, 0) # Comparing both the scenarios additional_bears_by_lang = { "All": ["NonOptionalSettingBear"] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears) # Simulating the situation when user rejects the bear res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs( "Some random text which will not be accepted", "No") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) self.assertEqual(generator.last_input, 1) # This time there will be no additional bears additional_bears_by_lang = { "All": [] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears)
def test_multiple_information(self): target_filenames = ['target_file_1', ] target_file_contents = ['Some content.'] with generate_files( target_filenames, target_file_contents, self.current_dir) as gen_files: uut = self.DummyMultiInfoExtractor( ['target_file_**', 'another_target_file'], self.current_dir) extracted_info = uut.extract_information() self.assertEqual(len(extracted_info.keys()), len(target_filenames)) self.assertEqual(extracted_info, uut.information) for tf in target_filenames: self.assertEqual(len(extracted_info[tf]), 2) self.assertEqual(len(extracted_info[tf]['DummyInfo']), 2) self.assertIsInstance( extracted_info[tf]['DummyInfo'][1], self.DummyInfo) self.assertEqual(len(extracted_info[tf]['AnotherDummyInfo']), 1) self.assertIsInstance( extracted_info[tf]['DummyInfo'][0], self.DummyInfo)
def test_collected_info(self): files_to_create = ["package.json", ".editorconfig", "Gemfile"] target_file_contents = [package_json, editorconfig, gemfile] with generate_files( files_to_create, target_file_contents, self.test_dir) as gen_files: collected_info = self.uut(self.test_dir) expected_results = [ ('TrailingWhitespaceInfo', ['.editorconfig'], 1), ('FinalNewlineInfo', ['.editorconfig'], 1), ('IndentStyleInfo', ['.editorconfig'], 2), ('IndentSizeInfo', ['.editorconfig'], 3), ('LineBreaksInfo', ['.editorconfig'], 1), ('CharsetInfo', ['.editorconfig'], 1), ('ProjectDependencyInfo', ['Gemfile', 'package.json'], 9), ('ManFilesInfo', ['package.json'], 1), ('LicenseUsedInfo', ['package.json'], 1), ('IncludePathsInfo', ['package.json'], 1)] self.assertEqual(len(collected_info.keys()), len(expected_results)) for iname, isources, icount in expected_results: self.assertEqual(len(collected_info[iname]), icount) isources = [os.path.normcase(i) for i in isources] for info in collected_info[iname]: self.assertIn(info.source, isources)
def test_unsupported_files(self): target_filenames = ['tempfile1', '1tempfile', 'tmpfile_not_allowed'] target_file_contents = ['Some content.', 'More content', 'Content'] with generate_files( target_filenames, target_file_contents, self.current_dir) as gen_files: with self.assertRaisesRegexp( ValueError, ("The taraget file tmpfile_not_allowed does not match the " "supported file globs \('tempfile\*\*', '\*\*tempfile'\) " "of TempfileExtractor")): uut = self.TempfileExtractor( ['tempfile1', 'tmpfile_not_allowed'], self.current_dir) uut.extract_information() uut = self.TempfileExtractor( ['**tempfile**'], self.current_dir) extracted_info = uut.extract_information() self.assertEqual(len(extracted_info.keys()), 2) uut = self.TempfileExtractor( ['tempfile1', 'tempfile_not_present'], self.current_dir) extracted_info = uut.extract_information() self.assertEqual(len(extracted_info.keys()), 1)
def test_fill_settings_section_match_with_conflicts(self): self.section = Section('test1') self.section["files"] = "hello.py" sections = {'test1': self.section} self.section.append(Setting('bears', 'BearC')) with simulate_console_inputs("False") as generator, \ bear_test_module(), retrieve_stdout() as sio: with generate_files([".editorconfig", "hello.py"], [editorconfig_4, "pass"], self.project_dir): extracted_info = collect_info(self.project_dir) local_bears, global_bears = fill_settings( sections, acquire_settings, self.log_printer, fill_section_method=fill_section, extracted_info=extracted_info) self.assertEqual(len(local_bears['test1']), 1) self.assertEqual(len(global_bears['test1']), 0) prompt_msg = ( 'coala-quickstart has detected multiple potential values ' 'for the setting "use_spaces"') self.assertIn(prompt_msg, sio.getvalue()) self.assertEqual(generator.last_input, 0) self.assertEqual(bool(self.section['use_spaces']), False)
def test_multiple_target_globs(self): target_filenames = [ 'target_file_1', 'target_file_2', 'another_target_file'] target_file_contents = ['Some content.', 'Any content', 'More content'] with generate_files( target_filenames, target_file_contents, self.current_dir) as gen_files: uut = self.DummyInfoExtractor( ['target_file_**', 'another_target_file'], self.current_dir) extracted_info = uut.extract_information() self.assertEqual(len(extracted_info.keys()), len(target_filenames)) self.assertEqual(extracted_info, uut.information) for tf in target_filenames: self.assertEqual(len(extracted_info[tf]['DummyInfo']), 1) self.assertIsInstance( extracted_info[tf]['DummyInfo'][0], self.DummyInfo) self.assertEqual( extracted_info[tf]['DummyInfo'][0].source, tf) # test if the extractor field is added automatically self.assertIsInstance( extracted_info[tf]['DummyInfo'][0].extractor, InfoExtractor)
def test_filter_bears_ci_mode(self): sys.argv.append('--ci') with bear_test_module(): languages = [] res_1 = filter_relevant_bears(languages, self.printer, self.arg_parser, {}) res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs("Yes") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) # Make sure there was no prompt self.assertEqual(generator.last_input, -1) # The NonOptionalSettingBear is not selected due to non-optional # setting value in non-interactive mode. additional_bears_by_lang = {"All": []} for lang in res_1: additional_bears = [ bear.name for bear in res_2[lang] if bear not in res_1[lang] ] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears)
def test_extracted_information(self): with generate_files( ["Gemfile"], [test_file], self.current_dir) as gen_file: self.uut = GemfileInfoExtractor( ["Gemfile"], self.current_dir) extracted_info = self.uut.extract_information() extracted_info = extracted_info[ os.path.normcase("Gemfile")] information_types = extracted_info.keys() self.assertIn("ProjectDependencyInfo", information_types) dep_info = extracted_info["ProjectDependencyInfo"] self.assertEqual(len(dep_info), 9) gems = [('some-gem', ''), ('puppet-lint', '2.1.1'), ('rubocop', '0.47.1'), ('scss_lint', ''), ('RedCloth', ''), ('rspec-rails', '>= 2.6.1'), ('rspec-rails', '~> 2.0.0'), ('ruby-debug19', ''), ('omniauth', '>= 0.2.6')] deps = [(d.value, d.version.value) for d in dep_info] self.assertNotIn(("not_to_consider", ""), deps) for gem in gems: self.assertIn(gem, deps) source_urls = [d.url for d in dep_info] self.assertIn("https://gems.example.com", source_urls)
def test_extracted_information(self): with generate_files(["package.json"], [test_file], self.current_dir) as gen_file: self.uut = PackageJSONInfoExtractor(["package.json"], self.current_dir) extracted_information = self.uut.extract_information() extracted_information = extracted_information["package.json"] information_types = extracted_information.keys() self.assertIn("LicenseUsedInfo", information_types) license_info = extracted_information["LicenseUsedInfo"] self.assertEqual(len(license_info), 1) self.assertEqual(license_info[0].value, "MIT") self.assertIn("ProjectDependencyInfo", information_types) dep_info = extracted_information["ProjectDependencyInfo"] self.assertEqual(len(dep_info), 2) self.assertIn(dep_info[0].value, ["coffeelint", "ramllint"]) self.assertIsInstance(dep_info[0].version, VersionInfo) self.assertIn(dep_info[0].version.value, ["~1", ">=1.2.2 <1.2.4"]) self.assertIn("ManFilesInfo", information_types) man_paths_info = extracted_information["ManFilesInfo"] self.assertEqual(len(man_paths_info), 1) self.assertEqual(man_paths_info[0].value, ["./man/foo.1", "./man/bar.1"]) self.assertIn("IncludePathsInfo", information_types) include_paths_info = extracted_information["IncludePathsInfo"] self.assertEqual(len(include_paths_info), 1) self.assertEqual(include_paths_info[0].value, ["dist"])
def test_extracted_information(self): with generate_files(["Gemfile"], [test_file], self.current_dir) as gen_file: self.uut = GemfileInfoExtractor(["Gemfile"], self.current_dir) extracted_info = self.uut.extract_information() extracted_info = extracted_info[os.path.normcase("Gemfile")] information_types = extracted_info.keys() self.assertIn("ProjectDependencyInfo", information_types) dep_info = extracted_info["ProjectDependencyInfo"] self.assertEqual(len(dep_info), 9) gems = [('some-gem', ''), ('puppet-lint', '2.1.1'), ('rubocop', '0.47.1'), ('scss_lint', ''), ('RedCloth', ''), ('rspec-rails', '>= 2.6.1'), ('rspec-rails', '~> 2.0.0'), ('ruby-debug19', ''), ('omniauth', '>= 0.2.6')] deps = [(d.value, d.version.value) for d in dep_info] self.assertNotIn(("not_to_consider", ""), deps) for gem in gems: self.assertIn(gem, deps) source_urls = [d.url for d in dep_info] self.assertIn("https://gems.example.com", source_urls)
def test_fill_settings_section_match_no_conflicts(self): self.section = Section('test') self.section["files"] = "*.py" sections = {'test': self.section} self.section.append(Setting('bears', 'BearC')) with simulate_console_inputs() as generator, bear_test_module(): with generate_files([".editorconfig", "hello.py"], [editorconfig_3, "pass"], self.project_dir) as gen_files: extracted_info = collect_info(self.project_dir) local_bears, global_bears = fill_settings( sections, acquire_settings, self.log_printer, fill_section_method=fill_section, extracted_info=extracted_info) self.assertEqual(len(local_bears['test']), 1) self.assertEqual(len(global_bears['test']), 0) # The value for the setting is automatically taken # from .editorconfig file. self.assertEqual(generator.last_input, -1) self.assertEqual(bool(self.section['use_spaces']), True)
def test_filter_relevant_bears_gruntfile_present(self): # Reset the IMPORTANT_BEARS_LIST import coala_quickstart.generation.Bears as Bears Bears.IMPORTANT_BEARS_LIST = { "JavaScript": "DoesNotExistBear", "Python": "DoesNotExistAsWellBear" } sys.argv.append('--no-filter-by-capabilities') with bear_test_module(): languages = [('JavaScript', 70), ('Python', 20)] res = {} with generate_files(["Gruntfile.js"], [gruntfile], self.project_dir) as gen_files: extracted_info = collect_info(self.project_dir) res = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) expected_results = { "JavaScript": set(["SomeLinterBear"]), } for lang, lang_bears in expected_results.items(): for bear in lang_bears: res_bears = [b.name for b in res[lang]] self.assertIn(bear, res_bears)
def test_filter_bears_ci_mode(self): sys.argv.append('--ci') with bear_test_module(): languages = [] res_1 = filter_relevant_bears( languages, self.printer, self.arg_parser, {}) res_2 = [] with generate_files(context_filenames, context_file_contents, self.project_dir): with simulate_console_inputs("Yes") as generator: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) # Make sure there was no prompt self.assertEqual(generator.last_input, -1) # The NonOptionalSettingBear is not selected due to non-optional # setting value in non-interactive mode. additional_bears_by_lang = { "All": [] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears)
def test_invalid_files(self): with generate_files(["package.json"], [invalid_test_file], self.current_dir) as gen_file: self.uut = PackageJSONInfoExtractor(["package.json"], self.current_dir) extracted_information = self.uut.extract_information() self.assertEqual(extracted_information, {})
def test_invalid_files(self): with generate_files( ["package.json"], [invalid_test_file], self.current_dir) as gen_file: self.uut = PackageJSONInfoExtractor( ["package.json"], self.current_dir) extracted_information = self.uut.extract_information() self.assertEqual(extracted_information, {})
def test_extracted_information(self): with generate_files([".editorconfig"], [test_file], self.current_dir) as gen_file: self.uut = EditorconfigInfoExtractor([".editorconfig"], self.current_dir) extracted_info = self.uut.extract_information() extracted_info = extracted_info[".editorconfig"] information_types = extracted_info.keys() # defined configurations in test '.editorconfig' file defined_indent_styles = [('*.py', 'space'), ('lib/**.js', 'space'), ('Makefile', 'tab'), ('{package.json,.travis.yml}', 'space')] defined_indent_sizes = [('*.{js,py}', 4), ('lib/**.js', 2), ('*.py', 4), ('{package.json,.travis.yml}', 2)] defined_linebreak_types = [('*', 'lf')] defined_charsets = [('*.{js,py}', 'utf-8')] defined_final_newlines = [('*', True)] defined_trim_trailing_whitespaces = [('*.{js,py}', True)] def compare_extracted_with_defined_info(defined_info, info_name): self.assertIn(info_name, information_types) info_to_match = extracted_info[info_name] list_to_match = [(i.scope, i.value) for i in info_to_match] self.assertEqual(len(defined_info), len(list_to_match)) for info in defined_info: self.assertIn(info, list_to_match) compare_extracted_with_defined_info(defined_indent_styles, "IndentStyleInfo") compare_extracted_with_defined_info(defined_indent_sizes, "IndentSizeInfo") compare_extracted_with_defined_info(defined_linebreak_types, "LineBreaksInfo") compare_extracted_with_defined_info(defined_charsets, "CharsetInfo") compare_extracted_with_defined_info(defined_final_newlines, "FinalNewlineInfo") compare_extracted_with_defined_info( defined_trim_trailing_whitespaces, "TrailingWhitespaceInfo")
def test_filemname_field(self): class TestInfoExtractor(InfoExtractor): def parse_file(self, fname, file_content): assert os.path.exists(fname) == 1 def find_information(self, fname, parsed_file): return [] target_filenames = ['target_file_1'] target_file_contents = ['Some content.'] uut = TestInfoExtractor(['target_file_**'], self.current_dir) with generate_files(target_filenames, target_file_contents, self.current_dir) as gen_file: uut.extract_information()
def test_supported_info_kinds(self): target_filenames = [ 'target_file_1', ] target_file_contents = ['Some content.'] with generate_files(target_filenames, target_file_contents, self.current_dir) as gen_files: uut = self.WrongSupportedInfoExtractor(['target_file_**'], self.current_dir) with self.assertRaisesRegexp( ValueError, ("The class AnotherDummyInfo is not present in supported " "information kinds of WrongSupportedInfoExtractor")): uut.extract_information()
def test_filter_relevant_bears_with_extracted_info(self): # results without extracted information languages = [('JavaScript', 70), ('Ruby', 20)] res_1 = filter_relevant_bears( languages, self.printer, self.arg_parser, {}) for lang, _ in languages: self.assertIn(lang, res_1) self.assertTrue(len(res_1[lang]) > 0) # results with extracted information res_2 = [] # FIXME: Create and use Test bears with # ``TestUtilites.bear_test_module`` instead of running # tests on actual bears in coala-bears package. with generate_files(context_filenames, context_file_contents, self.project_dir) as gen_files: extracted_info = collect_info(self.project_dir) res_2 = filter_relevant_bears(languages, self.printer, self.arg_parser, extracted_info) for lang, _ in languages: self.assertIn(lang, res_2) self.assertTrue(len(res_2[lang]) > 0) # Comparing both the scenarios # The following dict has list of bears that have their requirements # caputred by `ProjectDependencyInfo` from the dependency files # but are not part of the `IMPORTANT_BEARS_LIST` in Constants.py additional_bears_by_lang = { "JavaScript": ["ESLintBear", "HappinessLintBear"], "Ruby": [], "All": [] } for lang in res_1: additional_bears = [bear.name for bear in res_2[lang] if bear not in res_1[lang]] for bear in additional_bears_by_lang[lang]: self.assertIn(bear, additional_bears)
def test_no_information_found(self): target_filenames = [ 'target_file_1', ] target_file_contents = ['Some content.'] uut = self.NoInfoExtractor(['target_file_**', 'another_target_file'], self.current_dir) with generate_files(target_filenames, target_file_contents, self.current_dir) as gen_files: extracted_info = uut.extract_information() self.assertEqual(len(extracted_info.keys()), 0) self.assertEqual(extracted_info, uut.information) for tf in target_filenames: self.assertIsNone(extracted_info.get(tf))
def test_supported_info_kinds(self): target_filenames = ['target_file_1', ] target_file_contents = ['Some content.'] with generate_files( target_filenames, target_file_contents, self.current_dir) as gen_files: uut = self.WrongSupportedInfoExtractor( ['target_file_**'], self.current_dir) with self.assertRaisesRegexp( ValueError, ("The class AnotherDummyInfo is not present in supported " "information kinds of WrongSupportedInfoExtractor")): uut.extract_information()
def test_no_information_found(self): target_filenames = ['target_file_1', ] target_file_contents = ['Some content.'] uut = self.NoInfoExtractor( ['target_file_**', 'another_target_file'], self.current_dir) with generate_files( target_filenames, target_file_contents, self.current_dir) as gen_files: extracted_info = uut.extract_information() self.assertEqual(len(extracted_info.keys()), 0) self.assertEqual(extracted_info, uut.information) for tf in target_filenames: self.assertIsNone(extracted_info.get(tf))
def test_extracted_information(self): with generate_files( ["package.json"], [test_file], self.current_dir) as gen_file: self.uut = PackageJSONInfoExtractor( ["package.json"], self.current_dir) extracted_information = self.uut.extract_information() extracted_information = extracted_information["package.json"] information_types = extracted_information.keys() self.assertIn("LicenseUsedInfo", information_types) license_info = extracted_information["LicenseUsedInfo"] self.assertEqual(len(license_info), 1) self.assertEqual(license_info[0].value, "MIT") self.assertIn("ProjectDependencyInfo", information_types) dep_info = extracted_information["ProjectDependencyInfo"] self.assertEqual(len(dep_info), 2) self.assertIn(dep_info[0].value, ["coffeelint", "ramllint"]) self.assertIsInstance(dep_info[0].version, VersionInfo) self.assertIn( dep_info[0].version.value, ["~1", ">=1.2.2 <1.2.4"]) self.assertIn("ManFilesInfo", information_types) man_paths_info = extracted_information["ManFilesInfo"] self.assertEqual(len(man_paths_info), 1) self.assertEqual(man_paths_info[0].value, ["./man/foo.1", "./man/bar.1"]) self.assertIn("IncludePathsInfo", information_types) include_paths_info = extracted_information["IncludePathsInfo"] self.assertEqual(len(include_paths_info), 1) self.assertEqual(include_paths_info[0].value, ["dist"])
def test_filemname_field(self): class TestInfoExtractor(InfoExtractor): def parse_file(self, fname, file_content): assert os.path.exists(fname) == 1 def find_information(self, fname, parsed_file): return [] target_filenames = ['target_file_1'] target_file_contents = ['Some content.'] uut = TestInfoExtractor( ['target_file_**'], self.current_dir) with generate_files( target_filenames, target_file_contents, self.current_dir) as gen_file: uut.extract_information()
def test_fill_settings_autofill(self): self.section = Section('test') sections = {'test': self.section} self.section.append(Setting('bears', 'BearC')) with simulate_console_inputs() as generator, bear_test_module(): with generate_files([".editorconfig"], [editorconfig_1], self.project_dir) as gen_files: extracted_info = collect_info(self.project_dir) local_bears, global_bears = fill_settings( sections, acquire_settings, self.log_printer, fill_section_method=fill_section, extracted_info=extracted_info) self.assertEqual(len(local_bears['test']), 1) self.assertEqual(len(global_bears['test']), 0) # The value for the setting is automatically taken # from .editorconfig file. self.assertEqual(generator.last_input, -1) self.assertEqual(bool(self.section['use_spaces']), False)
def test_extracted_information(self): with generate_files( ["Gruntfile.js"], [test_file], self.current_dir) as gen_file: self.uut = GruntfileInfoExtractor( ["Gruntfile.js"], self.current_dir) extracted_info = self.uut.extract_information() extracted_info = extracted_info[os.path.normcase("Gruntfile.js")] information_types = extracted_info.keys() csslint_include_paths = ['css/**/*.css'] jshint_include_paths = [ '*.js', 'src/*.js', 'rules/**/*.js', 'test/**/*.js'] jshint_config = { 'options': { 'jshintrc': 'true' }, 'all': ['*.js', 'src/*.js', 'rules/**/*.js', 'test/**/*.js'] } jscs_config = { 'fix': { 'options': { 'fix': 'true' }, 'src': '<%= jshint.all %>' }, 'main': { 'src': '<%= jshint.all %>' } } csslint_config = { 'all': ['css/**/*.css'] } some_lint_task_config = { 'ignore': ['foo/**.bar'] } # Linter information contained in test 'Gruntfile.js' file # in the form of tuple # (linter_name, include_paths, ignore_paths, config) defined_info = [ ("csslint", csslint_include_paths, None, None, csslint_config), ("jshint", jshint_include_paths, None, None, jshint_config), ("jscs", None, None, None, jscs_config), ("some_lint_task", None, ['foo/**.bar'], None, some_lint_task_config)] info_name = "LintTaskInfo" self.assertIn(info_name, information_types) info_to_match = extracted_info[info_name] names_to_match = [i.value for i in info_to_match] include_paths_to_match = [i.include_paths.value if i.include_paths else None for i in info_to_match] ignore_paths_to_match = [i.ignore_paths.value if i.ignore_paths else None for i in info_to_match] config_to_match = [i.config for i in info_to_match] self.assertEqual(len(defined_info), len(names_to_match)) for info in defined_info: self.assertIn(info[0], names_to_match) self.assertIn(info[1], include_paths_to_match) self.assertIn(info[2], ignore_paths_to_match) if info[3] is not None: self.assertIn(info[3], config_to_match) tasks_used = [ 'grunt-contrib-concat', 'grunt-contrib-copy', 'grunt-contrib-csslint', 'grunt-contrib-cssmin', 'grunt-contrib-jshint', 'grunt-contrib-qunit', 'grunt-contrib-uglify', 'grunt-contrib-watch', 'grunt-jscs'] info_name = "MentionedTasksInfo" self.assertIn(info_name, information_types) info_to_match = extracted_info[info_name] tasks_lists = [t.value for t in info_to_match] tasks_to_match = [] for tasks in tasks_lists: tasks_to_match += tasks for task in tasks_used: self.assertIn(task, tasks_to_match)
def test_extracted_information(self): with generate_files( [".editorconfig"], [test_file], self.current_dir) as gen_file: self.uut = EditorconfigInfoExtractor( [".editorconfig"], self.current_dir) extracted_info = self.uut.extract_information() extracted_info = extracted_info[".editorconfig"] information_types = extracted_info.keys() test_filenames = { '*': { 'valid': ['hello.js', 'hello.py'], 'invalid': [] }, '*.{js,py}': { 'valid': ['hello.py', 'hello.js'], 'invalid': ['hello.c'] }, '*.py': { 'valid': ['some_file.py', 'hello.py'], 'invalid': ['some.js', 'py'] }, 'Makefile': { 'valid': ['Makefile'], 'invalid': ['NotAMakeFile'] }, 'lib/**.js': { 'valid': ['lib/foo.js'], 'invalid': ['lib/foo', 'foo.js'] }, '{package.json,.travis.yml}': { 'valid': ['package.json', '.travis.yml'], 'invalid': ['someting_else'] } } # defined configurations in test '.editorconfig' file defined_indent_styles = [ ('*.py', 'space'), ('lib/**.js', 'space'), ('Makefile', 'tab'), ('{package.json,.travis.yml}', 'space')] defined_indent_sizes = [ ('*.{js,py}', 4), ('lib/**.js', 2), ('*.py', 4), ('{package.json,.travis.yml}', 2)] defined_linebreak_types = [('*', 'lf')] defined_charsets = [('*.{js,py}', 'utf-8')] defined_final_newlines = [('*', True)] defined_trim_trailing_whitespaces = [('*.{js,py}', True)] for info_name, info in extracted_info.items(): for i in info: for fname in test_filenames[i.container_section]["valid"]: self.assertRegexpMatches(fname, i.scope[0]) for fname in test_filenames[i.container_section]["invalid"]: self.assertEqual(re.match(i.scope[0], fname), None) def compare_extracted_with_defined_info(defined_info, info_name): self.assertIn(info_name, information_types) info_to_match = extracted_info[info_name] list_to_match = [(i.container_section, i.value) for i in info_to_match] self.assertEqual(len(defined_info), len(list_to_match)) for info in defined_info: self.assertIn(info, list_to_match) compare_extracted_with_defined_info( defined_indent_styles, "IndentStyleInfo") compare_extracted_with_defined_info( defined_indent_sizes, "IndentSizeInfo") compare_extracted_with_defined_info( defined_linebreak_types, "LineBreaksInfo") compare_extracted_with_defined_info( defined_charsets, "CharsetInfo") compare_extracted_with_defined_info( defined_final_newlines, "FinalNewlineInfo") compare_extracted_with_defined_info( defined_trim_trailing_whitespaces, "TrailingWhitespaceInfo")
def test_extracted_information(self): with generate_files([".editorconfig"], [test_file], self.current_dir) as gen_file: self.uut = EditorconfigInfoExtractor([".editorconfig"], self.current_dir) extracted_info = self.uut.extract_information() extracted_info = extracted_info[".editorconfig"] information_types = extracted_info.keys() test_filenames = { '*': { 'valid': ['hello.js', 'hello.py'], 'invalid': [] }, '*.{js,py}': { 'valid': ['hello.py', 'hello.js'], 'invalid': ['hello.c'] }, '*.py': { 'valid': ['some_file.py', 'hello.py'], 'invalid': ['some.js', 'py'] }, 'Makefile': { 'valid': ['Makefile'], 'invalid': ['NotAMakeFile'] }, 'lib/**.js': { 'valid': ['lib/foo.js'], 'invalid': ['lib/foo', 'foo.js'] }, '{package.json,.travis.yml}': { 'valid': ['package.json', '.travis.yml'], 'invalid': ['someting_else'] } } # defined configurations in test '.editorconfig' file defined_indent_styles = [('*.py', 'space'), ('lib/**.js', 'space'), ('Makefile', 'tab'), ('{package.json,.travis.yml}', 'space')] defined_indent_sizes = [('*.{js,py}', 4), ('lib/**.js', 2), ('*.py', 4), ('{package.json,.travis.yml}', 2)] defined_linebreak_types = [('*', 'lf')] defined_charsets = [('*.{js,py}', 'utf-8')] defined_final_newlines = [('*', True)] defined_trim_trailing_whitespaces = [('*.{js,py}', True)] for info_name, info in extracted_info.items(): for i in info: for fname in test_filenames[i.container_section]["valid"]: self.assertRegexpMatches(fname, i.scope[0]) for fname in test_filenames[ i.container_section]["invalid"]: self.assertEqual(re.match(i.scope[0], fname), None) def compare_extracted_with_defined_info(defined_info, info_name): self.assertIn(info_name, information_types) info_to_match = extracted_info[info_name] list_to_match = [(i.container_section, i.value) for i in info_to_match] self.assertEqual(len(defined_info), len(list_to_match)) for info in defined_info: self.assertIn(info, list_to_match) compare_extracted_with_defined_info(defined_indent_styles, "IndentStyleInfo") compare_extracted_with_defined_info(defined_indent_sizes, "IndentSizeInfo") compare_extracted_with_defined_info(defined_linebreak_types, "LineBreaksInfo") compare_extracted_with_defined_info(defined_charsets, "CharsetInfo") compare_extracted_with_defined_info(defined_final_newlines, "FinalNewlineInfo") compare_extracted_with_defined_info( defined_trim_trailing_whitespaces, "TrailingWhitespaceInfo")