def python_resolver(name, reference, find_tests): module_path, tests_filter = reference_split(reference) if tests_filter is not None: tests_filter = re.compile(tests_filter) criteria_check = check_file(module_path, reference) if criteria_check is not True: return criteria_check # disabled tests not needed here class_methods_info, _ = find_tests(module_path) runnables = [] for klass, methods_tags_depens in class_methods_info.items(): for (method, tags, depens) in methods_tags_depens: klass_method = f"{klass}.{method}" if tests_filter is not None and not tests_filter.search( klass_method): continue uri = f"{module_path}:{klass_method}" runnables.append( Runnable(name, uri=uri, tags=tags, dependencies=depens)) if runnables: return ReferenceResolution(reference, ReferenceResolutionResult.SUCCESS, runnables) return ReferenceResolution(reference, ReferenceResolutionResult.NOTFOUND)
def python_resolver(name, reference, find_tests): module_path, tests_filter = reference_split(reference) if tests_filter is not None: tests_filter = re.compile(tests_filter) criteria_check = check_file(module_path, reference) if criteria_check is not True: return criteria_check # disabled tests not needed here class_methods_info, _ = find_tests(module_path) runnables = [] for klass, methods_tags_reqs in class_methods_info.items(): for (method, tags, reqs) in methods_tags_reqs: klass_method = "%s.%s" % (klass, method) if tests_filter is not None and not tests_filter.search( klass_method): continue uri = "%s:%s" % (module_path, klass_method) runnables.append( Runnable(name, uri=uri, tags=tags, requirements=reqs, config=settings.as_dict(r'^runner\.'))) if runnables: return ReferenceResolution(reference, ReferenceResolutionResult.SUCCESS, runnables) return ReferenceResolution(reference, ReferenceResolutionResult.NOTFOUND)
def resolve(reference): module_path, tests_filter = reference_split(reference) if tests_filter is not None: tests_filter = re.compile(tests_filter) criteria_check = check_file(module_path, reference) if criteria_check is not True: return criteria_check # disabled tests not needed here class_methods_info, _ = find_avocado_tests(module_path) runnables = [] for klass, methods_tags_reqs in class_methods_info.items(): for (method, tags, reqs) in methods_tags_reqs: klass_method = "%s.%s" % (klass, method) if tests_filter is not None: if not tests_filter.search(klass_method): continue uri = "%s:%s" % (module_path, klass_method) runnables.append( Runnable('avocado-instrumented', uri=uri, tags=tags, requirements=reqs)) if runnables: return ReferenceResolution(reference, ReferenceResolutionResult.SUCCESS, runnables) return ReferenceResolution(reference, ReferenceResolutionResult.NOTFOUND)
def test_split_file_exists(self): file_name = 'file_contains_a_colon_:_indeed' with unittest.mock.patch('avocado.core.references.os.path.exists', return_value=True): path, additional_info = references.reference_split(file_name) self.assertEqual(path, file_name) self.assertEqual(additional_info, None)
def _discover(self, reference, which_tests=DiscoverMode.DEFAULT): """ Recursively walk in a directory and find tests params. The tests are returned in alphabetic order. :param reference: the directory path to inspect. :param which_tests: Limit tests to be displayed :type which_tests: :class:`DiscoverMode` :return: list of matching tests """ if reference is None: if which_tests == DiscoverMode.DEFAULT: return [] # Return empty set when not listing details else: reference = data_dir.get_test_dir() ignore_suffix = (".data", ".pyc", ".pyo", "__init__.py", "__main__.py") # Look for filename:test_method pattern reference, subtests_filter = reference_split(reference) if subtests_filter is not None: subtests_filter = re.compile(subtests_filter) if not os.path.isdir(reference): # Single file return self._make_tests( reference, which_tests == DiscoverMode.ALL, subtests_filter ) tests = [] def add_test_from_exception(exception): """If the exc.filename is valid test it's added to tests""" tests.extend( self._make_tests(exception.filename, which_tests == DiscoverMode.ALL) ) def skip_non_test(exception): # pylint: disable=W0613 """Always return None""" return None if which_tests == DiscoverMode.ALL: onerror = add_test_from_exception else: # DEFAULT, AVAILABLE => skip missing tests onerror = skip_non_test for dirpath, dirs, filenames in os.walk(reference, onerror=onerror): dirs.sort() for file_name in sorted(filenames): if file_name.startswith(".") or file_name.endswith(ignore_suffix): continue pth = os.path.join(dirpath, file_name) tests.extend( self._make_tests( pth, which_tests == DiscoverMode.ALL, subtests_filter ) ) return tests
def test_split_file_does_not_exist(self): not_a_file = "/should/be/safe/to/assume/it/is/not/a/file:foo" path, additional_info = references.reference_split(not_a_file) self.assertEqual(path, "/should/be/safe/to/assume/it/is/not/a/file") self.assertEqual(additional_info, "foo")