def complete_network_testcase(prefix, parsed_args, **_): """ :type prefix: unicode :type parsed_args: any :rtype: list[str] """ testcases = [] # since testcases are module specific, don't autocomplete if more than one # module is specidied if len(parsed_args.include) != 1: return [] test_dir = 'test/integration/targets/%s/tests' % parsed_args.include[0] connection_dirs = data_context().content.get_dirs(test_dir) for connection_dir in connection_dirs: for testcase in [ os.path.basename(path) for path in data_context().content.get_files(connection_dir) ]: if testcase.startswith(prefix): testcases.append(testcase.split('.')[0]) return testcases
def generate_collection_coverage_config(args): # type: (TestConfig) -> str """Generate code coverage configuration for tests.""" coverage_config = ''' [run] branch = True concurrency = multiprocessing parallel = True disable_warnings = no-data-collected ''' if isinstance(args, IntegrationConfig): coverage_config += ''' include = %s/* */%s/* ''' % (data_context().content.root, data_context().content.collection.directory) elif isinstance(args, SanityConfig): # temporary work-around for import sanity test coverage_config += ''' include = %s/* omit = */test/runner/.tox/* ''' % data_context().content.root else: coverage_config += ''' include = %s/* ''' % data_context().content.root return coverage_config
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] """Return the given list of test targets, filtered to include only those relevant for the test.""" if self.no_targets: return [] if self.text is not None: if self.text: targets = [target for target in targets if not is_binary_file(target.path)] else: targets = [target for target in targets if is_binary_file(target.path)] if self.extensions: targets = [target for target in targets if os.path.splitext(target.path)[1] in self.extensions or (is_subdir(target.path, 'bin') and '.py' in self.extensions)] if self.prefixes: targets = [target for target in targets if any(target.path.startswith(pre) for pre in self.prefixes)] if self.files: targets = [target for target in targets if os.path.basename(target.path) in self.files] if self.ignore_self and data_context().content.is_ansible: relative_self_path = os.path.relpath(self.path, data_context().content.root) targets = [target for target in targets if target.path != relative_self_path] return targets
def pylint(args, context, paths, plugin_dir, plugin_names): # type: (SanityConfig, str, t.List[str], str, t.List[str]) -> t.List[t.Dict[str, str]] """Run pylint using the config specified by the context on the specified paths.""" rcfile = os.path.join(ANSIBLE_ROOT, 'test/sanity/pylint/config/%s' % context.split('/')[0]) if not os.path.exists(rcfile): rcfile = os.path.join(ANSIBLE_ROOT, 'test/sanity/pylint/config/default') parser = ConfigParser() parser.read(rcfile) if parser.has_section('ansible-test'): config = dict(parser.items('ansible-test')) else: config = dict() disable_plugins = set(i.strip() for i in config.get('disable-plugins', '').split(',') if i) load_plugins = set(plugin_names) - disable_plugins cmd = [ args.python_executable, '-m', 'pylint', '--jobs', '0', '--reports', 'n', '--max-line-length', '160', '--rcfile', rcfile, '--output-format', 'json', '--load-plugins', ','.join(load_plugins), ] + paths append_python_path = [plugin_dir] if data_context().content.collection: append_python_path.append(data_context().content.collection.root) env = ansible_environment(args) env['PYTHONPATH'] += os.path.pathsep + os.path.pathsep.join(append_python_path) if paths: display.info('Checking %d file(s) in context "%s" with config: %s' % (len(paths), context, rcfile), verbosity=1) try: stdout, stderr = run_command(args, cmd, env=env, capture=True) status = 0 except SubprocessError as ex: stdout = ex.stdout stderr = ex.stderr status = ex.status if stderr or status >= 32: raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout) else: stdout = None if not args.explain and stdout: messages = json.loads(stdout) else: messages = [] return messages
def enumerate_module_utils(): """Return a list of available module_utils imports. :rtype: set[str] """ return set( get_csharp_module_utils_name(p) for p in data_context().content.walk_files( data_context().content.module_utils_csharp_path) if os.path.splitext(p)[1] == '.cs')
def ansible_environment(args, color=True, ansible_config=None): """ :type args: CommonConfig :type color: bool :type ansible_config: str | None :rtype: dict[str, str] """ env = common_environment() path = env['PATH'] ansible_path = os.path.join(ANSIBLE_ROOT, 'bin') if not path.startswith(ansible_path + os.path.pathsep): path = ansible_path + os.path.pathsep + path if ansible_config: pass elif isinstance(args, IntegrationConfig): ansible_config = os.path.join(ANSIBLE_ROOT, 'test/integration/%s.cfg' % args.command) else: ansible_config = os.path.join(ANSIBLE_ROOT, 'test/%s/ansible.cfg' % args.command) if not args.explain and not os.path.exists(ansible_config): raise ApplicationError('Configuration not found: %s' % ansible_config) ansible = dict( ANSIBLE_PYTHON_MODULE_RLIMIT_NOFILE=str(SOFT_RLIMIT_NOFILE), ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color and color else 'false', ANSIBLE_DEPRECATION_WARNINGS='false', ANSIBLE_HOST_KEY_CHECKING='false', ANSIBLE_RETRY_FILES_ENABLED='false', ANSIBLE_CONFIG=os.path.abspath(ansible_config), ANSIBLE_LIBRARY='/dev/null', PYTHONPATH=os.path.join(ANSIBLE_ROOT, 'lib'), PAGER='/bin/cat', PATH=path, ) env.update(ansible) if args.debug: env.update( dict( ANSIBLE_DEBUG='true', ANSIBLE_LOG_PATH=os.path.abspath( 'test/results/logs/debug.log'), )) if data_context().content.collection: env.update( dict(ANSIBLE_COLLECTIONS_PATHS=data_context().content.collection. root, )) return env
def filter_targets( self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] """Return the given list of test targets, filtered to include only those relevant for the test.""" return [ target for target in targets if os.path.splitext(target.path)[1] == '.py' and ( is_subdir(target.path, data_context().content.module_path) or is_subdir(target.path, data_context().content.module_utils_path)) ]
def git_callback( files): # type: (t.List[t.Tuple[str, str]]) -> None """Add files from the content root .git directory to the payload file list.""" for dirpath, _dirnames, filenames in os.walk( os.path.join(data_context().content.root, '.git')): paths = [ os.path.join(dirpath, filename) for filename in filenames ] files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
def __init__(self, args, command): """ :type args: any :type command: str """ super(TestConfig, self).__init__(args, command) self.coverage = args.coverage # type: bool self.coverage_label = args.coverage_label # type: str self.coverage_check = args.coverage_check # type: bool self.coverage_config_base_path = None # type: t.Optional[str] self.include = args.include or [] # type: t.List[str] self.exclude = args.exclude or [] # type: t.List[str] self.require = args.require or [] # type: t.List[str] self.changed = args.changed # type: bool self.tracked = args.tracked # type: bool self.untracked = args.untracked # type: bool self.committed = args.committed # type: bool self.staged = args.staged # type: bool self.unstaged = args.unstaged # type: bool self.changed_from = args.changed_from # type: str self.changed_path = args.changed_path # type: t.List[str] self.lint = args.lint if 'lint' in args else False # type: bool self.junit = args.junit if 'junit' in args else False # type: bool self.failure_ok = args.failure_ok if 'failure_ok' in args else False # type: bool self.metadata = Metadata.from_file( args.metadata) if args.metadata else Metadata() self.metadata_path = None if self.coverage_check: self.coverage = True def metadata_callback( files): # type: (t.List[t.Tuple[str, str]]) -> None """Add the metadata file to the payload file list.""" config = self if data_context().content.collection: working_path = data_context().content.collection.directory else: working_path = '' if self.metadata_path: files.append((os.path.abspath(config.metadata_path), os.path.join(working_path, config.metadata_path))) data_context().register_payload_callback(metadata_callback)
def get_csharp_module_utils_name(path): # type: (str) -> str """Return a namespace and name from the given module_utils path.""" base_path = data_context().content.module_utils_csharp_path if data_context().content.collection: prefix = 'AnsibleCollections.' + data_context( ).content.collection.prefix else: prefix = '' name = prefix + os.path.splitext(os.path.relpath( path, base_path))[0].replace(os.sep, '.') return name
def metadata_callback( files): # type: (t.List[t.Tuple[str, str]]) -> None """Add the metadata file to the payload file list.""" config = self if data_context().content.collection: working_path = data_context().content.collection.directory else: working_path = '' if self.metadata_path: files.append((os.path.abspath(config.metadata_path), os.path.join(working_path, config.metadata_path)))
def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument """ :type args: SanityConfig :type targets: SanityTargets :rtype: TestResult """ sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity' sanity_docs = set(part[0] for part in ( os.path.splitext(os.path.basename(path)) for path in data_context().content.get_files(sanity_dir)) if part[1] == '.rst') sanity_tests = set(sanity_test.name for sanity_test in sanity_get_tests()) missing = sanity_tests - sanity_docs results = [] results += [ SanityMessage( message='missing docs for ansible-test sanity --test %s' % r, path=os.path.join(sanity_dir, '%s.rst' % r), ) for r in sorted(missing) ] if results: return SanityFailure(self.name, messages=results) return SanitySuccess(self.name)
def setup_common_temp_dir(args, path): """ :type args: IntegrationConfig :type path: str """ if args.explain: return os.mkdir(path) os.chmod(path, MODE_DIRECTORY) coverage_config_path = os.path.join(path, COVERAGE_CONFIG_PATH) if data_context().content.collection: coverage_config = generate_collection_coverage_config(args) with open(coverage_config_path, 'w') as coverage_config_fd: coverage_config_fd.write(coverage_config) else: shutil.copy(os.path.join(ANSIBLE_ROOT, COVERAGE_CONFIG_PATH), coverage_config_path) os.chmod(coverage_config_path, MODE_FILE) coverage_output_path = os.path.join(path, COVERAGE_OUTPUT_PATH) os.mkdir(coverage_output_path) os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
def sanity_init(): """Initialize full sanity test list (includes code-smell scripts determined at runtime).""" import_plugins('sanity') sanity_plugins = {} # type: t.Dict[str, t.Type[SanityFunc]] load_plugins(SanityFunc, sanity_plugins) sanity_tests = tuple([plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only]) global SANITY_TESTS # pylint: disable=locally-disabled, global-statement SANITY_TESTS = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
def get_python_module_utils_name(path): # type: (str) -> str """Return a namespace and name from the given module_utils path.""" base_path = data_context().content.module_utils_path if data_context().content.collection: prefix = 'ansible_collections.' + data_context( ).content.collection.prefix else: prefix = 'ansible.module_utils.' if path.endswith('/__init__.py'): path = os.path.dirname(path) name = prefix + os.path.splitext(os.path.relpath( path, base_path))[0].replace(os.sep, '.') return name
def walk_module_targets(): """ :rtype: collections.Iterable[TestTarget] """ for target in walk_test_targets(path=data_context().content.module_path, module_path=data_context().content.module_path, extensions=MODULE_EXTENSIONS): if not target.module: continue yield target
def __init__(self, args): """ :type args: EnvironmentConfig """ cache_dir = os.path.join(data_context().content.root, 'test/cache') self.key = os.path.join(cache_dir, self.KEY_NAME) self.pub = os.path.join(cache_dir, self.PUB_NAME) key_dst = os.path.relpath(self.key, data_context().content.root) pub_dst = os.path.relpath(self.pub, data_context().content.root) if not os.path.isfile(self.key) or not os.path.isfile(self.pub): base_dir = os.path.expanduser('~/.ansible/test/') key = os.path.join(base_dir, self.KEY_NAME) pub = os.path.join(base_dir, self.PUB_NAME) if not args.explain: make_dirs(base_dir) if not os.path.isfile(key) or not os.path.isfile(pub): run_command(args, [ 'ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key ]) self.key = key self.pub = pub def ssh_key_callback( files): # type: (t.List[t.Tuple[str, str]]) -> None """Add the SSH keys to the payload file list.""" files.append((key, key_dst)) files.append((pub, pub_dst)) data_context().register_payload_callback(ssh_key_callback) if args.explain: self.pub_contents = None else: with open(self.pub, 'r') as pub_fd: self.pub_contents = pub_fd.read().strip()
def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None, extra_dirs=None): """ :type path: str | None :type module_path: str | None :type extensions: tuple[str] | None :type prefix: str | None :type extra_dirs: tuple[str] | None :rtype: collections.Iterable[TestTarget] """ if path: file_paths = data_context().content.walk_files(path) else: file_paths = data_context().content.all_files() for file_path in file_paths: name, ext = os.path.splitext(os.path.basename(file_path)) if extensions and ext not in extensions: continue if prefix and not name.startswith(prefix): continue if os.path.islink(to_bytes(file_path)): # special case to allow a symlink of ansible_release.py -> ../release.py if file_path != 'lib/ansible/module_utils/ansible_release.py': continue yield TestTarget(file_path, module_path, prefix, path) file_paths = [] if extra_dirs: for extra_dir in extra_dirs: for file_path in data_context().content.get_files(extra_dir): file_paths.append(file_path) for file_path in file_paths: if os.path.islink(to_bytes(file_path)): continue yield TestTarget(file_path, module_path, prefix, path)
def walk_integration_targets(): """ :rtype: collections.Iterable[IntegrationTarget] """ path = 'test/integration/targets' modules = frozenset(target.module for target in walk_module_targets()) paths = data_context().content.get_dirs(path) prefixes = load_integration_prefixes() for path in paths: yield IntegrationTarget(path, modules, prefixes)
def enumerate_module_utils(): """Return a list of available module_utils imports. :rtype: set[str] """ module_utils = [] for path in data_context().content.walk_files( data_context().content.module_utils_path): ext = os.path.splitext(path)[1] if path == os.path.join(data_context().content.module_utils_path, '__init__.py'): continue if ext != '.py': continue module_utils.append(get_python_module_utils_name(path)) return set(module_utils)
def config_callback( files): # type: (t.List[t.Tuple[str, str]]) -> None """Add the config file to the payload file list.""" if self._get_cloud_config(self._CONFIG_PATH, ''): pair = (self.config_path, os.path.relpath(self.config_path, data_context().content.root)) if pair not in files: display.info('Including %s config: %s -> %s' % (self.platform, pair[0], pair[1]), verbosity=3) files.append(pair)
def __init__(self, args): """ :type args: IntegrationConfig """ self.args = args self.platform = self.__module__.split('.')[2] def config_callback( files): # type: (t.List[t.Tuple[str, str]]) -> None """Add the config file to the payload file list.""" if self._get_cloud_config(self._CONFIG_PATH, ''): pair = (self.config_path, os.path.relpath(self.config_path, data_context().content.root)) if pair not in files: display.info('Including %s config: %s -> %s' % (self.platform, pair[0], pair[1]), verbosity=3) files.append(pair) data_context().register_payload_callback(config_callback)
def load_integration_prefixes(): """ :rtype: dict[str, str] """ path = 'test/integration' file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes') prefixes = {} for file_path in file_paths: prefix = os.path.splitext(file_path)[1][1:] with open(file_path, 'r') as prefix_fd: prefixes.update(dict((k, prefix) for k in prefix_fd.read().splitlines())) return prefixes
def coverage_setup(args): # type: (TestConfig) -> None """Set up code coverage configuration before running tests.""" if args.coverage and data_context().content.collection: coverage_config = generate_collection_coverage_config(args) if args.explain: args.coverage_config_base_path = '/tmp/coverage-temp-dir' else: args.coverage_config_base_path = tempfile.mkdtemp() with open( os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_PATH), 'w') as coverage_config_path_fd: coverage_config_path_fd.write(coverage_config)
def main(): """Main program function.""" try: os.chdir(data_context().content.root) initialize_cloud_plugins() sanity_init() args = parse_args() config = args.config(args) # type: CommonConfig display.verbosity = config.verbosity display.truncate = config.truncate display.redact = config.redact display.color = config.color display.info_stderr = (isinstance(config, SanityConfig) and config.lint) or (isinstance( config, IntegrationConfig) and config.list_targets) check_startup() check_delegation_args(config) configure_timeout(config) display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE, ), verbosity=2) display.info('MAXFD: %d' % MAXFD, verbosity=2) try: args.func(config) delegate_args = None except Delegate as ex: # save delegation args for use once we exit the exception handler delegate_args = (ex.exclude, ex.require, ex.integration_targets) if delegate_args: delegate(config, *delegate_args) display.review_warnings() except ApplicationWarning as ex: display.warning(u'%s' % ex) exit(0) except ApplicationError as ex: display.error(u'%s' % ex) exit(1) except KeyboardInterrupt: exit(2) except IOError as ex: if ex.errno == errno.EPIPE: exit(3) raise
def collect_code_smell_tests(): """ :rtype: tuple[SanityFunc] """ skip_file = os.path.join(ANSIBLE_ROOT, 'test/sanity/code-smell/skip.txt') ansible_only_file = os.path.join(ANSIBLE_ROOT, 'test/sanity/code-smell/ansible-only.txt') skip_tests = read_lines_without_comments(skip_file, remove_blank_lines=True, optional=True) if not data_context().content.is_ansible: skip_tests += read_lines_without_comments(ansible_only_file, remove_blank_lines=True) paths = glob.glob(os.path.join(ANSIBLE_ROOT, 'test/sanity/code-smell/*.py')) paths = sorted(p for p in paths if os.access(p, os.X_OK) and os.path.isfile(p) and os.path.basename(p) not in skip_tests) tests = tuple(SanityCodeSmellTest(p) for p in paths) return tests
def delegate(args, exclude, require, integration_targets): """ :type args: EnvironmentConfig :type exclude: list[str] :type require: list[str] :type integration_targets: tuple[IntegrationTarget] :rtype: bool """ if isinstance(args, TestConfig): with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=data_context().content.root) as metadata_fd: args.metadata_path = os.path.basename(metadata_fd.name) args.metadata.to_file(args.metadata_path) try: return delegate_command(args, exclude, require, integration_targets) finally: args.metadata_path = None else: return delegate_command(args, exclude, require, integration_targets)
def filter_targets( self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] """Return the given list of test targets, filtered to include only those relevant for the test.""" yaml_targets = [ target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml') ] for plugin_type, plugin_path in sorted( data_context().content.plugin_paths.items()): if plugin_type == 'module_utils': continue yaml_targets.extend([ target for target in targets if os.path.splitext(target.path)[1] == '.py' and os.path.basename(target.path) != '__init__.py' and is_subdir(target.path, plugin_path) ]) return yaml_targets
def get_dependent_paths_internal(self, path): """ :type path: str :rtype: list[str] """ ext = os.path.splitext(os.path.split(path)[1])[1] if is_subdir(path, data_context().content.module_utils_path): if ext == '.py': return self.get_python_module_utils_usage(path) if ext == '.psm1': return self.get_powershell_module_utils_usage(path) if ext == '.cs': return self.get_csharp_module_utils_usage(path) if path.startswith('test/integration/targets/'): return self.get_integration_target_usage(path) return []
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] """Return the given list of test targets, filtered to include only those relevant for the test.""" # This should use documentable plugins from constants instead plugin_type_blacklist = set([ # not supported by ansible-doc 'action', 'doc_fragments', 'filter', 'module_utils', 'netconf', 'terminal', 'test', ]) plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type not in plugin_type_blacklist] return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and os.path.basename(target.path) != '__init__.py' and any(is_subdir(target.path, path) for path in plugin_paths) ]