def can_handle_archive(self): # Prefer gtar (GNU tar) as it supports the compression options -z, -j and -J try: self.cmd_path = get_bin_path('gtar') except ValueError: # Fallback to tar try: self.cmd_path = get_bin_path('tar') except ValueError: return False, "Unable to find required 'gtar' or 'tar' binary in the path" self.tar_type = self._get_tar_type() if self.tar_type != 'gnu': return False, 'Command "%s" detected as tar type %s. GNU tar required.' % ( self.cmd_path, self.tar_type) try: if self.files_in_archive: return True, None except UnarchiveError as e: return False, 'Command "%s" could not handle archive: %s' % ( self.cmd_path, to_native(e)) # Errors and no files in archive assume that we weren't able to # properly unarchive it return False, 'Command "%s" found no files in archive. Empty archive files are not supported.' % self.cmd_path
def __init__(self, src, b_dest, file_args, module): self.src = src self.b_dest = b_dest self.file_args = file_args self.opts = module.params['extra_opts'] self.module = module if self.module.check_mode: self.module.exit_json( skipped=True, msg= "remote module (%s) does not support check mode when using gtar" % self.module._name) self.excludes = [ path.rstrip('/') for path in self.module.params['exclude'] ] self.include_files = self.module.params['include'] # Prefer gtar (GNU tar) as it supports the compression options -z, -j and -J try: self.cmd_path = get_bin_path('gtar') except ValueError: # Fallback to tar try: self.cmd_path = get_bin_path('tar') except ValueError: self.module.fail_json( msg= "Unable to find required 'gtar' or 'tar' binary in the path" ) self.zipflag = '-z' self._files_in_archive = [] if self.cmd_path: self.tar_type = self._get_tar_type() else: self.tar_type = None
def is_available(self): ''' we expect the python bindings installed, but this gives warning if they are missing and we have rpm cli''' we_have_lib = super(RPM, self).is_available() try: get_bin_path('rpm') if not we_have_lib and not has_respawned(): # try to locate an interpreter with the necessary lib interpreters = [ '/usr/libexec/platform-python', '/usr/bin/python3', '/usr/bin/python2' ] interpreter_path = probe_interpreters_for_module( interpreters, self.LIB) if interpreter_path: respawn_module(interpreter_path) # end of the line for this process; this module will exit when the respawned copy completes if not we_have_lib: module.warn('Found "rpm" but %s' % (missing_required_lib(self.LIB))) except ValueError: pass return we_have_lib
def test_get_path_path_raise_valueerror(mocker): mocker.patch.dict('os.environ', {'PATH': ''}) mocker.patch('os.path.exists', return_value=False) mocker.patch('os.path.isdir', return_value=False) mocker.patch('ansible.module_utils.common.process.is_executable', return_value=True) with pytest.raises(ValueError, match='Failed to find required executable "notacommand"'): get_bin_path('notacommand')
def is_available(self): ''' we expect the python bindings installed, but this gives warning if they are missing and we have rpm cli''' we_have_lib = super(RPM, self).is_available() try: get_bin_path('rpm') if not we_have_lib: module.warn('Found "rpm" but %s' % (missing_required_lib('rpm'))) except ValueError: pass return we_have_lib
def is_available(self): ''' we expect the python bindings installed, but if there is apt/apt-get give warning about missing bindings''' we_have_lib = super(APT, self).is_available() if not we_have_lib: for exe in ('apt', 'apt-get', 'aptitude'): try: get_bin_path(exe) except ValueError: continue else: module.warn('Found "%s" but %s' % (exe, missing_required_lib('apt'))) break return we_have_lib
def __init__(self, play_context, new_stdin, *args, **kwargs): super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs) # Note: docker supports running as non-root in some configurations. # (For instance, setting the UNIX socket file to be readable and # writable by a specific UNIX group and then putting users into that # group). Therefore we don't check that the user is root when using # this connection. But if the user is getting a permission denied # error it probably means that docker on their system is only # configured to be connected to by root and they are not running as # root. self._docker_args = [] self._container_user_cache = {} self._version = None # Windows uses Powershell modules if getattr(self._shell, "_IS_WINDOWS", False): self.module_implementation_preferences = ('.ps1', '.exe', '') if 'docker_command' in kwargs: self.docker_cmd = kwargs['docker_command'] else: try: self.docker_cmd = get_bin_path('docker') except ValueError: raise AnsibleError("docker command not found in PATH")
def is_available(self): ''' we expect the python bindings installed, but this gives warning if they are missing and we have rpm cli''' we_have_lib = super(RPM, self).is_available() if not we_have_lib and get_bin_path('rpm'): self.warnings.append('Found "rpm" but %s' % (missing_required_lib('rpm'))) return we_have_lib
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE): ''' run a command on the chroot. This is only needed for implementing put_file() get_file() so that we don't have to read the whole file into memory. compared to exec_command() it looses some niceties like being able to return the process's exit code immediately. ''' executable = self.get_option('executable') local_cmd = [ get_bin_path('sudo'), self.chroot_cmd, self.chroot, executable, '-c', cmd ] display.vvv("EXEC %s" % (local_cmd), host=self.chroot) local_cmd = [ to_bytes(i, errors='surrogate_or_strict') for i in local_cmd ] p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return p
def __init__(self, play_context, new_stdin, *args, **kwargs): super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs) self.chroot = self._play_context.remote_addr if os.geteuid() != 0: raise AnsibleError("chroot connection requires running as root") # we're running as root on the local system so do some # trivial checks for ensuring 'host' is actually a chroot'able dir if not os.path.isdir(self.chroot): raise AnsibleError("%s is not a directory" % self.chroot) chrootsh = os.path.join(self.chroot, 'bin/sh') # Want to check for a usable bourne shell inside the chroot. # is_executable() == True is sufficient. For symlinks it # gets really complicated really fast. So we punt on finding that # out. As long as it's a symlink we assume that it will work if not (is_executable(chrootsh) or (os.path.lexists(chrootsh) and os.path.islink(chrootsh))): raise AnsibleError( "%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot) if os.path.isabs(self.get_option('chroot_exe')): self.chroot_cmd = self.get_option('chroot_exe') else: self.chroot_cmd = get_bin_path(self.get_option('chroot_exe')) if not self.chroot_cmd: raise AnsibleError("chroot command (%s) not found in PATH" % to_native(self.get_option('chroot_exe')))
def version(self): opkg = get_bin_path('opkg') if opkg: rc, out, err = self.module.run_command([opkg, 'info', 'uci']) if rc == 0: for line in out.splitlines(): if match('^Version', line): return line.split()[1]
def clear_facls(path): setfacl = get_bin_path('setfacl') # FIXME "setfacl -b" is available on Linux and FreeBSD. There is "setfacl -D e" on z/OS. Others? acl_command = [setfacl, '-b', path] b_acl_command = [to_bytes(x) for x in acl_command] rc, out, err = module.run_command(b_acl_command, environ_update=dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')) if rc != 0: raise RuntimeError('Error running "{0}": stdout: "{1}"; stderr: "{2}"'.format(' '.join(b_acl_command), out, err))
def parse(self, inventory, loader, path, cache=True): try: self._vbox_path = get_bin_path(self.VBOX) except ValueError as e: raise AnsibleParserError(e) super(InventoryModule, self).parse(inventory, loader, path) cache_key = self.get_cache_key(path) config_data = self._read_config_data(path) # set _options from config data self._consume_options(config_data) source_data = None if cache: cache = self.get_option('cache') update_cache = False if cache: try: source_data = self._cache[cache_key] except KeyError: update_cache = True if not source_data: b_pwfile = to_bytes(self.get_option('settings_password_file'), errors='surrogate_or_strict', nonstring='passthru') running = self.get_option('running_only') # start getting data cmd = [self._vbox_path, b'list', b'-l'] if running: cmd.append(b'runningvms') else: cmd.append(b'vms') if b_pwfile and os.path.exists(b_pwfile): cmd.append(b'--settingspwfile') cmd.append(b_pwfile) try: p = Popen(cmd, stdout=PIPE) except Exception as e: raise AnsibleParserError(to_native(e)) source_data = p.stdout.read().splitlines() using_current_cache = cache and not update_cache cacheable_results = self._populate_from_source(source_data, using_current_cache) if update_cache: self._cache[cache_key] = cacheable_results
def is_available(self): ''' we expect the python bindings installed, but if there is apt/apt-get give warning about missing bindings''' we_have_lib = super(APT, self).is_available() if not we_have_lib: for exe in ('apt', 'apt-get'): if get_bin_path(exe): self.warnings.append('Found "%s" but python bindings are missing, so we cannot get package information.' % exe) break return we_have_lib
def is_available(self): ''' we expect the python bindings installed, but if there is apt/apt-get give warning about missing bindings''' we_have_lib = super(APT, self).is_available() if not we_have_lib: for exe in ('apt', 'apt-get', 'aptitude'): if get_bin_path(exe): self.warnings.append('Found "%s" but %s' % (exe, missing_required_lib('apt'))) break return we_have_lib
def scm_archive_role(src, scm='git', name=None, version='HEAD', keep_scm_meta=False): def run_scm_cmd(cmd, tempdir): try: stdout = '' stderr = '' popen = Popen(cmd, cwd=tempdir, stdout=PIPE, stderr=PIPE) stdout, stderr = popen.communicate() except Exception as e: ran = " ".join(cmd) display.debug("ran %s:" % ran) display.debug("\tstdout: " + stdout) display.debug("\tstderr: " + stderr) raise AnsibleError("when executing %s: %s" % (ran, to_native(e))) if popen.returncode != 0: raise AnsibleError("- command %s failed in directory %s (rc=%s)" % (' '.join(cmd), tempdir, popen.returncode)) if scm not in ['hg', 'git']: raise AnsibleError("- scm %s is not currently supported" % scm) try: scm_path = get_bin_path(scm) except (ValueError, OSError, IOError): raise AnsibleError("could not find/use %s, it is required to continue with installing %s" % (scm, src)) tempdir = tempfile.mkdtemp(dir=C.DEFAULT_LOCAL_TMP) clone_cmd = [scm_path, 'clone', src, name] run_scm_cmd(clone_cmd, tempdir) if scm == 'git' and version: checkout_cmd = [scm_path, 'checkout', version] run_scm_cmd(checkout_cmd, os.path.join(tempdir, name)) temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar', dir=C.DEFAULT_LOCAL_TMP) archive_cmd = None if keep_scm_meta: display.vvv('tarring %s from %s to %s' % (name, tempdir, temp_file.name)) with tarfile.open(temp_file.name, "w") as tar: tar.add(os.path.join(tempdir, name), arcname=name) elif scm == 'hg': archive_cmd = [scm_path, 'archive', '--prefix', "%s/" % name] if version: archive_cmd.extend(['-r', version]) archive_cmd.append(temp_file.name) elif scm == 'git': archive_cmd = [scm_path, 'archive', '--prefix=%s/' % name, '--output=%s' % temp_file.name] if version: archive_cmd.append(version) else: archive_cmd.append('HEAD') if archive_cmd is not None: display.vvv('archiving %s' % archive_cmd) run_scm_cmd(archive_cmd, os.path.join(tempdir, name)) return temp_file.name
def __init__(self, play_context, new_stdin, *args, **kwargs): super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs) self._host = self._play_context.remote_addr try: self._lxc_cmd = get_bin_path("lxc") except ValueError: raise AnsibleError("lxc command not found in PATH") if self._play_context.remote_user is not None and self._play_context.remote_user != 'root': self._display.warning('lxd does not support remote_user, using container default: root')
def get_binary_from_path(name, opt_dirs=None): opt_arg = {} try: if opt_dirs is not None: if not isinstance(opt_dirs, list): opt_dirs = [opt_dirs] opt_arg["opt_dirs"] = opt_dirs bin_path = get_bin_path(name, **opt_arg) return bin_path except ValueError: return None
def test_get_bin_path(mocker): path = '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' mocker.patch.dict('os.environ', {'PATH': path}) mocker.patch('os.pathsep', ':') mocker.patch('os.path.exists', side_effect=[False, True]) mocker.patch('os.path.isdir', return_value=False) mocker.patch('ansible.module_utils.common.process.is_executable', return_value=True) assert '/usr/local/bin/notacommand' == get_bin_path('notacommand')
def __init__(self, src, b_dest, file_args, module): self.src = src self.b_dest = b_dest self.file_args = file_args self.opts = module.params['extra_opts'] self.module = module self.io_buffer_size = module.params.get("io_buffer_size", 64 * 1024) self.excludes = module.params['exclude'] self.includes = [] self.include_files = self.module.params['include'] try: self.cmd_path = get_bin_path('unzip') except ValueError: self.module.fail_json( msg="Unable to find required 'unzip' binary in the path") try: self.zipinfocmd_path = get_bin_path('zipinfo') except ValueError: self.module.fail_json( msg="Unable to find required 'zipinfo' binary in the path") self._files_in_archive = [] self._infodict = dict()
def _set_base_params(self, params): self.kubectl = params.get('kubectl') if self.kubectl is None: self.kubectl = get_bin_path('kubectl') self.base_cmd = [self.kubectl] if params.get('host'): self.base_cmd.append('--server=' + params.get('host')) if params.get('log_level'): self.base_cmd.append('--v=' + str(params.get('log_level'))) self.force = params.get('force') self.wait = params.get('wait')
def is_available(self): ''' we expect the python bindings installed, but if there is apt/apt-get give warning about missing bindings''' we_have_lib = super(APT, self).is_available() if not we_have_lib: for exe in ('apt', 'apt-get', 'aptitude'): try: get_bin_path(exe) except ValueError: continue else: if not has_respawned(): # try to locate an interpreter with the necessary lib interpreters = ['/usr/bin/python3', '/usr/bin/python2'] interpreter_path = probe_interpreters_for_module(interpreters, self.LIB) if interpreter_path: respawn_module(interpreter_path) # end of the line for this process; this module will exit here when respawned copy completes module.warn('Found "%s" but %s' % (exe, missing_required_lib('apt'))) break return we_have_lib
def _connect(self): ''' connect to the chroot ''' if os.path.isabs(self.get_option('chroot_exe')): self.chroot_cmd = self.get_option('chroot_exe') else: self.chroot_cmd = get_bin_path(self.get_option('chroot_exe')) if not self.chroot_cmd: raise AnsibleError("chroot command (%s) not found in PATH" % to_native(self.get_option('chroot_exe'))) super(Connection, self)._connect() if not self._connected: display.vvv("THIS IS A LOCAL CHROOT DIR", host=self.chroot) self._connected = True
def _connect(self): """ connect to the chroot """ if os.path.isabs(self.get_option('chroot_exe')): self.chroot_cmd = self.get_option('chroot_exe') else: try: self.chroot_cmd = get_bin_path(self.get_option('chroot_exe')) except ValueError as e: raise AnsibleError(to_native(e)) super(Connection, self)._connect() if not self._connected: display.vvv("THIS IS A LOCAL CHROOT DIR", host=self.chroot) self._connected = True
def __init__(self): super(CallbackModule, self).__init__() self.FAILED_VOICE = None self.REGULAR_VOICE = None self.HAPPY_VOICE = None self.LASER_VOICE = None try: self.synthesizer = get_bin_path('say') if platform.system() != 'Darwin': # 'say' binary available, it might be GNUstep tool which doesn't support 'voice' parameter self._display.warning( "'say' executable found but system is '%s': ignoring voice parameter" % platform.system()) else: self.FAILED_VOICE = 'Zarvox' self.REGULAR_VOICE = 'Trinoids' self.HAPPY_VOICE = 'Cellos' self.LASER_VOICE = 'Princess' except ValueError: try: self.synthesizer = get_bin_path('espeak') self.FAILED_VOICE = 'klatt' self.HAPPY_VOICE = 'f5' self.LASER_VOICE = 'whisper' except ValueError: self.synthesizer = None # plugin disable itself if say is not present # ansible will not call any callback if disabled is set to True if not self.synthesizer: self.disabled = True self._display.warning( "Unable to find either 'say' or 'espeak' executable, plugin %s disabled" % os.path.basename(__file__))
def _extract_collection_from_git(repo_url, coll_ver, b_path): name, version, git_url, fragment = parse_scm(repo_url, coll_ver) b_checkout_path = mkdtemp( dir=b_path, prefix=to_bytes(name, errors='surrogate_or_strict'), ) # type: bytes try: git_executable = get_bin_path('git') except ValueError as err: raise AnsibleError( "Could not find git executable to extract the collection from the Git repository `{repo_url!s}`." .format(repo_url=to_native(git_url))) from err # Perform a shallow clone if simply cloning HEAD if version == 'HEAD': git_clone_cmd = git_executable, 'clone', '--depth=1', git_url, to_text( b_checkout_path) else: git_clone_cmd = git_executable, 'clone', git_url, to_text( b_checkout_path) # FIXME: '--branch', version try: subprocess.check_call(git_clone_cmd) except subprocess.CalledProcessError as proc_err: raise_from( AnsibleError( # should probably be LookupError 'Failed to clone a Git repository from `{repo_url!s}`.'.format( repo_url=to_native(git_url)), ), proc_err, ) git_switch_cmd = git_executable, 'checkout', to_text(version) try: subprocess.check_call(git_switch_cmd, cwd=b_checkout_path) except subprocess.CalledProcessError as proc_err: raise_from( AnsibleError( # should probably be LookupError 'Failed to switch a cloned Git repo `{repo_url!s}` ' 'to the requested revision `{commitish!s}`.'.format( commitish=to_native(version), repo_url=to_native(git_url), ), ), proc_err, ) return (os.path.join(b_checkout_path, to_bytes(fragment)) if fragment else b_checkout_path)
def test_get_bin_path(mocker): path = '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' mocker.patch.dict('os.environ', {'PATH': path}) mocker.patch('os.pathsep', ':') mocker.patch('os.path.isdir', return_value=False) mocker.patch('ansible.module_utils.common.process.is_executable', return_value=True) # pytest-mock 2.0.0 will throw when os.path.exists is messed with # and then another method is patched afterwards. Likely # something in the pytest-mock chain uses os.path.exists internally, and # since pytest-mock prohibits context-specific patching, there's not a # good solution. For now, just patch os.path.exists last. mocker.patch('os.path.exists', side_effect=[False, True]) assert '/usr/local/bin/notacommand' == get_bin_path('notacommand')
def can_handle_archive(self): missing = [] for b in self.binaries: try: setattr(self, b[1], get_bin_path(b[0])) except ValueError: missing.append(b[0]) if missing: return False, "Unable to find required '{missing}' binary in the path.".format(missing="' or '".join(missing)) cmd = [self.cmd_path, '-l', self.src] rc, out, err = self.module.run_command(cmd) if rc == 0: return True, None return False, 'Command "%s" could not handle archive: %s' % (self.cmd_path, err)
def _run_command(self, args): if not self.DOCKER_MACHINE_PATH: try: self.DOCKER_MACHINE_PATH = get_bin_path('docker-machine') except ValueError as e: raise AnsibleError(to_native(e)) command = [self.DOCKER_MACHINE_PATH] command.extend(args) display.debug('Executing command {0}'.format(command)) try: result = subprocess.check_output(command) except subprocess.CalledProcessError as e: display.warning('Exception {0} caught while executing command {1}, this was the original exception: {2}'.format(type(e).__name__, command, e)) raise e return to_text(result).strip()
def get_memory_facts(self): memory_facts = { 'memtotal_mb': int(self.sysctl['hw.memsize']) // 1024 // 1024, 'memfree_mb': 0, } total_used = 0 page_size = 4096 try: vm_stat_command = get_bin_path('vm_stat') except ValueError: return memory_facts rc, out, err = self.module.run_command(vm_stat_command) if rc == 0: # Free = Total - (Wired + active + inactive) # Get a generator of tuples from the command output so we can later # turn it into a dictionary memory_stats = (line.rstrip('.').split(':', 1) for line in out.splitlines()) # Strip extra left spaces from the value memory_stats = dict((k, v.lstrip()) for k, v in memory_stats) for k, v in memory_stats.items(): try: memory_stats[k] = int(v) except ValueError: # Most values convert cleanly to integer values but if the field does # not convert to an integer, just leave it alone. pass if memory_stats.get('Pages wired down'): total_used += memory_stats['Pages wired down'] * page_size if memory_stats.get('Pages active'): total_used += memory_stats['Pages active'] * page_size if memory_stats.get('Pages inactive'): total_used += memory_stats['Pages inactive'] * page_size memory_facts['memfree_mb'] = memory_facts['memtotal_mb'] - ( total_used // 1024 // 1024) return memory_facts