def download_py_package(): """Download package which needs to be analyzed by future steps.""" command = ( f"pip download --no-binary=:all: --no-deps -d {WORKDIR} -i {Configuration.PACKAGE_INDEX} " f"{Configuration.PACKAGE_NAME}==={Configuration.PACKAGE_VERSION}" ) run_command(command) for f in os.listdir(WORKDIR): if f.endswith(".tar.gz"): full_path = os.path.join(WORKDIR, f) tar = tarfile.open(full_path, "r:gz") tar.extractall(os.path.join(d, "package")) break elif f.endswith(".zip"): full_path = os.path.join(WORKDIR, f) with zipfile.ZipFile(os.path.join(WORKDIR, f), 'r') as zip_ref: zip_ref.extractall(os.path.join(WORKDIR, "package")) break else: raise FileNotFoundError( f"No source distribution found for {Configuration.PACKAGE_NAME}==={Configuration.PACKAGE_VERSION} on " f"index {Configuration.PACKAGE_INDEX}." )
def _install_requirement(python_bin: str, package: str, version: str = None, index_url: str = None, clean: bool = True) -> None: """Install requirements specified using suggested pip binary.""" previous_version = _pipdeptree(python_bin, package) try: cmd = "{} -m pip install --force-reinstall --no-cache-dir --no-deps {}".format( python_bin, quote(package)) if version: cmd += "=={}".format(quote(version)) if index_url: cmd += ' --index-url "{}" '.format(quote(index_url)) # Supply trusted host by default so we do not get errors - it safe to # do it here as package indexes are managed by Thoth. trusted_host = urlparse(index_url).netloc cmd += " --trusted-host {}".format(trusted_host) _LOGGER.debug("Installing requirement %r in version %r", package, version) run_command(cmd) yield finally: if clean: _LOGGER.debug("Removing installed package %r", package) cmd = "{} -m pip uninstall --yes {}".format( python_bin, quote(package)) result = run_command(cmd, raise_on_error=False) if result.return_code != 0: _LOGGER.warning( "Failed to restore previous environment by removing package %r (installed version %r), " "the error is not fatal but can affect future actions: %s", package, version, result.stderr, ) _LOGGER.debug( "Restoring previous environment setup after installation of %r (%s)", package, previous_version) if previous_version: cmd = "{} -m pip install --force-reinstall --no-cache-dir --no-deps {}=={}".format( python_bin, quote(package), quote(previous_version["package"]["installed_version"])) result = run_command(cmd, raise_on_error=False) if result.return_code != 0: _LOGGER.warning( "Failed to restore previous environment for package %r (installed version %r), " ", the error is not fatal but can affect future actions (previous version: %r): %s", package, version, previous_version, result.stderr, )
def si_bandit( click_ctx, output: Optional[str], from_directory: Optional[str], package_name: str, package_version: Optional[str], package_index: Optional[str], no_pretty: bool, ): """Run the cli for si-bandit.""" if from_directory is None: with tempfile.TemporaryDirectory() as d: command = ( f"pip download --no-binary=:all: --no-deps -d {d} -i {package_index} " f"{package_name}==={package_version}") run_command(command) for f in os.listdir(d): if f.endswith(".tar.gz"): full_path = os.path.join(d, f) tar = tarfile.open(full_path, "r:gz") tar.extractall(os.path.join(d, "package")) from_directory = os.path.join(d, "package") break else: raise FileNotFoundError( f"No source distribution found for {package_name}==={package_version} " f"on {package_index}") out = _run_bandit(from_directory) else: out = _run_bandit(from_directory) if out is None: raise RuntimeError("output of bandit is empty") source_dict = { "url": package_index, "name": "foo", "verify_ssl": False, "warehouse": True } source = Source.from_dict(source_dict) try: out["time_of_release"] = source.get_package_release_date( package_name=package_name, package_version=package_version) except Exception: out["time_of_release"] = None print_command_result( click_ctx=click_ctx, result=out, analyzer=si_bandit_title, analyzer_version=si_bandit_version, output=output, duration=None, pretty=not no_pretty, )
def resolve( requirements: typing.List[str], index_urls: list = None, python_version: int = 3, exclude_packages: set = None, transitive: bool = True, subgraph_check_api: str = None, ) -> dict: """Resolve given requirements for the given Python version.""" assert python_version in (2, 3), "Unknown Python version" if subgraph_check_api and not transitive: _LOGGER.warning( "The check against subgraph API cannot be done if no transitive dependencies are " "resolved, sub-graph checks are turned off implicitly") subgraph_check_api = None python_bin = "python3" if python_version == 3 else "python2" run_command("virtualenv -p python3 venv") python_bin = "venv/bin/" + python_bin run_command("{} -m pip install pipdeptree".format(python_bin)) environment_details = _get_environment_details(python_bin) result = { "tree": [], "errors": [], "unparsed": [], "unresolved": [], "environment": environment_details } all_solvers = [] for index_url in index_urls: source = Source(index_url) all_solvers.append(PythonSolver(fetcher_kwargs={"source": source})) for solver in all_solvers: solver_result = _do_resolve_index( python_bin=python_bin, solver=solver, all_solvers=all_solvers, requirements=requirements, exclude_packages=exclude_packages, transitive=transitive, subgraph_check_api=subgraph_check_api, ) result["tree"].extend(solver_result["tree"]) result["errors"].extend(solver_result["errors"]) result["unparsed"].extend(solver_result["unparsed"]) result["unresolved"].extend(solver_result["unresolved"]) return result
def resolve(requirements, index_urls, python_version, exclude_packages, transitive, virtualenv): # type: (List[str], List[str], int, Optional[Set[str]], bool, Optional[str]) -> Dict[str, Any] """Resolve given requirements for the given Python version.""" assert python_version in (2, 3), "Unknown Python version" python_bin = "python3" if python_version == 3 else "python2" if not virtualenv: run_command("virtualenv -p " + python_bin + " venv") python_bin = os.path.join("venv", "bin", python_bin) run_command("{} -m pip install pipdeptree".format(python_bin)) else: python_bin = os.path.join(virtualenv, "bin", python_bin) environment_packages = get_environment_packages(python_bin) result = { "tree": [], "errors": [], "unparsed": [], "unresolved": [], "environment": default_environment(), "environment_packages": environment_packages, "platform": sysconfig.get_platform(), } all_solvers = [] for index_url in index_urls: source = Source(index_url) from .python_solver import PythonReleasesFetcher all_solvers.append( PythonSolver( dependency_parser=PythonDependencyParser(), releases_fetcher=PythonReleasesFetcher(source=source), ), ) for solver in all_solvers: solver_result = _do_resolve_index( python_bin=python_bin, solver=solver, all_solvers=all_solvers, requirements=requirements, exclude_packages=exclude_packages, transitive=transitive, ) result["tree"].extend(solver_result["tree"]) # type: ignore result["errors"].extend(solver_result["errors"]) # type: ignore result["unparsed"].extend(solver_result["unparsed"]) # type: ignore result["unresolved"].extend( solver_result["unresolved"]) # type: ignore return result
def si_cloc( click_ctx, output: Optional[str], from_directory: Optional[str], package_name: str, package_version: Optional[str], package_index: Optional[str], no_pretty: bool, ): """Run the cli for si-cloc.""" if from_directory is None: with tempfile.TemporaryDirectory() as d: command = ( f"pip download --no-binary=:all: --no-deps -d {d} -i {package_index} " f"{package_name}==={package_version}") run_command(command) for f in os.listdir(d): if f.endswith(".tar.gz"): full_path = os.path.join(d, f) break else: raise FileNotFoundError( f"No source distribution found for {package_name}==={package_version} " f"on {package_index}") out = run_command( f"cloc --extract-with='gzip -dc >FILE< | tar xf -' {full_path} --json", is_json=True) else: out = run_command(f"cloc {from_directory} --json", is_json=True) results = out.stdout if results is None: results = {"error": True, "error_messages": [out.stderr]} _LOGGER.warning( "cloc output is empty with the following in stderr:\n%s", out.stderr) else: results["error"] = False print_command_result( click_ctx=click_ctx, result=results, analyzer=__title__, analyzer_version=__version__, output=output, duration=None, pretty=not no_pretty, )
def get_image_metadata(image_name: str, *, registry_user: str = None, registry_password: str = None, verify_tls: bool = True) -> dict: """Get metadata for the given image and image repository.""" if not registry_user and not registry_password: cmd = f'skopeo inspect docker://{image_name!r}' elif registry_user and registry_password: # TODO: make sure registry_user and registry_password get escaped. cmd = f'skopeo inspect --creds={registry_user}:{registry_password} docker://{image_name!r}' else: raise NotImplementedError( "Both parameters registry_user and registry_password have to be supplied for registry authentication" ) if not verify_tls: cmd += ' --src-tls-verify=false' result = run_command(cmd, is_json=True, raise_on_error=False) if result.return_code == 0: result_dict = {} for key, value in result.stdout.items(): result_dict[_TRANSLATION_TABLE[key]] = value return result_dict if 'manifest unknown' in result.stderr: raise ImageManifestUnknownError("Unknown manifest for the given image") elif 'unauthorized: authentication required' in result.stderr: raise ImageAuthenticationRequired("There is required authentication in order to pull image and image details") _LOGGER.warning("An unhandled error occurred during extraction of image %r: %s", image_name, result.stderr) raise ImageError( "There was an error when extracting image information, please contact administrator for more details" )
def _get_lib_dir_symbols(result, root_dir): """Get library symbols from a directory.""" for so_file_path in glob.glob(os.path.join(root_dir, "*.so*")): # We grep for '0 A' here because all exported symbols are outputted by nm like: # 00000000 A GLIBC_1.x or: # 0000000000000000 A GLIBC_1.x command = f"nm -D {so_file_path!r} | grep '0 A'" # Drop path to the extracted container in the output. so_file_path = so_file_path[len(root_dir) + 1:] _LOGGER.debug("Gathering symbols from %r", so_file_path) command_result = run_command(command, timeout=120, raise_on_error=False) if command_result.return_code != 0: _LOGGER.warning( "Failed to obtain library symbols from %r; stderr: %s, stdout: %s", so_file_path, command_result.stderr, command_result.stdout) continue if so_file_path not in result: result[so_file_path] = set() for line in command_result.stdout.splitlines(): columns = line.split(' ') if len(columns) > 2: result[so_file_path].add(columns[2])
def discover_cuda_version(interactive: bool = False) -> typing.Optional[str]: """Check for CUDA version, if no CUDA is installed, return None.""" if bool(int(os.getenv("THAMOS_DISABLE_CUDA", 0))): _LOGGER.debug( "Disabling CUDA based on THAMOS_DISABLE_CUDA environment variable that is set to %r", os.environ["THAMOS_DISABLE_CUDA"], ) return None result = run_command("nvcc --version", raise_on_error=False) if result.return_code != 0: _LOGGER.info("No CUDA version detected") _LOGGER.debug( "Unable to detect CUDA version - nvcc returned non-zero version: %s", result.to_dict(), ) return None lines = result.stdout.splitlines() version_info = lines[-1].split(",") if len(version_info) != 3: _LOGGER.debug( "Unable to detect CUDA version from nvcc output: %r", result.stdout ) return None cuda_version = version_info[1].strip()[len("release "):] if interactive: cuda_version = click.prompt("Please select CUDA version", default=cuda_version) _LOGGER.info("Detected CUDA version: %r", cuda_version) return cuda_version
def _get_python_interpreters(path: str) -> List[dict]: """Find all python interpreters and symlinks.""" result = [] for py_path in glob.glob("{}/usr/bin/python*".format(path)): version_ = None try: os.chmod(py_path, stat.S_IEXEC) line = run_command("{} --version".format(py_path), timeout=2).stdout parts = line.split(maxsplit=2) if len(parts) == 2 and parts[0] == "Python": version_ = line.rstrip() except Exception as exc: _LOGGER.warning( "Failed to run %s --version to gather python interpreter version: %s", py_path, str(exc), ) absolute_link = _get_absolute_link(path, py_path, 0) if absolute_link is not None: absolute_link = absolute_link[len(path):] py_interpret = { "path": py_path[len(path):], "link": absolute_link, "version": version_, } result.append(py_interpret) return result
def _run_rpm(path: str, timeout: int = None) -> typing.List[str]: """Query for installed rpm packages in the given root described by path.""" cmd = "rpm -qa --root {!r}".format(path) output = run_command(cmd, timeout=timeout).stdout packages = output.split("\n") if not packages[-1]: packages.pop() return packages
def _run_bandit(from_directory: str) -> Optional[Dict[str, Any]]: results = run_command(f"bandit -r -f json {from_directory}", is_json=True, raise_on_error=False) out = results.stdout if out is None: raise Exception(results.stderr) return out
def si_bandit( click_ctx, output: Optional[str], from_directory: Optional[str], package_name: str, package_version: Optional[str], package_index: Optional[str], no_pretty: bool, ): """Run the cli for si-bandit.""" if from_directory is None: with tempfile.TemporaryDirectory() as d: command = ( f"pip download --no-binary=:all: --no-deps -d {d} -i {package_index} " f"{package_name}==={package_version}") run_command(command) for f in os.listdir(d): if f.endswith(".tar.gz"): full_path = os.path.join(d, f) tar = tarfile.open(full_path, "r:gz") tar.extractall(os.path.join(d, "package")) from_directory = os.path.join(d, "package") break else: raise FileNotFoundError( f"No source distribution found for {package_name}==={package_version} " f"on {package_index}") out = _run_bandit(from_directory) else: out = _run_bandit(from_directory) out["package_name"] = package_name out["package_version"] = package_version out["bandit_version"] = bandit_version out["package_index"] = package_index print_command_result( click_ctx=click_ctx, result=out, analyzer=si_bandit_title, analyzer_version=si_bandit_version, output=output, duration=None, pretty=not no_pretty, )
def main(): """Perform graph backup job.""" _LOGGER.debug("Debug mode is on.") adapter = GraphBackupStore() adapter.connect() _LOGGER.info("Starting creation of the database dump") run_command( f"pg_dump -h {KNOWLEDGE_GRAPH_HOST} -p {KNOWLEDGE_GRAPH_PORT} " f"-U {KNOWLEDGE_GRAPH_USER} -d {KNOWLEDGE_GRAPH_DATABASE} -f pg_dump.sql", env={"PGPASSWORD": os.getenv("KNOWLEDGE_GRAPH_PASSWORD", "postgres")}, timeout=None, ) _LOGGER.info("Uploading the database dump") object_id = adapter.store_dump("pg_dump.sql") _LOGGER.info("The database dump is available at %s/%s", adapter.prefix, object_id) _LOGGER.info("Graph backup task is done.")
def _get_cuda_version(path: str) -> dict: """Get the cuda version.""" res = {} # Gathering version from version.txt file version_path = os.path.join(path, "usr/local/cuda/version.txt") if os.path.isfile(version_path): with open(version_path, "r") as f: for line in f.readlines(): if line.startswith("CUDA Version"): res["/usr/local/cuda/version.txt"] = line[ len("CUDA Version"):].strip() break if res.get("/usr/local/cuda/version.txt") is not None: _LOGGER.info( "CUDA version %s was identified in file version.txt", res["/usr/local/cuda/version.txt"], ) else: _LOGGER.warning( "No CUDA version identifier was found in file version.txt") else: _LOGGER.info("No version.txt file was found to detect CUDA version") # Gathering version from nvcc command nvcc_path = os.path.join(path + "/usr/local/cuda/bin/nvcc") if os.path.exists(nvcc_path): st = os.stat(nvcc_path) os.chmod(nvcc_path, st.st_mode | stat.S_IEXEC) result = run_command("{} --version".format(nvcc_path), raise_on_error=False) if result.return_code != 0: _LOGGER.warning( "Unable to detect CUDA version - nvcc returned non-zero exit code: %s", result.to_dict(), ) else: for line in result.stdout.splitlines(): version = line.rsplit(", ", maxsplit=1)[-1] if line.startswith( "Cuda compilation tools") and version.startswith("V"): res["nvcc_version"] = version[1:] break if res.get("nvcc_version") is not None: _LOGGER.info("Detected CUDA version %s from nvcc output", res["nvcc_version"]) else: _LOGGER.debug( "Unable to detect CUDA version from nvcc output: %r", result.stdout) else: _LOGGER.info("No nvcc executable was found to detect CUDA version") return res
def _run_bandit(from_directory: str) -> Optional[Dict[str, Any]]: results = run_command(f"bandit -r -f json {from_directory}", is_json=True, raise_on_error=False) out = results.stdout # type: dict if out is None: out = {"error_messages": [results.stderr], "error": True} elif out["errors"] != []: out["error_messages"] = out.pop("errors") out["error"] = True out["error"] = False return out
def _run_mercator(path: str, timeout: int = None) -> dict: """Run mercator-go to find all packages that were installed inside an image.""" cmd = '{mercator_bin} -config {mercator_handlers_yaml} {path}'.format( mercator_bin=_MERCATOR_BIN, mercator_handlers_yaml=_MERCATOR_HANDLERS_YAML, path=path) output = run_command(cmd, env={ 'MERCATOR_INTERPRET_SETUP_PY': 'true' }, timeout=timeout, is_json=True).stdout return _normalize_mercator_output(path, output)
def get_image_metadata(image_name: str, *, registry_user: str = None, registry_password: str = None, verify_tls: bool = True) -> dict: """Get metadata for the given image and image repository.""" cmd = "skopeo inspect " if registry_user and registry_password: credentials = shlex.quote(f"{registry_user}:{registry_password})") cmd += f"--creds {credentials}" elif (registry_user and not registry_password) or (not registry_user and registry_password): raise ImageBadRequestError( "Both parameters registry_user and registry_password have to be supplied for registry authentication" ) if not verify_tls: cmd += "--tls-verify=false " cmd += f"docker://{image_name!r}" result = run_command(cmd, is_json=True, raise_on_error=False) if result.return_code == 0: result_dict = {} for key, value in result.stdout.items(): result_dict[_TRANSLATION_TABLE[key]] = value return result_dict if "manifest unknown" in result.stderr: raise ImageManifestUnknownError("Unknown manifest for the given image") elif "unauthorized: authentication required" in result.stderr: raise ImageAuthenticationRequired( "There is required authentication in order to pull image and image details" ) elif "x509: certificate signed by unknown authority" in result.stderr: raise ImageAuthenticationRequired( "There was an error with x509 certification check: certificate signed by unknown authority" ) elif "unable to retrieve auth token: invalid username/password" in result.stderr: raise ImageInvalidCredentials( "There was an error accessing the image as the username/password provided was invalid" ) _LOGGER.error( "An unhandled error occurred during extraction of image %r: %s", image_name, result.stderr) raise ImageError( "There was an error when extracting image information, please contact administrator for more details" )
def push( src: str, dst: str, user_src: Optional[str] = None, pass_src: Optional[str] = None, user_dst: Optional[str] = None, pass_dst: Optional[str] = None, ): """Push a container image from a source to an external registry.""" cmd = f"{_SKOPEO_EXEC_PATH} copy {src} {dst}" if user_src: cmd += f"--src-creds={user_src}" if pass_src: cmd += f":{pass_src}" cmd += " " if user_dst: cmd += f"--dest-creds={user_dst}" if pass_dst: cmd += f":{pass_dst}" cmd += " " image_name = src.rsplit("/", maxsplit=1)[1] if "quay.io" in dst: image_name = image_name.replace("@sha256", "") output = f"{dst}:{image_name.replace(':','-')}" else: output = f"{dst}/{image_name}" _LOGGER.debug("Pushing image %r from %r to registry %r, output is %r", image_name, src, dst, output) cmd += f"docker://{src} docker://{output}" _LOGGER.debug("Running: %s", cmd) try: command = run_command(cmd) _LOGGER.debug("%s stdout:\n%s\n%s", _SKOPEO_EXEC_PATH, command.stdout, command.stderr) except CommandError as exc: _LOGGER.exception("Failed to push image %r to external registry: %s", image_name, str(exc)) raise return output
def _push_image( image: str, push_registry: str, src_registry_user: str = None, src_registry_password: str = None, dst_registry_user: str = None, dst_registry_password: str = None, src_verify_tls: bool = True, dst_verify_tls: bool = True, ) -> str: """Push the given image (fully specified with registry info) into another registry.""" cmd = f"{_SKOPEO_EXEC_PATH} --insecure-policy copy " if not src_verify_tls: cmd += "--src-tls-verify=false " if not dst_verify_tls: cmd += "--dest-tls-verify=false " if dst_registry_user or dst_registry_password: dst_registry_user = dst_registry_user or "build-watcher" cmd += f"--dest-creds={dst_registry_user}" if dst_registry_password: cmd += f":{dst_registry_password}" cmd += " " if src_registry_user or src_registry_password: src_registry_user = src_registry_user or "build-watcher" cmd += f"--src-creds={src_registry_user}" if dst_registry_password: cmd += f":{src_registry_password}" cmd += " " image_name = image.rsplit("/", maxsplit=1)[1] output = f"{push_registry}/{image_name}" _LOGGER.debug("Pushing image %r from %r to registry %r, output is %r", image_name, image, push_registry, output) cmd += f"docker://{image} docker://{output}" _LOGGER.debug("Running: %s", cmd) command = run_command(cmd) _LOGGER.debug("%s stdout:\n%s\n%s", _SKOPEO_EXEC_PATH, command.stdout, command.stderr) return output
def get_environment_packages( python_bin): # type: (str) -> List[Dict[str, str]] """Get information about packages in environment where packages get installed.""" cmd = "{} -m pip freeze".format(python_bin) output = run_command(cmd, is_json=False).stdout.splitlines() result = [] for line in output: package_name, package_version = line.split("==", maxsplit=1) result.append({ "package_name": package_name, "package_version": package_version }) return result
def _run_rpm_repoquery(path: str, timeout: int = None) -> list: """Run repoquery and return it's output (parsed).""" cmd = "repoquery --deplist --installed --installroot {!r}".format(path) output = _parse_repoquery(run_command(cmd, timeout=timeout).stdout) result = [] for package_identifier, dependencies in output.items(): rpm_package = parse_nvra(package_identifier) rpm_package["dependencies"] = dependencies rpm_package["epoch"] = rpm_package["epoch"] or None rpm_package["package_identifier"] = package_identifier result.append(rpm_package) return result
def _get_origin() -> typing.Optional[str]: """Check git origin configured.""" result = run_command("git config --get remote.origin.url", raise_on_error=False) if result.return_code != 0: _LOGGER.debug("Failed to obtain information about git origin: %s", result.stderr) return None origin = result.stdout.strip() if origin: _LOGGER.debug("Found git origin %r", origin) return origin return None
def download_image(image_name: str, dir_path: str, timeout: int = None, registry_credentials: str = None, tls_verify: bool = True) -> None: """Download an image to dir_path.""" _LOGGER.debug("Downloading image %r", image_name) cmd = 'skopeo copy ' if not tls_verify: cmd += '--src-tls-verify=false ' if registry_credentials: cmd += '--src-creds={} '.format(quote(registry_credentials)) cmd += 'docker://{} dir:/{}'.format(quote(image_name), quote(dir_path)) stdout = run_command(cmd, timeout=timeout).stdout _LOGGER.debug("skopeo stdout: %s", stdout)
def pipenv_lock(self): """Perform pipenv lock on the current state of project.""" with cwd(self.workdir): self.pipfile.to_file() _LOGGER.debug("Running pipenv lock") try: result = run_command("pipenv lock", env={"PIPENV_IGNORE_VIRTUALENVS": "1"}) except CommandError as exc: _LOGGER.exception( "Unable to lock application stack (return code: %d):\n%s\n", exc.return_code, exc.stdout, exc.stderr ) raise UnableLock("Failed to perform lock") from exc _LOGGER.debug("pipenv stdout:\n%s", result.stdout) _LOGGER.debug("pipenv stderr:\n%s", result.stderr) self.pipfile_lock = PipfileLock.from_file(pipfile=self.pipfile)
def download_image( image_name: str, dir_path: str, timeout: int = None, registry_credentials: str = None, tls_verify: bool = True, ) -> None: """Download an image to dir_path.""" _LOGGER.debug("Downloading image %r", image_name) cmd = f"{_SKOPEO_EXEC_PATH} copy " if not tls_verify: cmd += "--src-tls-verify=false " if registry_credentials: cmd += "--src-creds={} ".format(quote(registry_credentials)) cmd += "docker://{} dir:/{}".format(quote(image_name), quote(dir_path)) stdout = run_command(cmd, timeout=timeout).stdout _LOGGER.debug("%s stdout: %s", _SKOPEO_EXEC_PATH, stdout)
def execute_env_function( python_bin, # type: str function, # type: Union[Callable[[Any], None], Callable[[], None]] *, env=None, # type: Optional[Dict[str, str]] raise_on_error=True, # type: bool is_json=False, # type: bool **function_arguments, # type: Any ): # type: (...) -> Optional[Union[str, Dict[str, Any], List[str]]] """Execute the given function in Python interpreter.""" kwargs = "" for argument, value in function_arguments.items(): if kwargs: kwargs += "," kwargs += argument + '="' + value + '"' function_source = inspect.getsource(function) cmd = python_bin + " -c " + (shlex.quote(function_source + "\n\n" + function.__name__ + "(" + kwargs + ")")) _LOGGER.debug( "Executing the following command in Python interpreter (env: %r): %r", env, cmd) res = run_command(cmd, env=env, is_json=is_json, raise_on_error=False) _LOGGER.debug("stderr during command execution: %s", res.stderr) if raise_on_error and res.return_code != 0: raise ValueError( "Failed to successfully execute function in Python interpreter: {}" .format(res.stderr)) _LOGGER.debug("stdout during command execution: %s", res.stdout) if res.return_code == 0: stdout = res.stdout # type: Union[str, Dict[str, Any]] return stdout _LOGGER.error( "Failed to successfully execute function in Python interpreter: %r", res.stderr) return None
def _run_dpkg_query(path: str, timeout: int = None) -> typing.List[dict]: """Query for installed deb packages in the given root.""" # Make sure dpkg-query exist, give up if not. dpkg_query_path = os.path.join(path, "usr", "bin", "dpkg-query") if not os.path.isfile(dpkg_query_path): _LOGGER.info( "Binary dpkg-query not found, deb packages discovery will not be performed" ) return [] # Make sure dpkg-query is executable after extraction. st = os.stat(dpkg_query_path) os.chmod(dpkg_query_path, st.st_mode | stat.S_IEXEC) cmd = "fakeroot fakechroot /usr/sbin/chroot {!r} /usr/bin/dpkg-query -l".format( path) output = run_command(cmd, timeout=timeout).stdout result = [] for line in output.split("\n"): if not line.startswith("ii "): _LOGGER.debug("Skipping line (not an installed package): %r", line) continue parts = line.split(maxsplit=4) if len(parts) < 4: _LOGGER.warning( "Line in dpkg-query output does not provide enough information to parse package name, " "version and architecture: %s", line, ) continue result.append({ "name": parts[1], "version": parts[2], "arch": parts[3] }) return result
def _pipdeptree(python_bin, package_name=None, warn=False): # type: (str, Optional[str], bool) -> Any """Get pip dependency tree by executing pipdeptree tool.""" cmd = "{} -m pipdeptree --json".format(python_bin) _LOGGER.debug("Obtaining pip dependency tree using: %r", cmd) output = run_command(cmd, is_json=True).stdout # type: List[Dict[str, Any]] if not package_name: return output for entry in output: # type: Dict[str, Any] # In some versions pipdeptree does not work with --packages flag, do the logic on out own. # TODO: we should probably do difference of reference this output and original environment if entry["package"]["key"].lower() == package_name.lower(): return entry # The given package was not found. if warn: _LOGGER.warning("Package %r was not found in pipdeptree output %r", package_name, output) return None
def _run_apt_cache_show(path: str, deb_packages: typing.List[dict], timeout: int = None) -> list: """Gather information about packages and their dependencies.""" # Make sure dpkg-query exist, give up if not. if not deb_packages: return [] apt_cache_path = os.path.join(path, "usr", "bin", "apt-cache") if not os.path.isfile(apt_cache_path): _LOGGER.warning( "Binary apt-cache not found but debian packages were discovered previously - the output will not " "provide dependency information for debian packages") return [] # Make sure dpkg-query is executable after extraction. st = os.stat(apt_cache_path) os.chmod(apt_cache_path, st.st_mode | stat.S_IEXEC) result = [] for record in deb_packages: cmd = "fakeroot fakechroot /usr/sbin/chroot {!r} /usr/bin/apt-cache show {}={}".format( path, record["name"], record["version"]) # Do not touch original deb query, extend it rather with more info to follow rpm schema. entry = dict(record) parts = entry["version"].split(":", maxsplit=1) if len(parts) == 2: try: # If it parses int, its an epoch probably. int(parts[0]) entry["epoch"] = parts[0] entry["version"] = parts[1] except ValueError: entry["epoch"] = None output = run_command(cmd, timeout=timeout).stdout entry["pre-depends"], entry["depends"], entry["replaces"] = [], [], [] for line in output.splitlines(): if line.startswith("Pre-Depends: "): deps = _parse_deb_dependency_line(line[len("Pre-Depends: "):]) entry["pre-depends"] = [{ "name": d[0], "version": d[1] } for d in deps] elif line.startswith("Depends: "): deps = _parse_deb_dependency_line(line[len("Depends: "):]) entry["depends"] = [{ "name": d[0], "version": d[1] } for d in deps] elif line.startswith("Replaces: "): deps = _parse_deb_dependency_line(line[len("Replaces: "):]) entry["replaces"] = [{ "name": d[0], "version": d[1] } for d in deps] result.append(entry) return result