Пример #1
0
def _ld_config_symbols(result: dict, path: str) -> None:
    """Gather library symbols based on conf path."""
    head, tail = os.path.split(path)
    try:
        with cwd(head), open(tail, "r") as f:
            for line in f.readlines():
                if line.startswith("include "):
                    line = line[len("include "):]
                    _ld_config_symbols(line)
                    continue

                if not line:
                    continue

                # Both relative and absolute will work:
                #   os.path.join("/foo", "bar") == "/foo/bar"
                #   os.path.join("/foo", "/bar") == "/bar"
                for entry_path in glob.glob(os.path.join(head, line.strip())):
                    with open(os.path.join(path, entry_path),
                              "r") as conf_file:
                        for conf_path in conf_file.readlines():
                            _get_lib_dir_symbols(result, conf_path)
    except Exception as exc:
        _LOGGER.warning("Cannot load symbols based on ld.so.conf: %s",
                        str(exc))
Пример #2
0
def cloned_repo(project: GithubProject, **clone_kwargs):
    """Clone the given Git repository and cd into it."""
    service_url = project.service.instance_url

    installation = isinstance(project.service.authentication, GithubApp)

    namespace, repository = (project.namespace, project.repo)
    slug = f"{namespace}/{repository}"
    if installation:
        access_token = project.service.authentication.get_token(
            namespace, repository)  # type: ignore
        repo_url = f"https://{APP_NAME}:{access_token}@{service_url}/{slug}"
    else:
        repo_url = f"git@{_remove_protocol(service_url)}:{slug}.git"

    with TemporaryDirectory() as repo_path, cwd(repo_path):
        # _LOGGER.info(f"Cloning repository {repo_url} to {repo_path}")
        repo = git.Repo.clone_from(repo_url,
                                   repo_path,
                                   branch=project.default_branch,
                                   **clone_kwargs)
        repo.config_writer().set_value(
            "user", "name", os.getenv("KEBECHET_GIT_NAME",
                                      "Kebechet")).release()
        repo.config_writer().set_value(
            "user",
            "email",
            os.getenv("KEBECHET_GIT_EMAIL", "*****@*****.**"),
        ).release()
        yield repo
Пример #3
0
def construct_rootfs(dir_path: str, rootfs_path: str) -> list:
    """Construct rootfs in a directory by extracting layers."""
    os.makedirs(rootfs_path, exist_ok=True)

    try:
        with open(os.path.join(dir_path, "manifest.json")) as manifest_file:
            manifest = json.load(manifest_file)
    except FileNotFoundError as exc:
        raise InvalidImageError(
            "No manifest.json file found in the downloaded "
            "image in {}".format(os.path.join(dir_path,
                                              "manifest.json"))) from exc

    if manifest.get("schemaVersion") == 1:
        manifest_layers = manifest["fsLayers"]
        get_layer_digest = _get_layer_digest_v1
    elif manifest.get("schemaVersion") == 2:
        manifest_layers = manifest["layers"]
        get_layer_digest = _get_layer_digest_v2
    else:
        raise NotSupported(
            "Invalid schema version in manifest.json file: {} "
            "(currently supported are schema versions 1 and 2)".format(
                manifest.get("schemaVersion")))

    layers = []
    _LOGGER.debug("Layers found: %r", manifest_layers)
    for layer_def in manifest_layers:
        layer_digest = get_layer_digest(layer_def)

        _LOGGER.debug("Extracting layer %r", layer_digest)
        layers.append(layer_digest)

        layer_gzip_tar = os.path.join(dir_path, layer_digest)
        with cwd(rootfs_path):
            tar_file = tarfile.open(layer_gzip_tar, "r:gz")

            # We cannot use extractall() since it does not handle overwriting files for us.
            for member in tar_file:
                # Do not set attributes so we are fine with permissions.
                try:
                    tar_file.extract(member,
                                     set_attrs=False,
                                     numeric_owner=False)
                except IOError:
                    # If the given file is present, there is raised an exception - remove file to prevent from errors.
                    try:
                        os.remove(member.name)
                        tar_file.extract(member,
                                         set_attrs=False,
                                         numeric_owner=False)
                    except Exception as exc:
                        _LOGGER.exception(
                            "Failed to extract %r, exception is not fatal: %s",
                            member.name,
                            exc,
                        )

    return layers
Пример #4
0
    def test_from_pip_compile_files_example_dir3(self) -> None:
        """Test loading only if only requirements.in is present."""
        with cwd(os.path.join(self.data_dir, "requirements", "example_dir3")):
            project = Project.from_pip_compile_files(allow_without_lock=True)

            assert project.pipfile_lock is None
            assert list(project.iter_dependencies()) == [
                PackageVersion(name="flask", version="*", develop=False, hashes=[], index=None)
            ]
Пример #5
0
def provenance_check(
    debug: bool = False,
    no_wait: bool = False,
    json_output: bool = False,
    force: bool = False,
    runtime_environment: typing.Optional[str] = None,
):
    """Check provenance of installed packages.

    Collect information about direct dependencies and dependencies stated in the lock file
    and send them to the remote service to verify their provenance.

    Examples:
      thamos provenance-check --runtime-environment "production"
    """
    with cwd(configuration.get_overlays_directory(runtime_environment)):
        if configuration.requirements_format != "pipenv":
            raise ValueError(
                "Provenance checks are available only for requirements managed by Pipenv"
            )

        pipfile, pipfile_lock = load_files("pipenv")
        if not pipfile_lock:
            _LOGGER.error(
                "No Pipfile.lock found - provenance cannot be checked")
            sys.exit(3)

        results = thoth_provenance_check(pipfile,
                                         pipfile_lock,
                                         debug=debug,
                                         nowait=no_wait,
                                         force=force)
        if not results:
            sys.exit(2)

        if no_wait:
            # Echo the analysis id to user when nowait.
            click.echo(results)
            sys.exit(0)

        report, error = results
        if report:
            _print_report(report,
                          json_output=json_output,
                          title="Provenance check report")
        else:
            _LOGGER.info("Provenance check passed!")

        if error:
            sys.exit(5)

        if any(item.get("type") == "ERROR" for item in report):
            sys.exit(4)

        return 0
Пример #6
0
    def test_from_pip_compile_files_example_dir2(self) -> None:
        """Test loading project from pip-compile files."""
        with cwd(os.path.join(self.data_dir, "requirements", "example_dir2")):
            assert Project.from_pip_compile_files(allow_without_lock=False) == Project.from_pip_compile_files(
                allow_without_lock=True
            )
            project = Project.from_pip_compile_files(allow_without_lock=False)

        assert list(project.iter_dependencies()) == [
            PackageVersion(name="flask", version="*", develop=False, index=Source(url="https://pypi.org/simple"))
        ]
        assert list(project.iter_dependencies_locked()) == [
            PackageVersion(
                name="click",
                version="==7.0",
                develop=False,
                index=Source(url="https://pypi.org/simple"),
                hashes=["sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13"],
            ),
            PackageVersion(
                name="flask",
                version="==1.1.1",
                develop=False,
                index=Source(url="https://pypi.org/simple"),
                hashes=["sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6"],
            ),
            PackageVersion(
                name="itsdangerous",
                version="==1.1.0",
                develop=False,
                index=Source(url="https://pypi.org/simple"),
                hashes=["sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"],
            ),
            PackageVersion(
                name="jinja2",
                version="==2.10.3",
                develop=False,
                index=Source(url="https://pypi.org/simple"),
                hashes=["sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f"],
            ),
            PackageVersion(
                name="markupsafe",
                version="==1.1.1",
                develop=False,
                index=Source(url="https://pypi.org/simple"),
                hashes=["sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"],
            ),
            PackageVersion(
                name="werkzeug",
                version="==0.16.0",
                develop=False,
                index=Source(url="https://pypi.org/simple"),
                hashes=["sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4"],
            ),
        ]
Пример #7
0
    def test_from_pip_compile_files_example_dir1(self) -> None:
        """Test loading only if requirements.txt is present."""
        with cwd(os.path.join(self.data_dir, "requirements", "example_dir1")):
            with pytest.raises(FileLoadError):
                Project.from_pip_compile_files()

            project = Project.from_pip_compile_files(allow_without_lock=True)

            assert project.pipfile_lock is None
            assert list(project.iter_dependencies()) == [
                PackageVersion(
                    name="click", version="*", develop=False, hashes=[], index=Source("https://pypi.org/simple")
                )
            ]
Пример #8
0
    def pipenv_lock(self):
        """Perform pipenv lock on the current state of project."""
        with cwd(self.workdir):
            self.pipfile.to_file()
            _LOGGER.debug("Running pipenv lock")

            try:
                result = run_command("pipenv lock", env={"PIPENV_IGNORE_VIRTUALENVS": "1"})
            except CommandError as exc:
                _LOGGER.exception(
                    "Unable to lock application stack (return code: %d):\n%s\n", exc.return_code, exc.stdout, exc.stderr
                )
                raise UnableLock("Failed to perform lock") from exc

            _LOGGER.debug("pipenv stdout:\n%s", result.stdout)
            _LOGGER.debug("pipenv stderr:\n%s", result.stderr)
            self.pipfile_lock = PipfileLock.from_file(pipfile=self.pipfile)
Пример #9
0
def install(
    runtime_environment_name: typing.Optional[str] = None,
    dev: bool = False,
    pip_args: typing.Optional[typing.Tuple[str]] = None,
) -> None:
    """Perform installation of packages for the given runtime environment.

    If the runtime environment is not specified, the first environment stated in the configuration is used.
    """
    method = ("pipenv" if thoth_config.requirements_format == "pipenv" else
              "requirements")

    if not dev and method == "pipenv":
        _LOGGER.warning(
            "Development dependencies will not be installed - see %s",
            jl("no_dev"))

    with cwd(thoth_config.get_overlays_directory(runtime_environment_name)):
        if method == "pipenv":
            if not os.path.isfile("Pipfile.lock"):
                raise NoRequirementsFile(
                    f"No Pipfile.lock found in {os.getcwd()!r} needed to install dependencies, "
                    "issue `thamos advise` resolve dpeendencies")
            if not os.path.isfile(
                    "Pipfile"):  # Required for computing digests.
                raise NoRequirementsFile(
                    f"No Pipfile found in {os.getcwd()!r} needed for the installation process"
                )
        else:
            if not os.path.isfile("requirements.txt"):
                raise NoRequirementsFile(
                    f"No requirements.txt file found in {os.getcwd()!r} needed to install dependencies"
                )

        _LOGGER.info(
            "Using %r installation method to install dependencies stated in %r",
            method,
            os.getcwd(),
        )
        micropipenv.install(method=method,
                            deploy=True,
                            dev=dev,
                            pip_args=pip_args)
Пример #10
0
 def _write_advise(self, adv_results: list):
     with open(".thoth.yaml", "r") as f:
         thoth_config = yaml.safe_load(f)
     overlays_dir = thoth_config.get("overlays_dir")
     requirements_lock = adv_results[0]["report"][0][1]["requirements_locked"]
     requirements = adv_results[0]["parameters"]["project"]["requirements"]
     requirements_format = adv_results[0]["parameters"]["requirements_format"]
     if overlays_dir:
         with cwd(f"{overlays_dir}/{self.runtime_environment}"):
             lib.write_files(
                 requirements=requirements,
                 requirements_lock=requirements_lock,
                 requirements_format=requirements_format,
             )
     else:
         lib.write_files(
             requirements=requirements,
             requirements_lock=requirements_lock,
             requirements_format=requirements_format,
         )
Пример #11
0
def construct_rootfs(dir_path: str, rootfs_path: str) -> list:
    """Construct rootfs in a directory by extracting layers."""
    os.makedirs(rootfs_path, exist_ok=True)

    try:
        with open(os.path.join(dir_path, 'manifest.json')) as manifest_file:
            manifest = json.load(manifest_file)
    except FileNotFoundError as exc:
        raise InvalidImageError(
            "No manifest.json file found in the downloaded "
            "image in {}".format(os.path.join(dir_path,
                                              'manifest.json'))) from exc

    if manifest.get('schemaVersion') != 2:
        raise NotSupported("Invalid schema version in manifest.json file: {} "
                           "(currently supported is 2)".format(
                               manifest.get('schemaVersion')))

    layers = []
    _LOGGER.debug("Layers found: %r", manifest['layers'])
    for layer_def in manifest['layers']:
        layer_digest = layer_def['digest'].split(':', maxsplit=1)[-1]

        _LOGGER.debug("Extracting layer %r", layer_digest)
        layers.append(layer_digest)

        layer_gzip_tar = os.path.join(dir_path, layer_digest)
        with cwd(rootfs_path):
            tar_file = tarfile.open(layer_gzip_tar, 'r:gz')

            # We cannot use extractall() since it does not handle overwriting files for us.
            for member in tar_file:
                # Do not set attributes so we are fine with permissions.
                try:
                    tar_file.extract(member, set_attrs=False)
                except IOError:
                    # If the given file is present, there is raised an exception - remove file to prevent from errors.
                    os.remove(member.name)
                    tar_file.extract(member, set_attrs=False)

    return layers
Пример #12
0
def workdir(file_lookup: Optional[str] = None, warn_on_dir_change: bool = True) -> None:
    """Find project directory and cd into it."""
    file_lookup = file_lookup or ".thoth.yaml"

    project_dir = os.getcwd()
    original_project_dir = project_dir
    for _ in range(_WORKDIR_DEPTH_LEN):
        file = os.path.join(project_dir, file_lookup)
        if os.path.isfile(file):
            with cwd(project_dir):
                if project_dir != original_project_dir and warn_on_dir_change:
                    _LOGGER.warning("Using %r as project root directory", project_dir)
                yield project_dir
            break

        project_dir = os.path.dirname(project_dir)
    else:
        raise NoProjectDirError(
            f"No {file_lookup} found in the current directory {os.getcwd()!r} or in any of its parent "
            f"directories, you can generate it using '{sys.argv[0]} config'"
        )
Пример #13
0
    def test_serialization(self, tmp_path: Path):
        """Test serialization of a response from backend."""
        response = json.loads(
            (Path(self.data_dir) / "response_1.json").read_text())
        with cwd(str(tmp_path)):
            pipfile = response["result"]["report"]["products"][0]["project"][
                "requirements"]
            pipfile_lock = response["result"]["report"]["products"][0][
                "project"]["requirements_locked"]
            write_files(pipfile, pipfile_lock, requirements_format="pipenv")

            written_pipfile = toml.loads(Path("Pipfile").read_text())
            assert written_pipfile == {
                "dev-packages": {},
                "packages": {
                    "flask": {
                        "index": "pypi-org",
                        "version": "*"
                    },
                    "tensorflow": {
                        "index": "pypi-org",
                        "version": "*"
                    },
                },
                "source": [{
                    "name": "pypi-org",
                    "url": "https://pypi.org/simple",
                    "verify_ssl": True,
                }],
                "thoth": {
                    "allow_prereleases": {},
                    "disable_index_adjustment": False
                },
            }

            written_pipfile_lock = json.loads(Path("Pipfile.lock").read_text())
            assert written_pipfile_lock == pipfile_lock
Пример #14
0
def pip_compile(*packages: str):
    """Run pip-compile to pin down packages, also resolve their transitive dependencies."""
    result = None
    packages = "\n".join(packages)

    with tempfile.TemporaryDirectory() as tmp_dirname, cwd(tmp_dirname):
        with open("requirements.in", "w") as requirements_file:
            requirements_file.write(packages)

        runner = CliRunner()

        try:
            result = runner.invoke(cli, ["requirements.in"],
                                   catch_exceptions=False)
        except Exception as exc:
            raise ThothPipCompileError(str(exc)) from exc

        if result.exit_code != 0:
            error_msg = (
                f"pip-compile returned non-zero ({result.exit_code:d}) "
                f"output: {result.output_bytes.decode():s}")
            raise ThothPipCompileError(error_msg)

    return result.output_bytes.decode()
def update_keb_installation():
    """Load files and pass them to storages update function."""
    if _SLUG is None:
        _LOGGER.info("No slug present, continuing to next step in task.")
        return

    service = GithubService(
        github_app_id=os.getenv("GITHUB_APP_ID"),
        github_app_private_key_path=os.getenv("GITHUB_PRIVATE_KEY_PATH"),
    )  # TODO: extend to use other services

    project = service.get_project(namespace=_SLUG.split("/")[0], repo=_SLUG.split("/")[1])

    raw_thoth_config = project.get_file_content(".thoth.yaml")

    with TemporaryDirectory() as repo_path, cwd(repo_path):
        thoth_config.load_config_from_string(raw_thoth_config)
        requirements_format = thoth_config.content["requirements_format"]
        overlays_dir = thoth_config.content.get("overlays_dir")
        to_update: List[RuntimeEnvironment]
        if overlays_dir is not None:
            to_update = [RuntimeEnvironment.from_dict(r) for r in thoth_config.list_runtime_environments()]
        else:
            to_update = [RuntimeEnvironment.from_dict(thoth_config.get_runtime_environment())]

        for runtime_environment in to_update:
            if overlays_dir:
                prefix = f"{overlays_dir}/{runtime_environment.name}/"
            else:
                prefix = ""

            if requirements_format == "pipenv":
                pipfile_r = project.get_file_content(f"{prefix}Pipfile")
                with open("Pipfile", "wb") as f:
                    f.write(pipfile_r)

                try:
                    piplock_r = project.get_file_content(f"{prefix}Pipfile.lock")
                    with open("Pipfile.lock", "wb") as f:
                        f.write(piplock_r)
                    project = Project.from_files(pipfile_path="Pipfile", pipfile_lock_path="Pipfile.lock")
                except Exception:
                    _LOGGER.debug("No Pipfile.lock found")
                    project = Project.from_files(
                        pipfile_path="Pipfile",
                        without_pipfile_lock=True,
                        runtime_environment=runtime_environment,
                    )

            elif requirements_format in ["pip", "pip-tools", "pip-compile"]:
                try:
                    requirements_r = project.get_file_content(f"{prefix}requirements.txt")
                    with open("requirements.txt", "wb") as f:
                        f.write(requirements_r)
                    project = Project.from_pip_compile_files(
                        requirements_path="requirements.txt",
                        allow_without_lock=True,
                        runtime_environment=runtime_environment,
                    )
                except Exception:
                    _LOGGER.debug("No requirements.txt found, trying to download requirements.in")
                    requirements_r = project.get_file_content(f"{prefix}requirements.in")
                    with open("requirements.in", "wb") as f:
                        f.write(requirements_r.content)
                    project = Project.from_pip_compile_files(
                        requirements_path="requirements.in",
                        allow_without_lock=True,
                        runtime_environment=runtime_environment,
                    )

                project = Project.from_pip_compile_files(allow_without_lock=True)
            else:
                raise NotImplementedError(f"{requirements_format} requirements format not supported.")

            db.update_kebechet_installation_using_files(
                slug=_SLUG,
                runtime_environment_name=runtime_environment.name,
                installation_id=str(project.github_repo.id),
                requirements=project.pipfile.to_dict(),
                requirements_lock=project.pipfile_lock.to_dict(),
                thoth_config=thoth_config,
            )

        present_installations = db.get_kebechet_github_app_installations_all(slug=_SLUG)
        cur_env_names = {r.name for r in to_update}
        all_env_names = {installation["runtime_environment_name"] for installation in present_installations}
        to_delete = all_env_names - cur_env_names
        for name in to_delete:
            db.delete_kebechet_github_app_installations(slug=_SLUG, runtime_environment=name)
Пример #16
0
def advise(
    debug: bool = False,
    no_write: bool = False,
    recommendation_type: typing.Optional[str] = None,
    runtime_environment: typing.Optional[str] = None,
    no_wait: bool = False,
    no_static_analysis: bool = False,
    json_output: bool = False,
    force: bool = False,
    dev: bool = False,
    no_user_stack: bool = False,
    install: bool = False,
    labels: Optional[str] = None,
    write_advised_manifest_changes: Optional[str] = None,
):
    """Ask Thoth for recommendations on the application stack.

    Ask the remote Thoth service for advise on the application stack used. The command
    collects information stated in the .thoth.yaml file for the given runtime environment,
    static source code analysis and requirements for the application and sends them to the
    remote service. Optionally, install packages resolved by Thoth.

    Examples:
      thamos advise --runtime-environment "testing" --labels foo=bar,qux=baz

      thamos advise --dev

      thamos advise --install

      thamos advise --no-static-analysis --no-user-stack
    """
    if install and no_wait:
        _LOGGER.error("Cannot install dependencies as --no-wait was provided")
        sys.exit(1)
    if install and no_write:
        _LOGGER.error(
            "Cannot install dependencies if lock files are not written")
        sys.exit(1)

    labels_dict = _parse_labels(labels)

    if not dev and configuration.requirements_format == "pipenv":
        _LOGGER.warning(
            "Development dependencies will not be considered during the resolution process - see %s",
            jl("no_dev"),
        )

    # In CLI we always call to obtain only the best software stack (count is implicitly set to 1).
    results = thoth_advise_here(
        recommendation_type=recommendation_type,
        src_path=configuration.config_path,
        runtime_environment_name=runtime_environment,
        debug=debug,
        nowait=no_wait,
        force=force,
        source_type=ThothAdviserIntegrationEnum.CLI,
        no_static_analysis=no_static_analysis,
        dev=dev,
        no_user_stack=no_user_stack,
        verify_tls=configuration.tls_verify,
        labels=labels_dict,
    )

    if not results:
        return sys.exit(2)

    if no_wait:
        # Echo the analysis id to user when not waiting.
        click.echo(results)
        sys.exit(0)

    result, error = results
    if error:
        if json_output:
            json.dump(result, sys.stdout, indent=2)
        else:
            stack_info = (result.get("report") or {}).get("stack_info")
            if stack_info:
                _print_report(
                    stack_info,
                    json_output=json_output,
                    title="Application stack guidance",
                )

            Console().print(
                result.get("error_msg")
                or "No error message was provided by the service.",
                style="bold red",
                justify="center",
            )

        sys.exit(4)
    if not no_write:
        with cwd(configuration.get_overlays_directory(runtime_environment)):
            if result["report"] and result["report"]["stack_info"]:
                _print_report(
                    result["report"]["stack_info"],
                    json_output=json_output,
                    title="Application stack guidance",
                )

            # Print report of the best one - thus index zero.
            if result["report"] and result["report"]["products"]:
                if result["report"]["products"][0]["justification"]:
                    _print_report(
                        result["report"]["products"][0]["justification"],
                        json_output=json_output,
                        title="Recommended stack report",
                    )
                else:
                    click.echo(
                        "No justification was made for the recommended stack")

            pipfile = result["report"]["products"][0]["project"][
                "requirements"]
            pipfile_lock = result["report"]["products"][0]["project"][
                "requirements_locked"]
            write_configuration(
                result["report"]["products"][0]["advised_runtime_environment"],
                recommendation_type,
                dev,
            )
            write_files(pipfile, pipfile_lock,
                        configuration.requirements_format)

            if write_advised_manifest_changes:
                advised_manifest_changes = result["report"]["products"][0][
                    "project"].get("advised_manifest_changes")
                with open(write_advised_manifest_changes,
                          "w") as advised_manifest_changes_file:
                    json.dump(advised_manifest_changes or {},
                              advised_manifest_changes_file)
                    advised_manifest_changes_file.write("\n")

            if install:
                thamos_install(runtime_environment_name=runtime_environment,
                               dev=dev)
    else:
        click.echo(json.dumps(result, indent=2))

    sys.exit(0)