def test_run_dev(self, context: Context) -> None:
        """Test discarding user's lock file if development dependencies are not present in lock but provided."""
        pipfile = str(self.data_dir / "projects" / "dev" / "Pipfile")
        pipfile_lock = str(self.data_dir / "projects" / "dev" / "Pipfile.lock")

        project = Project.from_files(pipfile_path=pipfile,
                                     pipfile_lock_path=pipfile_lock)

        assert project.pipfile.dev_packages.packages
        assert project.pipfile_lock.dev_packages.packages

        # Remove packages.
        project.pipfile_lock.dev_packages.packages.clear()
        context.cli_parameters["dev"] = True

        context.project = project

        unit = self.UNIT_TESTED()
        with unit.assigned_context(context):
            unit.run()

        assert project.pipfile.dev_packages.packages
        assert not project.pipfile_lock, "Lock file was not removed from the input"
        assert len(context.stack_info) == 1
        assert self.verify_justification_schema(context.stack_info)
Exemple #2
0
    def get_project(
        self,
        runtime_environment_name: Optional[str] = None,
        *,
        missing_dir_ok: bool = False,
    ) -> Project:
        """Get the given overlay."""
        path = self.get_overlays_directory(
            runtime_environment_name=runtime_environment_name,
            missing_dir_ok=missing_dir_ok,
        )
        runtime_environment = RuntimeEnvironment.from_dict(
            self.get_runtime_environment(runtime_environment_name))
        if self.requirements_format == "pipenv":
            pipfile_lock_path: Optional[str] = os.path.join(
                path, "Pipfile.lock")
            if pipfile_lock_path and not os.path.exists(pipfile_lock_path):
                pipfile_lock_path = None

            pipfile_path = os.path.join(path, "Pipfile")
            if not os.path.isfile(pipfile_path):
                if not os.path.isdir(path):
                    _LOGGER.info("Creating directory structure in %r", path)
                    os.makedirs(path, exist_ok=True)
                pipfile = Pipfile.from_dict({})
                pipfile.to_file(path=pipfile_path)

            project = Project.from_files(
                pipfile_path=pipfile_path,
                pipfile_lock_path=pipfile_lock_path,
                runtime_environment=runtime_environment,
                without_pipfile_lock=pipfile_lock_path is None,
            )
        else:
            requirements_in_file_path = os.path.join(path, "requirements.in")
            if not os.path.isfile(requirements_in_file_path):
                requirements_txt_file_path = os.path.join(
                    path, "requirements.txt")
                if os.path.isfile(requirements_txt_file_path):
                    _LOGGER.warning("Using %r for direct dependencies",
                                    requirements_in_file_path)
                    project = Project.from_pip_compile_files(
                        requirements_path=requirements_txt_file_path,
                        requirements_lock_path=None,
                        allow_without_lock=True,
                        runtime_environment=runtime_environment,
                    )
                else:
                    raise NotImplementedError(
                        "No requirements.txt/requirements.in files found, it is recommended to "
                        "use Pipenv files for managing dependencies")
            else:
                project = Project.from_pip_compile_files(
                    requirements_path=requirements_in_file_path,
                    requirements_lock_path=None,
                    allow_without_lock=True,
                    runtime_environment=runtime_environment,
                )

        return project
Exemple #3
0
def _load_files(requirements_format: str) -> Tuple[str, Optional[str]]:
    """Load Pipfile/Pipfile.lock or requirements.in/txt from the current directory."""
    if requirements_format == "pipenv":
        _LOGGER.info("Using Pipenv files located in %r directory", os.getcwd())
        pipfile_lock_exists = os.path.exists("Pipfile.lock")

        if pipfile_lock_exists:
            _LOGGER.info(
                "Submitting Pipfile.lock as a base for user's stack scoring - see %s",
                jl("user_stack"),
            )

        project = Project.from_files(
            without_pipfile_lock=not os.path.exists("Pipfile.lock"))

        if (pipfile_lock_exists and project.pipfile_lock.meta.hash["sha256"] !=
                project.pipfile.hash()["sha256"]):
            _LOGGER.error(
                "Pipfile hash stated in Pipfile.lock %r does not correspond to Pipfile hash %r - was Pipfile "
                "adjusted? This error is not critical.",
                project.pipfile_lock.meta.hash["sha256"][:6],
                project.pipfile.hash()["sha256"][:6],
            )
    elif requirements_format in ("pip", "pip-tools", "pip-compile"):
        _LOGGER.info("Using requirements.txt file located in %r directory",
                     os.getcwd())
        project = Project.from_pip_compile_files(allow_without_lock=True)
    else:
        raise ValueError(
            f"Unknown configuration option for requirements format: {requirements_format!r}"
        )
    return (
        project.pipfile.to_string(),
        project.pipfile_lock.to_string() if project.pipfile_lock else None,
    )
Exemple #4
0
    def post(self):
        """Get requirements file from disk."""
        input_data = self.get_json_body()

        kernel_name: str = input_data["kernel_name"]
        home = Path.home()
        store_path: Path = home.joinpath(".local/share/thoth/kernels")

        env_path = Path(store_path).joinpath(kernel_name)

        _LOGGER.info("Path used to get dependencies is: %r",
                     env_path.as_posix())

        requirements_format = "pipenv"

        pipfile_path = env_path.joinpath("Pipfile")
        pipfile_lock_path = env_path.joinpath("Pipfile.lock")

        if requirements_format == "pipenv":
            _LOGGER.debug("Get Pipfile/Pipfile.lock in %r", env_path)
            project = Project.from_files(pipfile_path=pipfile_path,
                                         pipfile_lock_path=pipfile_lock_path)

        requirements = project.pipfile.to_dict()
        requirements_locked = project.pipfile_lock.to_dict()

        self.finish(
            json.dumps({
                "requirements": requirements,
                "requirements_lock": requirements_locked
            }))
def load_files(base_path: str) -> typing.Tuple[str, typing.Optional[str]]:
    """Load Pipfile/Pipfile.lock from path."""
    _LOGGER.info("Looking for Pipenv files located in %r directory", base_path)
    pipfile_path = Path(base_path).joinpath("Pipfile")
    pipfile_lock_path = Path(base_path).joinpath("Pipfile.lock")

    project = Project.from_files(
        pipfile_path=pipfile_path,
        pipfile_lock_path=pipfile_lock_path,
        without_pipfile_lock=not pipfile_lock_path.exists(),
    )

    if pipfile_lock_path.exists() and project.pipfile_lock.meta.hash[
            "sha256"] != project.pipfile.hash()["sha256"]:
        _LOGGER.error(
            "Pipfile hash stated in Pipfile.lock %r does not correspond to Pipfile hash %r - was Pipfile "
            "adjusted? This error is not critical.",
            project.pipfile_lock.meta.hash["sha256"][:6],
            project.pipfile.hash()["sha256"][:6],
        )

    return (
        project.pipfile.to_string(),
        project.pipfile_lock.to_string() if project.pipfile_lock else None,
    )
Exemple #6
0
    def load_test_project(self, pinned_index: bool = False) -> Project:
        """Instantiate testing project from prepared testing Pipfile and Pipfile.lock files.

        @param pinned_index: true for retrieving Pipfile with pinned index
        """
        pipfile_path = os.path.join(
            self.data_dir, "pipfiles", "Pipfile_provenance1" if not pinned_index else "Pipfile_provenance2",
        )
        pipfile_lock_path = os.path.join(
            self.data_dir, "pipfiles", "Pipfile_provenance1.lock" if not pinned_index else "Pipfile_provenance2.lock",
        )
        return Project.from_files(pipfile_path, pipfile_lock_path)
Exemple #7
0
def project() -> Project:
    """Create a fixture for a project representation."""
    flexmock(Project)
    flexmock(RuntimeEnvironment)

    pipfile_path = AdviserTestCase.data_dir / "projects" / "Pipfile"
    pipfile_lock_path = AdviserTestCase.data_dir / "projects" / "Pipfile.lock"

    return Project.from_files(
        pipfile_path=str(pipfile_path),
        pipfile_lock_path=str(pipfile_lock_path),
        runtime_environment=RuntimeEnvironment.from_dict({}),
    )
Exemple #8
0
    def test_get_outdated_package_versions_indirect(self):
        """Test get outdated package versions indirect."""
        # The difference between direct and indirect - Pipenv states "index" in
        # the Pipfile.lock file if the given package is a direct dependency.
        # The "index" key for indirect dependencies is omitted though. This way
        # we check both - logic for indirect/direct is slightly different.
        # We cannot use flexmock as Source has slots.
        @attr.s
        class MySource:
            url = attr.ib(type=str)
            verify_ssl = attr.ib(type=bool)
            name = attr.ib(type=str)
            warehouse = attr.ib(type=bool, default=False)
            warehouse_api_url = attr.ib(default=None, type=str)

            def get_latest_package_version(_, package_name):  # noqa: N805
                return {
                    "certifi":
                    PackageVersion.parse_semantic_version("2018.10.15"),
                    "chardet": PackageVersion.parse_semantic_version("3.0.4"),
                    "idna": PackageVersion.parse_semantic_version(
                        "2.10"),  # Bumped from 2.7
                    "requests":
                    PackageVersion.parse_semantic_version("2.19.1"),
                    "termcolor":
                    PackageVersion.parse_semantic_version("1.1.0"),
                    "urllib3": PackageVersion.parse_semantic_version("1.23"),
                }[package_name]

        project = Project.from_files(
            os.path.join(self.data_dir, "pipfiles", "Pipfile_test2"),
            os.path.join(self.data_dir, "pipfiles", "Pipfile_test2.lock"),
        )

        new_sources = {}
        for source in project.pipfile_lock.meta.sources.values():
            new_sources[source.name] = MySource(**source.to_dict())
        project.pipfile_lock.meta.sources = new_sources

        for package_version in project.iter_dependencies_locked(
                with_devel=True):
            if package_version.index:
                package_version.index = new_sources[package_version.index.name]

        result = project.get_outdated_package_versions()
        assert len(result) == 1
        assert "idna" in result
        assert len(result["idna"]) == 2
        assert result["idna"][0] is project.pipfile_lock.packages["idna"]
        assert isinstance(result["idna"][1], Version)
        assert str(result["idna"][1]) == "2.10"
Exemple #9
0
    def test_get_outdated_package_versions_direct(self):
        """Test get outdated package versions direct."""
        # See previous test comments for more info.
        # We cannot use flexmock as Source has slots.
        @attr.s
        class MySource:
            url = attr.ib(type=str)
            verify_ssl = attr.ib(type=bool)
            name = attr.ib(type=str)
            warehouse = attr.ib(type=bool, default=False)
            warehouse_api_url = attr.ib(default=None, type=str)

            def get_latest_package_version(_, package_name):  # noqa: N805
                return {
                    "certifi":
                    PackageVersion.parse_semantic_version("2018.10.15"),
                    "chardet": PackageVersion.parse_semantic_version("3.0.4"),
                    "idna": PackageVersion.parse_semantic_version("2.7"),
                    "requests": PackageVersion.parse_semantic_version("3.0.0"),
                    "termcolor":
                    PackageVersion.parse_semantic_version("1.1.0"),
                    "urllib3": PackageVersion.parse_semantic_version("1.23"),
                }[package_name]

        project = Project.from_files(
            os.path.join(self.data_dir, "pipfiles", "Pipfile_test2"),
            os.path.join(self.data_dir, "pipfiles", "Pipfile_test2.lock"),
        )

        new_sources = {}
        for source in project.pipfile_lock.meta.sources.values():
            new_sources[source.name] = MySource(**source.to_dict())
        project.pipfile_lock.meta.sources = new_sources

        for package_version in project.iter_dependencies_locked(
                with_devel=True):
            if package_version.index:
                package_version.index = new_sources[package_version.index.name]

        result = project.get_outdated_package_versions()
        assert len(result) == 1
        assert "requests" in result
        assert len(result["requests"]) == 2
        assert result["requests"][0] is project.pipfile_lock.packages[
            "requests"]
        assert isinstance(result["requests"][1], Version)
        assert str(result["requests"][1]) == "3.0.0"
Exemple #10
0
    def test_run_dev_noop_no_dev(self, context: Context) -> None:
        """Test not discarding user's lock file if development dependencies are present in lock."""
        pipfile = str(self.data_dir / "projects" / "dev" / "Pipfile")
        pipfile_lock = str(self.data_dir / "projects" / "dev" / "Pipfile.lock")

        project = Project.from_files(pipfile_path=pipfile,
                                     pipfile_lock_path=pipfile_lock)

        assert project.pipfile.dev_packages.packages
        assert project.pipfile_lock.dev_packages.packages

        context.project = project
        project.pipfile_lock.dev_packages.packages.clear()
        context.cli_parameters["dev"] = False
        unit = self.UNIT_TESTED()
        with unit.assigned_context(context):
            unit.run()

        assert project.pipfile.dev_packages.packages
        assert project.pipfile_lock
        assert len(context.stack_info) == 0
def qeb_hwt_thamos_advise() -> None:
    """Qeb-Hwt Thamos Advise Task."""
    if not Configuration._REPO_PATH:
        raise Exception("No path has been provided to REPO_PATH env variable.")

    if not Path(Configuration._REPO_PATH).exists():
        raise FileNotFoundError(
            f"Cannot find the file on this path: {Configuration._REPO_PATH}")

    output_messages: list = []

    OpenShift.verify_github_app_inputs(
        github_event_type=Configuration._GITHUB_EVENT_TYPE,
        github_check_run_id=Configuration._GITHUB_CHECK_RUN_ID,
        github_installation_id=Configuration._GITHUB_INSTALLATION_ID,
        github_base_repo_url=Configuration._GITHUB_BASE_REPO_URL,
        origin=Configuration._ORIGIN,
    )

    os.chdir(Configuration._REPO_PATH)
    thoth_yaml_config = _Configuration()

    if not thoth_yaml_config.config_file_exists():
        exception_message = _create_message_config_file_error(no_file=True)
        store_messages(output_messages)
        trigger_finished_webhook(exception_message=exception_message,
                                 has_error=True,
                                 error_type="MissingThothYamlFile")
        return

    try:
        # Consider first runtime environment
        runtime_environment = thoth_yaml_config.content.get(
            "runtime_environments")[0]

        # Fetch recommendation type
        recommendation_type = runtime_environment.get(
            "recommendation_type") if thoth_yaml_config else "latest"

        requirements_format = thoth_yaml_config.requirements_format
        if requirements_format == "pipenv":
            project = Project.from_files(
                without_pipfile_lock=not os.path.exists("Pipfile.lock"))
        elif requirements_format in ("pip", "pip-tools", "pip-compile"):
            project = Project.from_pip_compile_files(allow_without_lock=True)
        else:
            raise ValueError(
                f"Unknown configuration option for requirements format: {requirements_format!r}"
            )

        pipfile = project.pipfile.to_string()
        pipfile_lock_str = project.pipfile_lock.to_string(
        ) if project.pipfile_lock else ""
        application_stack = PythonStack(
            requirements=pipfile,
            requirements_lock=pipfile_lock_str,
            requirements_format=requirements_format).to_dict()

    except Exception as exception:
        _LOGGER.debug(json.loads(exception.body)["error"])  # type: ignore
        exception_message = json.loads(exception.body)["error"]  # type: ignore
        store_messages(output_messages)
        trigger_finished_webhook(exception_message=exception_message,
                                 has_error=True)
        return

    # The input for AdviserTriggerMessage if no exceptions were found
    message_input = {
        "component_name": {
            "type": "str",
            "value": __COMPONENT_NAME__
        },
        "service_version": {
            "type": "str",
            "value": __service_version__
        },
        "application_stack": {
            "type": "Dict",
            "value": application_stack
        },
        "runtime_environment": {
            "type": "Dict",
            "value": runtime_environment
        },
        "recommendation_type": {
            "type": "str",
            "value": recommendation_type
        },
        "github_event_type": {
            "type": "str",
            "value": Configuration._GITHUB_EVENT_TYPE
        },
        "github_check_run_id": {
            "type": "int",
            "value": int(Configuration._GITHUB_CHECK_RUN_ID)
        },
        "github_installation_id": {
            "type": "int",
            "value": int(Configuration._GITHUB_INSTALLATION_ID)
        },
        "github_base_repo_url": {
            "type": "str",
            "value": Configuration._GITHUB_BASE_REPO_URL
        },
        "origin": {
            "type": "str",
            "value": Configuration._ORIGIN
        },
        "source_type": {
            "type": "str",
            "value": ThothAdviserIntegrationEnum.GITHUB_APP.name
        },
    }

    # We store the message to put in the output file here.
    output_messages = [{
        "topic_name": "thoth.adviser-trigger",
        "message_contents": message_input
    }]

    store_messages(output_messages)
def update_keb_installation():
    """Load files and pass them to storages update function."""
    if _SLUG is None:
        _LOGGER.info("No slug present, continuing to next step in task.")
        return

    service = GithubService(
        github_app_id=os.getenv("GITHUB_APP_ID"),
        github_app_private_key_path=os.getenv("GITHUB_PRIVATE_KEY_PATH"),
    )  # TODO: extend to use other services

    project = service.get_project(namespace=_SLUG.split("/")[0], repo=_SLUG.split("/")[1])

    raw_thoth_config = project.get_file_content(".thoth.yaml")

    with TemporaryDirectory() as repo_path, cwd(repo_path):
        thoth_config.load_config_from_string(raw_thoth_config)
        requirements_format = thoth_config.content["requirements_format"]
        overlays_dir = thoth_config.content.get("overlays_dir")
        to_update: List[RuntimeEnvironment]
        if overlays_dir is not None:
            to_update = [RuntimeEnvironment.from_dict(r) for r in thoth_config.list_runtime_environments()]
        else:
            to_update = [RuntimeEnvironment.from_dict(thoth_config.get_runtime_environment())]

        for runtime_environment in to_update:
            if overlays_dir:
                prefix = f"{overlays_dir}/{runtime_environment.name}/"
            else:
                prefix = ""

            if requirements_format == "pipenv":
                pipfile_r = project.get_file_content(f"{prefix}Pipfile")
                with open("Pipfile", "wb") as f:
                    f.write(pipfile_r)

                try:
                    piplock_r = project.get_file_content(f"{prefix}Pipfile.lock")
                    with open("Pipfile.lock", "wb") as f:
                        f.write(piplock_r)
                    project = Project.from_files(pipfile_path="Pipfile", pipfile_lock_path="Pipfile.lock")
                except Exception:
                    _LOGGER.debug("No Pipfile.lock found")
                    project = Project.from_files(
                        pipfile_path="Pipfile",
                        without_pipfile_lock=True,
                        runtime_environment=runtime_environment,
                    )

            elif requirements_format in ["pip", "pip-tools", "pip-compile"]:
                try:
                    requirements_r = project.get_file_content(f"{prefix}requirements.txt")
                    with open("requirements.txt", "wb") as f:
                        f.write(requirements_r)
                    project = Project.from_pip_compile_files(
                        requirements_path="requirements.txt",
                        allow_without_lock=True,
                        runtime_environment=runtime_environment,
                    )
                except Exception:
                    _LOGGER.debug("No requirements.txt found, trying to download requirements.in")
                    requirements_r = project.get_file_content(f"{prefix}requirements.in")
                    with open("requirements.in", "wb") as f:
                        f.write(requirements_r.content)
                    project = Project.from_pip_compile_files(
                        requirements_path="requirements.in",
                        allow_without_lock=True,
                        runtime_environment=runtime_environment,
                    )

                project = Project.from_pip_compile_files(allow_without_lock=True)
            else:
                raise NotImplementedError(f"{requirements_format} requirements format not supported.")

            db.update_kebechet_installation_using_files(
                slug=_SLUG,
                runtime_environment_name=runtime_environment.name,
                installation_id=str(project.github_repo.id),
                requirements=project.pipfile.to_dict(),
                requirements_lock=project.pipfile_lock.to_dict(),
                thoth_config=thoth_config,
            )

        present_installations = db.get_kebechet_github_app_installations_all(slug=_SLUG)
        cur_env_names = {r.name for r in to_update}
        all_env_names = {installation["runtime_environment_name"] for installation in present_installations}
        to_delete = all_env_names - cur_env_names
        for name in to_delete:
            db.delete_kebechet_github_app_installations(slug=_SLUG, runtime_environment=name)
Exemple #13
0
def advise_here(
    recommendation_type: typing.Optional[str] = None,
    *,
    runtime_environment: dict = None,
    runtime_environment_name: typing.Optional[str] = None,
    dev: bool = False,
    no_static_analysis: bool = False,
    no_user_stack: bool = False,
    nowait: bool = False,
    force: bool = False,
    limit: typing.Optional[int] = None,
    count: int = 1,
    debug: bool = False,
    origin: typing.Optional[str] = None,
    github_event_type: typing.Optional[str] = None,
    github_check_run_id: typing.Optional[int] = None,
    github_installation_id: typing.Optional[int] = None,
    github_base_repo_url: typing.Optional[str] = None,
    source_type: typing.Optional[ThothAdviserIntegrationEnum] = None,
) -> typing.Optional[tuple]:
    """Run advise in current directory, requires no arguments."""
    requirements_format = thoth_config.requirements_format
    if requirements_format == "pipenv":
        _LOGGER.info(
            "Using Pipenv files located in the project root directory")
        pipfile_lock_exists = os.path.exists("Pipfile.lock")

        if pipfile_lock_exists:
            _LOGGER.info(
                "Submitting Pipfile.lock as a base for user's stack scoring - see %s",
                jl("user_stack"),
            )

        project = Project.from_files(
            without_pipfile_lock=not os.path.exists("Pipfile.lock"))

        if (pipfile_lock_exists and project.pipfile_lock.meta.hash["sha256"] !=
                project.pipfile.hash()["sha256"]):
            _LOGGER.error(
                "Pipfile hash stated in Pipfile.lock %r does not correspond to Pipfile hash %r - was Pipfile "
                "adjusted? This error is not critical.",
                project.pipfile_lock.meta.hash["sha256"][:6],
                project.pipfile.hash()["sha256"][:6],
            )
    elif requirements_format in ("pip", "pip-tools", "pip-compile"):
        _LOGGER.info(
            "Using requirements.txt file located in the project root directory"
        )
        project = Project.from_pip_compile_files(allow_without_lock=True)
    else:
        raise ValueError(
            f"Unknown configuration option for requirements format: {requirements_format!r}"
        )

    pipfile = project.pipfile.to_string()
    pipfile_lock_str = project.pipfile_lock.to_string(
    ) if project.pipfile_lock else ""

    return advise(
        pipfile=pipfile,
        pipfile_lock=pipfile_lock_str,
        recommendation_type=recommendation_type,
        runtime_environment=runtime_environment,
        runtime_environment_name=runtime_environment_name,
        dev=dev,
        no_static_analysis=no_static_analysis,
        no_user_stack=no_user_stack,
        nowait=nowait,
        force=force,
        limit=limit,
        count=count,
        debug=debug,
        origin=origin,
        source_type=source_type,
        github_event_type=github_event_type,
        github_check_run_id=github_check_run_id,
        github_installation_id=github_installation_id,
        github_base_repo_url=github_base_repo_url,
    )