Beispiel #1
0
 def _create_unsupported_package_issue(self, package_name, pkg_location):
     """Create an issue as Kebechet doesn't support packages with git as source."""
     _LOGGER.info("Key Error encountered, due package source being git.")
     relative_dir = self._get_cwd_relative2gitroot()
     pip_url = construct_raw_file_url(
         self.service_url,
         self.slug,
         os.path.join(relative_dir, "Pipfile"),
         self.service_type,
     )
     piplock_url = construct_raw_file_url(
         self.service_url,
         self.slug,
         os.path.join(relative_dir, "Pipfile.lock"),
         self.service_type,
     )
     issue = self.get_issue_by_title(
         _ISSUE_UNSUPPORTED_PACKAGE.format(
             env_name=self.runtime_environment))
     if issue is None:
         self.project.create_issue(
             title=_ISSUE_UNSUPPORTED_PACKAGE.format(
                 env_name=self.runtime_environment),
             body=ISSUE_UNSUPPORTED_PACKAGE.format(
                 sha=self.sha,
                 package=package_name,
                 pkg_location=pkg_location,
                 pip_url=pip_url,
                 piplock_url=piplock_url,
                 environment_details=self.get_environment_details(),
             ),
         )
Beispiel #2
0
    def _relock_all(self, exc: PipenvError, labels: list) -> None:
        """Re-lock all dependencies given the Pipfile."""
        pip_url = construct_raw_file_url(
            self.service_url, self.slug, "Pipfile", self.service_type
        )
        piplock_url = construct_raw_file_url(
            self.service_url, self.slug, "Pipfile.lock", self.service_type
        )
        issue = self.sm.open_issue_if_not_exist(
            _ISSUE_REPLICATE_ENV_NAME,
            lambda: ISSUE_REPLICATE_ENV.format(
                **exc.__dict__,
                sha=self.sha,
                pip_url=pip_url,
                piplock_url=piplock_url,
                environment_details=self.get_environment_details(),
            ),
            refresh_comment=partial(self._add_refresh_comment, exc),
            labels=labels,
        )

        self._pipenv_update_all()
        commit_msg = "Automatic dependency re-locking"
        branch_name = "kebechet-dependency-relock"

        existing_prs = self._get_prs(branch_name)
        if len(existing_prs) == 1:
            pr = list(existing_prs)[0]
            commits = pr.get_all_commits()
            if len(commits) != 1:
                pr.comment(
                    "There have been done changes in the original pull request (multiple commits found), "
                    "aborting doing changes to the modified opened pull request"
                )
                return None
            if self.sha != commits[0]:
                self._git_push(
                    ":pushpin: " + commit_msg,
                    branch_name,
                    ["Pipfile.lock"],
                    force_push=True,
                )
                pr.comment(
                    f"Pull request has been rebased on top of the current master with SHA {self.sha}"
                )
        elif len(existing_prs) == 0:
            # Default case
            self._git_push(":pushpin: " + commit_msg, branch_name, ["Pipfile.lock"])
            pr_id = self.sm.open_merge_request(
                commit_msg, branch_name, f"Fixes: #{issue.id}", labels
            )
            _LOGGER.info(
                f"Issued automatic dependency re-locking in PR #{pr_id} to fix issue #{issue.id}"
            )
        else:
            raise DependencyManagementError(
                f"Found two or more pull requests for automatic relock for branch {branch_name}"
            )
    def run(self, lockfile: bool = False) -> None:  # type: ignore
        """Keep your requirements.txt in sync with Pipfile/Pipfile.lock."""
        if self.parsed_payload:
            if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED:
                _LOGGER.info(
                    "PipfileRequirementsManager doesn't act on %r events.",
                    self.parsed_payload.get("event"),
                )
                return

        file_name = "Pipfile.lock" if lockfile else "Pipfile"
        file_url = construct_raw_file_url(
            self.service_url, self.slug, file_name, self.service_type
        )

        _LOGGER.debug("Downloading %r from %r", file_name, file_url)
        # TODO: propagate tls_verify for internal GitLab instances here and bellow as well
        response = requests.get(file_url)
        response.raise_for_status()
        pipfile_content = (
            sorted(self.get_pipfile_lock_requirements(response.text))
            if lockfile
            else sorted(self.get_pipfile_requirements(response.text))
        )

        file_url = construct_raw_file_url(
            self.service_url, self.slug, "requirements.txt", self.service_type
        )
        _LOGGER.debug("Downloading requirements.txt from %r", file_url)
        response = requests.get(file_url)
        if response.status_code == 404:
            # If the requirements.txt file does not exist, create it.
            requirements_txt_content = []
        else:
            response.raise_for_status()
            requirements_txt_content = sorted(response.text.splitlines())

        if pipfile_content == requirements_txt_content:
            _LOGGER.info("Requirements in requirements.txt are up to date")
            # TODO: delete branch if already exists
            return

        with cloned_repo(self, depth=1) as repo:
            with open("requirements.txt", "w") as requirements_file:
                requirements_file.write("\n".join(pipfile_content))
                requirements_file.write("\n")

            branch_name = "pipfile-requirements-sync"
            repo.git.checkout(b=branch_name)
            repo.index.add(["requirements.txt"])
            repo.index.commit(
                "Update requirements.txt respecting requirements in {}".format(
                    "Pipfile" if not lockfile else "Pipfile.lock"
                )
            )
            repo.remote().push(branch_name)
Beispiel #4
0
 def _create_unsupported_package_issue(self, package_name):
     """Create an issue as Kebechet doesn't support packages with git as source."""
     _LOGGER.info("Key Errror encountered, due package source being git.")
     pip_url = construct_raw_file_url(self.service_url, self.slug,
                                      "Pipfile", self.service_type)
     piplock_url = construct_raw_file_url(self.service_url, self.slug,
                                          "Pipfile.lock", self.service_type)
     self.sm.open_issue_if_not_exist(
         _ISSUE_UNSUPPORTED_PACKAGE,
         lambda: ISSUE_UNSUPPORTED_PACKAGE.format(
             sha=self.sha,
             package=package_name,
             pip_url=pip_url,
             piplock_url=piplock_url,
             environment_details=self.get_environment_details(),
         ),
     )
    def run(self, lockfile: bool = False) -> None:
        """Keep your requirements.txt in sync with Pipfile/Pipfile.lock."""
        file_name = 'Pipfile.lock' if lockfile else 'Pipfile'
        file_url = construct_raw_file_url(self.service_url, self.slug,
                                          file_name, self.service_type)

        _LOGGER.debug("Downloading %r from %r", file_name, file_url)
        # TODO: propagate tls_verify for internal GitLab instances here and bellow as well
        response = requests.get(file_url)
        response.raise_for_status()
        pipfile_content = sorted(self.get_pipfile_lock_requirements(response.text)) \
            if lockfile else sorted(self.get_pipfile_requirements(response.text))

        file_url = construct_raw_file_url(self.service_url, self.slug,
                                          'requirements.txt',
                                          self.service_type)
        _LOGGER.debug("Downloading requirements.txt from %r", file_url)
        response = requests.get(file_url)
        if response.status_code == 404:
            # If the requirements.txt file does not exist, create it.
            requirements_txt_content = []
        else:
            response.raise_for_status()
            requirements_txt_content = sorted(response.text.splitlines())

        if pipfile_content == requirements_txt_content:
            _LOGGER.info("Requirements in requirements.txt are up to date")
            # TODO: delete branch if already exists
            return

        with cloned_repo(self.service_url, self.slug, depth=1) as repo:
            with open('requirements.txt', 'w') as requirements_file:
                requirements_file.write('\n'.join(pipfile_content))
                requirements_file.write('\n')

            branch_name = 'pipfile-requirements-sync'
            repo.git.checkout(b=branch_name)
            repo.index.add(['requirements.txt'])
            repo.index.commit(
                'Update requirements.txt respecting requirements in {}'.format(
                    'Pipfile' if not lockfile else 'Pipfile.lock'))
            repo.remote().push(branch_name)
Beispiel #6
0
 def _create_unsupported_package_issue(self, package_name):
     """Create an issue as Kebechet doesn't support packages with git as source."""
     _LOGGER.info("Key Errror encountered, due package source being git.")
     pip_url = construct_raw_file_url(
         self.service_url, self.slug, "Pipfile", self.service_type
     )
     piplock_url = construct_raw_file_url(
         self.service_url, self.slug, "Pipfile.lock", self.service_type
     )
     issue = self.get_issue_by_title(_ISSUE_UNSUPPORTED_PACKAGE)
     if issue is None:
         self.project.create_issue(
             title=_ISSUE_UNSUPPORTED_PACKAGE,
             body=ISSUE_UNSUPPORTED_PACKAGE.format(
                 sha=self.sha,
                 package=package_name,
                 pip_url=pip_url,
                 piplock_url=piplock_url,
                 environment_details=self.get_environment_details(),
             ),
         )
Beispiel #7
0
 def _create_issue_for_pipenv_failure(self, exc: PipenvError, labels: list):
     _LOGGER.warning(
         "Failed to update dependencies to their latest version, reporting issue"
     )
     relative_dir = self._get_cwd_relative2gitroot()
     pip_url = construct_raw_file_url(
         self.service_url,
         self.slug,
         os.path.join(relative_dir, "Pipfile"),
         self.service_type,
     )
     piplock_url = construct_raw_file_url(
         self.service_url,
         self.slug,
         os.path.join(relative_dir, "Pipfile.lock"),
         self.service_type,
     )
     issue = self.get_issue_by_title(
         _ISSUE_FAILED_TO_UPDATE_DEPENDENCIES.format(
             env_name=self.runtime_environment))
     if issue is None:
         self.project.create_issue(
             title=_ISSUE_FAILED_TO_UPDATE_DEPENDENCIES.format(
                 env_name=self.runtime_environment),
             body=ISSUE_PIPENV_UPDATE_ALL.format(
                 sha=self.sha,
                 pip_url=pip_url,
                 piplock_url=piplock_url,
                 environment_details=self.get_environment_details(),
                 dependency_graph=self.get_dependency_graph(graceful=True),
                 **exc.char_limit_dict(MAX_PIPENV_CMD_LEN),
             ),
             labels=labels,
         )
     else:
         self._add_refresh_comment(exc=exc, issue=issue)
Beispiel #8
0
    def _create_or_update_initial_lock(self, labels, pipenv_used, req_dev):
        close_initial_lock_issue = partial(
            self.close_issue_and_comment,
            _ISSUE_INITIAL_LOCK_NAME.format(env_name=self.runtime_environment),
            comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha),
        )

        # Check for first time (initial) locks first.
        try:
            if self._create_initial_lock(labels, pipenv_used, req_dev):
                close_initial_lock_issue()
                return {}
        except PipenvError as exc:
            _LOGGER.exception("Failed to perform initial dependency lock")
            file_name = (
                "requirements.txt" if not req_dev else
                "requirements-dev.txt" if not pipenv_used else "Pipfile")
            file_url = construct_raw_file_url(self.service_url, self.slug,
                                              file_name, self.service_type)
            issue = self.get_issue_by_title(
                _ISSUE_INITIAL_LOCK_NAME.format(
                    env_name=self.runtime_environment))
            if issue is None:
                self.project.create_issue(
                    title=_ISSUE_INITIAL_LOCK_NAME.format(
                        env_name=self.runtime_environment),
                    body=ISSUE_INITIAL_LOCK.format(
                        sha=self.sha,
                        url=file_url,
                        file=file_name,
                        environment_details=self.get_environment_details(),
                        **exc.char_limit_dict(MAX_PIPENV_CMD_LEN),
                    ),
                    labels=labels,
                )
            else:
                self._add_refresh_comment(exc=exc, issue=issue)
            raise

        close_initial_lock_issue()
Beispiel #9
0
    def _do_update(
        self, labels: list, pipenv_used: bool = False, req_dev: bool = False
    ) -> dict:
        """Update dependencies based on management used."""
        close_initial_lock_issue = partial(
            self.sm.close_issue_if_exists,
            _ISSUE_INITIAL_LOCK_NAME,
            comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha),
        )

        # Check for first time (initial) locks first.
        try:
            if self._create_initial_lock(labels, pipenv_used, req_dev):
                close_initial_lock_issue()
                return {}
        except PipenvError as exc:
            _LOGGER.exception("Failed to perform initial dependency lock")
            file_name = (
                "requirements.txt"
                if not req_dev
                else "requirements-dev.txt"
                if not pipenv_used
                else "Pipfile"
            )
            file_url = construct_raw_file_url(
                self.service_url, self.slug, file_name, self.service_type
            )
            self.sm.open_issue_if_not_exist(
                _ISSUE_INITIAL_LOCK_NAME,
                body=lambda: ISSUE_INITIAL_LOCK.format(
                    sha=self.sha,
                    url=file_url,
                    file=file_name,
                    environment_details=self.get_environment_details(),
                    **exc.__dict__,  # noqa F821
                ),
                refresh_comment=partial(self._add_refresh_comment, exc),
                labels=labels,
            )
            raise

        close_initial_lock_issue()

        if pipenv_used:
            old_environment = self._get_all_packages_versions()
            old_direct_dependencies_version = self._get_direct_dependencies_version()
            try:
                self._pipenv_update_all()
            except PipenvError as exc:
                _LOGGER.warning(
                    "Failed to update dependencies to their latest version, reporting issue"
                )
                pip_url = construct_raw_file_url(
                    self.service_url, self.slug, "Pipfile", self.service_type
                )
                piplock_url = construct_raw_file_url(
                    self.service_url, self.slug, "Pipfile.lock", self.service_type
                )
                self.sm.open_issue_if_not_exist(
                    _ISSUE_UPDATE_ALL_NAME,
                    body=lambda: ISSUE_PIPENV_UPDATE_ALL.format(
                        sha=self.sha,
                        pip_url=pip_url,
                        piplock_url=piplock_url,
                        environment_details=self.get_environment_details(),
                        dependency_graph=self.get_dependency_graph(graceful=True),
                        **exc.__dict__,  # noqa F821
                    ),
                    refresh_comment=partial(self._add_refresh_comment, exc),
                    labels=labels,
                )
                return {}
            else:
                # We were able to update all, close reported issue if any.
                self.sm.close_issue_if_exists(
                    _ISSUE_UPDATE_ALL_NAME,
                    comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha),
                )
        else:  # either requirements.txt or requirements-dev.txt
            old_environment = self._get_requirements_txt_dependencies(req_dev)
            direct_dependencies = self._get_direct_dependencies_requirements(req_dev)
            old_direct_dependencies_version = {
                k: v for k, v in old_environment.items() if k in direct_dependencies
            }

        outdated = self._get_all_outdated(old_direct_dependencies_version)
        _LOGGER.info(f"Outdated: {outdated}")

        # Undo changes made to Pipfile.lock by _pipenv_update_all. # Disabled for now.
        # self.repo.head.reset(index=True, working_tree=True)

        result = {}
        if outdated:
            # Do API calls only once, cache results.
            self._cached_merge_requests = self.sm.get_prs()
            body = self._generate_update_body(outdated)
            merge_request, should_update = self._should_update()
            if not should_update:
                _LOGGER.info(
                    f"Skipping update creation as the given update already exists in PR #{merge_request.id}"
                )
            try:
                versions = self._create_update(
                    body=body,
                    labels=labels,
                    old_environment=old_environment if not pipenv_used else None,
                    merge_request=merge_request,
                    pipenv_used=pipenv_used,
                    req_dev=req_dev,
                )
                if versions:
                    result["merge request id"] = versions  # return the merge request id
            except Exception as exc:
                _LOGGER.exception(
                    f"Failed to create update for current master {self.sha}: {str(exc)}"
                )
        return result
Beispiel #10
0
    def _do_update(self,
                   labels: list,
                   pipenv_used: bool = False,
                   req_dev: bool = False) -> dict:
        """Update dependencies based on management used."""
        close_initial_lock_issue = partial(
            self.sm.close_issue_if_exists,
            _ISSUE_INITIAL_LOCK_NAME,
            comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha),
        )

        # Check for first time (initial) locks first.
        try:
            if self._create_initial_lock(labels, pipenv_used, req_dev):
                close_initial_lock_issue()
                return {}
        except PipenvError as exc:
            _LOGGER.exception("Failed to perform initial dependency lock")
            file_name = (
                "requirements.txt" if not req_dev else
                "requirements-dev.txt" if not pipenv_used else "Pipfile")
            file_url = construct_raw_file_url(self.service_url, self.slug,
                                              file_name, self.service_type)
            self.sm.open_issue_if_not_exist(
                _ISSUE_INITIAL_LOCK_NAME,
                body=lambda: ISSUE_INITIAL_LOCK.format(
                    sha=self.sha,
                    url=file_url,
                    file=file_name,
                    environment_details=self.get_environment_details(),
                    **exc.__dict__,  # noqa F821
                ),
                refresh_comment=partial(self._add_refresh_comment, exc),
                labels=labels,
            )
            raise

        close_initial_lock_issue()

        if pipenv_used:
            old_environment = self._get_all_packages_versions()
            old_direct_dependencies_version = self._get_direct_dependencies_version(
            )
            try:
                self._pipenv_update_all()
            except PipenvError as exc:
                _LOGGER.warning(
                    "Failed to update dependencies to their latest version, reporting issue"
                )
                pip_url = construct_raw_file_url(self.service_url, self.slug,
                                                 "Pipfile", self.service_type)
                piplock_url = construct_raw_file_url(self.service_url,
                                                     self.slug, "Pipfile.lock",
                                                     self.service_type)
                self.sm.open_issue_if_not_exist(
                    _ISSUE_UPDATE_ALL_NAME,
                    body=lambda: ISSUE_PIPENV_UPDATE_ALL.format(
                        sha=self.sha,
                        pip_url=pip_url,
                        piplock_url=piplock_url,
                        environment_details=self.get_environment_details(),
                        dependency_graph=self.get_dependency_graph(graceful=
                                                                   True),
                        **exc.__dict__,  # noqa F821
                    ),
                    refresh_comment=partial(self._add_refresh_comment, exc),
                    labels=labels,
                )
                return {}
            else:
                # We were able to update all, close reported issue if any.
                self.sm.close_issue_if_exists(
                    _ISSUE_UPDATE_ALL_NAME,
                    comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha),
                )
        else:  # either requirements.txt or requirements-dev.txt
            old_environment = self._get_requirements_txt_dependencies(req_dev)
            direct_dependencies = self._get_direct_dependencies_requirements(
                req_dev)
            old_direct_dependencies_version = {
                k: v
                for k, v in old_environment.items() if k in direct_dependencies
            }

        outdated = self._get_all_outdated(old_direct_dependencies_version)
        _LOGGER.info(f"Outdated: {outdated}")

        # Undo changes made to Pipfile.lock by _pipenv_update_all.
        self.repo.head.reset(index=True, working_tree=True)

        result = {}
        if outdated:
            # Do API calls only once, cache results.
            self._cached_merge_requests = self.sm.get_prs()

        for package_name in outdated.keys():
            # As an optimization, first check if the given PR is already present.
            new_version = outdated[package_name]["new_version"]
            old_version = outdated[package_name]["old_version"]

            merge_request, should_update = self._should_update(
                package_name, new_version)
            if not should_update:
                _LOGGER.info(
                    f"Skipping update creation for {package_name} from version {old_version} to "
                    f"{new_version} as the given update already exists in PR #{merge_request.id}"
                )
                continue

            try:
                self._replicate_old_environment()
            except PipenvError as exc:
                # There has been an error in locking dependencies. This can be due to a missing dependency or simply
                # currently locked dependencies are not correct. Try to issue a pull request that would fix
                # that. We know that update all works, use update.
                _LOGGER.warning(
                    "Failed to replicate old environment, re-locking all dependencies"
                )
                self._relock_all(exc, labels)
                return {}

            is_dev = outdated[package_name]["dev"]
            try:
                _LOGGER.info(
                    f"Creating update of dependency {package_name} in repo {self.slug} (devel: {is_dev})"
                )
                versions = self._create_update(
                    package_name,
                    new_version,
                    old_version,
                    is_dev=is_dev,
                    labels=labels,
                    old_environment=old_environment
                    if not pipenv_used else None,
                    merge_request=merge_request,
                    pipenv_used=pipenv_used,
                    req_dev=req_dev,
                )
                if versions:
                    result[package_name] = versions
            except Exception as exc:
                _LOGGER.exception(
                    f"Failed to create update for dependency {package_name}: {str(exc)}"
                )
            finally:
                self.repo.head.reset(index=True, working_tree=True)
                self.repo.git.checkout("master")

        # We know that locking was done correctly - if the issue is still open, close it. The issue
        # should be automatically closed by merging the generated PR.
        self.sm.close_issue_if_exists(
            _ISSUE_REPLICATE_ENV_NAME,
            comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha))

        self._delete_old_branches(outdated)
        return result