def run(self, labels: list, analysis_id=None): """Run Thoth Advising Bot.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "ThothAdviseManager doesn't act on %r events.", self.parsed_payload.get("event"), ) return if analysis_id is None: with cloned_repo(self, depth=1) as repo: self.repo = repo if not os.path.isfile("Pipfile"): _LOGGER.warning("Pipfile not found in repo... Creating issue") self.sm.open_issue_if_not_exist( "Missing Pipfile", lambda: "Check your repository to make sure Pipfile exists", labels=labels, ) return False lib.advise_here( nowait=True, origin=(f"{self.service_url}/{self.slug}"), source_type=ThothAdviserIntegrationEnum.KEBECHET, kebechet_metadata=self.metadata, ) return True else: with cloned_repo(self, depth=1) as repo: self.repo = repo _LOGGER.info("Using analysis results from %s", analysis_id) res = lib.get_analysis_results(analysis_id) branch_name = self._construct_branch_name() branch = self.repo.git.checkout("-B", branch_name) # noqa F841 self._cached_merge_requests = self.sm.repository.get_pr_list() if res is None: _LOGGER.error( "Advise failed on server side, contact the maintainer" ) return False _LOGGER.debug(json.dumps(res)) if res[1] is False: _LOGGER.info("Advise succeeded") self._write_advise(res) self._open_merge_request( branch_name, labels, ["Pipfile.lock"], res[0].get("metadata") ) return True else: _LOGGER.warning( "Found error while running adviser... Creating issue" ) self._issue_advise_error(res, labels) return False
def run(self, labels: list, analysis_id: Optional[str] = None): """Run the provenance check bot.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "ThothProvenanceManager doesn't act on %r events.", self.parsed_payload.get("event"), ) return if not analysis_id: with cloned_repo(self, depth=1) as repo: self.repo = repo if not (os.path.isfile("Pipfile") and os.path.isfile("Pipfile.lock")): _LOGGER.warning( "Pipfile or Pipfile.lock is missing from repo, opening issue" ) issue = self.get_issue_by_title("Missing pipenv files") if issue is None: self.project.create_issue( title="Missing pipenv files", body= "Check your repository to make sure Pipfile and Pipfile.lock exist.", labels=labels, ) return False _LOGGER.info((self.service_url + self.slug)) lib.provenance_check_here( nowait=True, origin=f"{self.service_url}/{self.slug}") return True else: if not analysis_id.startswith("provenance"): _LOGGER.debug( "Analysis id isn't provenance, manager terminating...") return False with cloned_repo(self, depth=1) as repo: res = lib.get_analysis_results(analysis_id) if res is None: _LOGGER.error( "Provenance check failed on server side, contact the maintainer" ) return False if res[1] is False: _LOGGER.info( "Provenance check found problems, creating issue...") self._issue_provenance_error(res, labels) return False else: _LOGGER.info( "Provenance check found no problems, carry on coding :)" ) return True
def run(self) -> typing.Optional[dict]: # type: ignore """Check for info issue and close it with a report.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "Info manager doesn't act on %r events.", self.parsed_payload.get("event"), ) return None issue = self.sm.get_issue(_INFO_ISSUE_NAME) if not issue: _LOGGER.info("No issue to report to, exiting") return None _LOGGER.info(f"Found issue {_INFO_ISSUE_NAME}, generating report") with cloned_repo(self.service_url, self.slug, depth=1) as repo: # We could optimize this as the get_issue() does API calls as well. Keep it this simple now. self.sm.close_issue_if_exists( _INFO_ISSUE_NAME, INFO_REPORT.format( sha=repo.head.commit.hexsha, slug=self.slug, environment_details=self.get_environment_details(), dependency_graph=self.get_dependency_graph(graceful=True), ), ) return None
def run(self, labels: list, analysis_id=None): """Run the provenance check bot.""" if self.parsed_payload: if self.parsed_payload.get('event') not in _EVENTS_SUPPORTED: _LOGGER.info( "ThothProvenanceManager doesn't act on %r events.", self.parsed_payload.get('event')) return if not analysis_id: with cloned_repo(self.service_url, self.slug, depth=1) as repo: self.repo = repo if not (os.path.isfile("Pipfile") and os.path.isfile("Pipfile.lock")): _LOGGER.warning( "Pipfile or Pipfile.lock is missing from repo, opening issue" ) self.sm.open_issue_if_not_exist( "Missing pipenv files", lambda: "Check your repository to make sure Pipfile and Pipfile.lock exist.", labels=labels) return False _LOGGER.info((self.service_url + self.slug)) lib.provenance_check_here( nowait=True, origin=f"{self.service_url}/{self.slug}") return True else: with cloned_repo(self.service_url, self.slug, depth=1) as repo: res = lib.get_analysis_results(analysis_id) if res is None: _LOGGER.error( "Provenance check failed on server side, contact the maintainer" ) return False if res[1] is False: _LOGGER.info( "Provenance check found problems, creating issue...") self._issue_provenance_error(res, labels) return False else: _LOGGER.info( "Provenance check found no problems, carry on coding :)" ) return True
def run(self, labels: list, analysis_id=None): """Run Thoth Advising Bot.""" if analysis_id is None: with cloned_repo(self.service_url, self.slug, depth=1) as repo: self.repo = repo if not os.path.isfile("Pipfile"): _LOGGER.warning( "Pipfile not found in repo... Creating issue") self.sm.open_issue_if_not_exist( "Missing Pipfile", lambda: "Check your repository to make sure Pipfile exists", labels=labels) return False lib.advise_here(nowait=True, origin=(f"{self.service_url}/{self.slug}")) return True else: with cloned_repo(self.service_url, self.slug, depth=1) as repo: self.repo = repo _LOGGER.info("Using analysis results from %s", analysis_id) res = lib.get_analysis_results(analysis_id) branch_name = self._construct_branch_name() branch = self.repo.git.checkout("-B", branch_name) self._cached_merge_requests = self.sm.repository.merge_requests if res is None: _LOGGER.error( "Advise failed on server side, contact the maintainer") return False _LOGGER.debug(json.dumps(res)) if res[1] is False: _LOGGER.info('Advise succeeded') self._write_advise(res) self._open_merge_request(branch_name, labels, ["Pipfile.lock"]) return True else: _LOGGER.warning( 'Found error while running adviser... Creating issue') self._issue_advise_error(res, labels) return False
def run(self, lockfile: bool = False) -> None: # type: ignore """Keep your requirements.txt in sync with Pipfile/Pipfile.lock.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "PipfileRequirementsManager doesn't act on %r events.", self.parsed_payload.get("event"), ) return file_name = "Pipfile.lock" if lockfile else "Pipfile" file_url = construct_raw_file_url( self.service_url, self.slug, file_name, self.service_type ) _LOGGER.debug("Downloading %r from %r", file_name, file_url) # TODO: propagate tls_verify for internal GitLab instances here and bellow as well response = requests.get(file_url) response.raise_for_status() pipfile_content = ( sorted(self.get_pipfile_lock_requirements(response.text)) if lockfile else sorted(self.get_pipfile_requirements(response.text)) ) file_url = construct_raw_file_url( self.service_url, self.slug, "requirements.txt", self.service_type ) _LOGGER.debug("Downloading requirements.txt from %r", file_url) response = requests.get(file_url) if response.status_code == 404: # If the requirements.txt file does not exist, create it. requirements_txt_content = [] else: response.raise_for_status() requirements_txt_content = sorted(response.text.splitlines()) if pipfile_content == requirements_txt_content: _LOGGER.info("Requirements in requirements.txt are up to date") # TODO: delete branch if already exists return with cloned_repo(self, depth=1) as repo: with open("requirements.txt", "w") as requirements_file: requirements_file.write("\n".join(pipfile_content)) requirements_file.write("\n") branch_name = "pipfile-requirements-sync" repo.git.checkout(b=branch_name) repo.index.add(["requirements.txt"]) repo.index.commit( "Update requirements.txt respecting requirements in {}".format( "Pipfile" if not lockfile else "Pipfile.lock" ) ) repo.remote().push(branch_name)
def run(self, labels: list) -> typing.Optional[dict]: """Create a pull request for each and every direct dependency in the given org/repo (slug).""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "Update Manager doesn't act on %r events.", self.parsed_payload.get("event"), ) return None # We will keep venv in the project itself - we have permissions in the cloned repo. os.environ["PIPENV_VENV_IN_PROJECT"] = "1" with cloned_repo(self, depth=1) as repo: # Make repo available in the instance. self.repo = repo close_no_management_issue = partial( self.sm.close_issue_if_exists, _ISSUE_NO_DEPENDENCY_NAME, comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha), ) close_manual_update_issue = partial( self.sm.close_issue_if_exists, _ISSUE_MANUAL_UPDATE, comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha), ) if os.path.isfile("Pipfile"): _LOGGER.info("Using Pipfile for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=True, req_dev=False) close_manual_update_issue() elif os.path.isfile("requirements.in"): self._create_pipenv_environment(input_file="requirements.in") _LOGGER.info("Using requirements.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=False) if os.path.isfile("requirements-dev.in"): self._create_pipenv_environment(input_file="requirements-dev.in") _LOGGER.info("Using requirements-dev.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=True) close_manual_update_issue() else: _LOGGER.warning("No dependency management found") self.sm.open_issue_if_not_exist( _ISSUE_NO_DEPENDENCY_NAME, lambda: ISSUE_NO_DEPENDENCY_MANAGEMENT, labels=labels, ) return {} return result
def run(self, lockfile: bool = False) -> None: # type: ignore """Keep your requirements.txt in sync with Pipfile/Pipfile.lock.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "PipfileRequirementsManager doesn't act on %r events.", self.parsed_payload.get("event"), ) return file_name = "Pipfile.lock" if lockfile else "Pipfile" try: file_contents = self.project.get_file_content(file_name) except FileNotFoundError: self._create_missing_pipenv_files_issue(file_name) return pipfile_content = ( sorted(self.get_pipfile_lock_requirements(file_contents)) if lockfile else sorted(self.get_pipfile_requirements(file_contents)) ) try: file_contents = self.project.get_file_content("requirements.txt") requirements_txt_content = sorted(file_contents.splitlines()) except (FileNotFoundError, UnknownObjectException): requirements_txt_content = ( [] ) # requirements.txt file has not been created so the manager will create it if pipfile_content == requirements_txt_content: _LOGGER.info("Requirements in requirements.txt are up to date") # TODO: delete branch if already exists return with cloned_repo(self, depth=1) as repo: with open("requirements.txt", "w") as requirements_file: requirements_file.write("\n".join(pipfile_content)) requirements_file.write("\n") branch_name = "kebechet-pipfile-requirements-sync" repo.git.checkout(b=branch_name) repo.index.add(["requirements.txt"]) repo.index.commit( "Update requirements.txt respecting requirements in {}".format( "Pipfile" if not lockfile else "Pipfile.lock" ) ) repo.remote().push(branch_name)
def run(self, labels: list) -> typing.Optional[dict]: """Create a pull request for each and every direct dependency in the given org/repo (slug).""" # We will keep venv in the project itself - we have permissions in the cloned repo. os.environ['PIPENV_VENV_IN_PROJECT'] = '1' with cloned_repo(self.service_url, self.slug, depth=1) as repo: # Make repo available in the instance. self.repo = repo close_no_management_issue = partial( self.sm.close_issue_if_exists, _ISSUE_NO_DEPENDENCY_NAME, comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha)) if os.path.isfile('Pipfile'): _LOGGER.info("Using Pipfile for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=True, req_dev=False) elif os.path.isfile('requirements.in'): self._create_pipenv_environment(input_file='requirements.in') _LOGGER.info("Using requirements.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=False) if os.path.isfile('requirements-dev.in'): self._create_pipenv_environment( input_file='requirements-dev.in') _LOGGER.info( "Using requirements-dev.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=True) else: _LOGGER.warning("No dependency management found") self.sm.open_issue_if_not_exist( _ISSUE_NO_DEPENDENCY_NAME, lambda: ISSUE_NO_DEPENDENCY_MANAGEMENT, labels=labels) return {} return result
def run(self) -> typing.Optional[dict]: """Check for info issue and close it with a report.""" issue = self.sm.get_issue(_INFO_ISSUE_NAME) if not issue: _LOGGER.info("No issue to report to, exiting") return _LOGGER.info(f"Found issue {_INFO_ISSUE_NAME}, generating report") with cloned_repo(self.service_url, self.slug, depth=1) as repo: # We could optimize this as the get_issue() does API calls as well. Keep it this simple now. self.sm.close_issue_if_exists( _INFO_ISSUE_NAME, INFO_REPORT.format( sha=repo.head.commit.hexsha, slug=self.slug, environment_details=self.get_environment_details(), dependency_graph=self.get_dependency_graph(graceful=True), ))
def run(self, lockfile: bool = False) -> None: # type: ignore """Keep your requirements.txt in sync with Pipfile/Pipfile.lock.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "PipfileRequirementsManager doesn't act on %r events.", self.parsed_payload.get("event"), ) return file_name = "Pipfile.lock" if lockfile else "Pipfile" file_contents = self.project.get_file_content(file_name) pipfile_content = ( sorted(self.get_pipfile_lock_requirements(file_contents)) if lockfile else sorted(self.get_pipfile_requirements(file_contents)) ) file_contents = self.project.get_file_content("requirements.txt") requirements_txt_content = sorted(file_contents.splitlines()) if pipfile_content == requirements_txt_content: _LOGGER.info("Requirements in requirements.txt are up to date") # TODO: delete branch if already exists return with cloned_repo(self, depth=1) as repo: with open("requirements.txt", "w") as requirements_file: requirements_file.write("\n".join(pipfile_content)) requirements_file.write("\n") branch_name = "pipfile-requirements-sync" repo.git.checkout(b=branch_name) repo.index.add(["requirements.txt"]) repo.index.commit( "Update requirements.txt respecting requirements in {}".format( "Pipfile" if not lockfile else "Pipfile.lock" ) ) repo.remote().push(branch_name)
def run(self, lockfile: bool = False) -> None: """Keep your requirements.txt in sync with Pipfile/Pipfile.lock.""" file_name = 'Pipfile.lock' if lockfile else 'Pipfile' file_url = construct_raw_file_url(self.service_url, self.slug, file_name, self.service_type) _LOGGER.debug("Downloading %r from %r", file_name, file_url) # TODO: propagate tls_verify for internal GitLab instances here and bellow as well response = requests.get(file_url) response.raise_for_status() pipfile_content = sorted(self.get_pipfile_lock_requirements(response.text)) \ if lockfile else sorted(self.get_pipfile_requirements(response.text)) file_url = construct_raw_file_url(self.service_url, self.slug, 'requirements.txt', self.service_type) _LOGGER.debug("Downloading requirements.txt from %r", file_url) response = requests.get(file_url) if response.status_code == 404: # If the requirements.txt file does not exist, create it. requirements_txt_content = [] else: response.raise_for_status() requirements_txt_content = sorted(response.text.splitlines()) if pipfile_content == requirements_txt_content: _LOGGER.info("Requirements in requirements.txt are up to date") # TODO: delete branch if already exists return with cloned_repo(self.service_url, self.slug, depth=1) as repo: with open('requirements.txt', 'w') as requirements_file: requirements_file.write('\n'.join(pipfile_content)) requirements_file.write('\n') branch_name = 'pipfile-requirements-sync' repo.git.checkout(b=branch_name) repo.index.add(['requirements.txt']) repo.index.commit( 'Update requirements.txt respecting requirements in {}'.format( 'Pipfile' if not lockfile else 'Pipfile.lock')) repo.remote().push(branch_name)
def run(self) -> typing.Optional[dict]: # type: ignore """Check for info issue and close it with a report.""" thoth_config = pkg_resources.read_text(resources, "simple.thoth.yaml") with cloned_repo(self, depth=1, branch=self.project.default_branch) as repo: prs = self.get_prs_by_branch(_BRANCH_NAME) if len(prs) > 0: _LOGGER.debug("PR initializing .thoth.yaml already exists skipping...") return None repo.git.checkout("HEAD", b=_BRANCH_NAME) with open(".thoth.yaml", "w+") as f: f.write(thoth_config) repo.index.add([".thoth.yaml"]) repo.index.commit("Initialize .thoth.yaml with basic configuration") repo.remote().push(_BRANCH_NAME) self.create_pr( title="Thoth Configuration Initialization", body=_PR_BODY, target_branch=self.project.default_branch, source_branch=_BRANCH_NAME, )
def _branch_and_update_vers_and_changelog( self, trigger: BaseTrigger, changelog_smart: bool, changelog_classifier: str, changelog_format: str, changelog_file: bool, ) -> Tuple[str, str, List[str], bool]: with cloned_repo(self) as repo: res = self._trigger_update_files(trigger) version_file, new_version, old_version = res repo.git.add(version_file) prev_release = utils._prev_release_tag(repo, old_version) changelog = utils._compute_changelog( repo=repo, old_version=old_version, new_version=new_version, changelog_smart=changelog_smart, changelog_classifier=changelog_classifier, changelog_format=changelog_format, prev_release_tag=prev_release, ) if not changelog: raise NoChangesException("No changes found.") if changelog_file: utils._write_to_changelog(changelog, new_version) repo.git.add("CHANGELOG.md") branch_name = "v" + new_version repo.git.checkout("HEAD", b=branch_name) message = constants._VERSION_PULL_REQUEST_NAME.format(new_version) repo.index.commit(message) repo.remote().push(branch_name) return branch_name, new_version, changelog, bool(prev_release)
def run(self, maintainers: list = None, assignees: list = None, labels: list = None, changelog_file: bool = False) -> None: """Check issues for new issue request, if a request exists, issue a new PR with adjusted version in sources.""" reported_issues = [] for issue in self.sm.repository.issues: issue_title = issue.title.strip() if issue_title.startswith((_NO_VERSION_FOUND_ISSUE_NAME, _MULTIPLE_VERSIONS_FOUND_ISSUE_NAME)): # Reported issues that should be closed on success version change. reported_issues.append(issue) # This is an optimization not to clone repo each time. if not self._is_release_request(issue_title): continue _LOGGER.info( "Found an issue #%s which is a candidate for request of new version release: %s", issue.number, issue.title) with cloned_repo(self.service_url, self.slug) as repo: if assignees: try: self.sm.assign(issue, assignees) except Exception: _LOGGER.exception( f"Failed to assign {assignees} to issue #{issue.number}" ) issue.add_comment( "Unable to assign provided assignees, please check bot configuration." ) maintainers = maintainers or self._get_maintainers(labels) if issue.author.username not in maintainers: issue.add_comment( f"Sorry, @{issue.author.username} but you are not stated in maintainers section for " f"this project. Maintainers are @" + ', @'.join(maintainers) if maintainers else "Sorry, no maintainers configured.") issue.close() # Next issue. continue try: version_identifier, old_version = self._adjust_version_in_sources( repo, labels, issue) except VersionError as exc: _LOGGER.exception( "Failed to adjust version information in sources") issue.add_comment(str(exc)) issue.close() raise if not version_identifier: _LOGGER.error("Giving up with automated release") return changelog = self._compute_changelog( repo, old_version, version_identifier, version_file=changelog_file) branch_name = 'v' + version_identifier repo.git.checkout('HEAD', b=branch_name) message = _VERSION_PULL_REQUEST_NAME.format(version_identifier) repo.index.commit(message) # If this PR already exists, this will fail. repo.remote().push(branch_name) request = self.sm.open_merge_request( message, branch_name, body=self._construct_pr_body(issue, changelog), labels=labels) _LOGGER.info( f"Opened merge request with {request.number} for new release of {self.slug} " f"in version {version_identifier}") for reported_issue in reported_issues: reported_issue.add_comment( "Closing as this issue is no longer relevant.") reported_issue.close()
def run(self, labels: list) -> Optional[dict]: """Create a pull request for each and every direct dependency in the given org/repo (slug).""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "Update Manager doesn't act on %r events.", self.parsed_payload.get("event"), ) return None # We will keep venv in the project itself - we have permissions in the cloned repo. os.environ["PIPENV_VENV_IN_PROJECT"] = "1" with cloned_repo(self) as repo: # Make repo available in the instance. thoth_config.load_config() self.repo = repo runtime_environment_names = [ e["name"] for e in thoth_config.list_runtime_environments() ] overlays_dir = thoth_config.content.get("overlays_dir") runtime_environments: List[Optional[str]] if self.runtime_environment: if self.runtime_environment not in runtime_environment_names: # This is not a warning as it is expected when users remove and change runtime_environments _LOGGER.info( "Requested runtime does not exist in target repo.") return None runtime_environments = [self.runtime_environment] else: if overlays_dir: runtime_environments = runtime_environment_names elif runtime_environment_names: runtime_environments = [runtime_environment_names[0]] else: runtime_environments = [None] results: dict = {} close_manual_update_issue = partial( self.close_issue_and_comment, _ISSUE_MANUAL_UPDATE, comment=CLOSE_MANUAL_ISSUE_COMMENT.format( sha=self.sha, time=datetime.now().strftime("%m/%d/%Y, %H:%M:%S")), ) for e in runtime_environments: self.runtime_environment = e or "default" close_no_management_issue = partial( self.close_issue_and_comment, _ISSUE_NO_DEPENDENCY_NAME.format( env_name=self.runtime_environment), comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha), ) env_dir = thoth_config.get_overlays_directory(e) if e else "." with cwd(env_dir): update_prs = (self.get_prs_by_branch( _string2branch_name( _UPDATE_BRANCH_NAME.format( env_name=self.runtime_environment)), status=PRStatus.all, ) or []) to_rebase = [] for pr in update_prs: if pr.status == PRStatus.open: to_rebase.append(pr) if update_prs and not to_rebase: try: self.delete_remote_branch( _string2branch_name( _UPDATE_BRANCH_NAME.format( env_name=self.runtime_environment))) except Exception: _LOGGER.exception( f"Failed to delete branch " f"{_UPDATE_BRANCH_NAME.format(env_name=self.runtime_environment)}, trying to continue" ) elif to_rebase: for pr in to_rebase: rebase_pr_branch_and_comment(repo=self.repo, pr=pr) continue if os.path.isfile("Pipfile"): _LOGGER.info("Using Pipfile for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=True, req_dev=False) elif os.path.isfile("requirements.in"): self._create_pipenv_environment( input_file="requirements.in") _LOGGER.info( "Using requirements.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=False) if os.path.isfile("requirements-dev.in"): self._create_pipenv_environment( input_file="requirements-dev.in") _LOGGER.info( "Using requirements-dev.in for dependency management" ) close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=True) else: _LOGGER.warning("No dependency management found") issue = self.get_issue_by_title( _ISSUE_NO_DEPENDENCY_NAME.format( env_name=self.runtime_environment)) if issue is None: self.project.create_issue( title=_ISSUE_NO_DEPENDENCY_NAME.format( env_name=self.runtime_environment), body=ISSUE_NO_DEPENDENCY_MANAGEMENT.format( env_name=self.runtime_environment), labels=labels, ) result = {} results[self.runtime_environment] = result close_manual_update_issue() return results
def run(self, labels: list, analysis_id=None): """Run Thoth Advising Bot.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "ThothAdviseManager doesn't act on %r events.", self.parsed_payload.get("event"), ) return self._issue_list = self.project.get_issue_list() self._close_advise_issues4users_lacking_perms() self._tracking_issue = self._close_all_but_oldest_issue() runtime_environments: typing.List[typing.Tuple[str, Issue]] if analysis_id is None: if self._tracking_issue is None: _LOGGER.debug("No issue found to start advises for.") return elif not self._advise_issue_is_fresh(self._tracking_issue): _LOGGER.debug( "Issue has already been acted on by Kebechet and is still 'in progress'" ) return with cloned_repo(self, self.project.default_branch, depth=1) as repo: self.repo = repo with open(".thoth.yaml", "r") as f: thoth_config = yaml.safe_load(f) if thoth_config.get("overlays_dir"): runtime_environments = [ e["name"] for e in thoth_config["runtime_environments"] ] for e in thoth_config["runtime_environments"]: runtime_environments.append(e["name"]) else: runtime_environments = [ thoth_config["runtime_environments"][0]["name"] ] for e in runtime_environments: try: analysis_id = lib.advise_here( nowait=True, origin=(f"{self.service_url}/{self.slug}"), source_type=ThothAdviserIntegrationEnum.KEBECHET, kebechet_metadata=self.metadata, runtime_environment_name=e, ) self._tracking_issue.comment( STARTED_ADVISE_COMMENT.format( analysis_id=analysis_id, env_name=e, host=thoth_config["host"], ) ) except (FileLoadError, ThothConfigurationError) as exc: if isinstance(exc, FileLoadError): self._tracking_issue.comment( f"""Result for {e}: Error advising, no requirements found. If this project does not use requirements.txt or Pipfile then remove thoth-advise manager from your .thoth.yaml configuration.""" ) elif isinstance(exc, ThothConfigurationError): self._tracking_issue.comment( f"""Result for {e}: Error advising, configuration error found in .thoth.yaml. The following exception was caught when submitting: ``` {exc} ```""" ) # open issue # comment on advise issue with issue id return False return True else: with cloned_repo(self, self.project.default_branch, depth=1) as repo: self.repo = repo _LOGGER.info("Using analysis results from %s", analysis_id) res = lib.get_analysis_results(analysis_id) if self._metadata_indicates_internal_trigger(res[0].get("metadata")): self._tracking_issue = None # internal trigger advise results should not be tracked by issue branch_name = self._construct_branch_name(analysis_id) branch = self.repo.git.checkout("-B", branch_name) # noqa F841 self._cached_merge_requests = self.project.get_pr_list() if res is None: _LOGGER.error( "Advise failed on server side, contact the maintainer" ) return False _LOGGER.debug(json.dumps(res)) self.runtime_environment = _runtime_env_name_from_advise_response( res[0] ) to_ret = False if res[1] is False: _LOGGER.info("Advise succeeded") self._write_advise(res) opened_merge = self._open_merge_request( branch_name, labels, ["Pipfile.lock"], res[0].get("metadata") ) if opened_merge and self._tracking_issue: comment = ( SUCCESSFUL_ADVISE_COMMENT.format( env=self.runtime_environment ) + f"Opened merge request, see: #{opened_merge.id}" ) self._tracking_issue.comment(comment) elif self._tracking_issue: comment = ( SUCCESSFUL_ADVISE_COMMENT.format( env=self.runtime_environment ) + "Dependencies for this runtime environment are already up to date :)." ) self._tracking_issue.comment(comment) to_ret = True else: _LOGGER.warning( "Found error while running adviser... Creating issue" ) self._act_on_advise_error(res) if self._tracking_issue: to_open = len( self._tracking_issue.get_comments( filter_regex=STARTED_ADVISE_REGEX, author=APP_NAME, ) ) finished = len( self._tracking_issue.get_comments( filter_regex=SUCCESSFUL_ADVISE_REGEX, author=APP_NAME, ) ) errors = len( self._tracking_issue.get_comments( filter_regex=ERROR_ADVISE_REGEX, author=APP_NAME, ) ) if to_open - finished == 0: if errors > 0: comment = f"""All advises complete, but leaving issue open because {errors} could not be successfully submitted and may require user action.""" else: self._tracking_issue.comment( "Finished advising for all environments." ) self._tracking_issue.close() return to_ret
def run( # type: ignore self, maintainers: list = None, assignees: list = None, labels: list = None, changelog_file: bool = False, changelog_smart: bool = True, changelog_classifier: str = MLModel.DEFAULT.name, changelog_format: str = Format.DEFAULT.name, ) -> None: """Check issues for new issue request, if a request exists, issue a new PR with adjusted version in sources.""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "Version Manager doesn't act on %r events.", self.parsed_payload.get("event"), ) return reported_issues = [] for issue in self.sm.repository.get_issue_list(): issue_title = issue.title.strip() if issue_title.startswith( (_NO_VERSION_FOUND_ISSUE_NAME, _MULTIPLE_VERSIONS_FOUND_ISSUE_NAME) ): # Reported issues that should be closed on success version change. reported_issues.append(issue) # This is an optimization not to clone repo each time. if not self._is_release_request(issue_title): continue _LOGGER.info( "Found an issue #%s which is a candidate for request of new version release: %s", issue.id, issue.title, ) with cloned_repo(self) as repo: if assignees: try: self.sm.assign(issue, assignees) except Exception: _LOGGER.exception( f"Failed to assign {assignees} to issue #{issue.id}" ) issue.comment( "Unable to assign provided assignees, please check bot configuration." ) maintainers = maintainers or self._get_maintainers(labels) if issue.author.lower() not in (m.lower() for m in maintainers): issue.comment( f"Sorry, @{issue.author} but you are not stated in maintainers section for " f"this project. Maintainers are @" + ", @".join(maintainers) if maintainers else "Sorry, no maintainers configured." ) issue.close() # Next issue. continue try: version_identifier, old_version = self._adjust_version_in_sources( # type: ignore repo, labels, issue ) except VersionError as exc: _LOGGER.exception("Failed to adjust version information in sources") issue.comment(str(exc)) issue.close() raise if not version_identifier: _LOGGER.error("Giving up with automated release") return try: changelog = self._compute_changelog( repo, old_version, version_identifier, changelog_smart, changelog_classifier, changelog_format, version_file=changelog_file, ) except ThothGlyphException as exc: _LOGGER.exception("Failed to generate smart release log") issue.comment(str(exc)) issue.close() return # If an issue exists, we close it as there is no change to source code. if not changelog: message = f"Closing the issue as there is no changelog between the new release of {self.slug}." _LOGGER.info(message) issue.comment(message) issue.close() return branch_name = "v" + version_identifier repo.git.checkout("HEAD", b=branch_name) message = _VERSION_PULL_REQUEST_NAME.format(version_identifier) repo.index.commit(message) # If this PR already exists, this will fail. repo.remote().push(branch_name) request = self.sm.open_merge_request( message, branch_name, body=self._construct_pr_body(issue, changelog), labels=labels, ) _LOGGER.info( f"Opened merge request with {request.id} for new release of {self.slug} " f"in version {version_identifier}" ) for reported_issue in reported_issues: reported_issue.comment("Closing as this issue is no longer relevant.") reported_issue.close()
def run(self, labels: list) -> typing.Optional[dict]: """Create a pull request for each and every direct dependency in the given org/repo (slug).""" if self.parsed_payload: if self.parsed_payload.get("event") not in _EVENTS_SUPPORTED: _LOGGER.info( "Update Manager doesn't act on %r events.", self.parsed_payload.get("event"), ) return None # We will keep venv in the project itself - we have permissions in the cloned repo. os.environ["PIPENV_VENV_IN_PROJECT"] = "1" with cloned_repo(self, depth=1) as repo: # Make repo available in the instance. self.repo = repo close_no_management_issue = partial( self.close_issue_and_comment, _ISSUE_NO_DEPENDENCY_NAME, comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha), ) update_issue = self.get_issue_by_title(_UPDATE_MERGE_REQUEST_TITLE) if ( update_issue is not None and update_issue.status == 2 ): # Means "open" in OGR. try: self.delete_remote_branch(_UPDATE_BRANCH_NAME) except Exception: _LOGGER.exception( f"Failed to delete branch {_UPDATE_BRANCH_NAME}, trying to continue" ) close_manual_update_issue = partial( self.close_issue_and_comment, _ISSUE_MANUAL_UPDATE, comment=ISSUE_CLOSE_COMMENT.format(sha=self.sha), ) if os.path.isfile("Pipfile"): _LOGGER.info("Using Pipfile for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=True, req_dev=False) close_manual_update_issue() elif os.path.isfile("requirements.in"): self._create_pipenv_environment(input_file="requirements.in") _LOGGER.info("Using requirements.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=False) if os.path.isfile("requirements-dev.in"): self._create_pipenv_environment(input_file="requirements-dev.in") _LOGGER.info("Using requirements-dev.in for dependency management") close_no_management_issue() result = self._do_update(labels, pipenv_used=False, req_dev=True) close_manual_update_issue() else: _LOGGER.warning("No dependency management found") issue = self.get_issue_by_title(_ISSUE_NO_DEPENDENCY_NAME) if issue is None: self.project.create_issue( title=_ISSUE_NO_DEPENDENCY_NAME, body=ISSUE_NO_DEPENDENCY_MANAGEMENT, labels=labels, ) return {} return result