def get_config(package_name, test_env_path, full=False): """Retrieve the test configuration Args: package_name: Name of the package for which to apply ``full=False`` test_env_path: the location of the cloned test environment. full (boolean): See ``Returns`` section Returns: if ``full`` True, return the complete config.json file. Otherwise, return a dict with keys: ``['artifact_name', 'artifact_type', 'test_type']`` """ pkg_config = {} logger = Log() with lcd('{0}'.format(test_env_path)): config = json.loads(local('cat config.json', capture=True)) logger.info('Parsing configuration') if not full: for package in config['packages']: if package['artifact_name'] == package_name: pkg_config = package break else: pkg_config = config logger.info('Conf is ok and has keys {0}'.format(pkg_config.keys())) return pkg_config
def _retry(*args, **kwargs): """Fake doc""" logger = Log() last_exception = None multiplier = RETRY_HTTP["multiplier"] retry_interval = RETRY_HTTP["interval"] randomization_factor = RETRY_HTTP["randomization_factor"] total_sleep_time = 0 max_sleep_time = RETRY_HTTP["max_sleep_time"] request_nb = 0 # Capped to 20 minutes while total_sleep_time < max_sleep_time: try: return call(*args, **kwargs) except Exception as exc: # Inspired from https://developers.google.com/api-client-library/java/google-http-java-client/reference/ # 1.20.0/com/google/api/client/util/ExponentialBackOff next_retry_sleep = (multiplier ** request_nb * (retry_interval * (random.randint(0, int(2 * randomization_factor * 1000)) / 1000 + 1 - randomization_factor))) total_sleep_time += next_retry_sleep request_nb += 1 time.sleep(next_retry_sleep) last_exception = exc logger.info('Got an exception: {0}. Slept ({1} seconds / {2} seconds)' .format(exc, total_sleep_time, max_sleep_time)) logger.info('Max sleep time exceeded, raising exception.') raise last_exception
def __init__(self, login, password, **kwargs): self._connexion = GitHub(login, password) self.organization_name = kwargs.get("github_organization", GITHUB_INFO['organization']) self.repository_name = kwargs.get("github_repository", GITHUB_INFO['repository']) self._repository = self._connexion.get_organization(self.organization_name).get_repo(self.repository_name) # Used for cache self.pull_requests_cache = {} self.logger = Log()
def fetch_github_file(url, token): """Fetch a file from GitHub Args: url (str): URL the file is at. token (str): Authorization token for GitHub. Returns: str: the raw content of the file. """ logger = Log() headers = { 'Authorization': 'token {0}'.format(str(token)), 'Accept': 'application/vnd.github.v3.raw', } logger.info('Fetching file at {0}'.format(url)) req = requests.get(url, headers=headers, allow_redirects=True) m_file = StringIO.StringIO() m_file.write(req.content) return m_file.getvalue()
def strategy_runner(package, run_type, remote=False, **kwargs): """Run the packaging functions Args: package (dict): package_config run_type (str): Represent the strategy to run on a package (test or artifact) remote (bool): Represent if the plugin is executed in remote or not, default value: False Raises: CITestFail: some error occurred during the test CITestUnknown: wrong value for config['test_type'] CIBuildPackageFail: some error occurred during a packaging ImportPluginError: Fail to find / import a plugin """ logger = Log() if run_type in ["test", "artifact"]: if run_type == "test" and package["test_type"] == "no-test": logger.info("Tag no-test detected, skip test") return {} params = {"type": "test_type", "exception": CITestFail}\ if run_type == "test" else {"type": "artifact_type", "exception": CIBuildPackageFail} try: plugin = find_plugin(package[params["type"]], run_type, PLUGINS_INFO["locations"], PLUGINS_INFO["workspace"]) logger.info("The plugin {} is loaded".format(package[params["type"]])) except ImportPluginError: raise logger.info("Starting {} plugin ...".format(package[params["type"]])) try: return plugin.run(package, remote) except Exception as e: logger.error(str(e)) raise params["exception"](str(e)) elif run_type == "dependency": dependencies = set(package.get("depends_on", [])) for plugin_type in package["dependencies_type"]: try: plugin = find_plugin(plugin_type, run_type, PLUGINS_INFO["locations"], PLUGINS_INFO["workspace"]) logger.info("The plugin {} is loaded".format(plugin_type)) except ImportPluginError: raise dependencies |= plugin.Plugin(kwargs.get("basepath")) return dependencies else: raise ValueError("run_type must be equal to {}, actual value: {}".format(", ".join(ACCEPT_RUN_TYPE), run_type))
def prepare_test_env(branch, **kwargs): """Prepare the test environment Args: branch (str): Name of the branch the repository should be checkout to. Keyword Args: github_organization (str): this is the github organization for get back the repository, default value None. Also can be set by environment variable LOKTAR_GITHUB_INFO_ORGANIZATION github_repository (str): this is the target repository to download, default value None Also can be set by environment variable LOKTAR_GITHUB_INFO_REPOSITORY skip_git_clone (bool): Skip the git clone if is another process who cloned the repository, default to false unique_name_dir (str): If the unique name dir for the location where the repository is cloned is generated by another process Raises: PrepareEnvFail: Failed to prepare the environment. """ logger = Log() temporary_root = "/tmp/ci" unique_name_dir = kwargs.get("unique_name_dir", str(uuid4())) unique_path_dir = "{0}/{1}".format(temporary_root, unique_name_dir) archive = "{0}.tar.gz".format(unique_name_dir) logger.info("Preparing the test environment") github_organization = kwargs.get("github_organization", GITHUB_INFO["organization"]) github_repository = kwargs.get("github_repository", GITHUB_INFO["repository"]) if not os.path.exists(unique_path_dir): os.mkdir(unique_path_dir) try: if not kwargs.get("skip_git_clone", False): if not exec_command_with_retry("git clone -b {0} --single-branch [email protected]:{1}/{2}.git {3}" .format(branch, github_organization, github_repository, unique_path_dir), 0, MAX_RETRY_GITHUB): raise PrepareEnvFail( "The git clone can't the repository: {}/{}, check if you have the correct crendentials" .format(github_organization, github_repository)) with lcd(unique_path_dir): if not exec_command_with_retry("git fetch origin master", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail("Can't fetch the master branch from origin") if branch != "master": if not exec_command_with_retry("git config --global user.email '*****@*****.**'", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail("Git config error on user.email") if not exec_command_with_retry("git config --global user.name 'Your Name'", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail("Git config error on user.name") if not exec_command_with_retry("git merge --no-ff --no-edit FETCH_HEAD", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail("Can't merge the FETCH_HEAD (master branch)") local("rm -rf {0}/.git".format(unique_path_dir)) with lcd(temporary_root): if not exec_command_with_retry("tar -czf {0} {1}".format(archive, unique_name_dir), 0, MAX_RETRY_GITHUB): raise PrepareEnvFail("Can't create the archive") logger.info("The test env is ready!") except NetworkError as exc: logger.error(exc) raise except PrepareEnvFail: local("rm -rf {0}*".format(unique_path_dir)) raise return "{0}/{1}".format(temporary_root, archive)
def find_plugin(plugin_name, plugin_type, plugin_locations, workspace): """Try to retrieve a plugin Args: plugin_name (str): the plugin to search plugin_type (str): the type of plugin to fetch plugin_locations (list): locations of plugins workspace (str): Plugins workspace for fetching plugins Raises: ImportPluginError: it raise if the plugin cannot import or the plugin is not found Returns module (module): return the plugin """ sys.path += plugin_locations errors = list() logger = Log() logger.info("Searching plugin: {} in {} standard plugins".format(plugin_name, plugin_type)) try: return importlib.import_module("loktar.plugins.{}.{}".format(plugin_type, plugin_name)) except ImportError as e: logger.info("{} not found in standard plugin".format(plugin_name)) errors.append(str(e)) logger.info("Searching plugin: {} in {} custom plugins".format(plugin_name, plugin_type)) try: return importlib.import_module("{}.{}.{}".format(workspace, plugin_type, plugin_name)) except (ImportError, TypeError) as e: logger.info("{} not found in custom plugin".format(plugin_name)) errors.append(str(e)) logger.error("\n".join(errors)) raise ImportPluginError("\n".join(errors))
def prepare_test_env(branch, **kwargs): """Prepare the test environment Args: branch (str): Name of the branch the repository should be checkout to. Keyword Args: github_organization (str): this is the github organization for get back the repository, default value None. Also can be set by environment variable LOKTAR_GITHUB_INFO_ORGANIZATION github_repository (str): this is the target repository to download, default value None Also can be set by environment variable LOKTAR_GITHUB_INFO_REPOSITORY Raises: PrepareEnvFail: Failed to prepare the environment. """ logger = Log() unique_name_dir = str(uuid4()) unique_path_dir = "/tmp/{0}".format(unique_name_dir) archive = "{0}.tar.gz".format(unique_name_dir) logger.info("Preparing the test environment") github_organization = kwargs.get("github_organization", GITHUB_INFO["organization"]) github_repository = kwargs.get("github_repository", GITHUB_INFO["repository"]) os.mkdir(unique_path_dir) try: if not exec_command_with_retry( "git clone -b {0} --single-branch [email protected]:{1}/{2}.git {3}".format( branch, github_organization, github_repository, unique_path_dir ), 0, MAX_RETRY_GITHUB, ): raise PrepareEnvFail( "The git clone can't the repository: {}/{}, check if you have the correct crendentials".format( github_organization, github_repository ) ) with lcd(unique_path_dir): if not exec_command_with_retry("git fetch origin master", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail if branch != "master": if not exec_command_with_retry("git config --global user.email '*****@*****.**'", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail if not exec_command_with_retry("git config --global user.name 'Your Name'", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail if not exec_command_with_retry("git merge --no-ff --no-edit FETCH_HEAD", 0, MAX_RETRY_GITHUB): raise PrepareEnvFail local("rm -rf {0}/.git".format(unique_path_dir)) with lcd("/tmp"): if not exec_command_with_retry("tar -czf {0} {1}".format(archive, unique_name_dir), 0, MAX_RETRY_GITHUB): raise PrepareEnvFail logger.info("The test env is ready!") except NetworkError as exc: logger.error(exc) raise except PrepareEnvFail: local("rm -rf {0}*".format(unique_path_dir)) raise return "/tmp/{0}".format(archive)
class Github(object): """Wrapper for the github3 library Args: login (str): login for GitHub password (str): password for GitHub Keyword Args: github_organization (str): this is the github organization for get back the repository, default value None. Also can be set by environment variable LOKTAR_GITHUB_INFO_ORGANIZATION github_repository (str): this is the target repository to download, default value None Also can be set by environment variable LOKTAR_GITHUB_INFO_REPOSITORY """ def __init__(self, login, password, **kwargs): self._connexion = GitHub(login, password) self.organization_name = kwargs.get("github_organization", GITHUB_INFO['organization']) self.repository_name = kwargs.get("github_repository", GITHUB_INFO['repository']) self._repository = self._connexion.get_organization(self.organization_name).get_repo(self.repository_name) # Used for cache self.pull_requests_cache = {} self.logger = Log() @retry def search_pull_request_id(self, branch): """Look for an open pull request id given a branch Args: branch (str): name of the branch Returns: int or None: the id of the pull request or None """ pr_id = None for pr in self.get_pull_requests(state='open'): self.logger.info('Looking at branch {0}'.format(pr.head.ref)) if pr.head.ref == branch: self.logger.info('Found pull request {0} with id {1}'.format(pr, pr.number)) pr_id = pr.number break return pr_id @retry def get_pull_requests(self, state='open', use_cache=False): """Get a pull request from the GitHub API Args: state (str): State of the pull request use_cache (Optional[bool]): If True, only return cached pull requests. Otherwise, make another request. Returns: a list of PullRequest instance """ if use_cache and self.pull_requests_cache: pull_requests = self.pull_requests_cache else: pull_requests = self._repository.get_pulls(state=state) self._cache_pull_requests(pull_requests) return pull_requests def _cache_pull_requests(self, pull_requests=None): """Cache a list of pull requests Args: pull_requests (list of github.pullrequest.PullRequest) """ for pull_request in pull_requests: self.pull_requests_cache[pull_request.number] = pull_request @retry def get_pull_request(self, pull_request_id, use_cache=True): """Get a pull request from the GitHub API Args: pull_request_id (int): ID of the pull request use_cache (Optional[bool]): If True, only return cached pull requests. Otherwise, make another request. If we cannot find the pull request in the cache, we make another request. Returns: a PullRequest instance """ if use_cache and pull_request_id in self.pull_requests_cache: pull_request = self.pull_requests_cache[pull_request_id] else: try: pull_request = self._repository.get_pull(pull_request_id) except AssertionError: self.logger.error("pull request id must be an int or a long not: {}".format(type(pull_request_id))) raise SCMError("pull request id must be an int or a long") self._cache_pull_requests([pull_request]) return pull_request @retry def get_commit_message_modified_files_on_pull_request(self, pull_request_id): """Retrieve the commit messages from the pull request and associate the modified files for it. Args: pull_request_id (int): ID of the pull request Returns: dict of str: list: Comments of the commits linked to the pull request are keys, modified files are values. """ pr = self.get_pull_request(pull_request_id) commits = pr.get_commits() # Return message: files dict_message_files = {} for commit in commits: dict_message_files.setdefault(commit.commit.message, []) dict_message_files[commit.commit.message].extend(map(lambda file_: file_.filename, commit.files)) return dict_message_files @retry def get_git_branch_from_pull_request(self, pull_request_id): """Retrieve the branch name from the pull request id Args: pull_request_id (int): ID of the pull request Returns: A branch name """ pr = self.get_pull_request(pull_request_id) return pr.head.ref @retry def get_last_statuses_from_pull_request(self, pull_request_id, exclude_head=True): """Get the statuses from a pull request ID. Args: pull_request_id (int): ID of the pull request exclude_head (bool): If True, skip the head commit Returns: a tuple of github.Commit, list of github.CommitStatus.CommitStatus: List of statuses. Empty if no status was found. """ pr = self.get_pull_request(pull_request_id) for commit in pr.get_commits().reversed: if exclude_head and commit.sha == pr.head.sha: continue statuses = list(commit.get_statuses()) if statuses: return commit, statuses return None, [] @retry def get_last_commits_from_pull_request(self, pull_request_id, until_commit=None): """Get the commit from a pull request ID. Args: pull_request_id (int): ID of the pull request until_commit (github.Commit.Commit): Stop at a specific commit (will be excluded) Returns: list of github.Commit.Commit: the last commits """ commits = [] pr = self.get_pull_request(pull_request_id) for commit in pr.get_commits().reversed: if until_commit is not None and commit.sha == until_commit.sha: break commits.append(commit) return commits @retry def create_pull_request_comment(self, pull_request_id, comment, check_unique=False): """Create an issue comment on the pull request Args: pull_request_id (int): ID of the pull request comment (basestring): Comment text to post check_unique (bool): If True do not comment if it has already been posted. Returns: github.IssueComment.IssueComment: The comment that was created """ pr = self.get_pull_request(pull_request_id) if check_unique: issue_comments = pr.get_issue_comments() for issue_comment in issue_comments: if issue_comment.body == comment: return return pr.create_issue_comment(comment) @retry def set_tag_and_release(self, tag_name, tag_message, release_name, patch_note, commit_id, type_object="commit"): """Create a tag on specific git object (commit, tree or blob) and create a release from this tag Args: tag_name (str): The name for the tag tag_message (str): The message link to the tag release_name (str): The release name patch_note (str): The patch note associated to the release commit_id (str): The commit to attach the tag type_object (str): the target git object for tagging Returns: github.GitRelease.GitRelease: The release that was created Raises: SCMError """ try: response = self._repository.create_git_tag_and_release(tag_name, tag_message, release_name, patch_note, commit_id, type_object) except GithubException as e: self.logger.error(str(e)) raise SCMError(str(e)) if response.raw_headers["status"] == "201 Created": self.logger.info("The tag: {0} was created on the commit id {1} and the release is attached to the tag: {0}" .format(tag_name, commit_id)) return response else: self.logger.error("response: status : {}".format(response.raw_headers["status"])) raise SCMError("The tag or the release can't be created") @retry def get_modified_files_from_pull_request(self, pull_request_id): """Create a tag on specific git object (commit, tree or blob) and create a release from this tag Args: pull_request_id (int): the id of the pull request Returns: list of files modified Raises: SCMError """ try: pr_info = self.get_pull_request(pull_request_id) except GithubException as e: self.logger.error(str(e)) raise SCMError(str(e)) return [f.filename for f in pr_info.get_files()] @retry def get_commit(self, commit_id): try: return self._repository.get_commit(commit_id) except (UnknownObjectException, AssertionError) as e: raise SCMError(str(e)) @retry def get_modified_files_from_commit(self, commit_id): return [f.filename for f in self.get_commit(commit_id).files]