コード例 #1
0
 def fetch_gh_repo_branch_protection_details(self):
     """Fetch Github repository branch protection metadata."""
     branches = self.config.get(
         'org.auditree.repo_integrity.branches',
         {self.config.get('locker.repo_url'): ['master']})
     current_url = None
     github = None
     for repo_url, repo_branches in branches.items():
         parsed = urlparse(repo_url)
         base_url = f'{parsed.scheme}://{parsed.hostname}'
         repo = parsed.path.strip('/')
         for branch in repo_branches:
             file_prefix_parts = [
                 repo.lower().replace('/', '_').replace('-', '_'),
                 branch.lower().replace('-', '_')
             ]
             file_prefix = '_'.join(file_prefix_parts)
             path = ['auditree', f'gh_{file_prefix}_branch_protection.json']
             if base_url != current_url:
                 github = Github(self.config.creds, base_url)
                 current_url = base_url
             self.config.add_evidences([
                 RepoBranchProtectionEvidence(
                     path[1], path[0], DAY,
                     (f'Github branch protection for {repo} repo '
                      f'{branch} branch'))
             ])
             joined_path = os.path.join(*path)
             with raw_evidence(self.locker, joined_path) as evidence:
                 if evidence:
                     evidence.set_content(
                         json.dumps(
                             github.get_branch_protection_details(
                                 repo, branch)))
コード例 #2
0
 def fetch_workspaces(self):
     """Fetch Github repository Zenhub workspaces."""
     for config in self.configs:
         gh_host = config.get('github_host', GH_HOST_URL)
         zh_root = config.get('api_root', ZH_API_ROOT)
         repo = config['github_repo']
         repo_hash = get_sha256_hash([gh_host, repo], 10)
         fname = f'zh_repo_{repo_hash}_workspaces.json'
         self.config.add_evidences([
             RawEvidence(
                 fname, 'issues', DAY,
                 f'Zenhub workspaces for {gh_host}/{repo} repository')
         ])
         with raw_evidence(self.locker, f'issues/{fname}') as evidence:
             if evidence:
                 if gh_host not in self.gh_pool.keys():
                     self.gh_pool[gh_host] = Github(base_url=gh_host)
                 if zh_root not in self.zh_pool.keys():
                     self.zh_pool[zh_root] = BaseSession(zh_root)
                     service = 'zenhub'
                     if zh_root != ZH_API_ROOT:
                         service = 'zenhub_enterprise'
                     token = self.config.creds[service].token
                     self.zh_pool[zh_root].headers.update({
                         'Content-Type':
                         'application/json',
                         'X-Authentication-Token':
                         token
                     })
                 workspaces = self._get_workspaces(repo,
                                                   config.get('workspaces'),
                                                   gh_host, zh_root)
                 evidence.set_content(json.dumps(workspaces))
コード例 #3
0
 def fetch_gh_org_collaborators(self):
     """Fetch collaborators from GH organization repositories."""
     for config in self.config.get('org.permissions.org_integrity.orgs'):
         host, org = config['url'].rsplit('/', 1)
         for aff in config.get('collaborator_types', GH_ALL_COLLABORATORS):
             url_hash = get_sha256_hash([config['url']], 10)
             json_file = f'gh_{aff}_collaborators_{url_hash}.json'
             path = ['permissions', json_file]
             description = (
                 f'{aff.title()} collaborators of the {org} GH org')
             self.config.add_evidences(
                 [RawEvidence(path[1], path[0], DAY, description)])
             with raw_evidence(self.locker, '/'.join(path)) as evidence:
                 if evidence:
                     if host not in self.gh_pool:
                         self.gh_pool[host] = Github(base_url=host)
                     if not config.get('repos'):
                         repos = self.gh_pool[host].paginate_api(
                             f'orgs/{org}/repos')
                         config['repos'] = [repo['name'] for repo in repos]
                     collabs = {}
                     for repo in config['repos']:
                         collabs_url = f'repos/{org}/{repo}/collaborators'
                         collabs[repo] = self.gh_pool[host].paginate_api(
                             collabs_url, affiliation=aff)
                     evidence.set_content(json.dumps(collabs))
コード例 #4
0
 def fetch_gh_repo_branch_recent_commits_details(self):
     """Fetch Github repository branch recent commits metadata."""
     branches = self.config.get(
         'org.auditree.repo_integrity.branches',
         {self.config.get('locker.repo_url'): ['master']})
     current_url = None
     github = None
     for repo_url, repo_branches in branches.items():
         parsed = urlparse(repo_url)
         base_url = f'{parsed.scheme}://{parsed.hostname}'
         repo = parsed.path.strip('/')
         for branch in repo_branches:
             file_prefix_parts = [
                 repo.lower().replace('/', '_').replace('-', '_'),
                 branch.lower().replace('-', '_')
             ]
             file_prefix = '_'.join(file_prefix_parts)
             path = ['auditree', f'gh_{file_prefix}_recent_commits.json']
             if base_url != current_url:
                 github = Github(self.config.creds, base_url)
                 current_url = base_url
             ttl = DAY
             # To ensure signed commits check picks up locker commits
             if (repo_url == self.locker.repo_url
                     and branch == self.locker.branch):
                 ttl = DAY * 2
             self.config.add_evidences([
                 RepoCommitEvidence(
                     path[1], path[0], ttl,
                     (f'Github recent commits for {repo} repo '
                      f'{branch} branch'))
             ])
             joined_path = os.path.join(*path)
             with raw_evidence(self.locker, joined_path) as evidence:
                 if evidence:
                     meta = self.locker.get_evidence_metadata(evidence.path)
                     if meta is None:
                         meta = {}
                     now = datetime.utcnow().strftime(LOCKER_DTTM_FORMAT)
                     since = datetime.strptime(meta.get('last_update', now),
                                               LOCKER_DTTM_FORMAT)
                     evidence.set_content(
                         json.dumps(
                             github.get_commit_details(repo, since,
                                                       branch)))
コード例 #5
0
 def fetch_gh_repo_branch_file_path_recent_commits_details(self):
     """Fetch Github repository branch file path recent commits metadata."""
     filepaths = self.config.get('org.auditree.repo_integrity.filepaths')
     current_url = None
     github = None
     for repo_url, repo_branches in filepaths.items():
         parsed = urlparse(repo_url)
         base_url = f'{parsed.scheme}://{parsed.hostname}'
         repo = parsed.path.strip('/')
         for branch, repo_filepaths in repo_branches.items():
             for filepath in repo_filepaths:
                 ev_file_prefix = f'{repo}_{branch}_{filepath}'.lower()
                 for symbol in [' ', '/', '-', '.']:
                     ev_file_prefix = ev_file_prefix.replace(symbol, '_')
                 path = [
                     'auditree', f'gh_{ev_file_prefix}_recent_commits.json'
                 ]
                 if base_url != current_url:
                     github = Github(self.config.creds, base_url)
                     current_url = base_url
                 self.config.add_evidences([
                     RepoCommitEvidence(
                         path[1], path[0], DAY,
                         (f'Github recent commits for {repo} repo '
                          f'{branch} branch, {filepath} file path'))
                 ])
                 joined_path = os.path.join(*path)
                 with raw_evidence(self.locker, joined_path) as evidence:
                     if evidence:
                         meta = self.locker.get_evidence_metadata(
                             evidence.path)
                         if meta is None:
                             meta = {}
                         utcnow = datetime.utcnow()
                         now = utcnow.strftime(LOCKER_DTTM_FORMAT)
                         since = datetime.strptime(
                             meta.get('last_update', now),
                             LOCKER_DTTM_FORMAT)
                         evidence.set_content(
                             json.dumps(
                                 github.get_commit_details(
                                     repo, since, branch, filepath)))
コード例 #6
0
ファイル: notify.py プロジェクト: tsehrer/auditree-framework
    def __init__(self, results, controls, push_error=False):
        """
        Construct and initialize the Github notifier object.

        :param results: dictionary generated by
          :py:class:`compliance.runners.CheckMode` at the end of the execution.
        :param controls: the control descriptor that manages accreditations.
        """
        super(GHIssuesNotifier, self).__init__(results, controls, push_error)

        self._config = get_config().get('notify.gh_issues')
        if not self._config:
            # Ensure that legacy ghe_issues config still works
            self._config = get_config().get('notify.ghe_issues', {})
        # Using the locker repo url to define the base url.  The expectation
        # is that the Github issues repository will share the base url.
        parsed_locker_url = urlparse(get_config().get('locker.repo_url'))
        self._github = Github(
            get_config().creds,
            f'{parsed_locker_url.scheme}://{parsed_locker_url.hostname}')
コード例 #7
0
 def fetch_gh_repo_details(self):
     """Fetch Github repository metadata."""
     repo_urls = self.config.get('org.auditree.repo_integrity.repos',
                                 [self.config.get('locker.repo_url')])
     current_url = None
     github = None
     for repo_url in repo_urls:
         parsed = urlparse(repo_url)
         base_url = f'{parsed.scheme}://{parsed.hostname}'
         repo = parsed.path.strip('/')
         file_prefix = repo.lower().replace('/', '_').replace('-', '_')
         path = ['auditree', f'gh_{file_prefix}_repo_metadata.json']
         if base_url != current_url:
             github = Github(self.config.creds, base_url)
             current_url = base_url
         self.config.add_evidences([
             RepoMetadataEvidence(path[1], path[0], DAY,
                                  f'Github {repo} repo metadata details')
         ])
         with raw_evidence(self.locker, os.path.join(*path)) as evidence:
             if evidence:
                 evidence.set_content(
                     json.dumps(github.get_repo_details(repo)))
コード例 #8
0
 def fetch_issues(self):
     """Fetch Github repository issues."""
     for config in self.configs:
         host = config.get('host', GH_HOST_URL)
         repo = config['repo']
         fname = f'gh_repo_{get_sha256_hash([host, repo], 10)}_issues.json'
         self.config.add_evidences([
             RawEvidence(fname, 'issues', DAY,
                         f'Github issues for {host}/{repo} repository')
         ])
         with raw_evidence(self.locker, f'issues/{fname}') as evidence:
             if evidence:
                 if host not in self.gh_pool.keys():
                     self.gh_pool[host] = Github(base_url=host)
                 issues = []
                 for search in self._compose_searches(config, host):
                     issue_ids = [i['id'] for i in issues]
                     for result in self.gh_pool[host].search_issues(search):
                         if result['id'] not in issue_ids:
                             issues.append(result)
                 evidence.set_content(json.dumps(issues))
コード例 #9
0
ファイル: notify.py プロジェクト: tsehrer/auditree-framework
class GHIssuesNotifier(_BaseMDNotifier):
    """
    Github notifier class.

    Notifications are sent to Github as repository issues.  This
    notifier is configurable via :class:`compliance.config.ComplianceConfig`.
    """
    def __init__(self, results, controls, push_error=False):
        """
        Construct and initialize the Github notifier object.

        :param results: dictionary generated by
          :py:class:`compliance.runners.CheckMode` at the end of the execution.
        :param controls: the control descriptor that manages accreditations.
        """
        super(GHIssuesNotifier, self).__init__(results, controls, push_error)

        self._config = get_config().get('notify.gh_issues')
        if not self._config:
            # Ensure that legacy ghe_issues config still works
            self._config = get_config().get('notify.ghe_issues', {})
        # Using the locker repo url to define the base url.  The expectation
        # is that the Github issues repository will share the base url.
        parsed_locker_url = urlparse(get_config().get('locker.repo_url'))
        self._github = Github(
            get_config().creds,
            f'{parsed_locker_url.scheme}://{parsed_locker_url.hostname}')

    def notify(self):
        """Send notifications to Github as repository issues."""
        self.logger.info('Running the Github Issues notifier...')
        if not self._config:
            self.logger.warning('Using Github Issues notifier without config')

        messages = list(self._messages_by_accreditations().items())
        messages.sort(key=lambda x: x[0])
        for accreditation, results in messages:
            if accreditation not in self._config:
                continue
            passed, failed, warned, errored = self._split_by_status(results)
            results_by_status = {
                'pass': passed,
                'fail': failed,
                'warn': warned,
                'error': errored
            }
            if self._config[accreditation].get('summary_issue'):
                self._notify_by_summary_issue(accreditation, results_by_status)
            elif self._push_error:
                self.logger.error('Remote locker push failed.  '
                                  'Github Issues notifier not triggered.')
            else:
                self._notify_by_check_issues(accreditation, results_by_status)

    def _notify_by_summary_issue(self, accred, results):
        issue = [self._generate_summary_issue(accred, results)]
        repos = self._config[accred].get('repo', [])
        for repo in repos:
            owner, repository = repo.split('/')
            issue_urls = self._process_new_alerts(
                owner, repository, issue,
                self._config[accred]['summary_issue'].get('message'))
            self._assign_projects(issue_urls, repo, accred)

    def _generate_summary_issue(self, accred, results):
        summary_config = self._config[accred]['summary_issue']
        title = summary_config['title']
        labels = summary_config.get('labels', [])
        assignees = summary_config.get('assignees', [])
        frequency = summary_config.get('frequency')
        rotation = summary_config.get('rotation')
        rotation_index = None
        now = datetime.utcnow()
        if frequency == 'day':
            today = now.strftime('%Y-%m-%d')
            title = f'{today} - {title}'
            labels.extend([frequency, today])
            rotation_index = now.timetuple().tm_yday
        elif frequency == 'week':
            year, week, _ = now.isocalendar()
            title = f'{year}, {week}W - {title}'
            labels.extend([frequency, str(year), f'{week}W'])
            rotation_index = week
        elif frequency == 'month':
            year = now.strftime('%Y')
            month = now.strftime('%mM')
            title = f'{year}, {month} - {title}'
            labels.extend([frequency, year, month])
            rotation_index = int(month[:-1])
        elif frequency == 'year':
            year = now.strftime('%Y')
            title = f'{year} - {title}'
            labels.extend([frequency, year])
            rotation_index = int(year)
        if rotation and rotation_index:
            assignees = rotation[divmod(rotation_index, len(rotation))[1]]
        issue = {'title': title, 'labels': labels, 'assignees': assignees}
        issue['body'] = '\n'.join(
            self._generate_accred_content(accred, results))
        return issue

    def _notify_by_check_issues(self, accred, results):
        issues = []
        statuses = self._config[accred].get('status', ['fail'])
        repos = self._config[accred].get('repo', [])
        for status, result in results.items():
            if status in statuses:
                issues += self._generate_issues(accred, result)
        for repo in repos:
            owner, repository = repo.split('/')
            issue_urls = self._process_new_alerts(owner, repository, issues)
            self._assign_projects(issue_urls, repo, accred)
        if 'pass' not in statuses:
            for repo in repos:
                owner, repository = repo.split('/')
                issues = self._generate_issues(accred, results['pass'])
                issue_urls = self._process_old_alerts(owner, repository,
                                                      issues)
                self._assign_projects(issue_urls, repo, accred)

    def _generate_issues(self, accred, results):
        issues = []
        if not results:
            return issues
        for check_path, result, message in results:
            now = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')
            body = [f'## Compliance check alert - {now}']
            body.append(f'- Check: {check_path}')
            test_obj = result['test'].test
            check_name = check_path.rsplit('.', 1).pop()
            doc = getattr(test_obj.__class__, check_name).__doc__
            if doc:
                doc = doc.strip()
                newline = doc.find('\n')
                if newline > -1:
                    doc = doc[:newline]
                body.append(f'- Description: {doc}')
            body.append(f'- Accreditation: {accred}')
            status = ''.join(
                self._get_summary_and_body(
                    result,
                    message,
                    include_title=False,
                    summary_format='{status} ({issues})',
                    link_format='[{name}]({url})'))
            body.append(f'- Run Status: **{status}**')
            run_dttm = datetime.fromtimestamp(result['timestamp'])
            body.append(f'- Run Date/Time: {run_dttm}')
            report_links = self._get_report_links(
                result, link_format='[{name}]({url})')
            if report_links:
                body.append(f'- Reports: {", ".join(report_links)}')

            issue = {
                'title':
                message['title'],
                'body':
                '\n'.join(body),
                'labels': [
                    f'accreditation: {accred}',
                    f'run status: {result["status"]}'
                ]
            }
            issues.append(issue)

        return issues

    def _process_new_alerts(self, owner, repository, issues, message=None):
        issue_urls = {}
        for issue in issues:
            gh_issue = self._find_gh_issue('/'.join([owner, repository]),
                                           issue['title'])
            if gh_issue is None:
                body = issue['body']
                if message:
                    joined_msg = '\n'.join(message)
                    body = f'{joined_msg}\n\n{issue["body"]}'
                gh_issue = self._github.add_issue(owner,
                                                  repository,
                                                  issue['title'],
                                                  body,
                                                  labels=issue['labels'],
                                                  assignees=issue.get(
                                                      'assignees', []))
            else:
                self._update_issue_labels(owner, repository, gh_issue,
                                          issue['labels'])
                self._github.add_issue_comment(owner, repository,
                                               gh_issue['number'],
                                               issue['body'])
            issue_urls[gh_issue['id']] = gh_issue['url']
        return issue_urls

    def _process_old_alerts(self, owner, repository, issues):
        issue_urls = {}
        for issue in issues:
            gh_issue = self._find_gh_issue('/'.join([owner, repository]),
                                           issue['title'])
            if gh_issue:
                self._update_issue_labels(owner, repository, gh_issue,
                                          issue['labels'])
                self._github.add_issue_comment(owner, repository,
                                               gh_issue['number'],
                                               issue['body'])
                issue_urls[gh_issue['id']] = gh_issue['url']
        return issue_urls

    def _find_gh_issue(self, repo, title):
        gh_issues = self._github.search_issues(
            f'{title} type:issue in:title is:open repo:{repo}')
        found = None
        for issue in gh_issues:
            if issue['title'] == title:
                found = issue
                break
        return found

    def _update_issue_labels(self, owner, repository, issue, labels):
        current_labels = [label['name'] for label in issue['labels']]
        new_labels = list(set(labels) - set(current_labels))
        if new_labels:
            current_labels = [
                label for label in current_labels
                if not label.startswith('run status: ')
            ]
            self._github.patch_issue(owner,
                                     repository,
                                     issue['number'],
                                     labels=current_labels + new_labels)

    def _assign_projects(self, issues, repo, accred):
        config_projects = self._config[accred].get('project')
        if not config_projects:
            return
        all_projects = {
            p['name']: p['id']
            for p in self._github.get_all_projects(repo)
        }
        for project, column in config_projects.items():
            if project not in all_projects.keys():
                self.logger.warning(f'Project {project} not found in {repo}')
                continue
            columns = {
                c['name']: c['id']
                for c in self._github.get_columns(all_projects[project])
            }
            if column not in columns.keys():
                self.logger.warning(f'Column {column} not found '
                                    f'in {project} project, {repo} repo')
                continue
            card_lists = self._github.get_all_cards(columns.values()).values()
            issue_urls = [
                c.get('content_url') for cl in card_lists for c in cl
            ]
            for issue_id, issue_url in issues.items():
                if issue_url in issue_urls:
                    continue
                self._github.add_card(columns[column], issue=issue_id)