def __init__(self, context, spec, working_dir=None): self.context = context self.spec = spec self.working_dir = working_dir or context.clone_path self.problems = Problems() self.pr = PullRequest(bitbucket, context.repository) commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], commit_hash, 'badwolf/lint', 'https://bitbucket.org/{}/pull-requests/{}'.format( context.repository, context.pr_id))
def handle_pull_request_approved(payload): if not current_app.config['AUTO_MERGE_ENABLED']: return repo = payload['repository'] pr = payload['pullrequest'] pr_id = pr['id'] title = pr['title'].lower() description = (pr['description'] or '').lower() for keyword in ('wip', 'merge skip', 'working in progress'): if keyword in title or keyword in description: logger.info('%s found, ignore auto merge.', keyword) return pull_request = PullRequest( bitbucket, repo['full_name'] ) try: pr_info = pull_request.get(pr_id) except BitbucketAPIError as exc: logger.exception('Error calling Bitbucket API') if exc.code != 404: sentry.captureException() return if pr_info['state'] != 'OPEN': return participants = pr_info['participants'] approved_users = [u for u in participants if u['approved']] if len(approved_users) < current_app.config['AUTO_MERGE_APPROVAL_COUNT']: return commit_hash = pr_info['source']['commit']['hash'] build_status = BuildStatus( bitbucket, pr_info['source']['repository']['full_name'], commit_hash, 'badwolf/test', url_for('log.build_log', sha=commit_hash, _external=True) ) message = 'Auto merge pull request #{}: {}'.format(pr_id, pr['title']) if description: message += '\n\n{}'.format(pr['description']) try: status = build_status.get() if status['state'] == 'SUCCESSFUL': pull_request.merge(pr_id, message) except BitbucketAPIError: logger.exception('Error calling Bitbucket API') sentry.captureException()
def __init__(self, context, spec, working_dir): self.context = context self.spec = spec self.working_dir = working_dir self.problems = Problems() self.pr = PullRequest(bitbucket, context.repository) commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], commit_hash, 'badwolf/lint', url_for('log.lint_log', sha=commit_hash, _external=True))
def __init__(self, context): self.context = context self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.repository, self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, task_id=context.task_id, _external=True) ) self.vault = None
def __init__(self, context, lock): self.context = context self.lock = lock self.repo_full_name = context.repository self.repo_name = context.repository.split('/')[-1] self.task_id = str(uuid.uuid4()) self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, _external=True)) self.docker = Client( base_url=current_app.config['DOCKER_HOST'], timeout=current_app.config['DOCKER_API_TIMEOUT'], )
def __init__(self, context, spec, build_status=None, docker_version='auto'): self.context = context self.spec = spec self.repo_name = context.repository.split('/')[-1] self.commit_hash = context.source['commit']['hash'] self.build_status = build_status or BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, _external=True)) self.docker = DockerClient( base_url=current_app.config['DOCKER_HOST'], timeout=current_app.config['DOCKER_API_TIMEOUT'], version=docker_version, ) vault_url = spec.vault.url or current_app.config['VAULT_URL'] vault_token = spec.vault.token or current_app.config['VAULT_TOKEN'] if vault_url and vault_token: self.vault = hvac.Client(url=vault_url, token=vault_token) else: self.vault = None
def deploy(self): if not self.providers: logger.info('No deploy provider active') return commit_hash = self.context.source['commit']['hash'] run_after_deploy = False notification = self.spec.notification slack_webhook = notification.slack_webhook for provider_config in self.providers: provider_name = provider_config.provider provider_class = self.PROVIDERS.get(provider_name) if not provider_class: logger.warning('Provider %s not found', provider_name) continue provider = provider_class(self.working_dir, provider_config) if not provider.is_usable(): logger.warning('Provider %s is not usable', provider_name) continue build_status = BuildStatus( bitbucket, self.context.source['repository']['full_name'], commit_hash, 'badwolf/deploy/{}'.format(provider_name), url_for('log.build_log', sha=commit_hash, task_id=self.context.task_id, _external=True)) self._update_build_status( build_status, 'INPROGRESS', '{} deploy in progress'.format(provider_name)) succeed, output = provider.deploy() logger.info('Provider %s deploy %s, output: \n%s', provider_name, 'succeed' if succeed else 'failed', output) state = 'SUCCESSFUL' if succeed else 'FAILED' self._update_build_status( build_status, state, '{} deploy {}'.format(provider_name, state.lower())) if succeed: run_after_deploy = True if slack_webhook and slack_webhook.on_success == 'always': trigger_slack_webhook(slack_webhook.webhooks, self.context, provider, True) else: if slack_webhook and slack_webhook.on_failure == 'always': trigger_slack_webhook(slack_webhook.webhooks, self.context, provider, False) # after deploy if not run_after_deploy or not self.spec.after_deploy: return for script in self.spec.after_deploy: exit_code, output = run_command(script, shell=True) logger.info( 'After deploy command `%s` exit code: %s, output: \n %s', script, exit_code, output)
def __init__(self, context, spec, docker_version='auto'): self.context = context self.spec = spec self.repo_name = context.repository.split('/')[-1] self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, _external=True) ) self.docker = Client( base_url=current_app.config['DOCKER_HOST'], timeout=current_app.config['DOCKER_API_TIMEOUT'], version=docker_version, )
def check_mergeable(context, pr_api, pr_info): pr_id = pr_info['id'] merge_status = BuildStatus( bitbucket, pr_info['source']['repository']['full_name'], pr_info['source']['commit']['hash'], 'badwolf/pr/mergeable', 'https://bitbucket.org/{}/pull-requests/{}'.format( context.repository, pr_id)) notify = False status = {'state': None} try: status = merge_status.get() except BitbucketAPIError as e: if e.code != 404: raise notify = True else: if status['state'] == 'SUCCESSFUL': notify = True diff = pr_api.diff(pr_id, raw=True) if '+<<<<<<< destination:' not in diff: # Mergeable logger.info('Pull request #%s is mergeable', pr_id) if status['state'] != 'SUCCESSFUL': merge_status.update('SUCCESSFUL', 'Pull request is mergeable') return # Unmergeable if not notify: return logger.info('Pull request #%s is not mergeable', pr_id) merge_status.update('FAILED', 'Pull request is not mergeable') comment = ( ':umbrella: The latest upstream changes(presumably {}) made this pull request unmergeable. ' 'Please resolve the merge conflicts.') matches = _MERGE_COMMIT_RE.search(context.message) if matches: comment = comment.format('pull request #{}'.format(matches.group(1))) else: comment = comment.format('commit {}'.format( context.source['commit']['hash'])) pr_api.comment(pr_id, comment)
class LintProcessor(object): LINTERS = { 'eslint': ESLinter, 'flake8': Flake8Linter, 'jscs': JSCSLinter, 'pep8': PEP8Linter, 'csslint': CSSLinter, 'shellcheck': ShellCheckLinter, 'jsonlint': JSONLinter, 'yamllint': YAMLLinter, 'bandit': BanditLinter, 'rstlint': RestructuredTextLinter, 'pylint': PylintLinter, 'sasslint': SassLinter, 'stylelint': StyleLinter, } def __init__(self, context, spec, working_dir): self.context = context self.spec = spec self.working_dir = working_dir self.problems = Problems() self.pr = PullRequest(bitbucket, context.repository) commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], commit_hash, 'badwolf/lint', url_for('log.lint_log', sha=commit_hash, _external=True)) def load_changes(self): try: changes = self.pr.diff(self.context.pr_id) except (BitbucketAPIError, UnidiffParseError): logger.exception('Error getting pull request diff from API') return self.problems.set_changes(changes) return changes def process(self): if not self.spec.linters: logger.info('No linters configured, ignore lint.') return logger.info('Running code linting') patch = self.load_changes() if not patch: logger.info('Load changes failed, ignore lint.') return lint_files = patch.added_files + patch.modified_files if not lint_files: logger.info('No changed files found, ignore lint') return self.update_build_status('INPROGRESS', 'Lint in progress') files = [f.path for f in lint_files] self._execute_linters(files) logger.info('%d problems found before limit to changes', len(self.problems)) self.problems.limit_to_changes() has_error = any(p for p in self.problems if p.is_error) if len(self.problems): description = 'Found {} code issues'.format(len(self.problems)) else: description = 'No code issues found' logger.info('No problems found when linting codes') # Report error or cleanup lint self._report() if has_error: self.update_build_status('FAILED', description) else: self.update_build_status('SUCCESSFUL', description) def _execute_linters(self, files): for linter_option in self.spec.linters: name = linter_option.name linter_cls = self.LINTERS.get(name) if not linter_cls: logger.info('Linter %s not found, ignore.', name) continue linter = linter_cls(self.working_dir, self.problems, linter_option) if not linter.is_usable(): logger.info('Linter %s is not usable, ignore.', name) continue logger.info('Running %s code linter', name) linter.execute(files) def _report(self): try: comments = self.pr.all_comments(self.context.pr_id) except BitbucketAPIError: logger.exception('Error fetching all comments for pull request') comments = [] hash_set = set() for comment in comments: inline = comment.get('inline') if not inline: continue raw = comment['content']['raw'] if self.context.cleanup_lint and raw.startswith(':broken_heart:'): # Delete comment try: self.pr.delete_comment(self.context.pr_id, comment['id']) except BitbucketAPIError: logger.exception('Error deleting pull request comment') else: filename = inline['path'] line_to = inline['to'] hash_set.add(hash('{}{}{}'.format(filename, line_to, raw))) if len(self.problems) == 0: return revision_before = self.context.target['commit']['hash'] revision_after = self.context.source['commit']['hash'] problem_count = 0 for problem in self.problems: content = ':broken_heart: **{}**: {}'.format( problem.linter, problem.message) comment_hash = hash('{}{}{}'.format( problem.filename, problem.line, content, )) if comment_hash in hash_set: continue try: self.pr.comment( self.context.pr_id, content, line_to=problem.line, filename=problem.filename, anchor=revision_after, dest_rev=revision_before, ) except BitbucketAPIError: logger.exception( 'Error creating inline comment for pull request') else: problem_count += 1 logger.info('Code lint result: %d problems found, %d submited', len(self.problems), problem_count) return problem_count def update_build_status(self, state, description=None): try: self.build_status.update(state, description=description) except BitbucketAPIError: logger.exception('Error calling Bitbucket API')
class TestRunner(object): """Badwolf test runner""" def __init__(self, context, lock): self.context = context self.lock = lock self.repo_full_name = context.repository self.repo_name = context.repository.split('/')[-1] self.task_id = str(uuid.uuid4()) self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, _external=True)) self.docker = Client( base_url=current_app.config['DOCKER_HOST'], timeout=current_app.config['DOCKER_API_TIMEOUT'], ) def run(self): start_time = time.time() self.branch = self.context.source['branch']['name'] try: self.clone_repository() except git.GitCommandError as e: logger.exception('Git command error') self.update_build_status('FAILED', 'Git clone repository failed') content = ':broken_heart: **Git error**: {}'.format(to_text(e)) if self.context.pr_id: pr = PullRequest(bitbucket, self.repo_full_name) pr.comment(self.context.pr_id, content) else: cs = Changesets(bitbucket, self.repo_full_name) cs.comment(self.commit_hash, content) self.cleanup() return if not self.validate_settings(): self.cleanup() return context = { 'context': self.context, 'task_id': self.task_id, 'build_log_url': url_for('log.build_log', sha=self.commit_hash, _external=True), 'branch': self.branch, 'scripts': self.spec.scripts, } if self.spec.scripts: self.update_build_status('INPROGRESS', 'Test in progress') docker_image_name, build_output = self.get_docker_image() context['build_logs'] = to_text(build_output) context.update({ 'build_logs': to_text(build_output), 'elapsed_time': int(time.time() - start_time), }) if not docker_image_name: self.update_build_status('FAILED', 'Build or get Docker image failed') context['exit_code'] = -1 self.send_notifications(context) self.cleanup() return exit_code, output = self.run_tests_in_container(docker_image_name) if exit_code == 0: # Success logger.info('Test succeed for repo: %s', self.repo_full_name) self.update_build_status('SUCCESSFUL', '1 of 1 test succeed') else: # Failed logger.info('Test failed for repo: %s, exit code: %s', self.repo_full_name, exit_code) self.update_build_status('FAILED', '1 of 1 test failed') context.update({ 'logs': to_text(output), 'exit_code': exit_code, 'elapsed_time': int(time.time() - start_time), }) self.send_notifications(context) # Code linting if self.context.pr_id and self.spec.linters: lint = LintProcessor(self.context, self.spec, self.clone_path) lint.process() self.cleanup() def clone_repository(self): self.clone_path = os.path.join(tempfile.gettempdir(), 'badwolf', self.task_id, self.repo_name) source_repo = self.context.source['repository']['full_name'] # Use shallow clone to speed up bitbucket.clone(source_repo, self.clone_path, depth=50, branch=self.branch) gitcmd = git.Git(self.clone_path) if self.context.target: # Pull Request target_repo = self.context.target['repository']['full_name'] target_branch = self.context.target['branch']['name'] if source_repo == target_repo: target_remote = 'origin' else: # Pull Reuqest across forks target_remote = target_repo.split('/', 1)[0] gitcmd.remote('add', target_remote, bitbucket.get_git_url(target_repo)) gitcmd.fetch(target_remote, target_branch) gitcmd.checkout('FETCH_HEAD') gitcmd.merge('origin/{}'.format(self.branch)) else: # Push to branch or ci retry comment on some commit logger.info('Checkout commit %s', self.commit_hash) gitcmd.checkout(self.commit_hash) gitmodules = os.path.join(self.clone_path, '.gitmodules') if os.path.exists(gitmodules): gitcmd.submodule('update', '--init', '--recursive') def validate_settings(self): conf_file = os.path.join(self.clone_path, current_app.config['BADWOLF_PROJECT_CONF']) if not os.path.exists(conf_file): logger.warning('No project configuration file found for repo: %s', self.repo_full_name) return False self.spec = spec = Specification.parse_file(conf_file) if self.context.type == 'commit' and spec.branch and self.branch not in spec.branch: logger.info( 'Ignore tests since branch %s test is not enabled. Allowed branches: %s', self.branch, spec.branch) return False if not spec.scripts and not spec.linters: logger.warning('No script(s) or linter(s) to run') return False return True def get_docker_image(self): docker_image_name = self.repo_full_name.replace('/', '-') output = [] with self.lock: docker_image = self.docker.images(docker_image_name) if not docker_image or self.context.rebuild: dockerfile = os.path.join(self.clone_path, self.spec.dockerfile) build_options = { 'tag': docker_image_name, 'rm': True, } if not os.path.exists(dockerfile): logger.warning( 'No Dockerfile: %s found for repo: %s, using simple runner image', dockerfile, self.repo_full_name) dockerfile_content = 'FROM messense/badwolf-test-runner\n' fileobj = io.BytesIO(dockerfile_content.encode('utf-8')) build_options['fileobj'] = fileobj else: build_options['dockerfile'] = self.spec.dockerfile build_success = False logger.info('Building Docker image %s', docker_image_name) self.update_build_status('INPROGRESS', 'Building Docker image') res = self.docker.build(self.clone_path, **build_options) for line in res: if b'Successfully built' in line: build_success = True log = to_text(json.loads(to_text(line))['stream']) output.append(log) logger.info('`docker build` : %s', log.strip()) if not build_success: return None, ''.join(output) return docker_image_name, ''.join(output) def run_tests_in_container(self, docker_image_name): command = '/bin/sh -c badwolf-run' environment = {} if self.spec.environments: # TODO: Support run in multiple environments environment = self.spec.environments[0] # TODO: Add more test context related env vars environment.update({ 'DEBIAN_FRONTEND': 'noninteractive', 'CI': 'true', 'CI_NAME': 'badwolf', 'BADWOLF_BRANCH': self.branch, 'BADWOLF_COMMIT': self.commit_hash, 'BADWOLF_BUILD_DIR': '/mnt/src', 'BADWOLF_REPO_SLUG': self.repo_full_name, }) if self.context.pr_id: environment['BADWOLF_PULL_REQUEST'] = to_text(self.context.pr_id) container = self.docker.create_container( docker_image_name, command=command, environment=environment, working_dir='/mnt/src', volumes=['/mnt/src'], host_config=self.docker.create_host_config( privileged=self.spec.privileged, binds={ self.clone_path: { 'bind': '/mnt/src', 'mode': 'rw', }, })) container_id = container['Id'] logger.info('Created container %s from image %s', container_id, docker_image_name) output = [] try: self.docker.start(container_id) self.update_build_status('INPROGRESS', 'Running tests in Docker container') for line in self.docker.logs(container_id, stream=True): output.append(to_text(line)) exit_code = self.docker.wait( container_id, current_app.config['DOCKER_RUN_TIMEOUT']) except (APIError, DockerException, ReadTimeout) as e: exit_code = -1 output.append(to_text(e)) logger.exception('Docker error') finally: try: self.docker.remove_container(container_id, force=True) except (APIError, DockerException): logger.exception('Error removing docker container') return exit_code, ''.join(output) def update_build_status(self, state, description=None): try: self.build_status.update(state, description=description) except BitbucketAPIError: logger.exception('Error calling Bitbucket API') def send_notifications(self, context): exit_code = context['exit_code'] template = 'test_success' if exit_code == 0 else 'test_failure' html = render_template('mail/' + template + '.html', **context) html = sanitize_sensitive_data(html) # Save log html log_dir = os.path.join(current_app.config['BADWOLF_LOG_DIR'], self.commit_hash) if not os.path.exists(log_dir): os.makedirs(log_dir) log_file = os.path.join(log_dir, 'build.html') with open(log_file, 'wb') as f: f.write(to_binary(html)) if exit_code == 0: subject = 'Test succeed for repository {}'.format( self.repo_full_name) else: subject = 'Test failed for repository {}'.format( self.repo_full_name) notification = self.spec.notification emails = notification['emails'] if emails: send_mail(emails, subject, html) slack_webhooks = notification['slack_webhooks'] if slack_webhooks: message = render_template('slack_webhook/' + template + '.md', **context) trigger_slack_webhook(slack_webhooks, message) def cleanup(self): shutil.rmtree(os.path.dirname(self.clone_path), ignore_errors=True)
class Pipeline(object): '''badwolf build/lint pipeline''' def __init__(self, context): self.context = context self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, _external=True)) def start(self): '''Start Pipeline''' logger.info('Pipeline started for repository %s', self.context.repository) try: self.clone() self.parse_spec() self.build() self.lint() except git.GitCommandError as git_err: logger.exception('Git command error') self._report_git_error(git_err) except BitbucketAPIError: logger.exception('Error calling BitBucket API') except BadwolfException: pass finally: self.clean() def _report_git_error(self, exc): self.build_status.update('FAILED', description='Git clone repository failed') content = ':broken_heart: **Git error**: {}'.format(to_text(exc)) content = sanitize_sensitive_data(content) if self.context.pr_id: pr = PullRequest(bitbucket, self.context.repository) pr.comment(self.context.pr_id, content) else: cs = Changesets(bitbucket, self.context.repository) cs.comment(self.commit_hash, content) def clone(self): '''Clone Git repository to local''' logger.info('Cloning repository %s', self.context.repository) RepositoryCloner(self.context).clone() def parse_spec(self): '''Parse repository build/lint spec''' logger.info('Parsing specification for repository %s', self.context.repository) conf_file = os.path.join(self.context.clone_path, current_app.config['BADWOLF_PROJECT_CONF']) try: spec = Specification.parse_file(conf_file) except OSError: logger.warning('No project configuration file found for repo: %s', self.context.repository) raise SpecificationNotFound() branch = self.context.source['branch']['name'] if self.context.type == 'commit' and not spec.is_branch_enabled( branch): logger.info( 'Ignore tests since branch %s test is not enabled. Allowed branches: %s', branch, spec.branch) raise BuildDisabled() if not spec.scripts and not spec.linters: logger.warning('No script(s) or linter(s) to run') raise InvalidSpecification() self.spec = spec def build(self): '''Build project''' if self.spec.scripts: logger.info('Running build for repository %s', self.context.repository) Builder(self.context, self.spec).run() def lint(self): '''Lint codes''' if self.context.pr_id and self.spec.linters: logger.info('Running lint for repository %s', self.context.repository) LintProcessor(self.context, self.spec).process() def clean(self): '''Clean local files''' logger.info('Cleaning local files for repository %s', self.context.repository) try: shutil.rmtree(os.path.dirname(self.context.clone_path), ignore_errors=True) except OSError: logger.exception('Error clean local files')
class Pipeline(object): '''badwolf build/lint pipeline''' def __init__(self, context): self.context = context self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.repository, self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, task_id=context.task_id, _external=True) ) self.vault = None def start(self): '''Start Pipeline''' logger.info('Pipeline started for repository %s', self.context.repository) try: self.clone() self.parse_spec() exit_code = self.build() build_success = exit_code == 0 self.save_artifacts(build_success) if exit_code != 137: # 137 means build cancelled self.lint() if build_success: self.deploy() except git.GitCommandError as git_err: logger.exception('Git command error') self._report_git_error(git_err) except BitbucketAPIError: logger.exception('Error calling BitBucket API') sentry.captureException() except InvalidSpecification as err: self._report_error(':umbrella: Invalid badwolf configuration: ' + str(err)) except BadwolfException: pass finally: self.clean() def _report_error(self, content): content = sanitize_sensitive_data(content) if self.context.pr_id: pr = PullRequest(bitbucket, self.context.repository) pr.comment( self.context.pr_id, content ) else: cs = Changesets(bitbucket, self.context.repository) cs.comment( self.commit_hash, content ) def _report_git_error(self, exc): def _linkify_file(name): return '[`{name}`](#chg-{name})'.format(name=name) self.build_status.update('FAILED', description='Git clone repository failed') git_error_msg = str(exc) content = ':broken_heart: **Git error**: {}'.format(git_error_msg) if 'Merge conflict' in git_error_msg: # git merge conflicted conflicted_files = RepositoryCloner.get_conflicted_files( self.context.clone_path ) if conflicted_files: conflicted_files = '\n'.join(('* ' + _linkify_file(name) for name in conflicted_files.split('\n'))) content = ':broken_heart: This branch has conflicts that must be resolved\n\n' content += '**Conflicting files**\n\n{}'.format(conflicted_files) self._report_error(content) def _report_docker_error(self, exc): self.build_status.update('FAILED', description='Docker error occurred') content = ':broken_heart: **Docker error**: {}'.format(exc.explanation) self._report_error(content) def clone(self): '''Clone Git repository to local''' logger.info('Cloning repository %s', self.context.repository) RepositoryCloner(self.context).clone() def parse_spec(self): '''Parse repository build/lint spec''' logger.info('Parsing specification for repository %s', self.context.repository) conf_file = os.path.join(self.context.clone_path, current_app.config['BADWOLF_PROJECT_CONF']) try: spec = Specification.parse_file(conf_file) except OSError: logger.warning( 'No project configuration file found for repo: %s', self.context.repository ) raise SpecificationNotFound() secretfile = os.path.join(self.context.clone_path, 'Secretfile') if os.path.exists(secretfile): spec.parse_secretfile(secretfile) branch = self.context.source['branch']['name'] if self.context.type == 'branch' and not spec.is_branch_enabled(branch): logger.info( 'Ignore tests since branch %s test is not enabled. Allowed branches: %s', branch, spec.branch ) raise BuildDisabled() if not spec.scripts and not spec.linters: logger.warning('No script(s) or linter(s) to run') raise InvalidSpecification('No script or linter to run') self.spec = spec # setup Vault vault_url = spec.vault.url or current_app.config['VAULT_URL'] vault_token = spec.vault.token or current_app.config['VAULT_TOKEN'] if vault_url and vault_token: self.vault = hvac.Client(url=vault_url, token=vault_token) self._populate_envvars_from_vault() def _populate_envvars_from_vault(self): if self.vault is None or not self.spec.vault.env: return paths = [v[0] for v in self.spec.vault.env.values()] secrets = {} for path in paths: try: res = self.vault.read(path) except VaultError as exc: raise InvalidSpecification('Error reading {} from Vault: {}'.format(path, str(exc))) if not res: raise InvalidSpecification('Error reading {} from Vault: not found'.format(path)) secrets[path] = res['data'] for name, (path, key) in self.spec.vault.env.items(): val = secrets.get(path, {}).get(key) if val is not None: self.context.environment.setdefault(name, val) def build(self): '''Build project''' if self.spec.scripts: logger.info('Running build for repository %s', self.context.repository) try: return Builder(self.context, self.spec, build_status=self.build_status).run() except DockerAPIError as e: logger.exception('Docker API error') self._report_docker_error(e) return False def save_artifacts(self, build_success): '''Save artifacts produced during build''' if not self.spec.artifacts.paths: return try: self._save_artifacts(build_success) except Exception: logger.exception('Error saving artifacts for repository %s', self.context.repository) sentry.captureException() def _save_artifacts(self, build_success): def _should_exclude(path): excluded = self.spec.artifacts.excludes if not excluded: return False for pattern in excluded: if fnmatch.fnmatch(path, pattern): return True return False logger.info('Saving artifacts for repository %s', self.context.repository) paths = [] for path in self.spec.artifacts.paths: if '$' not in path: paths.append(path) else: cmd = 'echo {}'.format(path) exit_code, output = run_command(cmd, cwd=self.context.clone_path, shell=True) if exit_code == 0: paths.extend(x for x in output.strip().split(':') if x and not _should_exclude(x)) if not paths: logger.info('No artifacts paths found for repository %s', self.context.repository) return artifacts_repo_path = os.path.join( current_app.config['BADWOLF_ARTIFACTS_DIR'], self.context.repository, ) artifacts_commit_path = os.path.join( artifacts_repo_path, self.commit_hash ) os.makedirs(artifacts_commit_path, exist_ok=True) artifacts_file = os.path.join(artifacts_commit_path, 'artifacts.tar.gz') file_added = False with tarfile.open(artifacts_file, 'w:gz') as tar: for path in paths: file_path = os.path.join(self.context.clone_path, path) try: tar.add(file_path, path) except FileNotFoundError as exc: logger.error(str(exc)) else: file_added = True if not file_added: try: shutil.rmtree(artifacts_commit_path, ignore_errors=True) except OSError: logger.exception('Error clean empty artifacts files') return run_command('shasum artifacts.tar.gz > SHASUM', cwd=artifacts_commit_path, shell=True) logger.info('Saved artifacts to %s', artifacts_commit_path) if build_success and self.context.type in ('tag', 'branch'): artifacts_branch_path = os.path.join( artifacts_repo_path, self.context.source['branch']['name'] ) os.makedirs(artifacts_branch_path, exist_ok=True) for name in ('artifacts.tar.gz', 'SHASUM'): commit_path = os.path.join(artifacts_commit_path, name) branch_path = os.path.join(artifacts_branch_path, name) try: os.remove(branch_path) except OSError: pass os.symlink(commit_path, branch_path) logger.info('Saved artifacts to %s', artifacts_branch_path) build_status = BuildStatus( bitbucket, self.context.repository, self.commit_hash, 'badwolf/artifacts', url_for('artifacts.download_artifacts', user=self.context.repo_owner, repo=self.context.repo_name, sha=self.commit_hash, filename='artifacts.tar.gz', _external=True) ) build_status.update('SUCCESSFUL', description='Build artifacts saved') def lint(self): '''Lint codes''' if not self.context.skip_lint and self.context.pr_id and self.spec.linters: logger.info('Running lint for repository %s', self.context.repository) LintProcessor(self.context, self.spec).process() def deploy(self): '''Deploy''' if not self.spec.deploy or self.context.type not in {'branch', 'tag'}: return providers = [] branch = self.context.source['branch']['name'] for provider in self.spec.deploy: if (self.context.type == 'branch' and branch in provider.branch) or \ (self.context.type == 'tag' and provider.tag): providers.append(provider) if not providers: return logger.info('Running %d deploy(s) for repository %s', len(providers), self.context.repository) Deployer(self.context, self.spec, providers).deploy() def clean(self): '''Clean local files''' logger.info('Cleaning local files (%s) for repository %s', self.context.clone_path, self.context.repository) try: shutil.rmtree(self.context.clone_path, ignore_errors=True) except OSError: logger.exception('Error clean local files') sentry.captureException()
def _save_artifacts(self, build_success): def _should_exclude(path): excluded = self.spec.artifacts.excludes if not excluded: return False for pattern in excluded: if fnmatch.fnmatch(path, pattern): return True return False logger.info('Saving artifacts for repository %s', self.context.repository) paths = [] for path in self.spec.artifacts.paths: if '$' not in path: paths.append(path) else: cmd = 'echo {}'.format(path) exit_code, output = run_command(cmd, cwd=self.context.clone_path, shell=True) if exit_code == 0: paths.extend(x for x in output.strip().split(':') if x and not _should_exclude(x)) if not paths: logger.info('No artifacts paths found for repository %s', self.context.repository) return artifacts_repo_path = os.path.join( current_app.config['BADWOLF_ARTIFACTS_DIR'], self.context.repository, ) artifacts_commit_path = os.path.join( artifacts_repo_path, self.commit_hash ) os.makedirs(artifacts_commit_path, exist_ok=True) artifacts_file = os.path.join(artifacts_commit_path, 'artifacts.tar.gz') file_added = False with tarfile.open(artifacts_file, 'w:gz') as tar: for path in paths: file_path = os.path.join(self.context.clone_path, path) try: tar.add(file_path, path) except FileNotFoundError as exc: logger.error(str(exc)) else: file_added = True if not file_added: try: shutil.rmtree(artifacts_commit_path, ignore_errors=True) except OSError: logger.exception('Error clean empty artifacts files') return run_command('shasum artifacts.tar.gz > SHASUM', cwd=artifacts_commit_path, shell=True) logger.info('Saved artifacts to %s', artifacts_commit_path) if build_success and self.context.type in ('tag', 'branch'): artifacts_branch_path = os.path.join( artifacts_repo_path, self.context.source['branch']['name'] ) os.makedirs(artifacts_branch_path, exist_ok=True) for name in ('artifacts.tar.gz', 'SHASUM'): commit_path = os.path.join(artifacts_commit_path, name) branch_path = os.path.join(artifacts_branch_path, name) try: os.remove(branch_path) except OSError: pass os.symlink(commit_path, branch_path) logger.info('Saved artifacts to %s', artifacts_branch_path) build_status = BuildStatus( bitbucket, self.context.repository, self.commit_hash, 'badwolf/artifacts', url_for('artifacts.download_artifacts', user=self.context.repo_owner, repo=self.context.repo_name, sha=self.commit_hash, filename='artifacts.tar.gz', _external=True) ) build_status.update('SUCCESSFUL', description='Build artifacts saved')
class Builder(object): """Badwolf build runner""" def __init__(self, context, spec, docker_version='auto'): self.context = context self.spec = spec self.repo_name = context.repository.split('/')[-1] self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, _external=True) ) self.docker = Client( base_url=current_app.config['DOCKER_HOST'], timeout=current_app.config['DOCKER_API_TIMEOUT'], version=docker_version, ) def run(self): start_time = time.time() self.branch = self.context.source['branch']['name'] context = { 'context': self.context, 'build_log_url': url_for('log.build_log', sha=self.commit_hash, _external=True), 'branch': self.branch, 'scripts': self.spec.scripts, 'ansi_termcolor_style': deansi.styleSheet(), } self.update_build_status('INPROGRESS', 'Test in progress') docker_image_name, build_output = self.get_docker_image() context.update({ 'build_logs': Markup(build_output), 'elapsed_time': int(time.time() - start_time), }) if not docker_image_name: self.update_build_status('FAILED', 'Build or get Docker image failed') context['exit_code'] = -1 self.send_notifications(context) return exit_code, output = self.run_in_container(docker_image_name) if exit_code == 0: # Success logger.info('Test succeed for repo: %s', self.context.repository) self.update_build_status('SUCCESSFUL', '1 of 1 test succeed') else: # Failed logger.info( 'Test failed for repo: %s, exit code: %s', self.context.repository, exit_code ) self.update_build_status('FAILED', '1 of 1 test failed') context.update({ 'logs': Markup(deansi.deansi(output)), 'exit_code': exit_code, 'elapsed_time': int(time.time() - start_time), }) self.send_notifications(context) def get_docker_image(self): docker_image_name = self.context.repository.replace('/', '-') output = [] docker_image = self.docker.images(docker_image_name) if not docker_image or self.context.rebuild: dockerfile = os.path.join(self.context.clone_path, self.spec.dockerfile) build_options = { 'tag': docker_image_name, 'rm': True, 'stream': True, 'decode': True, 'nocache': self.context.nocache, } if not os.path.exists(dockerfile): logger.warning( 'No Dockerfile: %s found for repo: %s, using simple runner image', dockerfile, self.context.repository ) dockerfile_content = 'FROM messense/badwolf-test-runner:python\n' fileobj = io.BytesIO(dockerfile_content.encode('utf-8')) build_options['fileobj'] = fileobj else: build_options['dockerfile'] = self.spec.dockerfile build_success = False logger.info('Building Docker image %s', docker_image_name) self.update_build_status('INPROGRESS', 'Building Docker image') res = self.docker.build(self.context.clone_path, **build_options) for log in res: if 'errorDetail' in log: msg = log['errorDetail']['message'] elif 'error' in log: # Deprecated # https://github.com/docker/docker/blob/master/pkg/jsonmessage/jsonmessage.go#L104 msg = log['error'] else: msg = log['stream'] if 'Successfully built' in msg: build_success = True output.append(deansi.deansi(msg)) logger.info('`docker build` : %s', msg.strip()) if not build_success: return None, ''.join(output) return docker_image_name, ''.join(output) def run_in_container(self, docker_image_name): command = '/bin/sh -c badwolf-run' environment = {} if self.spec.environments: # TODO: Support run in multiple environments environment = self.spec.environments[0] # TODO: Add more test context related env vars environment.update({ 'DEBIAN_FRONTEND': 'noninteractive', 'CI': 'true', 'CI_NAME': 'badwolf', 'BADWOLF_BRANCH': self.branch, 'BADWOLF_COMMIT': self.commit_hash, 'BADWOLF_BUILD_DIR': '/mnt/src', 'BADWOLF_REPO_SLUG': self.context.repository, }) if self.context.pr_id: environment['BADWOLF_PULL_REQUEST'] = to_text(self.context.pr_id) container = self.docker.create_container( docker_image_name, command=command, environment=environment, working_dir='/mnt/src', volumes=['/mnt/src'], host_config=self.docker.create_host_config( privileged=self.spec.privileged, binds={ self.context.clone_path: { 'bind': '/mnt/src', 'mode': 'rw', }, } ), stdin_open=False, tty=True ) container_id = container['Id'] logger.info('Created container %s from image %s', container_id, docker_image_name) output = [] try: self.docker.start(container_id) self.update_build_status('INPROGRESS', 'Running tests in Docker container') exit_code = self.docker.wait(container_id, current_app.config['DOCKER_RUN_TIMEOUT']) except (APIError, DockerException, ReadTimeout) as e: exit_code = -1 output.append(to_text(e)) logger.exception('Docker error') finally: try: output.append(to_text(self.docker.logs(container_id))) self.docker.remove_container(container_id, force=True) except (APIError, DockerException, ReadTimeout): logger.exception('Error removing docker container') return exit_code, ''.join(output) def update_build_status(self, state, description=None): try: self.build_status.update(state, description=description) except BitbucketAPIError: logger.exception('Error calling Bitbucket API') def send_notifications(self, context): exit_code = context['exit_code'] template = 'test_success' if exit_code == 0 else 'test_failure' html = render_template('mail/' + template + '.html', **context) html = sanitize_sensitive_data(html) # Save log html log_dir = os.path.join(current_app.config['BADWOLF_LOG_DIR'], self.commit_hash) os.makedirs(log_dir, exist_ok=True) log_file = os.path.join(log_dir, 'build.html') with open(log_file, 'wb') as f: f.write(to_binary(html)) if exit_code == 0: subject = 'Test succeed for repository {}'.format(self.context.repository) else: subject = 'Test failed for repository {}'.format(self.context.repository) notification = self.spec.notification emails = notification['emails'] if emails: send_mail(emails, subject, html) slack_webhooks = notification['slack_webhooks'] if slack_webhooks: message = render_template('slack_webhook/' + template + '.md', **context) trigger_slack_webhook(slack_webhooks, message)
class LintProcessor(object): LINTERS = { 'eslint': ESLinter, 'flake8': Flake8Linter, 'pep8': PyCodeStyleLinter, 'pycodestyle': PyCodeStyleLinter, 'csslint': CSSLinter, 'shellcheck': ShellCheckLinter, 'jsonlint': JSONLinter, 'yamllint': YAMLLinter, 'bandit': BanditLinter, 'rstlint': RestructuredTextLinter, 'pylint': PylintLinter, 'sasslint': SassLinter, 'stylelint': StyleLinter, 'mypy': MypyLinter, } def __init__(self, context, spec, working_dir=None): self.context = context self.spec = spec self.working_dir = working_dir or context.clone_path self.problems = Problems() self.pr = PullRequest(bitbucket, context.repository) commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], commit_hash, 'badwolf/lint', 'https://bitbucket.org/{}/pull-requests/{}'.format( context.repository, context.pr_id)) def load_changes(self): try: changes = self.pr.diff(self.context.pr_id) except (BitbucketAPIError, UnidiffParseError): logger.exception('Error getting pull request diff from API') sentry.captureException() return self.problems.set_changes(changes) return changes def process(self): if not self.spec.linters: logger.info('No linters configured, ignore lint.') return logger.info('Running code linting') patch = self.load_changes() if not patch: logger.info('Load changes failed, ignore lint.') return lint_files = patch.added_files + patch.modified_files if not lint_files: logger.info('No changed files found, ignore lint') return self.update_build_status('INPROGRESS', 'Lint in progress') files = [f.path for f in lint_files] self._execute_linters(files) total_problems = len(self.problems) self.problems.limit_to_changes() in_diff_problems = len(self.problems) # Report error and cleanup outdated lint comments submitted_problems, fixed_problems = self._report() if total_problems > 0: if in_diff_problems == total_problems: description = 'Found {} new issues'.format(total_problems) else: description = 'Found {} issues'.format(total_problems) description += ', {} issues in diff'.format(in_diff_problems) if submitted_problems > 0: description += ', {} new issues'.format(submitted_problems) if fixed_problems > 0: description += ' {} issues fixed'.format(fixed_problems) else: description = 'No code issues found' has_error = any(p for p in self.problems if p.is_error) if has_error: logger.info('Lint failed: %s', description) self.update_build_status('FAILED', description) else: logger.info('Lint successful: %s', description) self.update_build_status('SUCCESSFUL', description) def _execute_linters(self, files): for linter_option in self.spec.linters: name = linter_option.name linter_cls = self.LINTERS.get(name) if not linter_cls: logger.info('Linter %s not found, ignore.', name) continue linter = linter_cls(self.working_dir, self.problems, linter_option) if not linter.is_usable(): logger.info('Linter %s is not usable, ignore.', name) continue logger.info('Running %s code linter', name) linter.execute(files) def _report(self): try: comments = self.pr.all_comments(self.context.pr_id) except BitbucketAPIError: logger.exception('Error fetching all comments for pull request') sentry.captureException() comments = [] existing_comments_ids = {} for comment in comments: inline = comment.get('inline') if not inline: continue raw = comment['content']['raw'] if not raw.startswith(':broken_heart: **'): continue filename = inline['path'] line = inline['to'] or inline['from'] if line is None: continue existing_comments_ids[(filename, line, raw)] = comment['id'] if len(self.problems) == 0: return 0, 0 revision_before = self.context.target['commit']['hash'] revision_after = self.context.source['commit']['hash'] lint_comments = set() problem_count = 0 for problem in self.problems: content = ':broken_heart: **{}**: {}'.format( problem.linter, problem.message) comment_tuple = (problem.filename, problem.line, content) lint_comments.add(comment_tuple) if comment_tuple in existing_comments_ids: continue comment_kwargs = { 'filename': problem.filename, 'anchor': revision_after, 'dest_rev': revision_before, } if problem.has_line_change: comment_kwargs['line_to'] = problem.line else: comment_kwargs['line_from'] = problem.line try: self.pr.comment(self.context.pr_id, content, **comment_kwargs) except BitbucketAPIError: logger.exception( 'Error creating inline comment for pull request') sentry.captureException() else: problem_count += 1 logger.info('Code lint result: %d problems found, %d submitted', len(self.problems), problem_count) outdated_cleaned = 0 outdated_comments = set(existing_comments_ids.keys()) - lint_comments logger.info('%d outdated lint comments found', len(outdated_comments)) for comment in outdated_comments: # Delete comment try: self.pr.delete_comment(self.context.pr_id, existing_comments_ids[comment]) outdated_cleaned += 1 except BitbucketAPIError: logger.exception('Error deleting pull request comment') sentry.captureException() return problem_count, outdated_cleaned def update_build_status(self, state, description=None): try: self.build_status.update(state, description=description) except BitbucketAPIError: logger.exception('Error calling Bitbucket API') sentry.captureException()
class Pipeline(object): '''badwolf build/lint pipeline''' def __init__(self, context): self.context = context self.commit_hash = context.source['commit']['hash'] self.build_status = BuildStatus( bitbucket, context.source['repository']['full_name'], self.commit_hash, 'badwolf/test', url_for('log.build_log', sha=self.commit_hash, task_id=context.task_id, _external=True)) def start(self): '''Start Pipeline''' logger.info('Pipeline started for repository %s', self.context.repository) try: self.clone() self.parse_spec() build_success = self.build() self.lint() if build_success: self.deploy() except git.GitCommandError as git_err: logger.exception('Git command error') self._report_git_error(git_err) except BitbucketAPIError: logger.exception('Error calling BitBucket API') sentry.captureException() except InvalidSpecification as err: self._report_error(':umbrella: Invalid badwolf configuration: ' + str(err)) except BadwolfException: pass finally: self.clean() def _report_error(self, content): content = sanitize_sensitive_data(content) if self.context.pr_id: pr = PullRequest(bitbucket, self.context.repository) pr.comment(self.context.pr_id, content) else: cs = Changesets(bitbucket, self.context.repository) cs.comment(self.commit_hash, content) def _report_git_error(self, exc): def _linkify_file(name): return '[`{name}`](#chg-{name})'.format(name=name) self.build_status.update('FAILED', description='Git clone repository failed') git_error_msg = str(exc) content = ':broken_heart: **Git error**: {}'.format(git_error_msg) if 'Merge conflict' in git_error_msg: # git merge conflicted conflicted_files = RepositoryCloner.get_conflicted_files( self.context.clone_path) if conflicted_files: conflicted_files = '\n'.join( ('* ' + _linkify_file(name) for name in conflicted_files.split('\n'))) content = ':broken_heart: This branch has conflicts that must be resolved\n\n' content += '**Conflicting files**\n\n{}'.format( conflicted_files) self._report_error(content) def _report_docker_error(self, exc): self.build_status.update('FAILED', description='Docker error occurred') content = ':broken_heart: **Docker error**: {}'.format(exc.explanation) self._report_error(content) def clone(self): '''Clone Git repository to local''' logger.info('Cloning repository %s', self.context.repository) RepositoryCloner(self.context).clone() def parse_spec(self): '''Parse repository build/lint spec''' logger.info('Parsing specification for repository %s', self.context.repository) conf_file = os.path.join(self.context.clone_path, current_app.config['BADWOLF_PROJECT_CONF']) try: spec = Specification.parse_file(conf_file) except OSError: logger.warning('No project configuration file found for repo: %s', self.context.repository) raise SpecificationNotFound() branch = self.context.source['branch']['name'] if self.context.type == 'branch' and not spec.is_branch_enabled( branch): logger.info( 'Ignore tests since branch %s test is not enabled. Allowed branches: %s', branch, spec.branch) raise BuildDisabled() if not spec.scripts and not spec.linters: logger.warning('No script(s) or linter(s) to run') raise InvalidSpecification('No script or linter to run') self.spec = spec def build(self): '''Build project''' if self.spec.scripts: logger.info('Running build for repository %s', self.context.repository) try: return Builder(self.context, self.spec, build_status=self.build_status).run() except DockerAPIError as e: logger.exception('Docker API error') self._report_docker_error(e) return False def lint(self): '''Lint codes''' if self.context.pr_id and self.spec.linters: logger.info('Running lint for repository %s', self.context.repository) LintProcessor(self.context, self.spec).process() def deploy(self): '''Deploy''' if not self.spec.deploy or self.context.type not in {'branch', 'tag'}: return providers = [] branch = self.context.source['branch']['name'] for provider in self.spec.deploy: if (self.context.type == 'branch' and branch in provider.branch) or \ (self.context.type == 'tag' and provider.tag): providers.append(provider) if not providers: return logger.info('Running %d deploy(s) for repository %s', len(providers), self.context.repository) Deployer(self.context, self.spec, providers).deploy() def clean(self): '''Clean local files''' logger.info('Cleaning local files (%s) for repository %s', self.context.clone_path, self.context.repository) try: shutil.rmtree(self.context.clone_path, ignore_errors=True) except OSError: logger.exception('Error clean local files') sentry.captureException()