Пример #1
0
def build_new_view(job_slug):
    """
    View to create a new build
    """
    job = Job(slug=job_slug)
    if not job.exists():
        abort(404)

    if request.method == 'POST':
        build = Build(job=job)
        build.repo = job.repo

        build_url = url_for('build_view',
                            job_slug=job_slug,
                            build_slug=build.slug)

        if 'X-Github-Event' in request.headers:
            if not job.github_secret:
                logging.warn("GitHub webhook secret not setup")
                abort(403)

            if not is_valid_github(job.github_secret):
                logging.warn("Invalid GitHub payload")
                abort(403)

            if request.headers['X-Github-Event'] == 'push':
                push_data = request.json
                build.commit = push_data['head_commit']['id']

            else:
                logging.debug("Unknown GitHub hook '%s'",
                              request.headers['X-Github-Event'])
                abort(501)

            try:
                build.save()
                build.queue()

                return build_url, 201

            except ValidationError as ex:
                logging.exception("GitHub hook error")
                return json.dumps({
                    'errors': ex.messages,
                }), 400

        else:
            build.commit = request.form['commit']

            try:
                build.save()
                build.queue()

                flash(u"Build queued", 'success')
                return redirect(build_url, 303)

            except ValidationError as ex:
                flash(ex.messages, 'danger')

    return render_template('build_new.html', build=Build(job=job))
Пример #2
0
    def post(self, project_slug):
        """ Create a new job for a project """
        project = Project.query.filter_by(slug=project_slug).first_or_404()
        job = Job(project=project, repo_fs=project.repo_fs)
        self.handle_write(job, JOB_NEW_PARSER)
        job.queue()

        return job
Пример #3
0
    def test_service_display(self, mocker, kwargs, repo_name_, exp_display):
        job = Job(**kwargs)
        class MockJobConfig(object):
            repo_name = repo_name_
        job._job_config = MockJobConfig()
        job.id = 20

        assert job.service.display_full == exp_display
Пример #4
0
        def runnable(handle):
            """
            Resolve jobs and start services
            """
            all_okay = True
            # pylint:disable=no-member
            for job_slug, service_config in self.build_config.services.items():
                service_job = Job(job_slug)
                if not service_job.exists():
                    handle.write(
                        ("No job found matching %s\n" % job_slug).encode())
                    all_okay = False
                    continue

                service_build = service_job.latest_build(passed=True,
                                                         versioned=True)
                if not service_build:
                    handle.write(
                        ("No successful, versioned build for %s - %s\n" %
                         (job_slug, service_job.name)).encode())
                    all_okay = False
                    continue

                handle.write(
                    ("%sStarting service %s - %s %s" %
                     ("" if all_okay else "NOT ", job_slug, service_job.name,
                      service_build.version)).encode())

                try:
                    service_kwargs = {
                        key: value
                        for key, value in service_config.items()
                        if key in ('command', 'environment')
                    }
                    service_container = self.docker_client.create_container(
                        image=service_build.image_id, **service_kwargs)
                    self.docker_client.start(service_container['Id'])

                    # Store the provisioning info
                    self._provisioned_containers.append({
                        'job_slug':
                        job_slug,
                        'config':
                        service_config,
                        'id':
                        service_container['Id']
                    })
                    handle.write("... STARTED!\n".encode())

                except docker.errors.APIError as ex:
                    handle.write(("... FAILED!\n    %s" %
                                  ex.explanation.decode()).encode())
                    all_okay = False

            return all_okay
Пример #5
0
def job_edit_view(slug):
    """
    View to edit a job
    """
    job = Job(slug)
    if not job.exists():
        abort(404)

    return render_template('job_edit.html',
                           job=job,
                           edit_operation='edit')
Пример #6
0
    def test_no_ancestors(self,
                          mocker,
                          new_result,
                          changed):
        """ Test when ancestor job has a result """
        job_current = Job()

        mocker.patch.object(job_current, 'ancestor_job', new=None)
        mocker.patch.object(job_current, 'result', new=new_result)

        assert job_current.changed_result() == changed
Пример #7
0
    def test_ancestor_incomplete(self, mocker):
        job_current = Job()
        job_ancestor_incomplete = Job()

        mocker.patch.object(job_ancestor_incomplete, 'result', new=None)
        mocker.patch.object(
            job_current, 'ancestor_job', new=job_ancestor_incomplete,
        )

        mocker.patch.object(job_current, 'result', new=JobResult.success)

        assert job_current.changed_result() == True
Пример #8
0
    def test_ancestor_complete(self,
                               mocker,
                               prev_result,
                               new_result,
                               changed):
        """ Test when ancestor job has a result """
        job_current = Job()
        job_ancestor = Job()

        mocker.patch.object(job_current, 'ancestor_job', new=job_ancestor)
        mocker.patch.object(job_current, 'result', new=new_result)
        mocker.patch.object(job_ancestor, 'result', new=prev_result)

        assert job_current.changed_result() == changed
Пример #9
0
    def _process_message(self, channel, body, envelope, _):
        """ Parse message data and start in a worker """
        self._logger.info('Received message')
        self._logger.debug('Message body: %s', body)

        try:
            job_data = json.loads(body.decode())
            project_slug = job_data.pop('project_slug')
            job_slug = job_data.pop('job_slug')
            job = Job.load(project_slug, job_slug, **job_data)

        except (ValueError, KeyError):
            self._logger.exception('Failed to load job message: %s', body)
            self._logger.info('Rejecting message')
            yield from channel.basic_client_nack(
                delivery_tag=envelope.delivery_tag,
                requeue=False,
            )

        else:
            self._job_event.set()
            self._logger.info('Acknowleding message')
            yield from channel.basic_client_ack(
                delivery_tag=envelope.delivery_tag,
            )

            self._logger.info('Running job %s/%s', project_slug, job_slug)
            yield from asyncio.get_event_loop().run_in_executor(
                None, job.run,
            )

            self._logger.info('Job completed')
            self._job_event.clear()
Пример #10
0
    def test_basic_sets(self,
                        mocker,
                        model_state,
                        in_service,
                        in_state,
                        in_msg,
                        exp_state,
                        exp_msg,
                        ):
        """ Test some basic input/output combinations """
        job = Job()
        mocker.patch('dockci.models.job.Job.state', new_callable=PropertyMock(return_value=model_state))
        out_state, out_msg = job.state_data_for(in_service, in_state, in_msg)

        assert out_state == exp_state
        assert out_msg == exp_msg
Пример #11
0
def job_edit_view(slug):
    """
    View to edit a job
    """
    return render_template('job_edit.html',
                           job=Job(slug),
                           edit_operation='edit')
Пример #12
0
def build_output_view(job_slug, build_slug, filename):
    """
    View to download some build output
    """
    job = Job(slug=job_slug)
    build = Build(job=job, slug=build_slug)

    # TODO possible security issue opending files from user input like this
    data_file_path = os.path.join(*build.build_output_path() + [filename])
    if not os.path.isfile(data_file_path):
        abort(404)

    def loader():
        """
        Generator to stream the log file
        """
        with open(data_file_path, 'rb') as handle:
            while True:
                data = handle.read(1024)
                yield data

                is_live_log = (build.state == 'running' and filename
                               == "%s.log" % build.build_stage_slugs[-1])
                if is_live_log:
                    select.select((handle, ), (), (), 2)
                    build.load()

                elif len(data) == 0:
                    return

    mimetype, _ = mimetypes.guess_type(filename)
    if mimetype is None:
        mimetype = 'application/octet-stream'

    return Response(loader(), mimetype=mimetype)
Пример #13
0
def build_view(job_slug, build_slug):
    """
    View to display a build
    """
    job = Job(slug=job_slug)
    build = Build(job=job, slug=build_slug)

    return render_template('build.html', build=build)
Пример #14
0
def build_output_json(job_slug, build_slug):
    """
    View to download some build info in JSON
    """
    job = Job(slug=job_slug)
    build = Build(job=job, slug=build_slug)

    return Response(json.dumps(build.as_dict(), cls=DateTimeEncoder),
                    mimetype='application/json')
Пример #15
0
def job_view(slug):
    """
    View to display a job
    """
    job = Job(slug)
    request_fill(
        job,
        ('name', 'repo', 'github_secret', 'hipchat_api_token', 'hipchat_room'))

    return render_template('job.html', job=job)
Пример #16
0
def job_view(project_slug, job_slug):
    """
    View to display a job
    """
    project = Project.query.filter_by(slug=project_slug).first_or_404()
    if not (project.public or current_user.is_authenticated()):
        abort(404)

    job = Job.query.get_or_404(Job.id_from_slug(job_slug))

    return render_template('job.html', job=job)
Пример #17
0
def get_validate_job(project_slug, job_slug):
    """ Get the job object, validate that project slug matches expected """
    job_id = Job.id_from_slug(job_slug)
    job = Job.query.get_or_404(job_id)
    if job.project.slug != project_slug:
        flask_restful.abort(404)

    if not (job.project.public or current_user.is_authenticated()):
        flask_restful.abort(404)

    return job
Пример #18
0
def job_new_view():
    """
    View to make a new job
    """
    job = Job()
    if request.method == 'POST':
        saved = request_fill(
            job, ('slug', 'name', 'repo', 'hipchat_api_token', 'hipchat_room'))
        if saved:
            return redirect('/jobs/{job_slug}'.format(job_slug=job.slug))

    return render_template('job_edit.html', job=job, edit_operation='new')
Пример #19
0
def job_new_view(project_slug):
    """
    View to create a new job
    """

    has_event_header = any((
        header in request.headers
        for header in (
            'X-Github-Event',
            'X-Gitlab-Event',
        )
    ))
    if not has_event_header:
        abort(400)

    project = Project.query.filter_by(slug=project_slug).first_or_404()
    job = Job(project=project, repo_fs=project.repo_fs)

    if 'X-Github-Event' in request.headers:
        job_new_github(project, job)
    elif 'X-Gitlab-Event' in request.headers:
        job_new_gitlab(project, job)

    try:
        DB.session.add(job)
        DB.session.commit()
        job.queue()

        job_url = url_for('job_view',
                          project_slug=project_slug,
                          job_slug=job.slug)
        return job_url, 201

    except ValidationError as ex:
        rollbar.report_exc_info()
        logging.exception("Event hook error")
        return json.dumps({
            'errors': ex.messages,
        }), 400
Пример #20
0
def run_build_async(job_slug, build_slug):
    """
    Load and run a build's private run job, forking to handle the build in the
    background
    """
    if os.fork():
        return  # parent process

    logger = logging.getLogger('dockci.build')
    try:
        with APP.app_context():
            job = Job(job_slug)
            build = Build(job=job, slug=build_slug)
            build_okay = build._run_now()  # pylint:disable=protected-access

            # Send the failure message
            if not build_okay:
                recipients = []
                if build.git_author_email:
                    recipients.append(
                        '%s <%s>' %
                        (build.git_author_name, build.git_author_email))
                if build.git_committer_email:
                    recipients.append(
                        '%s <%s>' %
                        (build.git_committer_name, build.git_committer_email))

                if recipients:
                    email = Message(
                        recipients=recipients,
                        subject="DockCI - {job_name} {build_result}ed".format(
                            job_name=job.name,
                            build_result=build.result,
                        ),
                    )
                    send_mail(email)

            # Send a HipChat notification
            if job.hipchat_api_token != '' and job.hipchat_room != '':
                hipchat = HipChat(apitoken=job.hipchat_api_token,
                                  room=job.hipchat_room)
                hipchat.message("DockCI - {name} Build {id}: {result}".format(
                    name=job.name,
                    id=build.create_ts,
                    result=build.result,
                ))

    except Exception:  # pylint:disable=broad-except
        logger.exception("Something went wrong in the build worker")
Пример #21
0
def job_view(slug):
    """
    View to display a job
    """
    job = Job(slug)
    if not job.exists():
        abort(404)

    request_fill(job, ('name', 'repo', 'github_secret',
                       'hipchat_api_token', 'hipchat_room'))

    page_size = int(request.args.get('page_size', 20))
    page_offset = int(request.args.get('page_offset', 0))
    versioned = 'versioned' in request.args

    if versioned:
        builds = list(job.filtered_builds(passed=True, versioned=True))
    else:
        builds = job.builds

    prev_page_offset = max(page_offset - page_size, 0)
    if page_offset < 1:
        prev_page_offset = None

    next_page_offset = page_offset + page_size
    if next_page_offset > len(builds):
        next_page_offset = None

    builds = builds[page_offset:page_offset + page_size]
    return render_template('job.html',
                           job=job,
                           builds=builds,
                           versioned=versioned,
                           prev_page_offset=prev_page_offset,
                           next_page_offset=next_page_offset,
                           page_size=page_size)
Пример #22
0
def check_output(project_slug, job_slug, filename):
    """ Ensure the job exists, and that the path is not dangerous """
    project = Project.query.filter_by(slug=project_slug).first_or_404()
    if not (project.public or current_user.is_authenticated()):
        abort(404)

    job = Job.query.get_or_404(Job.id_from_slug(job_slug))

    job_output_path = job.job_output_path()
    data_file_path = job_output_path.join(filename)

    # Ensure no security issues opening path above our output dir
    if not path_contained(job_output_path, data_file_path):
        abort(404)

    if not data_file_path.check(file=True):
        abort(404)

    return data_file_path
Пример #23
0
def filter_jobs_by_request(project):
    """ Get all jobs for a project, filtered by some request parameters """
    filter_args = {}
    for filter_name in ('passed', 'versioned', 'completed'):
        try:
            value = request.values[filter_name]
            if value == '':  # Acting as a switch
                filter_args[filter_name] = True
            else:
                filter_args[filter_name] = str2bool(value)

        except KeyError:
            pass

    for filter_name in ('branch', 'tag', 'commit'):
        try:
            filter_args[filter_name] = request.values[filter_name]
        except KeyError:
            pass

    return Job.filtered_query(
        query=project.jobs.order_by(sqlalchemy.desc(Job.create_ts)),
        **filter_args
    )
Пример #24
0
class Build(Model):  # pylint:disable=too-many-instance-attributes
    """
    An individual job build, and result
    """
    def __init__(self, job=None, slug=None):
        super(Build, self).__init__()

        assert job is not None, "Job is given"

        self.job = job
        self.job_slug = job.slug

        if slug:
            self.slug = slug

    slug = OnAccess(lambda _: hex(int(datetime.now().timestamp() * 10000))[2:])
    previous_build_slug = OnAccess(lambda _: None)
    job = OnAccess(lambda self: Job(self.job_slug))
    job_slug = OnAccess(lambda self: self.job.slug)  # TODO infinite loop
    create_ts = LoadOnAccess(generate=lambda _: datetime.now())
    start_ts = LoadOnAccess(default=lambda _: None)
    complete_ts = LoadOnAccess(default=lambda _: None)
    result = LoadOnAccess(default=lambda _: None)
    repo = LoadOnAccess(generate=lambda self: self.job.repo)
    commit = LoadOnAccess(default=lambda _: None)
    version = LoadOnAccess(default=lambda _: None)
    image_id = LoadOnAccess(default=lambda _: None)
    container_id = LoadOnAccess(default=lambda _: None)
    exit_code = LoadOnAccess(default=lambda _: None)
    build_stage_slugs = LoadOnAccess(default=lambda _: [])
    build_stages = OnAccess(
        lambda self:
        [BuildStage(slug=slug, build=self) for slug in self.build_stage_slugs])
    git_author_name = LoadOnAccess(default=lambda _: None)
    git_author_email = LoadOnAccess(default=lambda _: None)
    git_committer_name = LoadOnAccess(
        default=lambda self: self.git_author_name)
    git_committer_email = LoadOnAccess(
        default=lambda self: self.git_author_email)
    # pylint:disable=unnecessary-lambda
    build_config = OnAccess(lambda self: BuildConfig(self))

    _provisioned_containers = []

    def validate(self):
        with self.parent_validation(Build):
            errors = []

            if not self.job:
                errors.append("Parent job not given")
            if self.commit and not is_git_hash(self.commit):
                errors.append("Invalid git commit hash")
            if self.image_id and not is_docker_id(self.image_id):
                errors.append("Invalid Docker image ID")
            if self.container_id and not is_docker_id(self.container_id):
                errors.append("Invalid Docker container ID")

            if errors:
                raise ValidationError(errors)

        return True

    @property
    def state(self):
        """
        Current state that the build is in
        """
        if self.result is not None:
            return self.result
        elif self.build_stages:
            return 'running'  # TODO check if running or dead
        else:
            return 'queued'  # TODO check if queued or queue fail

    _docker_client = None

    @property
    def docker_client(self):
        """
        Get the cached (or new) Docker Client object being used for this build
        """
        if not self._docker_client:
            if CONFIG.docker_use_env_vars:
                docker_client_args = kwargs_from_env()
            else:
                docker_client_args = {'base_url': CONFIG.docker_host}

            self._docker_client = docker.Client(**docker_client_args)

        return self._docker_client

    @property
    def build_output_details(self):
        """
        Details for build output artifacts
        """
        # pylint:disable=no-member
        output_files = ((name,
                         os.path.join(*self.build_output_path() +
                                      ['%s.tar' % name]))
                        for name in self.build_config.build_output.keys())
        return {
            name: {
                'size':
                bytes_human_readable(os.path.getsize(path)),
                'link':
                url_for(
                    'build_output_view',
                    job_slug=self.job_slug,
                    build_slug=self.slug,
                    filename='%s.tar' % name,
                ),
            }
            for name, path in output_files if os.path.isfile(path)
        }

    @property
    def docker_image_name(self):
        """
        Get the docker image name, including repository where necessary
        """
        if CONFIG.docker_use_registry:
            return '{host}/{name}'.format(host=CONFIG.docker_registry_host,
                                          name=self.job_slug)

        return self.job_slug

    @property
    def docker_full_name(self):
        """
        Get the full name of the docker image, including tag, and repository
        where necessary
        """
        if self.version:
            return '{name}:{tag}'.format(name=self.docker_image_name,
                                         tag=self.version)

        return self.docker_image_name

    @property
    def is_stable_release(self):
        """
        Check if this is a successfully run, versioned build
        """
        return self.result == 'success' and self.version is not None

    def data_file_path(self):
        # Add the job name before the build slug in the path
        data_file_path = super(Build, self).data_file_path()
        data_file_path.insert(-1, self.job_slug)
        return data_file_path

    def build_output_path(self):
        """
        Directory for any build output data
        """
        return self.data_file_path()[:-1] + ['%s_output' % self.slug]

    def queue(self):
        """
        Add the build to the queue
        """
        if self.start_ts:
            raise AlreadyRunError(self)

        # TODO fix and reenable pylint check for cyclic-import
        from dockci.workers import run_build_async
        run_build_async(self.job_slug, self.slug)

    def _run_now(self):
        """
        Worker func that performs the build
        """
        self.start_ts = datetime.now()
        self.save()

        try:
            with tempfile.TemporaryDirectory() as workdir:
                pre_build = (stage() for stage in (
                    lambda: self._run_prep_workdir(workdir),
                    lambda: self._run_git_info(workdir),
                    lambda: self._run_tag_version(workdir),
                    lambda: self._run_provision(workdir),
                    lambda: self._run_build(workdir),
                ))
                if not all(pre_build):
                    self.result = 'error'
                    return False

                if not self._run_test():
                    self.result = 'fail'
                    return False

                # We should fail the build here because if this is a versioned
                # build, we can't rebuild it
                if not self._run_push():
                    self.result = 'error'
                    return False

                self.result = 'success'
                self.save()

                # Failing this doesn't indicade build failure
                # TODO what kind of a failure would this not working be?
                self._run_fetch_output()

            return True
        except Exception:  # pylint:disable=broad-except
            self.result = 'error'
            self._error_stage('error')

            return False

        finally:
            try:
                self._run_cleanup()

            except Exception:  # pylint:disable=broad-except
                self._error_stage('cleanup_error')

            self.complete_ts = datetime.now()
            self.save()

    def _run_prep_workdir(self, workdir):
        """
        Clone and checkout the build
        """
        stage = self._stage('git_prepare',
                            workdir=workdir,
                            cmd_args=(
                                ['git', 'clone', self.repo, workdir],
                                ['git', 'checkout', self.commit],
                            ))
        result = stage.returncode == 0

        # check for, and load build config
        build_config_file = os.path.join(workdir, BuildConfig.slug)
        if os.path.isfile(build_config_file):
            # pylint:disable=no-member
            self.build_config.load(data_file=build_config_file)
            self.build_config.save()

        return result

    def _run_git_info(self, workdir):
        """
        Get info about the current commit from git
        """
        def runnable(handle):
            """
            Execute git to retrieve info
            """
            def run_proc(*args):
                """
                Run, and wait for a process with default args
                """
                proc = subprocess.Popen(
                    args,
                    stdout=subprocess.PIPE,
                    stderr=handle,
                    cwd=workdir,
                )
                proc.wait()
                return proc

            largest_returncode = 0
            properties_empty = True

            properties = {
                'Author name': ('git_author_name', '%an'),
                'Author email': ('git_author_email', '%ae'),
                'Committer name': ('git_committer_name', '%cn'),
                'Committer email': ('git_committer_email', '%ce'),
            }
            for display_name, (attr_name, format_string) in properties.items():
                proc = run_proc('git', 'show', '-s',
                                '--format=format:%s' % format_string, 'HEAD')

                largest_returncode = max(largest_returncode, proc.returncode)
                value = proc.stdout.read().decode().strip()

                if value != '' and proc.returncode == 0:
                    setattr(self, attr_name, value)
                    properties_empty = False
                    handle.write(
                        ("%s is %s\n" % (display_name, value)).encode())

            ancestor_build = self.job.latest_build_ancestor(
                workdir, self.commit)
            if ancestor_build:
                properties_empty = False
                handle.write(
                    ("Ancestor build is %s\n" % ancestor_build.slug).encode())

            if properties_empty:
                handle.write("No information about the git commit could be "
                             "derived\n".encode())

            else:
                self.save()

            return proc.returncode

        stage = self._stage('git_info', workdir=workdir, runnable=runnable)
        return stage.returncode == 0

    def _run_tag_version(self, workdir):
        """
        Try and add a version to the build, based on git tag
        """
        stage = self._stage(
            'git_tag',
            workdir=workdir,
            cmd_args=['git', 'describe', '--tags', '--exact-match'])
        if not stage.returncode == 0:
            # TODO remove spoofed return
            # (except that --exact-match legitimately returns 128 if no tag)
            return True  # stage result is irrelevant

        try:
            # TODO opening file to get this is kinda awful
            data_file_path = os.path.join(*stage.data_file_path())
            with open(data_file_path, 'r') as handle:
                line = handle.readline().strip()
                if line:
                    self.version = line
                    self.save()

        except KeyError:
            pass

        # TODO don't spoof the return; just ignore output elsewhere
        return True  # stage result is irrelevant

    def _run_provision(self, workdir):
        """
        Provision the services that are required for this build
        """
        def runnable(handle):
            """
            Resolve jobs and start services
            """
            all_okay = True
            # pylint:disable=no-member
            for job_slug, service_config in self.build_config.services.items():
                service_job = Job(job_slug)
                if not service_job.exists():
                    handle.write(
                        ("No job found matching %s\n" % job_slug).encode())
                    all_okay = False
                    continue

                service_build = service_job.latest_build(passed=True,
                                                         versioned=True)
                if not service_build:
                    handle.write(
                        ("No successful, versioned build for %s - %s\n" %
                         (job_slug, service_job.name)).encode())
                    all_okay = False
                    continue

                handle.write(
                    ("%sStarting service %s - %s %s" %
                     ("" if all_okay else "NOT ", job_slug, service_job.name,
                      service_build.version)).encode())

                try:
                    service_kwargs = {
                        key: value
                        for key, value in service_config.items()
                        if key in ('command', 'environment')
                    }
                    service_container = self.docker_client.create_container(
                        image=service_build.image_id, **service_kwargs)
                    self.docker_client.start(service_container['Id'])

                    # Store the provisioning info
                    self._provisioned_containers.append({
                        'job_slug':
                        job_slug,
                        'config':
                        service_config,
                        'id':
                        service_container['Id']
                    })
                    handle.write("... STARTED!\n".encode())

                except docker.errors.APIError as ex:
                    handle.write(("... FAILED!\n    %s" %
                                  ex.explanation.decode()).encode())
                    all_okay = False

            return all_okay

        return self._stage('docker_provision',
                           workdir=workdir,
                           runnable=runnable).returncode

    def _run_build(self, workdir):
        """
        Tell the Docker host to build
        """
        def on_done(line):
            """
            Check the final line for success, and image id
            """
            if line:
                if isinstance(line, bytes):
                    line = line.decode()

                line_data = json.loads(line)
                re_match = re.search(r'Successfully built ([0-9a-f]+)',
                                     line_data.get('stream', ''))
                if re_match:
                    self.image_id = re_match.group(1)
                    return True

            return False

        tag = self.docker_full_name
        if self.version is not None:
            existing_image = None
            for image in self.docker_client.images(name=self.job_slug, ):
                if tag in image['RepoTags']:
                    existing_image = image
                    break

            if existing_image is not None:
                # Do not override existing builds of _versioned_ tagged code
                if is_semantic(self.version):
                    raise AlreadyBuiltError('Version %s of %s already built' %
                                            (
                                                self.version,
                                                self.job_slug,
                                            ))
                # Delete existing builds of _non-versioned_ tagged code
                # (allows replacement of images)
                else:
                    try:
                        self.docker_client.remove_image(
                            image=existing_image['Id'], )
                    except docker.errors.APIError:
                        # TODO handle deletion of containers here
                        pass

        # Don't use the docker caches if a version tag is defined
        no_cache = (self.version is not None)

        return self._run_docker(
            'build',
            # saved stream for debugging
            # lambda: open('docker_build_stream', 'r'),
            lambda: self.docker_client.build(
                path=workdir, tag=tag, nocache=no_cache, rm=True, stream=True),
            on_done=on_done,
        )

    def _run_test(self):
        """
        Tell the Docker host to run the CI command
        """
        def start_container():
            """
            Create a container instance, attache to its outputs and then start
            it, returning the output stream
            """
            container_details = self.docker_client.create_container(
                self.image_id, 'ci')
            self.container_id = container_details['Id']
            self.save()

            def link_tuple(service_info):
                """
                Turn our provisioned service info dict into an alias string for
                Docker
                """
                if 'name' not in service_info:
                    service_info['name'] = \
                        self.docker_client.inspect_container(
                            service_info['id']
                        )['Name'][1:]  # slice to remove the / from start

                if 'alias' not in service_info:
                    if isinstance(service_info['config'], dict):
                        service_info['alias'] = service_info['config'].get(
                            'alias', service_info['job_slug'])

                    else:
                        service_info['alias'] = service_info['job_slug']

                return (service_info['name'], service_info['alias'])

            stream = self.docker_client.attach(self.container_id, stream=True)
            self.docker_client.start(
                self.container_id,
                links=[
                    link_tuple(service_info)
                    for service_info in self._provisioned_containers
                ])

            return stream

        def on_done(_):
            """
            Check container exit code and return True on 0, or False otherwise
            """
            details = self.docker_client.inspect_container(self.container_id)
            self.exit_code = details['State']['ExitCode']
            self.save()
            return self.exit_code == 0

        return self._run_docker(
            'test',
            start_container,
            on_done=on_done,
        )

    def _run_push(self):
        """
        Push the built container to the Docker registry, if versioned and
        configured
        """
        def push_container():
            """
            Perform the actual Docker push operation
            """
            return self.docker_client.push(
                self.docker_image_name,
                tag=self.version,
                stream=True,
                insecure_registry=CONFIG.docker_registry_insecure,
            )

        if self.version and CONFIG.docker_use_registry:
            return self._run_docker('push', push_container)

        else:
            return True

    def _run_docker(self,
                    docker_stage_slug,
                    docker_command,
                    on_line=None,
                    on_done=None):
        """
        Wrapper around common Docker command process. Will send output lines to
        file, and optionally use callbacks to notify on each line, and
        completion
        """
        def runnable(handle):
            """
            Perform the Docker command given
            """
            output = docker_command()

            line = ''
            for line in output:
                if isinstance(line, bytes):
                    handle.write(line)
                else:
                    handle.write(line.encode())

                handle.flush()

                if on_line:
                    on_line(line)

            if on_done:
                return on_done(line)

            elif line:
                return True

            return False

        return self._stage('docker_%s' % docker_stage_slug,
                           runnable=runnable).returncode

    def _run_fetch_output(self):
        """
        Fetches any output specified in build config
        """
        def runnable(handle):
            """
            Fetch/save the files
            """
            # pylint:disable=no-member
            mappings = self.build_config.build_output.items()
            for key, docker_fn in mappings:
                handle.write(
                    ("Fetching %s from '%s'..." % (key, docker_fn)).encode())
                resp = self.docker_client.copy(self.container_id, docker_fn)

                if 200 <= resp.status < 300:
                    output_path = os.path.join(*self.build_output_path() +
                                               ['%s.tar' % key])
                    with open(output_path, 'wb') as output_fh:
                        bytes_written = output_fh.write(resp.data)

                    handle.write(
                        (" DONE! %s total\n" %
                         (bytes_human_readable(bytes_written))).encode(), )

                else:
                    handle.write((" FAIL! HTTP status %d: %s\n" %
                                  (resp.status_code, resp.reason)).encode(), )

            # Output something on no output
            if not mappings:
                handle.write("No output files to fetch".encode())

        return self._stage('docker_fetch', runnable=runnable).returncode

    def _run_cleanup(self):
        """
        Clean up after the build/test
        """
        def cleanup_context(handle, object_type, object_id):
            """
            Get a stream_write_status context manager with messages set
            correctly
            """
            return stream_write_status(
                handle,
                "Cleaning up %s '%s'..." % (object_type, object_id),
                "DONE!",
                "FAILED!",
            )

        def runnable(handle):
            """
            Do the image/container cleanup
            """
            if self.container_id:
                with cleanup_context(handle, 'container', self.container_id):
                    self.docker_client.remove_container(self.container_id)

            if self._provisioned_containers:
                for service_info in self._provisioned_containers:
                    ctx = cleanup_context(handle, 'provisioned container',
                                          service_info['id'])
                    with ctx:
                        self.docker_client.remove_container(
                            service_info['id'],
                            force=True,
                        )

            # Only clean up image if this is an non-versioned build
            if self.version is None or self.result in ('error', 'fail'):
                if self.image_id:
                    with cleanup_context(handle, 'image', self.image_id):
                        self.docker_client.remove_image(self.image_id)

        return self._stage('cleanup', runnable)

    def _error_stage(self, stage_slug):
        """
        Create an error stage and add stack trace for it
        """
        self.build_stage_slugs.append(stage_slug)  # pylint:disable=no-member
        self.save()

        import traceback
        try:
            BuildStage(
                stage_slug, self, lambda handle: handle.write(
                    bytes(traceback.format_exc(), 'utf8'))).run()
        except Exception:  # pylint:disable=broad-except
            print(traceback.format_exc())

    def _stage(self, stage_slug, runnable=None, workdir=None, cmd_args=None):
        """
        Create and save a new build stage, running the given args and saving
        its output
        """
        if cmd_args:
            stage = BuildStage.from_command(slug=stage_slug,
                                            build=self,
                                            cwd=workdir,
                                            cmd_args=cmd_args)
        else:
            stage = BuildStage(slug=stage_slug, build=self, runnable=runnable)

        self.build_stage_slugs.append(stage_slug)  # pylint:disable=no-member
        self.save()
        stage.run()
        return stage
Пример #25
0
def build_new_view(job_slug):
    """
    View to create a new build
    """
    job = Job(slug=job_slug)

    if request.method == 'POST':
        build = Build(job=job)
        build.repo = job.repo

        build_url = url_for('build_view',
                            job_slug=job_slug,
                            build_slug=build.slug)

        if 'X-Github-Event' in request.headers:
            if not job.github_secret:
                logging.warn("GitHub webhook secret not setup")
                abort(403)

            if not is_valid_github(job.github_secret):
                logging.warn("Invalid GitHub payload")
                abort(403)

            if request.headers['X-Github-Event'] == 'push':
                push_data = request.json
                build.commit = push_data['head_commit']['id']

            else:
                logging.debug("Unknown GitHub hook '%s'",
                              request.headers['X-Github-Event'])
                abort(501)

            try:
                build.save()
                build.queue()

                return build_url, 201

            except ValidationError as ex:
                logging.exception("GitHub hook error")
                return json.dumps({
                    'errors': ex.messages,
                }), 400

        else:
            build.commit = request.form['commit']

            if not re.match(r'[a-fA-F0-9]{1,40}', request.form['commit']):
                flash(u"Invalid git commit hash", 'danger')
                return render_template('build_new.html', build=build)

            try:
                build.save()
                build.queue()

                flash(u"Build queued", 'success')
                return redirect(build_url, 303)

            except ValidationError as ex:
                flash(ex.messages, 'danger')

    return render_template('build_new.html', build=Build(job=job))