Пример #1
0
    def _handle_generic_artifact(self, jobstep, artifact, skip_checks=False):
        if not skip_checks:
            if artifact['fileName'].endswith('.log') and not self.sync_log_artifacts:
                return

            if artifact['fileName'].endswith(XUNIT_FILENAMES) and not self.sync_xunit_artifacts:
                return

            if artifact['fileName'].endswith(COVERAGE_FILENAMES) and not self.sync_coverage_artifacts:
                return

        artifact, created = get_or_create(Artifact, where={
            'step': jobstep,
            'name': artifact['fileName'],
        }, defaults={
            'project': jobstep.project,
            'job': jobstep.job,
            'data': artifact,
        })
        if not created:
            db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=jobstep.id.hex,
        )
Пример #2
0
def _sync_artifacts_for_jobstep(step):
    # only generate the sync_artifact tasks for this step once
    if Task.query.filter(
            Task.parent_id == step.id,
            Task.task_name == 'sync_artifact',
    ).first():
        return

    artifacts = Artifact.query.filter(Artifact.step_id == step.id).all()

    _, buildstep = JobPlan.get_build_step_for_job(job_id=step.job_id)
    prefer_artifactstore = buildstep.prefer_artifactstore()
    artifact_manager = buildstep.get_artifact_manager(step)
    to_sync = _get_artifacts_to_sync(artifacts, artifact_manager,
                                     prefer_artifactstore)

    # buildstep may want to check for e.g. required artifacts
    buildstep.verify_final_artifacts(step, to_sync)

    for artifact in to_sync:
        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=step.id.hex,
        )
Пример #3
0
    def _handle_generic_artifact(self, jobstep, artifact, skip_checks=False):
        artifact, created = get_or_create(
            Artifact,
            where={"step": jobstep, "name": artifact["fileName"]},
            defaults={"project": jobstep.project, "job": jobstep.job, "data": artifact},
        )
        if not created:
            db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex, task_id=artifact.id.hex, parent_task_id=jobstep.id.hex, skip_checks=skip_checks
        )
Пример #4
0
    def post(self, step_id):
        """
        Create a new artifact with the given name.
        """
        step = JobStep.query.get(step_id)
        if step is None:
            return '', 404

        if step.result == Result.aborted:
            return '', 410

        args = self.parser.parse_args()

        artifact = Artifact(
            name=args.name,
            step_id=step.id,
            job_id=step.job_id,
            project_id=step.project_id,
        )
        try:
            db.session.add(artifact)
            db.session.flush()
        except IntegrityError:
            db.session.rollback()
            exists = True
        else:
            exists = False

        if exists:
            # XXX(dcramer); this is more of an error but we make an assumption
            # that this happens because it was already sent
            existing_msg = {"error": "An artifact with this name already exists"}
            return self.respond(existing_msg, status_code=204)

        step_id = artifact.step_id.hex
        artifact.file.save(
            args.artifact_file,
            '{0}/{1}/{2}_{3}'.format(
                step_id[:4], step_id[4:],
                artifact.id.hex, artifact.name
            ),
        )
        db.session.add(artifact)
        db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=step.id.hex,
        )

        return {'id': artifact.id.hex}, 201
Пример #5
0
    def post(self, step_id):
        """
        Create a new artifact with the given name.
        """
        step = JobStep.query.get(step_id)
        if step is None:
            return '', 404

        if step.result == Result.aborted:
            return '', 410

        args = self.parser.parse_args()

        artifact = Artifact(
            name=args.name,
            step_id=step.id,
            job_id=step.job_id,
            project_id=step.project_id,
        )
        try:
            db.session.add(artifact)
            db.session.flush()
        except IntegrityError:
            db.session.rollback()
            exists = True
        else:
            exists = False

        if exists:
            # XXX(dcramer); this is more of an error but we make an assumption
            # that this happens because it was already sent
            existing_msg = {
                "error": "An artifact with this name already exists"
            }
            return self.respond(existing_msg, status_code=204)

        step_id = artifact.step_id.hex
        artifact.file.save(
            args.artifact_file,
            '{0}/{1}/{2}_{3}'.format(step_id[:4], step_id[4:], artifact.id.hex,
                                     artifact.name),
        )
        db.session.add(artifact)
        db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=step.id.hex,
        )

        return {'id': artifact.id.hex}, 201
Пример #6
0
    def _handle_generic_artifact(self, jobstep, artifact):
        artifact, created = get_or_create(Artifact, where={
            'step': jobstep,
            'name': self._get_artifact_path(artifact),
        }, defaults={
            'project': jobstep.project,
            'job': jobstep.job,
            'data': artifact,
        })
        if not created:
            db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=jobstep.id.hex,
        )
Пример #7
0
def _sync_artifacts_for_jobstep(step):
    artifacts = Artifact.query.filter(Artifact.step_id == step.id).all()

    _, buildstep = JobPlan.get_build_step_for_job(job_id=step.job_id)
    prefer_artifactstore = buildstep.prefer_artifactstore()
    artifact_manager = buildstep.get_artifact_manager(step)
    to_sync = _get_artifacts_to_sync(artifacts, artifact_manager, prefer_artifactstore)

    # buildstep may want to check for e.g. required artifacts
    buildstep.verify_final_artifacts(step, to_sync)

    for artifact in to_sync:
        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=step.id.hex,
        )
Пример #8
0
    def _handle_generic_artifact(self, jobstep, artifact, skip_checks=False):
        artifact, created = get_or_create(Artifact, where={
            'step': jobstep,
            'name': artifact['fileName'],
        }, defaults={
            'project': jobstep.project,
            'job': jobstep.job,
            'data': artifact,
        })
        if not created:
            db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=jobstep.id.hex,
            skip_checks=skip_checks,
        )
Пример #9
0
    def _handle_generic_artifact(self, jobstep, artifact, skip_checks=False):
        artifact, created = get_or_create(Artifact, where={
            'step': jobstep,
            'name': artifact['fileName'],
        }, defaults={
            'project': jobstep.project,
            'job': jobstep.job,
            'data': artifact,
        })
        if not created:
            db.session.commit()

        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=jobstep.id.hex,
            skip_checks=skip_checks,
        )
Пример #10
0
def _sync_artifacts_for_jobstep(step):
    artifacts = Artifact.query.filter(Artifact.step_id == step.id).all()

    _, buildstep = JobPlan.get_build_step_for_job(job_id=step.job_id)
    prefer_artifactstore = buildstep.prefer_artifactstore()
    artifact_manager = buildstep.get_artifact_manager(step)
    to_sync = _get_artifacts_to_sync(artifacts, artifact_manager,
                                     prefer_artifactstore)

    # buildstep may want to check for e.g. required artifacts
    buildstep.verify_final_artifacts(step, to_sync)

    for artifact in to_sync:
        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=step.id.hex,
        )
Пример #11
0
def _sync_artifacts_for_jobstep(step):
    # only generate the sync_artifact tasks for this step once
    if Task.query.filter(
        Task.parent_id == step.id,
        Task.task_name == 'sync_artifact',
    ).first():
        return

    _, buildstep = JobPlan.get_build_step_for_job(job_id=step.job_id)
    prefer_artifactstore = buildstep.prefer_artifactstore()
    artifacts = Artifact.query.filter(Artifact.step_id == step.id).all()

    to_sync = _get_artifacts_to_sync(artifacts, prefer_artifactstore)

    # buildstep may want to check for e.g. required artifacts
    buildstep.verify_final_artifacts(step, to_sync)

    for artifact in to_sync:
        sync_artifact.delay_if_needed(
            artifact_id=artifact.id.hex,
            task_id=artifact.id.hex,
            parent_task_id=step.id.hex,
        )
Пример #12
0
    def _sync_step_from_active(self, step):
        try:
            job_name = step.data['job_name']
            build_no = step.data['build_no']
        except KeyError:
            raise UnrecoverableException('Missing Jenkins job information')

        try:
            item = self._get_response('/job/{}/{}'.format(
                job_name, build_no))
        except NotFound:
            raise UnrecoverableException('Unable to find job in Jenkins')

        # TODO(dcramer): we're doing a lot of work here when we might
        # not need to due to it being sync'd previously
        node, _ = get_or_create(Node, where={
            'label': item['builtOn'],
        })

        step.node = node
        step.label = item['fullDisplayName']
        step.date_started = datetime.utcfromtimestamp(
            item['timestamp'] / 1000)

        if item['building']:
            step.status = Status.in_progress
        else:
            step.status = Status.finished
            step.result = RESULT_MAP[item['result']]
            # values['duration'] = item['duration'] or None
            step.date_finished = datetime.utcfromtimestamp(
                (item['timestamp'] + item['duration']) / 1000)

        # step.data.update({
        #     'backend': {
        #         'uri': item['url'],
        #         'label': item['fullDisplayName'],
        #     }
        # })
        db.session.add(step)

        # TODO(dcramer): we shoudl abstract this into a sync_phase
        phase = step.phase

        if not phase.date_started:
            phase.date_started = safe_agg(
                min, (s.date_started for s in phase.steps), step.date_started)
            db.session.add(phase)

        if phase.status != step.status:
            phase.status = step.status
            db.session.add(phase)

        if step.status == Status.finished:
            phase.status = Status.finished
            phase.date_finished = safe_agg(
                max, (s.date_finished for s in phase.steps), step.date_finished)

            if any(s.result is Result.failed for s in phase.steps):
                phase.result = Result.failed
            else:
                phase.result = safe_agg(
                    max, (s.result for s in phase.steps), Result.unknown)

            db.session.add(phase)

        db.session.commit()

        if step.status != Status.finished:
            return

        # sync artifacts
        for artifact in item.get('artifacts', ()):
            artifact, created = get_or_create(Artifact, where={
                'step': step,
                'name': artifact['fileName'],
            }, defaults={
                'project': step.project,
                'job': step.job,
                'data': artifact,
            })
            sync_artifact.delay_if_needed(
                artifact_id=artifact.id.hex,
                task_id=artifact.id.hex,
                parent_task_id=step.id.hex,
            )

        # sync test results
        try:
            with db.session.begin_nested():
                self._sync_test_results(
                    step=step,
                    job_name=job_name,
                    build_no=build_no,
                )
        except Exception:
            self.logger.exception(
                'Failed to sync test results for %s #%s', job_name, build_no)
        else:
            db.session.commit()

        # sync console log
        try:
            result = True
            while result:
                result = self._sync_log(
                    jobstep=step,
                    name=step.label,
                    job_name=job_name,
                    build_no=build_no,
                )

        except Exception:
            db.session.rollback()
            current_app.logger.exception(
                'Unable to sync console log for job step %r',
                step.id.hex)
Пример #13
0
    def _sync_step_from_active(self, step):
        try:
            job_name = step.data['job_name']
            build_no = step.data['build_no']
        except KeyError:
            raise UnrecoverableException('Missing Jenkins job information')

        try:
            item = self._get_response('/job/{}/{}'.format(
                job_name, build_no))
        except NotFound:
            raise UnrecoverableException('Unable to find job in Jenkins')

        # TODO(dcramer): we're doing a lot of work here when we might
        # not need to due to it being sync'd previously
        node, _ = get_or_create(Node, where={
            'label': item['builtOn'],
        })

        step.node = node
        step.label = item['fullDisplayName']
        step.date_started = datetime.utcfromtimestamp(
            item['timestamp'] / 1000)

        if item['building']:
            step.status = Status.in_progress
        else:
            step.status = Status.finished
            step.result = RESULT_MAP[item['result']]
            # values['duration'] = item['duration'] or None
            step.date_finished = datetime.utcfromtimestamp(
                (item['timestamp'] + item['duration']) / 1000)

        # step.data.update({
        #     'backend': {
        #         'uri': item['url'],
        #         'label': item['fullDisplayName'],
        #     }
        # })
        db.session.add(step)
        db.session.commit()

        # TODO(dcramer): we shoudl abstract this into a sync_phase
        phase = step.phase

        if not phase.date_started:
            phase.date_started = safe_agg(
                min, (s.date_started for s in phase.steps), step.date_started)
            db.session.add(phase)

        if phase.status != step.status:
            phase.status = step.status
            db.session.add(phase)

        if step.status == Status.finished:
            phase.status = Status.finished
            phase.date_finished = safe_agg(
                max, (s.date_finished for s in phase.steps), step.date_finished)

            if any(s.result is Result.failed for s in phase.steps):
                phase.result = Result.failed
            else:
                phase.result = safe_agg(
                    max, (s.result for s in phase.steps), Result.unknown)

            db.session.add(phase)

        db.session.commit()

        if step.status != Status.finished:
            return

        # sync artifacts
        for artifact in item.get('artifacts', ()):
            artifact, created = get_or_create(Artifact, where={
                'step': step,
                'name': artifact['fileName'],
            }, defaults={
                'project': step.project,
                'job': step.job,
                'data': artifact,
            })
            db.session.commit()
            sync_artifact.delay_if_needed(
                artifact_id=artifact.id.hex,
                task_id=artifact.id.hex,
                parent_task_id=step.id.hex,
            )

        # sync test results
        try:
            with db.session.begin_nested():
                self._sync_test_results(
                    step=step,
                    job_name=job_name,
                    build_no=build_no,
                )
        except Exception:
            self.logger.exception(
                'Failed to sync test results for %s #%s', job_name, build_no)
        else:
            db.session.commit()

        # sync console log
        try:
            result = True
            while result:
                result = self._sync_log(
                    jobstep=step,
                    name=step.label,
                    job_name=job_name,
                    build_no=build_no,
                )

        except Exception:
            db.session.rollback()
            current_app.logger.exception(
                'Unable to sync console log for job step %r',
                step.id.hex)