Beispiel #1
0
    def test_empty_cobertura_file(self):
        jobstep = JobStep(
            id=uuid.uuid4(),
            job=Job(id=uuid.uuid4(), project_id=uuid.uuid4())
        )

        fp = StringIO('')

        handler = CoverageHandler(jobstep)
        results = handler.get_coverage(fp)

        # most importantly, it shouldn't crash
        assert len(results) == 0
Beispiel #2
0
    def _sync_artifact_as_coverage(self, artifact):
        jobstep = artifact.step
        resp = self.fetch_artifact(jobstep, artifact.data)

        # TODO(dcramer): requests doesnt seem to provide a non-binary file-like
        # API, so we're stuffing it into StringIO
        try:
            handler = CoverageHandler(jobstep)
            handler.process(StringIO(resp.content))
        except Exception:
            db.session.rollback()
            self.logger.exception(
                'Failed to sync test results for job step %s', jobstep.id)
        else:
            db.session.commit()
Beispiel #3
0
    def _sync_artifact_as_coverage(self, artifact):
        jobstep = artifact.step
        resp = self.fetch_artifact(jobstep, artifact.data)

        # TODO(dcramer): requests doesnt seem to provide a non-binary file-like
        # API, so we're stuffing it into StringIO
        try:
            handler = CoverageHandler(jobstep)
            handler.process(StringIO(resp.content))
        except Exception:
            db.session.rollback()
            self.logger.exception(
                'Failed to sync test results for job step %s', jobstep.id)
        else:
            db.session.commit()
Beispiel #4
0
    def test_process_diff(self, generate_diff):
        project = self.create_project()
        build = self.create_build(project)
        job = self.create_job(build)
        jobphase = self.create_jobphase(job)
        jobstep = self.create_jobstep(jobphase)

        generate_diff.return_value = SAMPLE_DIFF

        handler = CoverageHandler(jobstep)

        lines_by_file = handler.get_processed_diff()
        # Just check the keys and one detail (there are other unittests that validate this already)
        assert set(lines_by_file) == {'ci/server-collect', 'ci/run_with_retries.py', 'ci/not-real'}
        assert lines_by_file['ci/not-real'] == {1}

        # This should be repeatable
        assert handler.get_processed_diff() == lines_by_file
Beispiel #5
0
    def test_jacoco_result_generation(self):
        jobstep = JobStep(
            id=uuid.uuid4(),
            job=Job(id=uuid.uuid4(), project_id=uuid.uuid4())
        )

        handler = CoverageHandler(jobstep)
        with open(os.path.join(os.path.dirname(__file__), 'fixtures', 'jacoco-coverage.xml')) as fp:
            results = handler.get_coverage(fp)

        assert len(results) == 1

        r1 = results[0]
        assert type(r1) == FileCoverage
        assert r1.job_id == jobstep.job.id
        assert r1.project_id == jobstep.job.project_id
        assert r1.filename == 'src/main/java/com/dropbox/apx/onyx/api/resource/stats/StatsResource.java'
        assert r1.data == 'NNNNCCCCNNCCUU'
Beispiel #6
0
    def test_cobertura_result_generation(self):
        jobstep = JobStep(
            id=uuid.uuid4(),
            job=Job(id=uuid.uuid4(), project_id=uuid.uuid4())
        )

        fp = StringIO(SAMPLE_COVERAGE)

        handler = CoverageHandler(jobstep)
        results = handler.get_coverage(fp)

        assert len(results) == 2

        r1 = results[0]
        assert type(r1) == FileCoverage
        assert r1.job_id == jobstep.job.id
        assert r1.project_id == jobstep.job.project_id
        assert r1.filename == 'setup.py'
        assert r1.data == 'NUNNNNNNNNNUCCNU'
        r2 = results[1]
        assert type(r2) == FileCoverage
        assert r2.job_id == jobstep.job.id
        assert r2.project_id == jobstep.job.project_id
        assert r2.data == 'CUCNNNU'  # partial branch coverage is considered uncovered
Beispiel #7
0
    def test_process(self, process_diff, get_coverage):
        project = self.create_project()
        build = self.create_build(project)
        job = self.create_job(build)
        jobphase = self.create_jobphase(job)
        jobstep = self.create_jobstep(jobphase)

        handler = CoverageHandler(jobstep)

        process_diff.return_value = {
            'setup.py': set([1, 2, 3, 4, 5]),
        }

        # now try with some duplicate coverage
        get_coverage.return_value = [FileCoverage(
            job_id=job.id,
            step_id=jobstep.id,
            project_id=project.id,
            filename='setup.py',
            data='CUNNNNCCNNNUNNNUUUUUU'
        )]

        fp = StringIO()
        handler.process(fp)
        get_coverage.assert_called_once_with(fp)

        get_coverage.reset_mock()

        get_coverage.return_value = [FileCoverage(
            job_id=job.id,
            step_id=jobstep.id,
            project_id=project.id,
            filename='setup.py',
            data='NUUNNNNNNNNUCCNU'
        )]

        fp = StringIO()
        handler.process(fp)
        get_coverage.assert_called_once_with(fp)

        file_cov = list(FileCoverage.query.filter(
            FileCoverage.job_id == job.id,
        ))
        assert len(file_cov) == 1
        assert file_cov[0].filename == 'setup.py'
        assert file_cov[0].data == 'CUUNNNCCNNNUCCNUUUUUU'
        assert file_cov[0].lines_covered == 5
        assert file_cov[0].lines_uncovered == 9
        assert file_cov[0].diff_lines_covered == 1
        assert file_cov[0].diff_lines_uncovered == 2
Beispiel #8
0
    def sync_artifact(self, artifact, skip_checks=False):
        if not skip_checks:
            if artifact.name.endswith(".log") and not self.sync_log_artifacts:
                return

            elif XunitHandler.can_process(artifact.name) and not self.sync_xunit_artifacts:
                return

            elif CoverageHandler.can_process(artifact.name) and not self.sync_coverage_artifacts:
                return

            elif not self.sync_file_artifacts:
                return

        for cls in [XunitHandler, CoverageHandler, ManifestJsonHandler]:
            if cls.can_process(artifact.name):
                self._sync_artifact_as_file(artifact, handler_cls=cls)
                break
        else:
            if artifact.name.endswith(".log"):
                self._sync_artifact_as_log(artifact)

            else:
                self._sync_artifact_as_file(artifact)