Exemplo n.º 1
0
    def test_multiple_builds(self, get_options, post):
        get_options.return_value = {
            'phabricator.notify': '1'
        }
        project1 = self.create_project(name='Server', slug='project-slug')
        project2 = self.create_project(name='Server2', slug='project-slug2')
        self.assertEquals(post.call_count, 0)
        collection_id = uuid.uuid4()

        def create_build(result, project):
            base_source = self.create_source(project, revision_sha='1235')
            base_build = self.create_build(project, result=Result.passed,
                                           source=base_source,
                                           status=Status.finished)
            self.create_job(build=base_build)

            patch = self.create_patch()
            source = self.create_source(project, revision_sha='1235', patch=patch)
            build = self.create_build(project, result=result, target='D1', source=source, status=Status.finished, collection_id=collection_id)
            job = self.create_job(build=build)
            testcase = self.create_test(
                package='test.group.ClassName',
                name='test.group.ClassName.test_foo',
                job=job,
                duration=134,
                result=result,
                )
            return build, testcase

        build1, testcase1 = create_build(Result.failed, project1)
        build2, testcase2 = create_build(Result.passed, project2)

        build_finished_handler(build_id=build1.id.hex)

        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build1.project.slug, build1.id.hex))
        build2_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build2.project.slug, build2.id.hex))
        failure_link = build_uri('/projects/{0}/builds/{1}/tests/?result=failed'.format(
            build1.project.slug, build1.id.hex))

        test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
            build1.project.slug,
            build1.id.hex,
            testcase1.job_id.hex,
            testcase1.id.hex
        ))
        test_desc = "[test_foo](%s)" % test_link
        expected_msg = """Server build Failed {{icon times, color=red}} ([results]({0})). There were 1 new [test failures]({1})

**New failures (1):**
|Test Name | Package|
|--|--|
|{2}|test.group.ClassName|

Server2 build Passed {{icon check, color=green}} ([results]({3}))."""

        post.assert_called_once_with('1', expected_msg.format(build_link, failure_link, test_desc, build2_link), mock.ANY)
Exemplo n.º 2
0
    def test_parent_and_new_failures(self, get_options, phab, get_base_failures):
        def get_test_desc(build, testcase, test_name):
            test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
                build.project.slug,
                build.id.hex,
                testcase.job_id.hex,
                testcase.id.hex
            ))
            return "[%s](%s)" % (test_name, test_link)
        get_options.return_value = {
            'phabricator.notify': '1'
        }
        project = self.create_project(name='Server', slug='project-slug')
        self.assertEquals(phab.call_count, 0)
        patch = self.create_patch()
        source = self.create_source(project, revision_sha='1235', patch=patch)
        build = self.create_build(project, result=Result.failed, target='D1', source=source, status=Status.finished)
        job = self.create_job(build=build)
        testcase = self.create_test(
            package='test.group.ClassName',
            name='test.group.ClassName.test_foo',
            job=job,
            duration=134,
            result=Result.failed,
            )
        testcase2 = self.create_test(
            package='test.group.ClassName',
            name='test.group.ClassName.test_foo2',
            job=job,
            duration=134,
            result=Result.failed,
            )
        get_base_failures.return_value = {testcase.name}

        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        failure_link = build_uri('/projects/{0}/builds/{1}/tests/?result=failed'.format(
            build.project.slug, build.id.hex))

        test_desc = get_test_desc(build, testcase, 'test_foo')
        test_desc2 = get_test_desc(build, testcase2, 'test_foo2')
        expected_msg = """Server build Failed {{icon times, color=red}} ([results]({0})). There were 1 new [test failures]({1})

**New failures (1):**
|Test Name | Package|
|--|--|
|{2}|test.group.ClassName|

**Failures in parent revision (1):**
|Test Name | Package|
|--|--|
|{3}|test.group.ClassName|"""

        phab.assert_called_once_with('1', expected_msg.format(build_link, failure_link, test_desc2, test_desc))
Exemplo n.º 3
0
def send_notification(job, recipients):
    # TODO(dcramer): we should send a clipping of a relevant job log
    test_failures = TestGroup.query.filter(
        TestGroup.job_id == job.id,
        TestGroup.result == Result.failed,
        TestGroup.num_leaves == 0,
    ).order_by(TestGroup.name.asc())
    num_test_failures = test_failures.count()
    test_failures = test_failures[:25]

    build = job.build

    # TODO(dcramer): we should probably find a better way to do logs
    primary_log = LogSource.query.filter(
        LogSource.job_id == job.id,
    ).order_by(LogSource.date_created.asc()).first()
    if primary_log:
        log_clipping = get_log_clipping(
            primary_log, max_size=5000, max_lines=25)

    subject = u"Build {result} - {project} #{number} ({target})".format(
        number='{0}.{1}'.format(job.build.number, job.number),
        result=unicode(job.result),
        target=build.target or build.source.revision_sha or 'Unknown',
        project=job.project.name,
    )

    for testgroup in test_failures:
        testgroup.uri = build_uri('/testgroups/{0}/'.format(testgroup.id.hex))

    job.uri = build_uri('/jobs/{0}/'.format(job.id.hex))
    build.uri = build_uri('/builds/{0}/'.format(build.id.hex))

    context = {
        'job': job,
        'build': job.build,
        'total_test_failures': num_test_failures,
        'test_failures': test_failures,
    }

    if primary_log:
        context['build_log'] = {
            'text': log_clipping,
            'name': primary_log.name,
            'link': '{0}logs/{1}/'.format(job.uri, primary_log.id.hex),
        }

    msg = Message(subject, recipients=recipients, extra_headers={
        'Reply-To': ', '.join(sanitize_address(r) for r in recipients),
    })
    msg.body = render_template('listeners/mail/notification.txt', **context)
    msg.html = render_template('listeners/mail/notification.html', **context)

    mail.send(msg)
Exemplo n.º 4
0
 def process(self, fp):
     try:
         phase_config = json.load(fp)
     except ValueError:
         uri = build_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
         self.logger.warning('Failed to parse json; (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
         self._add_failure_reason()
     else:
         _, implementation = JobPlan.get_build_step_for_job(job_id=self.step.job_id)
         try:
             implementation.expand_jobs(self.step, phase_config)
         except Exception:
             uri = build_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
             self.logger.warning('expand_jobs failed (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
             self._add_failure_reason()
Exemplo n.º 5
0
    def test_slug_escape(self, get_options, phab):
        get_options.return_value = {
            'phabricator.notify': '1'
        }
        project = self.create_project(name='Server', slug='project-(slug)')
        self.assertEquals(phab.call_count, 0)
        patch = self.create_patch()
        source = self.create_source(project, revision_sha='1235', patch=patch)
        build = self.create_build(project, result=Result.passed, target='D1', source=source, status=Status.finished)
        job = self.create_job(build=build)
        testcase = self.create_test(
            package='test.group.ClassName',
            name='test.group.ClassName.test_foo',
            job=job,
            duration=134,
            result=Result.passed,
        )

        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        safe_slug = 'project-%28slug%29'
        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            safe_slug, build.id.hex))

        expected_msg = 'Server build Passed {{icon check, color=green}} ([results]({0})).'
        phab.assert_called_once_with('1', expected_msg.format(build_link))
Exemplo n.º 6
0
    def get_allocation_params(self, jobstep):
        params = {
            'artifact-search-path': self.path,
            'artifacts-server': current_app.config['ARTIFACTS_SERVER'],
            'adapter': self.get_client_adapter(),
            'server': build_uri('/api/0/'),
            'jobstep_id': jobstep.id.hex,
            's3-bucket': current_app.config['SNAPSHOT_S3_BUCKET'],
            'pre-launch': self.debug_config.get('prelaunch_script') or current_app.config['LXC_PRE_LAUNCH'],
            'post-launch': current_app.config['LXC_POST_LAUNCH'],
            'release': self.release,
            'use-external-env': 'false',
        }

        if current_app.config['CLIENT_SENTRY_DSN']:
            params['sentry-dsn'] = current_app.config['CLIENT_SENTRY_DSN']

        if 'bind_mounts' in self.debug_config:
            params['bind-mounts'] = self.debug_config['bind_mounts']

        expected_image = self._image_for_job_id(jobstep.job_id)
        if expected_image:
            params['save-snapshot'] = expected_image.hex

        # Filter out any None-valued parameter
        return dict((k, v) for k, v in params.iteritems() if v is not None)
Exemplo n.º 7
0
    def serialize(self, instance):
        if instance.project_id:
            avg_build_time = instance.project.avg_build_time
        else:
            avg_build_time = None

        data = instance.data or {}
        backend_details = data.get('backend')
        if backend_details:
            external = {
                'link': backend_details['uri'],
                'label': backend_details['label'],
            }
        else:
            external = None

        return {
            'id': instance.id.hex,
            'number': instance.number,
            'name': instance.label,
            'result': instance.result,
            'status': instance.status,
            'project': instance.project,
            'duration': instance.duration,
            'estimatedDuration': avg_build_time,
            'link': build_uri('/jobs/%s/' % (instance.id.hex,)),
            'external': external,
            'dateCreated': instance.date_created.isoformat(),
            'dateModified': instance.date_modified.isoformat() if instance.date_modified else None,
            'dateStarted': instance.date_started.isoformat() if instance.date_started else None,
            'dateFinished': instance.date_finished.isoformat() if instance.date_finished else None,
        }
Exemplo n.º 8
0
    def get_job_parameters(self, job, changes_bid):
        # TODO(kylec): Take a Source rather than a Job; we don't need a Job.
        """
        Args:
            job (Job): Job to use.
            changes_bid (str): Changes BID; typically JobStep ID.

        Returns:
            dict: Parameters to be supplied to Jenkins for the job.
        """
        params = {'CHANGES_BID': changes_bid}

        source = job.build.source

        if source.revision_sha:
            params['REVISION'] = source.revision_sha

        if source.patch:
            params['PATCH_URL'] = build_uri('/api/0/patches/{0}/?raw=1'.format(
                        source.patch.id.hex))

        phab_diff_id = source.data.get('phabricator.diffID')
        if phab_diff_id:
            params['PHAB_DIFF_ID'] = phab_diff_id

        phab_revision_id = source.data.get('phabricator.revisionID')
        if phab_revision_id:
            params['PHAB_REVISION_ID'] = phab_revision_id

        return params
Exemplo n.º 9
0
    def serialize(self, instance):
        if instance.project_id:
            avg_build_time = instance.project.avg_build_time
        else:
            avg_build_time = None

        target = instance.target
        if target is None and instance.source.revision_sha:
            target = instance.source.revision_sha[:12]

        return {
            'id': instance.id.hex,
            'number': instance.number,
            'name': instance.label,
            'target': target,
            'result': instance.result,
            'status': instance.status,
            'project': instance.project,
            'cause': instance.cause,
            'author': instance.author,
            'source': instance.source,
            'message': instance.message,
            'duration': instance.duration,
            'estimatedDuration': avg_build_time,
            'link': build_uri('/builds/%s/' % (instance.id.hex,)),
            'dateCreated': instance.date_created.isoformat(),
            'dateModified': instance.date_modified.isoformat() if instance.date_modified else None,
            'dateStarted': instance.date_started.isoformat() if instance.date_started else None,
            'dateFinished': instance.date_finished.isoformat() if instance.date_finished else None,
        }
Exemplo n.º 10
0
def build_finished_handler(build_id, **kwargs):
    build = Build.query.get(build_id)
    if build is None:
        return

    target = build.target
    is_diff_build = target and target.startswith(u'D')
    if not is_diff_build:
        # Not a diff build
        return

    if build.result != Result.failed:
        return

    options = get_options(build.project_id)
    if options.get('phabricator.notify', '0') != '1':
        return

    message = u'Build {result} - {project} #{number} ({target}). Build Results: [link]({link})'.format(
        number='{0}'.format(build.number),
        result=unicode(build.result),
        target=build.target or build.source.revision_sha or 'Unknown',
        project=build.project.name,
        link=build_uri('/projects/{0}/builds/{1}/'.format(build.project.slug, build.id.hex))
    )

    if build.author:
        message += ' - {author}'.format(author=build.author.email,)

    post_comment(target, message)
Exemplo n.º 11
0
    def crumble(self, item, attrs):
        if item.project_id:
            avg_build_time = item.project.avg_build_time
        else:
            avg_build_time = None

        target = item.target
        if target is None and item.source and item.source.revision_sha:
            target = item.source.revision_sha[:12]

        return {
            'id': item.id.hex,
            'collection_id': item.collection_id,
            'number': item.number,
            'name': item.label,
            'target': target,
            'result': item.result,
            'status': item.status,
            'project': item.project,
            'cause': item.cause,
            'author': item.author,
            'source': item.source,
            'message': item.message,
            'tags': item.tags or [],
            'duration': item.duration,
            'estimatedDuration': avg_build_time,
            'dateCreated': item.date_created.isoformat(),
            'dateModified': item.date_modified.isoformat() if item.date_modified else None,
            'dateStarted': item.date_started.isoformat() if item.date_started else None,
            'dateFinished': item.date_finished.isoformat() if item.date_finished else None,
            'stats': attrs['stats'],
            'failures': attrs['failures'],
            'link': build_uri('/projects/{0}/builds/{1}/'.format(
                item.project.slug, item.id.hex)),
        }
Exemplo n.º 12
0
    def get_tests(self, fp):
        try:
            # libxml has a limit on the size of a text field by default, but we encode stdout/stderr.
            #
            # Its not good to have such huge text fields in the first place but we still want to
            # avoid hard failing here if we do.
            parser = etree.XMLParser(huge_tree=True)
            root = etree.fromstring(fp.read(), parser=parser)
        except Exception:
            uri = build_uri("/find_build/{0}/".format(self.step.job.build_id.hex))
            self.logger.warning("Failed to parse XML; (step=%s, build=%s)", self.step.id.hex, uri, exc_info=True)
            try_create(
                FailureReason,
                {
                    "step_id": self.step.id,
                    "job_id": self.step.job_id,
                    "build_id": self.step.job.build_id,
                    "project_id": self.step.project_id,
                    "reason": "malformed_artifact",
                },
            )
            db.session.commit()
            return []

        if root.tag == "unittest-results":
            return self.get_bitten_tests(root)
        return self.get_xunit_tests(root)
Exemplo n.º 13
0
    def get_allocation_params(self, jobstep):
        params = {
            'artifact-search-path': self.path,
            'artifacts-server': current_app.config['ARTIFACTS_SERVER'],
            'adapter': self.get_client_adapter(),
            'server': build_uri('/api/0/'),
            'jobstep_id': jobstep.id.hex,
            's3-bucket': current_app.config['SNAPSHOT_S3_BUCKET'],
            'pre-launch': self.debug_config.get('prelaunch_script') or current_app.config['LXC_PRE_LAUNCH'],
            'post-launch': current_app.config['LXC_POST_LAUNCH'],
            'release': self.release,
        }

        if current_app.config['CLIENT_SENTRY_DSN']:
            params['sentry-dsn'] = current_app.config['CLIENT_SENTRY_DSN']

        if 'bind_mounts' in self.debug_config:
            params['bind-mounts'] = self.debug_config['bind_mounts']

        # TODO(dcramer): we need some kind of tie into the JobPlan in order
        # to dictate that this is a snapshot build
        # determine if there's an expected snapshot outcome
        expected_image = db.session.query(
            SnapshotImage.id,
        ).filter(
            SnapshotImage.job_id == jobstep.job_id,
        ).scalar()
        if expected_image:
            params['save-snapshot'] = expected_image.hex

        # Filter out any None-valued parameter
        return dict((k, v) for k, v in params.iteritems() if v is not None)
Exemplo n.º 14
0
def _generate_remarkup_table_for_tests(build, tests):
    num_failures = len(tests)
    did_truncate = False
    if num_failures > 10:
        tests = tests[:10]
        did_truncate = True

    table = ['|Test Name | Package|',
             '|--|--|']
    for test in tests:
        pkg = test.package
        name = test.name
        if name.startswith(pkg):
            name = name[len(pkg) + 1:]

        test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
            urllib.quote_plus(build.project.slug),
            build.id.hex,
            test.job_id.hex,
            test.id.hex
        ))
        table = table + ['|[%s](%s)|%s|' % (name, test_link, pkg)]

    if did_truncate:
        table += ['|...more...|...|']

    return '\n'.join(table)
Exemplo n.º 15
0
    def get_allocation_params(self, jobstep):
        params = {
            'adapter': self.get_client_adapter(),
            'server': build_uri('/api/0/'),
            'jobstep_id': jobstep.id.hex,
            's3-bucket': current_app.config['SNAPSHOT_S3_BUCKET'],
            'pre-launch': current_app.config['LXC_PRE_LAUNCH'],
            'post-launch': current_app.config['LXC_POST_LAUNCH'],
            'release': self.release,
        }

        if current_app.config['CLIENT_SENTRY_DSN']:
            params['sentry-dsn'] = current_app.config['CLIENT_SENTRY_DSN']

        # TODO(dcramer): we need some kind of tie into the JobPlan in order
        # to dictate that this is a snapshot build
        # determine if there's an expected snapshot outcome
        expected_image = db.session.query(
            SnapshotImage.id,
        ).filter(
            SnapshotImage.job_id == jobstep.job_id,
        ).scalar()
        if expected_image:
            params['save-snapshot'] = expected_image.hex

        return params
Exemplo n.º 16
0
def _generate_remarkup_table_for_tests(build, tests):
    num_failures = len(tests)
    did_truncate = False
    max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_PHABRICATOR', 10)
    if num_failures > max_shown:
        tests = tests[:max_shown]
        did_truncate = True

    table = ['|Test Name | Package|',
             '|--|--|']
    for test in tests:
        pkg = test.package
        name = test.name
        if name.startswith(pkg):
            name = name[len(pkg) + 1:]

        test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
            urllib.quote_plus(build.project.slug),
            build.id.hex,
            test.job_id.hex,
            test.id.hex
        ))
        table = table + ['|[%s](%s)|%s|' % (name, test_link, pkg)]

    if did_truncate:
        table += ['|...more...|...|']

    return '\n'.join(table)
Exemplo n.º 17
0
    def get_allocation_params(self, jobstep):
        params = {
            "artifact-search-path": self.path,
            "artifacts-server": current_app.config["ARTIFACTS_SERVER"],
            "adapter": self.get_client_adapter(),
            "server": build_uri("/api/0/"),
            "compression": self.compression,
            "jobstep_id": jobstep.id.hex,
            "s3-bucket": current_app.config["SNAPSHOT_S3_BUCKET"],
            "pre-launch": self.debug_config.get("prelaunch_script") or current_app.config["LXC_PRE_LAUNCH"],
            "post-launch": current_app.config["LXC_POST_LAUNCH"],
            "release": self.release,
        }

        if current_app.config["CLIENT_SENTRY_DSN"]:
            params["sentry-dsn"] = current_app.config["CLIENT_SENTRY_DSN"]

        if "bind_mounts" in self.debug_config:
            params["bind-mounts"] = self.debug_config["bind_mounts"]

        # TODO(dcramer): we need some kind of tie into the JobPlan in order
        # to dictate that this is a snapshot build
        # determine if there's an expected snapshot outcome
        expected_image = db.session.query(SnapshotImage.id).filter(SnapshotImage.job_id == jobstep.job_id).scalar()
        if expected_image:
            params["save-snapshot"] = expected_image.hex

        # Filter out any None-valued parameter
        return dict((k, v) for k, v in params.iteritems() if v is not None)
Exemplo n.º 18
0
    def test_build_failure_with_tests_and_no_base_job(self, get_options, post):
        get_options.return_value = {
            'phabricator.notify': '1'
        }
        project = self.create_project(name='Server', slug='project-slug')
        base_source = self.create_source(project, revision_sha='1235')
        base_build = self.create_build(project, result=Result.passed,
                                       source=base_source,
                                       status=Status.finished)
        self.assertEquals(post.call_count, 0)

        patch = self.create_patch()
        source = self.create_source(project, revision_sha='1235', patch=patch)
        build = self.create_build(project, result=Result.failed, target='D1',
                                  source=source, status=Status.finished)
        job = self.create_job(build=build)
        testcase = self.create_test(
            package='test.group.ClassName',
            name='test.group.ClassName.test_foo',
            job=job,
            duration=134,
            result=Result.failed,
            )

        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        failure_link = build_uri('/projects/{0}/builds/{1}/tests/?result=failed'.format(
            build.project.slug, build.id.hex))

        test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
            build.project.slug,
            build.id.hex,
            testcase.job_id.hex,
            testcase.id.hex
        ))
        test_desc = "[test_foo](%s)" % test_link
        expected_msg = """Server build Failed {{icon times, color=red}} ([results]({0})). There were a total of 1 [test failures]({1}), but we could not determine if any of these tests were previously failing.

**All failures (1):**
|Test Name | Package|
|--|--|
|{2}|test.group.ClassName|"""

        post.assert_called_once_with('1', expected_msg.format(build_link, failure_link, test_desc), mock.ANY)
Exemplo n.º 19
0
    def test_max_shown_build_failures(self, get_options, phab):
        get_options.return_value = {
            'phabricator.notify': '1'
        }
        project = self.create_project(name='Server', slug='project-slug')
        self.assertEquals(phab.call_count, 0)
        patch = self.create_patch()
        source = self.create_source(project, revision_sha='1235', patch=patch)
        build = self.create_build(project, result=Result.failed, target='D1', source=source, status=Status.finished)
        job = self.create_job(build=build)
        max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_PHABRICATOR', 10)
        total_test_count = max_shown + 1
        testcases = []
        for i in range(total_test_count):
            testcases.append(self.create_test(
                package='test.group.ClassName',
                name='test.group.ClassName.test_foo{}'.format(i),
                job=job,
                duration=134,
                result=Result.failed,
                ))

        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        failure_link = build_uri('/projects/{0}/builds/{1}/tests/?result=failed'.format(
            build.project.slug, build.id.hex))

        assert phab.call_count == 1
        (diff_id, comment), _ = phab.call_args
        assert diff_id == '1'
        shown_test_count = 0
        for testcase in testcases:
            test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
                build.project.slug,
                build.id.hex,
                testcase.job_id.hex,
                testcase.id.hex
            ))
            if test_link in comment:
                shown_test_count += 1
        assert shown_test_count == max_shown
        assert 'Server build Failed {{icon times, color=red}} ([results]({0})). There were {2} new [test failures]({1})'.format(build_link, failure_link, total_test_count)
        assert '|...more...|...|' in comment
Exemplo n.º 20
0
 def serialize(self, instance, attrs):
     return {
         'id': instance.id.hex,
         'slug': instance.slug,
         'name': instance.name,
         'dateCreated': instance.date_created,
         'link': build_uri('/projects/{0}/'.format(instance.slug)),
     }
Exemplo n.º 21
0
 def get_test_desc(build, testcase, test_name):
     test_link = build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
         build.project.slug,
         build.id.hex,
         testcase.job_id.hex,
         testcase.id.hex
     ))
     return "[%s](%s)" % (test_name, test_link)
Exemplo n.º 22
0
    def get_context(self, job, parent=None):
        test_failures = self.get_test_failures(job)
        num_test_failures = test_failures.count()
        test_failures = test_failures[:25]

        build = job.build

        result_label = self.get_result_label(job, parent)

        subject = u"{target} {result} - {project} #{number}".format(
            number='{0}.{1}'.format(job.build.number, job.number),
            result=result_label,
            target=build.target or build.source.revision_sha or 'Build',
            project=job.project.name,
        )

        build.uri = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        job.uri = build.uri + 'jobs/{0}/'.format(job.id.hex)

        for testgroup in test_failures:
            testgroup.uri = job.uri + 'tests/{0}/'.format(testgroup.id.hex)

        is_failure = job.result == Result.failed

        context = {
            'title': subject,
            'job': job,
            'build': job.build,
            'is_failure': is_failure,
            'is_passing': job.result == Result.passed,
            'result_label': result_label,
            'total_test_failures': num_test_failures,
            'test_failures': test_failures,
            'failure_reasons': get_failure_reasons(build),
        }

        if is_failure:
            # try to find the last failing log
            log_sources = self.get_failing_log_sources(job)
            if len(log_sources) == 1:
                log_clipping = self.get_log_clipping(
                    log_sources[0], max_size=5000, max_lines=25)

                context['build_log'] = {
                    'text': log_clipping,
                    'name': log_sources[0].name,
                    'uri': '{0}logs/{1}/'.format(job.uri, log_sources[0].id.hex),
                }
            elif log_sources:
                context['relevant_logs'] = [
                    {
                        'name': source.name,
                        'uri': '{0}logs/{1}/'.format(job.uri, source.id.hex),
                    } for source in log_sources
                ]

        return context
Exemplo n.º 23
0
 def serialize(self, instance):
     return {
         'id': instance.id.hex,
         'name': instance.name,
         'step': instance.step,
         'link': build_uri('/jobs/{0}/logs/{1}/'.format(
             instance.job_id.hex, instance.id.hex)),
         'dateCreated': instance.date_created,
     }
Exemplo n.º 24
0
def build_finished_handler(build, **kwargs):
    if build.result != Result.passed:
        return

    url = current_app.config.get('GREEN_BUILD_URL')
    if not url:
        logger.info('GREEN_BUILD_URL not set')
        return

    auth = current_app.config['GREEN_BUILD_AUTH']
    if not auth:
        logger.info('GREEN_BUILD_AUTH not set')
        return

    # we only want to identify stable revisions
    if build.patch_id or not build.revision_sha:
        logger.debug('Ignoring build due to non-commit: %s', build.id)
        return

    options = get_options(build.project_id)

    if options.get('green-build.notify', '1') != '1':
        logger.info('green-build.notify disabled for project: %s', build.project_id)
        return

    if build.repository.backend != RepositoryBackend.hg:
        logger.info('Repository backend is not supported: %s', build.repository.id)
        return

    vcs = build.repository.get_vcs()
    if vcs is None:
        logger.info('Repository has no VCS set: %s', build.repository.id)
        return

    # ensure we have the latest changes
    vcs.update()

    release_id = vcs.run(['log', '-r %s' % (build.revision_sha,), '--limit=1', '--template={rev}:{node|short}'])

    project = options.get('green-build.project') or build.project.slug

    requests.post(url, auth=auth, data={
        'project': project,
        'id': release_id,
        'build_url': build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex)),
        'build_server': 'changes',
    })

    try_create(Event, where={
        'type': EventType.green_build,
        'item_id': build.source_id,
        'data': {
            'build': build.id.hex,
        }
    })
Exemplo n.º 25
0
 def crumble(self, instance, attrs):
     return {
         'id': instance.id.hex,
         'diff': instance.diff,
         'link': build_uri('/patches/{0}/'.format(instance.id.hex)),
         'parentRevision': {
             'sha': instance.parent_revision_sha,
         },
         'dateCreated': instance.date_created,
     }
Exemplo n.º 26
0
    def get_job_context(self, job):

        def get_job_failing_tests(job):
            failing_tests = TestCase.query.filter(
                TestCase.job_id == job.id,
                TestCase.result == Result.failed,
            ).order_by(TestCase.name.asc())
            failing_tests_count = failing_tests.count()

            failing_tests = [
                {
                    'test_case': test_case,
                    'uri': build_uri(get_test_case_uri(test_case)),
                } for test_case in failing_tests[:3]
            ]

            return failing_tests, failing_tests_count

        def get_job_failing_log_sources(job):
            failing_log_sources = LogSource.query.filter(
                LogSource.job_id == job.id,
            ).join(
                JobStep, LogSource.step_id == JobStep.id,
            ).filter(
                JobStep.result == Result.failed,
            ).order_by(JobStep.date_created)
            failing_log_sources_count = failing_log_sources.count()

            failing_logs = []
            for log_source in failing_log_sources[:3]:
                log_clipping = self.get_log_clipping(
                    log_source, max_size=5000, max_lines=25)
                failing_logs.append({
                    'text': log_clipping,
                    'name': log_source.name,
                    'uri': build_uri(get_log_uri(log_source)),
                })

            return failing_logs, failing_log_sources_count

        failing_tests, failing_tests_count = get_job_failing_tests(job)
        failing_logs, failing_logs_count = get_job_failing_log_sources(job)

        context = {
            'job': job,
            'uri': build_uri(get_job_uri(job)),
            'failing_tests': failing_tests,
            'failing_tests_count': len(failing_tests),
            'total_failing_tests_count': failing_tests_count,
            'failing_logs': failing_logs,
            'failing_logs_count': len(failing_logs),
            'total_failing_logs_count': failing_logs_count,
        }

        return context
Exemplo n.º 27
0
    def get_context(self, job, parent=None):
        test_failures = self.get_test_failures(job)
        num_test_failures = test_failures.count()
        test_failures = test_failures[:25]

        build = job.build

        result_label = self.get_result_label(job, parent)

        subject = u"{target} {result} - {project} #{number}".format(
            number="{0}.{1}".format(job.build.number, job.number),
            result=result_label,
            target=build.target or build.source.revision_sha or "Build",
            project=job.project.name,
        )

        build.uri = build_uri("/projects/{0}/builds/{1}/".format(build.project.slug, build.id.hex))
        job.uri = build.uri + "jobs/{0}/".format(job.id.hex)

        for testgroup in test_failures:
            testgroup.uri = job.uri + "tests/{0}/".format(testgroup.id.hex)

        is_failure = job.result == Result.failed

        context = {
            "title": subject,
            "job": job,
            "build": job.build,
            "is_failure": is_failure,
            "is_passing": job.result == Result.passed,
            "result_label": result_label,
            "total_test_failures": num_test_failures,
            "test_failures": test_failures,
            "failure_reasons": get_failure_reasons(build),
        }

        if is_failure:
            # try to find the last failing log
            log_sources = self.get_failing_log_sources(job)
            if len(log_sources) == 1:
                log_clipping = self.get_log_clipping(log_sources[0], max_size=5000, max_lines=25)

                context["build_log"] = {
                    "text": log_clipping,
                    "name": log_sources[0].name,
                    "uri": "{0}logs/{1}/".format(job.uri, log_sources[0].id.hex),
                }
            elif log_sources:
                context["relevant_logs"] = [
                    {"name": source.name, "uri": "{0}logs/{1}/".format(job.uri, source.id.hex)}
                    for source in log_sources
                ]

        return context
Exemplo n.º 28
0
    def test_whitelisted_project(self, get_options, phab):
        get_options.return_value = {"phabricator.notify": "1"}
        project = self.create_project(name="test", slug="project-slug")
        self.assertEquals(phab.call_count, 0)
        build = self.create_build(project, result=Result.failed, target="D1", status=Status.finished)
        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri("/projects/{0}/builds/{1}/".format(build.project.slug, build.id.hex))
        expected_msg = "test build Failed {{icon times, color=red}} ([results]({0})).".format(build_link)

        phab.assert_called_once_with("1", expected_msg)
Exemplo n.º 29
0
 def crumble(self, instance, attrs):
     return {
         'id': instance.id.hex,
         'slug': instance.slug,
         'name': instance.name,
         'repository': {
             'id': instance.repository_id,
         },
         'status': instance.status,
         'dateCreated': instance.date_created,
         'link': build_uri('/projects/{0}/'.format(instance.slug)),
     }
Exemplo n.º 30
0
 def serialize(self, instance):
     return {
         'id': instance.id.hex,
         'name': instance.label,
         'message': instance.message,
         'diff': instance.diff,
         'link': build_uri('/patches/{0}/'.format(instance.id.hex)),
         'parentRevision': {
             'sha': instance.parent_revision_sha,
         },
         'dateCreated': instance.date_created,
     }
Exemplo n.º 31
0
 def crumble(self, instance, attrs):
     result = {
         'id': instance.id.hex,
         'name': instance.label,
         'project': instance.project,
         'author': instance.author,
         'message': instance.message,
         'link': build_uri('/changes/%s/' % (instance.id.hex,)),
         'dateCreated': instance.date_created.isoformat(),
         'dateModified': instance.date_modified.isoformat(),
     }
     if hasattr(instance, 'last_job'):
         result['lastBuild'] = instance.last_job
     return result
Exemplo n.º 32
0
def _get_job_context(job):
    def get_job_failing_tests(job):
        failing_tests = TestCase.query.filter(
            TestCase.job_id == job.id,
            TestCase.result == Result.failed,
        ).order_by(TestCase.name.asc())

        failing_tests = [{
            'test_case': test_case,
            'uri': build_uri(_get_test_case_uri(test_case)),
        } for test_case in failing_tests]
        failing_tests_count = len(failing_tests)

        return failing_tests, failing_tests_count

    def get_job_failing_log_sources(job):
        failing_log_sources = LogSource.query.join(
            JobStep,
            LogSource.step_id == JobStep.id,
        ).filter(
            JobStep.result == Result.failed,
            JobStep.job_id == job.id,
        ).order_by(JobStep.date_created)

        failing_logs = [{
            'text':
            _get_log_clipping(log_source, max_size=5000, max_lines=25),
            'name':
            log_source.name,
            'uri':
            build_uri(_get_log_uri(log_source)),
        } for log_source in failing_log_sources
                        if not log_source.is_infrastructural()]
        failing_log_sources_count = len(failing_logs)

        return failing_logs, failing_log_sources_count

    failing_tests, failing_tests_count = get_job_failing_tests(job)
    failing_logs, failing_logs_count = get_job_failing_log_sources(job)

    context = {
        'job': job,
        'uri': build_uri(_get_job_uri(job)),
        'failing_tests': failing_tests,
        'failing_tests_count': len(failing_tests),
        'failing_logs': failing_logs,
        'failing_logs_count': len(failing_logs),
    }

    return context
Exemplo n.º 33
0
    def test_whitelisted_project(self, get_options, phab):
        get_options.return_value = {'phabricator.notify': '1'}
        project = self.create_project(name='test', slug='project-slug')
        self.assertEquals(phab.call_count, 0)
        build = self.create_build(project, result=Result.failed, target='D1')
        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        expected_msg = "Build Failed - test #1 (D1). Build Results: [link]({0})".format(
            build_link)

        phab.assert_called_once_with('1', expected_msg)
Exemplo n.º 34
0
    def get_project_stats(self, start_period, end_period):
        projects_by_id = dict((p.id, p) for p in self.projects)
        project_ids = projects_by_id.keys()

        # fetch overall build statistics per project
        query = db.session.query(
            Build.project_id, Build.result,
            func.count(Build.id).label('num'),
            func.avg(Build.duration).label('duration'),
        ).join(
            Source, Source.id == Build.source_id,
        ).filter(
            Source.patch_id == None,  # NOQA
            Build.project_id.in_(project_ids),
            Build.status == Status.finished,
            Build.result.in_([Result.failed, Result.passed]),
            Build.date_created >= start_period,
            Build.date_created < end_period,
        ).group_by(Build.project_id, Build.result)

        project_results = {}
        for project in self.projects:
            project_results[project] = {
                'total_builds': 0,
                'green_builds': 0,
                'green_percent': None,
                'avg_duration': 0,
                'link': build_uri('/projects/{0}/'.format(project.slug)),
            }

        for project_id, result, num_builds, duration in query:
            if duration is None:
                duration = 0

            project = projects_by_id[project_id]

            if result == Result.passed:
                project_results[project]['avg_duration'] = duration

            project_results[project]['total_builds'] += num_builds
            if result == Result.passed:
                project_results[project]['green_builds'] += num_builds

        for project, stats in project_results.iteritems():
            if stats['total_builds']:
                stats['green_percent'] = percent(stats['green_builds'], stats['total_builds'])
            else:
                stats['green_percent'] = None

        return project_results
Exemplo n.º 35
0
def get_test_failure_remarkup(build, tests):
    safe_slug = urllib.quote_plus(build.project.slug)

    base_commit_failures = get_test_failures_in_base_commit(build)
    if base_commit_failures is None:
        total_failures = [t for t in tests]
        failures_in_parent = []
        message = ' There were a total of ' \
                  '{num_failures} [test failures]({link}), but we could not ' \
                  'determine if any of these tests were previously failing.'.format(
                      num_failures=len(tests),
                      link=build_uri('/build_tests/{0}/'.format(build.id.hex))
                  )
        message += '\n\n**All failures ({failure_count}):**\n'.format(
            failure_count=len(total_failures))
        message += _generate_remarkup_table_for_tests(build, total_failures)

    else:
        new_failures = [t for t in tests if t.name not in base_commit_failures]
        failures_in_parent = [
            t for t in tests if t.name in base_commit_failures
        ]
        message = ' There were {new_failures} new [test failures]({link})'.format(
            new_failures=len(new_failures),
            link=build_uri('/build_tests/{0}/'.format(build.id.hex)))

        if new_failures:
            message += '\n\n**New failures ({new_failure_count}):**\n'.format(
                new_failure_count=len(new_failures))
            message += _generate_remarkup_table_for_tests(build, new_failures)

    if failures_in_parent:
        message += '\n\n**Failures in parent revision ({parent_failure_count}):**\n'.format(
            parent_failure_count=len(failures_in_parent))
        message += _generate_remarkup_table_for_tests(build,
                                                      failures_in_parent)
    return message
Exemplo n.º 36
0
 def process(self, fp):
     try:
         phase_config = json.load(fp)
     except ValueError:
         uri = build_uri('/find_build/{0}/'.format(
             self.step.job.build_id.hex))
         self.logger.warning('Failed to parse json; (step=%s, build=%s)',
                             self.step.id.hex,
                             uri,
                             exc_info=True)
         self._add_failure_reason()
     else:
         _, implementation = JobPlan.get_build_step_for_job(
             job_id=self.step.job_id)
         try:
             implementation.expand_jobs(self.step, phase_config)
         except Exception:
             uri = build_uri('/find_build/{0}/'.format(
                 self.step.job.build_id.hex))
             self.logger.warning('expand_jobs failed (step=%s, build=%s)',
                                 self.step.id.hex,
                                 uri,
                                 exc_info=True)
             self._add_failure_reason()
Exemplo n.º 37
0
 def process(self, fp):
     try:
         phase_config = json.load(fp)
     except ValueError:
         uri = build_uri('/find_build/{0}/'.format(
             self.step.job.build_id.hex))
         self.logger.warning('Failed to parse json; (step=%s, build=%s)',
                             self.step.id.hex,
                             uri,
                             exc_info=True)
         self._add_failure_reason()
     else:
         _, implementation = JobPlan.get_build_step_for_job(
             job_id=self.step.job_id)
         try:
             implementation.expand_jobs(self.step, phase_config)
         except ArtifactParseError:
             uri = build_uri('/find_build/{0}/'.format(
                 self.step.job.build_id.hex))
             self.logger.warning(
                 'malformed %s artifact (step=%s, build=%s)',
                 self.FILENAMES[0],
                 self.step.id.hex,
                 uri,
                 exc_info=True)
             self._add_failure_reason()
         except Exception:
             uri = build_uri('/find_build/{0}/'.format(
                 self.step.job.build_id.hex))
             self.logger.warning('expand_jobs failed (step=%s, build=%s)',
                                 self.step.id.hex,
                                 uri,
                                 exc_info=True)
             self.step.result = Result.infra_failed
             db.session.add(self.step)
             db.session.commit()
Exemplo n.º 38
0
    def test_whitelisted_project(self, get_options, post):
        get_options.return_value = {'phabricator.notify': '1'}
        project = self.create_project(name='test', slug='project-slug')
        self.assertEquals(post.call_count, 0)
        build = self.create_build(project,
                                  result=Result.failed,
                                  target='D1',
                                  status=Status.finished)
        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri('/find_build/{0}/'.format(build.id.hex))
        expected_msg = "test build Failed {{icon times, color=red}} ([results]({0})).".format(
            build_link)

        post.assert_called_once_with('1', expected_msg, mock.ANY)
Exemplo n.º 39
0
def build_finished_handler(build_id, **kwargs):
    build = Build.query.get(build_id)
    if build is None:
        return

    if build.cause == Cause.snapshot:
        return

    if build.source.patch_id:
        return

    if not should_notify(build):
        return

    options = get_options(build.project_id)

    if options.get('hipchat.notify', '0') != '1':
        return

    if not options.get('hipchat.room'):
        return

    token = options.get('hipchat.token') or current_app.config.get('HIPCHAT_TOKEN')
    if not token:
        return

    message = u'Build {result} - <a href="{link}">{project} #{number}</a> ({target}) - {subject}'.format(
        number='{0}'.format(build.number),
        result=unicode(build.result),
        target=build.target or build.source.revision_sha or 'Unknown',
        subject=build.source.revision.subject,
        project=build.project.name,
        link=build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
    )
    if build.author:
        message += ' - {author}'.format(
            author=build.author.email,
        )

    send_payload(
        token=token,
        room=options['hipchat.room'],
        message=message,
        notify=True,
        color='green' if build.result == Result.passed else 'red',
    )
Exemplo n.º 40
0
    def get_html_label(self, build):
        link = build_uri(
            '/projects/{0}/builds/{1}/tests/?result=failed'.format(
                build.project.slug, build.id.hex))

        try:
            test_failures = (s.value for s in build.stats
                             if s.name == 'test_failures').next()
        except StopIteration:
            return Markup(
                'There were an <a href="{link}">unknown number of test failures</a>.'
                .format(link=link, ))

        return Markup(
            'There were <a href="{link}">{count} failing tests</a>.'.format(
                link=link,
                count=test_failures,
            ))
Exemplo n.º 41
0
def _get_build_target(build):
    """
    Returns the build's target and target uri (normally a phabricator
    revision and diff url).
    """
    source_data = build.source.data or {}
    phabricator_rev_id = source_data.get('phabricator.revisionID')
    phabricator_uri = source_data.get('phabricator.revisionURL')

    if phabricator_rev_id and phabricator_uri:
        target = 'D{}'.format(phabricator_rev_id)
        target_uri = phabricator_uri
    else:
        # TODO: Make sure that the phabricator source data is present to
        # make this obsolete.
        target = None
        target_uri = build_uri(_get_source_uri(build, build.source))
    return target, target_uri
Exemplo n.º 42
0
    def get_job_parameters(self, job):
        params = [
            {'name': 'CHANGES_BID', 'value': job.id.hex},
        ]

        if job.build.source.revision_sha:
            params.append(
                {'name': 'REVISION', 'value': job.build.source.revision_sha},
            )

        if job.build.source.patch:
            params.append(
                {
                    'name': 'PATCH_URL',
                    'value': build_uri('/api/0/patches/{0}/?raw=1'.format(
                        job.build.source.patch.id.hex)),
                }
            )
        return params
Exemplo n.º 43
0
    def get_slow_tests_for_project(self, project, start_period, end_period):
        latest_build = Build.query.filter(
            Build.project == project,
            Build.status == Status.finished,
            Build.result == Result.passed,
            Build.date_created >= start_period,
            Build.date_created < end_period,
        ).order_by(Build.date_created.desc(), ).limit(1).first()

        if not latest_build:
            return []

        job_list = list(latest_build.jobs)
        if not job_list:
            return []

        queryset = TestCase.query.filter(
            TestCase.job_id.in_(j.id for j in job_list),
            TestCase.result == Result.passed,
            TestCase.date_created > start_period,
            TestCase.date_created <= end_period,
        ).order_by(TestCase.duration.desc()).limit(MAX_SLOW_TESTS)

        slow_list = []
        for test in queryset:
            slow_list.append({
                'project':
                project,
                'name':
                test.short_name,
                'package':
                test.package,
                'duration':
                '%.2f s' % (test.duration / 1000.0, ),
                'duration_raw':
                test.duration,
                'link':
                build_uri('/projects/{0}/tests/{1}/'.format(
                    project.slug, test.name_sha)),
            })

        return slow_list
Exemplo n.º 44
0
    def get_slow_tests_for_project(self, project, start_period, end_period):
        latest_build = Build.query.filter(
            Build.project == project,
            Build.status == Status.finished,
            Build.result == Result.passed,
            Build.date_created >= start_period,
            Build.date_created < end_period,
        ).order_by(
            Build.date_created.desc(),
        ).limit(1).first()

        if not latest_build:
            return []

        job_list = list(latest_build.jobs)
        if not job_list:
            return []

        queryset = db.session.query(
            TestCase.name, TestCase.duration,
        ).filter(
            TestCase.job_id.in_(j.id for j in job_list),
            TestCase.result == Result.passed,
            TestCase.date_created > start_period,
            TestCase.date_created <= end_period,
        ).group_by(
            TestCase.name, TestCase.duration,
        ).order_by(TestCase.duration.desc())

        slow_list = []
        for name, duration in queryset[:10]:
            slow_list.append({
                'project': project,
                'name': name,
                'package': '',  # TODO
                'duration': '%.2f s' % (duration / 1000.0,),
                'duration_raw': duration,
                'link': build_uri('/projects/{0}/tests/{1}/'.format(
                    project.slug, sha1(name).hexdigest())),
            })

        return slow_list
Exemplo n.º 45
0
    def get_job_failing_log_sources(job):
        failing_log_sources = LogSource.query.join(
            JobStep,
            LogSource.step_id == JobStep.id,
        ).filter(
            JobStep.result == Result.failed,
            JobStep.job_id == job.id,
        ).order_by(JobStep.date_created)

        failing_logs = [{
            'text':
            _get_log_clipping(log_source, max_size=5000, max_lines=25),
            'name':
            log_source.name,
            'uri':
            build_uri(_get_log_uri(log_source)),
        } for log_source in failing_log_sources]
        failing_log_sources_count = len(failing_logs)

        return failing_logs, failing_log_sources_count
Exemplo n.º 46
0
def _get_message_for_build_context(build_context):
    build = build_context['build']
    result = build.result
    if result == Result.passed:
        result_image = '{icon check, color=green}'
    elif result == Result.failed:
        result_image = '{icon times, color=red}'
    else:
        result_image = '{icon question, color=orange}'
    safe_slug = urllib.quote_plus(build.project.slug)
    message = u'{project} build {result} {image} ([results]({link})).'.format(
        project=build.project.name,
        image=result_image,
        result=unicode(build.result),
        link=build_uri('/find_build/{0}/'.format(build.id.hex)))

    test_failures = [t['test_case'] for t in build_context['failing_tests']]

    if build_context['failing_tests_count'] > 0:
        message += get_test_failure_remarkup(build, test_failures)
    return message
Exemplo n.º 47
0
def get_test_failure_remarkup(build, tests):
    base_commit_failures = get_test_failures_in_base_commit(build)
    new_failures = [t for t in tests if t.name not in base_commit_failures]
    failures_in_parent = [t for t in tests if t.name in base_commit_failures]

    safe_slug = urllib.quote_plus(build.project.slug)
    message = ' There were {new_failures} new [test failures]({link})'.format(
        num_failures=len(tests),
        new_failures=len(new_failures),
        link=build_uri('/projects/{0}/builds/{1}/tests/?result=failed'.format(
            safe_slug, build.id.hex)))
    if new_failures:
        message += '\n\n**New failures ({new_failure_count}):**\n'.format(
            new_failure_count=len(new_failures))
        message += _generate_remarkup_table_for_tests(build, new_failures)

    if failures_in_parent:
        message += '\n\n**Failures in parent revision ({parent_failure_count}):**\n'.format(
            parent_failure_count=len(failures_in_parent))
        message += _generate_remarkup_table_for_tests(build,
                                                      failures_in_parent)
    return message
Exemplo n.º 48
0
    def get_allocation_params(self, jobstep):
        params = {
            'artifact-search-path':
            self.path,
            'artifacts-server':
            current_app.config['ARTIFACTS_SERVER'],
            'adapter':
            self.get_client_adapter(),
            'server':
            build_uri('/api/0/'),
            'jobstep_id':
            jobstep.id.hex,
            's3-bucket':
            current_app.config['SNAPSHOT_S3_BUCKET'],
            'pre-launch':
            self.debug_config.get('prelaunch_script')
            or current_app.config['LXC_PRE_LAUNCH'],
            'post-launch':
            current_app.config['LXC_POST_LAUNCH'],
            'release':
            self.release,
        }

        if current_app.config['CLIENT_SENTRY_DSN']:
            params['sentry-dsn'] = current_app.config['CLIENT_SENTRY_DSN']

        if 'bind_mounts' in self.debug_config:
            params['bind-mounts'] = self.debug_config['bind_mounts']

        # TODO(dcramer): we need some kind of tie into the JobPlan in order
        # to dictate that this is a snapshot build
        # determine if there's an expected snapshot outcome
        expected_image = db.session.query(SnapshotImage.id, ).filter(
            SnapshotImage.job_id == jobstep.job_id, ).scalar()
        if expected_image:
            params['save-snapshot'] = expected_image.hex

        # Filter out any None-valued parameter
        return dict((k, v) for k, v in params.iteritems() if v is not None)
Exemplo n.º 49
0
    def get_allocation_params(self, jobstep):
        params = {
            'adapter': self.get_client_adapter(),
            'server': build_uri('/api/0/'),
            'jobstep_id': jobstep.id.hex,
            's3-bucket': current_app.config['SNAPSHOT_S3_BUCKET'],
            'pre-launch': current_app.config['LXC_PRE_LAUNCH'],
            'post-launch': current_app.config['LXC_POST_LAUNCH'],
            'release': self.release,
        }

        if current_app.config['CLIENT_SENTRY_DSN']:
            params['sentry-dsn'] = current_app.config['CLIENT_SENTRY_DSN']

        # TODO(dcramer): we need some kind of tie into the JobPlan in order
        # to dictate that this is a snapshot build
        # determine if there's an expected snapshot outcome
        expected_image = db.session.query(SnapshotImage.id, ).filter(
            SnapshotImage.job_id == jobstep.job_id, ).scalar()
        if expected_image:
            params['save-snapshot'] = expected_image.hex

        return params
Exemplo n.º 50
0
def get_flaky_tests(start_period, end_period, projects, maxFlakyTests):
    test_queryset = TestCase.query.filter(
        TestCase.project_id.in_(p.id for p in projects),
        TestCase.result == Result.passed,
        TestCase.date_created >= start_period,
        TestCase.date_created < end_period,
    ).join(
        Job,
        Job.id == TestCase.job_id,
    ).join(
        Build,
        Build.id == Job.build_id,
    ).join(
        Source,
        Source.id == Build.source_id,
    ).filter(
        Source.patch_id == None,  # NOQA
    )

    flaky_test_queryset = test_queryset.with_entities(
        TestCase.name_sha, TestCase.project_id,
        func.sum(case([(TestCase.reruns > 0, 1)], else_=0)).label('reruns'),
        func.sum(case([(TestCase.reruns > 1, 1)],
                      else_=0)).label('double_reruns'),
        func.count('*').label('count')).group_by(
            TestCase.name_sha, TestCase.project_id).order_by(
                func.sum(TestCase.reruns).desc()).limit(maxFlakyTests)

    project_names = {p.id: p.name for p in projects}

    flaky_list = []
    for name_sha, project_id, reruns, double_reruns, count in flaky_test_queryset:
        if reruns == 0:
            continue

        rerun = test_queryset.filter(
            TestCase.name_sha == name_sha,
            TestCase.project_id == project_id,
            TestCase.reruns > 0,
        ).order_by(TestCase.date_created.desc()).first()

        flaky_list.append({
            'id':
            rerun.id,
            'name':
            rerun.name,
            'short_name':
            rerun.short_name,
            'package':
            rerun.package,
            'hash':
            name_sha,
            'project_id':
            rerun.project_id,
            'project_name':
            project_names[rerun.project_id],
            'flaky_runs':
            reruns,
            'double_reruns':
            double_reruns,
            'passing_runs':
            count,
            'link':
            build_uri('/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
                rerun.project.slug, rerun.job.build.id.hex, rerun.job.id.hex,
                rerun.id.hex)),
        })

    return flaky_list
Exemplo n.º 51
0
    def test_multiple_builds(self, get_options, post):
        get_options.return_value = {'phabricator.notify': '1'}
        project1 = self.create_project(name='Server', slug='project-slug')
        project2 = self.create_project(name='Server2', slug='project-slug2')
        self.assertEquals(post.call_count, 0)
        collection_id = uuid.uuid4()

        def create_build(result, project):
            base_source = self.create_source(project, revision_sha='1235')
            base_build = self.create_build(project,
                                           result=Result.passed,
                                           source=base_source,
                                           status=Status.finished)
            self.create_job(build=base_build)

            patch = self.create_patch()
            source = self.create_source(project,
                                        revision_sha='1235',
                                        patch=patch)
            build = self.create_build(project,
                                      result=result,
                                      target='D1',
                                      source=source,
                                      status=Status.finished,
                                      collection_id=collection_id)
            job = self.create_job(build=build)
            testcase = self.create_test(
                package='test.group.ClassName',
                name='test.group.ClassName.test_foo',
                job=job,
                duration=134,
                result=result,
            )
            return build, testcase

        build1, testcase1 = create_build(Result.failed, project1)
        build2, testcase2 = create_build(Result.passed, project2)

        build_finished_handler(build_id=build1.id.hex)

        build_link = build_uri('/find_build/{0}/'.format(build1.id.hex))
        build2_link = build_uri('/find_build/{0}/'.format(build2.id.hex))
        failure_link = build_uri('/build_tests/{0}/'.format(build1.id.hex))

        test_link = build_uri('/build_test/{0}/{1}/'.format(
            build1.id.hex,
            testcase1.id.hex,
        ))
        test_desc = "[test_foo](%s)" % test_link
        expected_msg = """Server build Failed {{icon times, color=red}} ([results]({0})). There were 1 new [test failures]({1})

**New failures (1):**
|Test Name | Package|
|--|--|
|{2}|test.group.ClassName|

Server2 build Passed {{icon check, color=green}} ([results]({3}))."""

        post.assert_called_once_with(
            '1',
            expected_msg.format(build_link, failure_link, test_desc,
                                build2_link), mock.ANY)
Exemplo n.º 52
0
 def get_test_desc(build, testcase, test_name):
     test_link = build_uri('/build_test/{0}/{1}/'.format(
         build.id.hex,
         testcase.id.hex,
     ))
     return "[%s](%s)" % (test_name, test_link)
Exemplo n.º 53
0
def build_finished_handler(build_id, **kwargs):
    build = Build.query.get(build_id)
    if build is None:
        return

    if build.cause == Cause.snapshot:
        return

    if build.result != Result.passed:
        return

    url = current_app.config.get('GREEN_BUILD_URL')
    if not url:
        logger.info('GREEN_BUILD_URL not set')
        return

    auth = current_app.config['GREEN_BUILD_AUTH']
    if not auth:
        logger.info('GREEN_BUILD_AUTH not set')
        return

    source = build.source

    is_commit_build = source.is_commit()
    # Commit queue builds have a commit, but they aren't really commit builds.
    if build.tags and 'commit-queue' in build.tags:
        is_commit_build = False
    if not is_commit_build:
        logger.debug('Ignoring build due to non-commit: %s', build.id)
        return

    vcs = source.repository.get_vcs()
    if vcs is None:
        logger.info('Repository has no VCS set: %s', source.repository.id)
        return

    # ensure we have the latest changes
    if vcs.exists():
        try:
            vcs.update()
        except ConcurrentUpdateError:
            # Retry once if it was already updating.
            vcs.update()
    else:
        vcs.clone()

    # set latest_green_build if latest for each branch:
    _set_latest_green_build_for_each_branch(build, source, vcs)

    options = get_options(build.project_id)

    if options.get('green-build.notify', '1') != '1':
        logger.info('green-build.notify disabled for project: %s', build.project_id)
        return

    branch_names = filter(bool, options.get('build.branch-names', '*').split(' '))
    if not source.revision.should_build_branch(branch_names):
        return

    release_id = get_release_id(source, vcs)

    project = options.get('green-build.project') or build.project.slug
    committed_timestamp_sec = calendar.timegm(source.revision.date_committed.utctimetuple())

    logging.info('Making green_build request to %s', url)
    try:
        requests.post(url, auth=auth, timeout=10, data={
            'project': project,
            'id': release_id,
            'build_url': build_uri('/projects/{0}/builds/{1}/'.format(
                build.project.slug, build.id.hex)),
            'build_server': 'changes',
            'author_name': source.revision.author.name,
            'author_email': source.revision.author.email,
            'commit_timestamp': committed_timestamp_sec,
            'revision_message': source.revision.message,
        }).raise_for_status()
    except HTTPError as ex:
        # Conflicts aren't necessarily failures; some green build receivers
        # report conflict if they see out-of-order results (not uncommon in Changes).
        # We want to track those situations independently of other non-success responses.
        # NOTE: We compare `ex.response` to None explicitly because any non-200 response
        # evaluates to `False`.
        if ex.response is not None and ex.response.status_code == 409:
            logger.warning("Conflict when reporting green build", exc_info=True)
        else:
            logger.exception('Failed to report green build')
        status = 'fail'
    except Exception:
        logger.exception('Failed to report green build')
        status = 'fail'
    else:
        status = 'success'

    create_or_update(Event, where={
        'type': EventType.green_build,
        'item_id': build.id,
    }, values={
        'data': {
            'status': status,
        },
        'date_modified': datetime.utcnow(),
    })
Exemplo n.º 54
0
    def get_context(self, job, parent=None):
        test_failures = self.get_test_failures(job)
        num_test_failures = test_failures.count()
        test_failures = test_failures[:25]

        build = job.build

        result_label = self.get_result_label(job, parent)
        if result_label:
            result_label = result_label.upper()

        branch_str = ''
        if build.source.revision:
            revision = build.source.revision
            branches = revision.branches
            if branches:
                branch_str = ' (%s)' % ','.join(branches)

        subject = u"{target} {result} - {project}{branches} {name} #{number}".format(
            name=build.label,
            number='{0}.{1}'.format(job.build.number, job.number),
            result=result_label,
            target=build.target or build.source.revision_sha or 'Build',
            project=job.project.name,
            branches=branch_str,
        )

        build.uri = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        job.uri = build.uri + 'jobs/{0}/'.format(job.id.hex)

        for testgroup in test_failures:
            testgroup.uri = job.uri + 'tests/{0}/'.format(testgroup.id.hex)

        is_failure = job.result == Result.failed

        project_options = ProjectOptionsHelper.get_options(
            [job.project], ['project.owners', 'project.notes'])[job.project_id]

        context = {
            'title': subject,
            'job': job,
            'build': job.build,
            'is_failure': is_failure,
            'is_passing': job.result == Result.passed,
            'result_label': result_label,
            'total_test_failures': num_test_failures,
            'test_failures': test_failures,
            'failure_reasons': get_failure_reasons(build),
            'owners': project_options.get('project.owners'),
            'notes': project_options.get('project.notes'),
        }

        if is_failure:
            # try to find the last failing log
            log_sources = self.get_failing_log_sources(job)
            if len(log_sources) == 1:
                log_clipping = self.get_log_clipping(log_sources[0],
                                                     max_size=5000,
                                                     max_lines=25)

                context['build_log'] = {
                    'text': log_clipping,
                    'name': log_sources[0].name,
                    'uri': '{0}logs/{1}/'.format(job.uri,
                                                 log_sources[0].id.hex),
                }
            elif log_sources:
                context['relevant_logs'] = [{
                    'name':
                    source.name,
                    'uri':
                    '{0}logs/{1}/'.format(job.uri, source.id.hex),
                } for source in log_sources]

        return context
Exemplo n.º 55
0
def build_finished_handler(build, **kwargs):
    if build.result != Result.passed:
        return

    url = current_app.config.get('GREEN_BUILD_URL')
    if not url:
        logger.info('GREEN_BUILD_URL not set')
        return

    auth = current_app.config['GREEN_BUILD_AUTH']
    if not auth:
        logger.info('GREEN_BUILD_AUTH not set')
        return

    # we only want to identify stable revisions
    if build.patch_id or not build.revision_sha:
        logger.debug('Ignoring build due to non-commit: %s', build.id)
        return

    options = get_options(build.project_id)

    if options.get('green-build.notify', '1') != '1':
        logger.info('green-build.notify disabled for project: %s',
                    build.project_id)
        return

    if build.repository.backend != RepositoryBackend.hg:
        logger.info('Repository backend is not supported: %s',
                    build.repository.id)
        return

    vcs = build.repository.get_vcs()
    if vcs is None:
        logger.info('Repository has no VCS set: %s', build.repository.id)
        return

    # ensure we have the latest changes
    vcs.update()

    release_id = vcs.run([
        'log',
        '-r %s' % (build.revision_sha, ), '--limit=1',
        '--template={rev}:{node|short}'
    ])

    project = options.get('green-build.project') or build.project.slug

    requests.post(url,
                  auth=auth,
                  data={
                      'project':
                      project,
                      'id':
                      release_id,
                      'build_url':
                      build_uri('/projects/{0}/builds/{1}/'.format(
                          build.project.slug, build.id.hex)),
                      'build_server':
                      'changes',
                  })

    try_create(Event,
               where={
                   'type': EventType.green_build,
                   'item_id': build.source_id,
                   'data': {
                       'build': build.id.hex,
                   }
               })
Exemplo n.º 56
0
    def get_job_parameters(self,
                           job,
                           changes_bid,
                           setup_script=None,
                           script=None,
                           teardown_script=None,
                           path=None):
        """
        Gets a list containing dictionaries, each with two keys - name and value.
        These key,value pairs correspond to the input variables in Jenkins.

        changes_bid is actually the jobstep id, and job is the current job.
        *_script and path override the corresponding fields of the current
        builder.
        """
        params = super(JenkinsGenericBuilder,
                       self).get_job_parameters(job, changes_bid=changes_bid)

        if path is None:
            path = self.path

        if setup_script is None:
            setup_script = self.setup_script

        if script is None:
            script = self.script

        if teardown_script is None:
            teardown_script = self.teardown_script

        project = job.project
        repository = project.repository

        vcs = repository.get_vcs()
        if vcs:
            repo_url = vcs.remote_url
        else:
            repo_url = repository.url

        cluster = self.cluster

        snapshot_bucket = current_app.config.get('SNAPSHOT_S3_BUCKET', '')

        default_pre = self.debug_config.get(
            'prelaunch_script') or current_app.config.get(
                'LXC_PRE_LAUNCH', '')
        default_post = current_app.config.get('LXC_POST_LAUNCH', '')
        default_release = current_app.config.get('LXC_RELEASE', 'trusty')

        build_desc = self.build_desc

        # This is the image we are expected to produce or None
        # if this is not a snapshot build.
        expected_image = self.get_expected_image(job.id)

        # Setting script to be empty essentially forces nothing
        # but setup/teardown to be run, making a clean snapshot
        snapshot_id = ''
        if expected_image:
            snapshot_id = expected_image.hex

            # this is a no-op command in sh, essentially equivalent
            # to '' except it tells changes-client that we are
            # deliberately doing absolutely nothing. However,
            # if snapshot script is not None, then we just use
            # that in place of script (so the normal script is
            # never used).
            script = self.snapshot_script or ':'

            # sharded builds will have different setup/teardown/build_desc
            # scripts between shards and collector so we need to
            # use the shard ones
            build_desc = self.get_snapshot_build_desc()
            setup_script = self.get_snapshot_setup_script()
            teardown_script = self.get_snapshot_teardown_script()

        # CHANGES_BID, the jobstep id, is provided by superclass
        params.update({
            'CHANGES_PID':
            project.slug,
            'PROJECT_CONFIG':
            project.get_config_path(),
            'REPO_URL':
            repo_url,
            'SETUP_SCRIPT':
            setup_script,
            'SCRIPT':
            script,
            'TEARDOWN_SCRIPT':
            teardown_script,
            'RESET_SCRIPT':
            self.reset_script,
            'REPO_VCS':
            repository.backend.name,
            'CLUSTER':
            cluster,
            'WORK_PATH':
            path,
            'C_WORKSPACE':
            self.workspace,
            'ARTIFACTS_SERVER_BASE_URL':
            self.artifact_server_base_url
        })

        if 'bind_mounts' in self.debug_config:
            params['bind-mounts'] = self.debug_config['bind_mounts']

        if build_desc.get('uses_client', False):
            params.update({
                'JENKINS_COMMAND':
                build_desc['jenkins-command'],
                'CHANGES_CLIENT_ADAPTER':
                build_desc['adapter'],
                'CHANGES_CLIENT_SERVER':
                build_uri('/api/0'),
                'CHANGES_CLIENT_SNAPSHOT_BUCKET':
                snapshot_bucket,
                'CHANGES_CLIENT_SNAPSHOT_ID':
                snapshot_id,
                'CHANGES_CLIENT_LXC_PRE_LAUNCH':
                build_desc.get('pre-launch', default_pre),
                'CHANGES_CLIENT_LXC_POST_LAUNCH':
                build_desc.get('post-launch', default_post),
                'CHANGES_CLIENT_LXC_RELEASE':
                build_desc.get('release', default_release)
            })

        return params
Exemplo n.º 57
0
    def test_parent_and_new_failures(self, get_options, phab,
                                     get_base_failures):
        def get_test_desc(build, testcase, test_name):
            test_link = build_uri(
                '/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
                    build.project.slug, build.id.hex, testcase.job_id.hex,
                    testcase.id.hex))
            return "[%s](%s)" % (test_name, test_link)

        get_options.return_value = {'phabricator.notify': '1'}
        project = self.create_project(name='Server', slug='project-slug')
        self.assertEquals(phab.call_count, 0)
        patch = self.create_patch()
        source = self.create_source(project, revision_sha='1235', patch=patch)
        build = self.create_build(project,
                                  result=Result.failed,
                                  target='D1',
                                  source=source,
                                  status=Status.finished)
        job = self.create_job(build=build)
        testcase = self.create_test(
            package='test.group.ClassName',
            name='test.group.ClassName.test_foo',
            job=job,
            duration=134,
            result=Result.failed,
        )
        testcase2 = self.create_test(
            package='test.group.ClassName',
            name='test.group.ClassName.test_foo2',
            job=job,
            duration=134,
            result=Result.failed,
        )
        get_base_failures.return_value = {testcase.name}

        build_finished_handler(build_id=build.id.hex)

        get_options.assert_called_once_with(project.id)
        build_link = build_uri('/projects/{0}/builds/{1}/'.format(
            build.project.slug, build.id.hex))
        failure_link = build_uri(
            '/projects/{0}/builds/{1}/tests/?result=failed'.format(
                build.project.slug, build.id.hex))

        test_desc = get_test_desc(build, testcase, 'test_foo')
        test_desc2 = get_test_desc(build, testcase2, 'test_foo2')
        expected_msg = """Server build Failed {{icon times, color=red}} ([results]({0})). There were 1 new [test failures]({1})

**New failures (1):**
|Test Name | Package|
|--|--|
|{2}|test.group.ClassName|

**Failures in parent revision (1):**
|Test Name | Package|
|--|--|
|{3}|test.group.ClassName|"""

        phab.assert_called_once_with(
            '1',
            expected_msg.format(build_link, failure_link, test_desc2,
                                test_desc))
Exemplo n.º 58
0
 def get_test_desc(build, testcase, test_name):
     test_link = build_uri(
         '/projects/{0}/builds/{1}/jobs/{2}/tests/{3}/'.format(
             build.project.slug, build.id.hex, testcase.job_id.hex,
             testcase.id.hex))
     return "[%s](%s)" % (test_name, test_link)
Exemplo n.º 59
0
 def get_buildstep_patch(self, source, workspace):
     return BASH_PATCH_STEP % dict(
         local_path=workspace,
         patch_url=build_uri('/api/0/patches/{0}/?raw=1'.format(
             source.patch_id.hex)),
     )
Exemplo n.º 60
0
def build_finished_handler(build_id, **kwargs):
    build = Build.query.get(build_id)
    if build is None:
        return

    if build.cause == Cause.snapshot:
        return

    if build.result != Result.passed:
        return

    url = current_app.config.get('GREEN_BUILD_URL')
    if not url:
        logger.info('GREEN_BUILD_URL not set')
        return

    auth = current_app.config['GREEN_BUILD_AUTH']
    if not auth:
        logger.info('GREEN_BUILD_AUTH not set')
        return

    source = build.source

    # we only want to identify stable revisions
    if not source.is_commit():
        logger.debug('Ignoring build due to non-commit: %s', build.id)
        return

    options = get_options(build.project_id)

    if options.get('green-build.notify', '1') != '1':
        logger.info('green-build.notify disabled for project: %s', build.project_id)
        return

    vcs = source.repository.get_vcs()
    if vcs is None:
        logger.info('Repository has no VCS set: %s', source.repository.id)
        return

    # ensure we have the latest changes
    if vcs.exists():
        vcs.update()
    else:
        vcs.clone()

    release_id = get_release_id(source, vcs)

    project = options.get('green-build.project') or build.project.slug

    logging.info('Making green_build request to %s', url)
    try:
        requests.post(url, auth=auth, data={
            'project': project,
            'id': release_id,
            'build_url': build_uri('/projects/{0}/builds/{1}/'.format(
                build.project.slug, build.id.hex)),
            'build_server': 'changes',
        })
    except Exception:
        logger.exception('Failed to report green build')
        status = 'fail'
    else:
        status = 'success'

    create_or_update(Event, where={
        'type': EventType.green_build,
        'item_id': build.id,
    }, values={
        'data': {
            'status': status,
        },
        'date_modified': datetime.utcnow(),
    })

    # set latest_green_build if latest for each branch:
    _set_latest_green_build_for_each_branch(build, source, vcs)