def test_autoclassified_after_manual_classification(test_user,
                                                    test_job_2,
                                                    text_log_errors_failure_lines,
                                                    failure_classifications, bugs):
    lines = [(test_line, {})]
    test_error_lines, test_failure_lines = create_lines(test_job_2, lines)
    bug = bugs.first()

    BugJobMap.create(
        job_id=test_job_2.id,
        bug_id=bug.id,
        user=test_user
    )
    JobNote.objects.create(job=test_job_2,
                           failure_classification_id=4,
                           user=test_user,
                           text="")

    for error_line, failure_line in zip(test_error_lines, test_failure_lines):
        error_line.refresh_from_db()
        error_line.metadata.refresh_from_db()
        failure_line.refresh_from_db()

    tle1 = test_error_lines[0]
    fl1 = test_failure_lines[0]

    assert tle1.matches.count() == 1
    assert tle1.metadata.best_classification == tle1.classified_failures.first()
    assert tle1.metadata.best_is_verified

    assert fl1.error.matches.count() == 1
    assert fl1.error.metadata.best_classification == fl1.error.classified_failures.first()
    assert fl1.text_log_error_metadata.best_is_verified
Esempio n. 2
0
def test_update_autoclassification_bug(test_job, test_job_2,
                                       classified_failures):
    classified_failure = classified_failures[0]
    user = User.objects.create()

    # create some TextLogErrors attached to test_job_2
    text_log_errors, _ = create_lines(test_job_2, [(test_line, {})])

    # Job 1 has two failure lines so nothing should be updated
    assert classified_failure.bug_number is None

    # Create a BugJobMap
    BugJobMap.create(
        job_id=test_job.id,
        bug_id=1234,
        user=user,
    )
    mark_best_classification(text_log_errors[0], classified_failure)
    assert classified_failure.bug_number is None

    metadata = TextLogErrorMetadata.objects.get(
        text_log_error__step__job=test_job_2)
    metadata.failure_line = FailureLine.objects.get(pk=3)
    metadata.save()

    BugJobMap.create(
        job_id=test_job_2.id,
        bug_id=1234,
        user=user,
    )
    classified_failure.refresh_from_db()
    assert classified_failure.bug_number == 1234
Esempio n. 3
0
def test_bug_job_map_delete(
    client, eleven_jobs_stored, test_repository, test_user, test_no_auth, bugs
):
    """
    test deleting a bug_job_map object
    """
    job = Job.objects.first()
    bug = bugs[0]

    BugJobMap.create(
        job_id=job.id, bug_id=bug.id, user=test_user,
    )

    if not test_no_auth:
        client.force_authenticate(user=test_user)

    pk = "{0}-{1}".format(job.id, bug.id)

    resp = client.delete(
        reverse("bug-job-map-detail", kwargs={"project": test_repository.name, "pk": pk})
    )

    if test_no_auth:
        assert resp.status_code == 403
        assert BugJobMap.objects.count() == 1
    else:
        content = json.loads(resp.content)
        assert content == {"message": "Bug job map deleted"}
        assert BugJobMap.objects.count() == 0
def test_autoclassified_after_manual_classification(
        test_user, test_job_2, text_log_errors_failure_lines,
        failure_classifications, bugs):
    lines = [(test_line, {})]
    test_error_lines, test_failure_lines = create_lines(test_job_2, lines)
    bug = bugs.first()

    BugJobMap.create(job_id=test_job_2.id, bug_id=bug.id, user=test_user)
    JobNote.objects.create(job=test_job_2,
                           failure_classification_id=4,
                           user=test_user,
                           text="")

    for error_line, failure_line in zip(test_error_lines, test_failure_lines):
        error_line.refresh_from_db()
        error_line.metadata.refresh_from_db()
        failure_line.refresh_from_db()

    tle1 = test_error_lines[0]
    fl1 = test_failure_lines[0]

    assert tle1.matches.count() == 1
    assert tle1.metadata.best_classification == tle1.classified_failures.first(
    )
    assert tle1.metadata.best_is_verified

    assert fl1.error.matches.count() == 1
    assert fl1.error.metadata.best_classification == fl1.error.classified_failures.first(
    )
    assert fl1.text_log_error_metadata.best_is_verified
def test_update_autoclassification_bug(test_job, test_job_2, classified_failures):
    classified_failure = classified_failures[0]
    user = User.objects.create()

    # create some TextLogErrors attached to test_job_2
    text_log_errors, _ = create_lines(test_job_2, [(test_line, {})])

    # Job 1 has two failure lines so nothing should be updated
    assert classified_failure.bug_number is None

    # Create a BugJobMap
    BugJobMap.create(
        job_id=test_job.id,
        bug_id=1234,
        user=user,
    )
    mark_best_classification(text_log_errors[0], classified_failure)
    assert classified_failure.bug_number is None

    metadata = TextLogErrorMetadata.objects.get(text_log_error__step__job=test_job_2)
    metadata.failure_line = FailureLine.objects.get(pk=3)
    metadata.save()

    BugJobMap.create(
        job_id=test_job_2.id,
        bug_id=1234,
        user=user,
    )
    classified_failure.refresh_from_db()
    assert classified_failure.bug_number == 1234
Esempio n. 6
0
def test_bug_job_map_delete(client, eleven_jobs_stored, test_repository,
                            test_user, test_no_auth, bugs):
    """
    test deleting a bug_job_map object
    """
    job = Job.objects.first()
    bug = bugs[0]

    BugJobMap.create(
        job_id=job.id,
        bug_id=bug.id,
        user=test_user,
    )

    if not test_no_auth:
        client.force_authenticate(user=test_user)

    pk = "{0}-{1}".format(job.id, bug.id)

    resp = client.delete(
        reverse("bug-job-map-detail", kwargs={
            "project": test_repository.name,
            "pk": pk
        })
    )

    if test_no_auth:
        assert resp.status_code == 403
        assert BugJobMap.objects.count() == 1
    else:
        content = json.loads(resp.content)
        assert content == {"message": "Bug job map deleted"}
        assert BugJobMap.objects.count() == 0
Esempio n. 7
0
    def create(self, request, project):
        """Add a new relation between a job and a bug."""
        job_id = int(request.data['job_id'])
        bug_id = int(request.data['bug_id'])

        try:
            BugJobMap.create(
                job_id=job_id, bug_id=bug_id, user=request.user,
            )
            message = "Bug job map saved"
        except IntegrityError:
            message = "Bug job map skipped: mapping already exists"

        return Response({"message": message})
Esempio n. 8
0
def test_bug_job_map_list(client, test_repository, eleven_jobs_stored, test_user, bugs):
    """
    test retrieving a list of bug_job_map
    """
    jobs = Job.objects.all()[:10]

    expected = list()

    for (i, job) in enumerate(jobs):
        bjm = BugJobMap.create(job_id=job.id, bug_id=bugs[i].id, user=test_user,)

        expected.append(
            {
                "job_id": job.id,
                "bug_id": bugs[i].id,
                "created": bjm.created.isoformat(),
                "who": test_user.email,
            }
        )

    # verify that API works with different combinations of job_id= parameters
    for job_range in [(0, 1), (0, 2), (0, 9)]:
        resp = client.get(
            reverse("bug-job-map-list", kwargs={"project": test_repository.name}),
            data={'job_id': [job.id for job in jobs[job_range[0] : job_range[1]]]},
        )
        assert resp.status_code == 200
        assert resp.json() == expected[job_range[0] : job_range[1]]
Esempio n. 9
0
def test_bug_job_map_detail(client, eleven_jobs_stored, test_repository,
                            test_user, bugs):
    """
    test retrieving a list of bug_job_map
    """
    job = Job.objects.first()
    bug = bugs[0]
    expected = list()

    bjm = BugJobMap.create(
        job_id=job.id,
        bug_id=bug.id,
        user=test_user,
    )

    pk = "{0}-{1}".format(job.id, bug.id)

    resp = client.get(
        reverse("bug-job-map-detail",
                kwargs={
                    "project": test_repository.name,
                    "pk": pk
                }))
    assert resp.status_code == 200

    expected = {
        "job_id": job.id,
        "bug_id": bug.id,
        "created": bjm.created.isoformat(),
        "who": test_user.email,
    }
    assert resp.json() == expected
Esempio n. 10
0
def test_bug_job_map_detail(client, eleven_jobs_stored, test_repository,
                            test_user, bugs):
    """
    test retrieving a list of bug_job_map
    """
    job = Job.objects.first()
    bug = bugs[0]
    expected = list()

    bjm = BugJobMap.create(
        job_id=job.id,
        bug_id=bug.id,
        user=test_user,
    )

    pk = "{0}-{1}".format(job.id, bug.id)

    resp = client.get(
        reverse("bug-job-map-detail", kwargs={
            "project": test_repository.name,
            "pk": pk
        })
    )
    assert resp.status_code == 200

    expected = {
        "job_id": job.id,
        "bug_id": bug.id,
        "created": bjm.created.isoformat(),
        "who": test_user.email
    }
    assert resp.json() == expected
Esempio n. 11
0
def test_bug_job_map_list(client, test_repository, eleven_jobs_stored, test_user, bugs):
    """
    test retrieving a list of bug_job_map
    """
    jobs = Job.objects.all()[:10]

    expected = list()

    for (i, job) in enumerate(jobs):
        bjm = BugJobMap.create(
            job_id=job.id,
            bug_id=bugs[i].id,
            user=test_user,
        )

        expected.append({
            "job_id": job.id,
            "bug_id": bugs[i].id,
            "created": bjm.created.isoformat(),
            "who": test_user.email
        })

    # verify that API works with different combinations of job_id= parameters
    for job_range in [(0, 1), (0, 2), (0, 9)]:
        resp = client.get(
            reverse("bug-job-map-list", kwargs={"project": test_repository.name}),
            data={'job_id': [job.id for job in
                             jobs[job_range[0]:job_range[1]]]})
        assert resp.status_code == 200
        assert resp.json() == expected[job_range[0]:job_range[1]]
Esempio n. 12
0
    def create(self, request, project):
        """Add a new relation between a job and a bug."""
        job_id = int(request.data['job_id'])
        bug_id = int(request.data['bug_id'])

        try:
            BugJobMap.create(
                job_id=job_id,
                bug_id=bug_id,
                user=request.user,
            )
            message = "Bug job map saved"
        except IntegrityError:
            message = "Bug job map skipped: mapping already exists"

        return Response({"message": message})
Esempio n. 13
0
def bug_data(eleven_jobs_stored, test_repository, test_push, bugs):
    from treeherder.model.models import Job, BugJobMap, Option

    jobs = Job.objects.all()
    bug_id = bugs[0].id
    job_id = jobs[0].id
    BugJobMap.create(job_id=job_id, bug_id=bug_id)
    query_string = '?startday=2012-05-09&endday=2018-05-10&tree={}'.format(test_repository.name)

    return {
        'tree': test_repository.name,
        'option': Option.objects.first(),
        'bug_id': bug_id,
        'job': jobs[0],
        'jobs': jobs,
        'query_string': query_string,
    }
Esempio n. 14
0
def bug_data(eleven_jobs_stored, test_repository, test_push, bugs):
    from treeherder.model.models import (Job,
                                         BugJobMap,
                                         Option)
    jobs = Job.objects.all()
    bug_id = bugs[0].id
    job_id = jobs[0].id
    BugJobMap.create(job_id=job_id, bug_id=bug_id)
    query_string = '?startday=2012-05-09&endday=2018-05-10&tree={}'.format(
        test_repository.name)

    return {
        'tree': test_repository.name,
        'option': Option.objects.first(),
        'bug_id': bug_id,
        'job': jobs[0],
        'jobs': jobs,
        'query_string': query_string
    }
Esempio n. 15
0
    def autoclassify_failures(self, failures, classification):
        for tasks in failures.values():
            for task in tasks:
                # Keeping only the tasks that should be autoclassified
                if not task.get("autoclassify"):
                    continue

                bugs = []
                for failing_test_name in task.get("tests", []):
                    try:
                        bugs.append(
                            Bugscache.objects.get(
                                summary__endswith=
                                f"{failing_test_name} | single tracking bug"))
                    except Bugscache.DoesNotExist:
                        logger.info(
                            "No single tracking Bugzilla bug found for test name: %s",
                            failing_test_name,
                        )

                if not bugs:
                    # No associated Bugzilla bug exists, skipping the autoclassification
                    continue

                # Retrieving the relevant Job
                try:
                    job = Job.objects.get(
                        taskcluster_metadata__task_id=task["task_id"])
                except Job.DoesNotExist:
                    logger.error(
                        "Job associated to the TC task %s does not exist and could not be autoclassified.",
                        task["task_id"],
                    )
                    raise

                # Adding an "autoclassified intermittent" classification on it
                JobNote.objects.create(
                    job=job,
                    failure_classification=classification,
                    text=
                    "Autoclassified by mozci bot as an intermittent failure",
                )

                # Linking it to the relevant Bugzilla single tracking bugs
                BugJobMap.objects.bulk_create(
                    [BugJobMap(job=job, bug_id=bug.id) for bug in bugs],
                    ignore_conflicts=True)
    def handle(self, *args, **options):

        for ds in Datasource.objects.all():
            self.stdout.write('{}\n'.format(ds.project))
            try:
                repository = Repository.objects.get(name=ds.project)
            except:
                self.stderr.write(
                    'No repository for datasource project {}, skipping\n'.
                    format(ds.project))
                continue

            db_options = settings.DATABASES['default'].get('OPTIONS', {})
            db = MySQLdb.connect(
                host=settings.DATABASES['default']['HOST'],
                db=ds.name,
                user=settings.DATABASES['default']['USER'],
                passwd=settings.DATABASES['default'].get('PASSWORD') or '',
                **db_options)
            c = db.cursor()

            #
            # Migrate bug job map
            #
            c.execute(
                """SELECT job_id, bug_id, submit_timestamp, who from bug_job_map"""
            )
            ds_bug_job_maps = c.fetchall()

            (job_id_mapping, email_mapping) = self._get_mappings(
                repository, set([bjm[0] for bjm in ds_bug_job_maps]),
                set([bjm[3] for bjm in ds_bug_job_maps]))

            # migrate everything in one big bulk transaction (there aren't
            # that many)
            migrated_bug_job_maps = []
            for (ds_job_id, ds_bug_id, ds_timestamp,
                 ds_email) in ds_bug_job_maps:
                if not job_id_mapping.get(ds_job_id):
                    self.stderr.write(
                        "WARNING: job id {} not found when migrating bug job map, skipping\n"
                        .format(ds_job_id))
                    continue
                migrated_bug_job_maps.append(
                    BugJobMap(
                        job_id=job_id_mapping[ds_job_id],
                        bug_id=ds_bug_id,
                        user_id=email_mapping.get(ds_email),
                        created=datetime.datetime.fromtimestamp(ds_timestamp)))
            BugJobMap.objects.bulk_create(migrated_bug_job_maps)

            #
            # Migrate job notes
            #
            c.execute(
                """SELECT job_id, failure_classification_id, who, note, note_timestamp from job_note"""
            )
            ds_job_notes = c.fetchall()

            (job_id_mapping, email_mapping) = self._get_mappings(
                repository, set([jn[0] for jn in ds_job_notes]),
                set([jn[2] for jn in ds_job_notes]))
            migrated_job_notes = []
            for (ds_job_id, ds_failure_classification_id, ds_email,
                 ds_note_text, ds_timestamp) in ds_job_notes:
                if not job_id_mapping.get(ds_job_id):
                    self.stderr.write(
                        "WARNING: job id {} not found when migrating job notes, skipping\n"
                        .format(ds_job_id))
                    continue
                ds_note_text = filter(lambda x: x in set(string.printable),
                                      ds_note_text)
                migrated_job_notes.append(
                    JobNote(
                        job_id=job_id_mapping[ds_job_id],
                        failure_classification_id=ds_failure_classification_id,
                        user_id=email_mapping.get(ds_email),
                        text=ds_note_text,
                        created=datetime.datetime.fromtimestamp(ds_timestamp)))
            JobNote.objects.bulk_create(migrated_job_notes)