Exemplo n.º 1
0
def test_ingest_pulse_jobs(pulse_jobs, test_repository, push_stored,
                           failure_classifications, mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = push_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision
        jl.process_job(job)

    jobs = Job.objects.all()
    assert len(jobs) == 5

    assert [job.taskcluster_metadata for job in jobs]
    assert set(TaskclusterMetadata.objects.values_list(
        'task_id', flat=True)) == set(['IYyscnNMTLuxzna7PNqUJQ',
                                       'XJCbbRQ6Sp-UL1lL-tw5ng',
                                       'ZsSzJQu3Q7q2MfehIBAzKQ',
                                       'bIzVZt9jQQKgvQYD3a2HQw'])

    job_logs = JobLog.objects.filter(job_id=1)
    assert job_logs.count() == 2
    logs_expected = [{"name": "builds-4h",
                      "url": "http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/tinderbox-builds/mozilla-inbound-linux64/mozilla-inbound_linux64_spidermonkey-warnaserr-bm57-build1-build352.txt.gz",
                      "parse_status": 0},
                     {"name": "errorsummary_json",
                      "url": "http://mozilla-releng-blobs.s3.amazonaws.com/blobs/Mozilla-Inbound-Non-PGO/sha512/05c7f57df6583c6351c6b49e439e2678e0f43c2e5b66695ea7d096a7519e1805f441448b5ffd4cc3b80b8b2c74b244288fda644f55ed0e226ef4e25ba02ca466",
                      "parse_status": 0}]
    assert [{"name": item.name, "url": item.url, "parse_status": item.status}
            for item in job_logs.all()] == logs_expected

    assert JobDetail.objects.count() == 2
Exemplo n.º 2
0
def test_ingest_pending_pulse_job(pulse_jobs, push_stored,
                                  failure_classifications, mock_log_parser):
    """
    Test that ingesting a pending job (1) works and (2) ingests the
    taskcluster metadata
    """
    jl = JobLoader()

    pulse_job = pulse_jobs[0]
    revision = push_stored[0]["revision"]
    pulse_job["origin"]["revision"] = revision
    pulse_job["state"] = "pending"
    jl.process_job(pulse_job, 'https://firefox-ci-tc.services.mozilla.com')

    jobs = Job.objects.all()
    assert len(jobs) == 1

    job = jobs[0]
    assert job.taskcluster_metadata
    assert job.taskcluster_metadata.task_id == 'IYyscnNMTLuxzna7PNqUJQ'

    # should not have processed any log or details for pending jobs
    assert JobLog.objects.count() == 2
    # we're no longer storing artifacts in this table
    assert JobDetail.objects.count() == 0
Exemplo n.º 3
0
def test_skip_unscheduled(first_job, failure_classifications,
                          mock_log_parser):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job(first_job, 'https://firefox-ci-tc.services.mozilla.com')

    assert not Job.objects.count()
Exemplo n.º 4
0
def test_skip_unscheduled(first_job, failure_classifications,
                          mock_log_parser):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job(first_job)

    assert not Job.objects.count()
Exemplo n.º 5
0
def test_ingest_pulse_jobs_with_missing_push(pulse_jobs):
    """
    Ingest jobs with missing pushes, so they should throw an exception
    """

    jl = JobLoader()
    job = pulse_jobs[0]
    job["origin"]["revision"] = "1234567890123456789012345678901234567890"

    with pytest.raises(MissingPushException):
        for pulse_job in pulse_jobs:
            jl.process_job(pulse_job)

    # if one job isn't ready, except on the whole batch.  They'll retry as a
    # task after the timeout.
    assert Job.objects.count() == 0
Exemplo n.º 6
0
def test_ingest_pulse_jobs_with_missing_push(pulse_jobs):
    """
    Ingest jobs with missing pushes, so they should throw an exception
    """

    jl = JobLoader()
    job = pulse_jobs[0]
    job["origin"]["revision"] = "1234567890123456789012345678901234567890"

    with pytest.raises(MissingPushException):
        for pulse_job in pulse_jobs:
            jl.process_job(pulse_job)

    # if one job isn't ready, except on the whole batch.  They'll retry as a
    # task after the timeout.
    assert Job.objects.count() == 0
Exemplo n.º 7
0
def test_ingest_pulse_jobs_bad_project(pulse_jobs, test_repository, push_stored,
                                       failure_classifications, mock_log_parser):
    """
    Test ingesting a pulse job with bad repo will skip, ingest others
    """

    jl = JobLoader()
    revision = push_stored[0]["revision"]
    job = pulse_jobs[0]
    job["origin"]["revision"] = revision
    job["origin"]["project"] = "ferd"

    for pulse_job in pulse_jobs:
        jl.process_job(pulse_job)

    # length of pulse jobs is 5, so one will be skipped due to bad project
    assert Job.objects.count() == 4
Exemplo n.º 8
0
def test_ingest_pulse_job_with_long_job_type_name(
    pulse_jobs, test_repository, push_stored, failure_classifications, mock_log_parser
):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """
    job = pulse_jobs[0]
    jl = JobLoader()
    revision = push_stored[0]["revision"]
    job["display"][
        "jobName"
    ] = "this is a very long string that exceeds the 100 character size that was the previous limit by just a little bit"
    job["origin"]["revision"] = revision
    jl.process_job(job, 'https://firefox-ci-tc.services.mozilla.com')

    jobs = Job.objects.all()
    assert len(jobs) == 1
Exemplo n.º 9
0
def test_new_job_transformation(new_pulse_jobs, new_transformed_jobs, failure_classifications):
    jl = JobLoader()
    for message in new_pulse_jobs:
        # "task_id" which is not really the task_id
        job_guid = message["taskId"]
        (decoded_task_id, _) = job_guid.split("/")
        # As of slugid v2, slugid.encode() returns a string not bytestring under Python 3.
        taskId = slugid.encode(uuid.UUID(decoded_task_id))
        transformed_job = jl.process_job(message, 'https://firefox-ci-tc.services.mozilla.com')
        # Not all messages from Taskcluster will be processed
        if transformed_job:
            assert new_transformed_jobs[taskId] == transformed_job
Exemplo n.º 10
0
def test_ingest_pulse_jobs_with_missing_push(pulse_jobs):
    """
    Ingest jobs with missing pushes, so they should throw an exception
    """

    jl = JobLoader()
    job = pulse_jobs[0]
    job["origin"]["revision"] = "1234567890123456789012345678901234567890"
    responses.add(
        responses.GET,
        "https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/IYyscnNMTLuxzna7PNqUJQ",
        json={},
        content_type='application/json',
        status=200,
    )

    with pytest.raises(ObjectDoesNotExist):
        for pulse_job in pulse_jobs:
            jl.process_job(pulse_job, 'https://firefox-ci-tc.services.mozilla.com')

    # if one job isn't ready, except on the whole batch.  They'll retry as a
    # task after the timeout.
    assert Job.objects.count() == 0
Exemplo n.º 11
0
def test_ingest_pending_pulse_job(pulse_jobs, push_stored,
                                  failure_classifications, mock_log_parser):
    """
    Test that ingesting a pending job (1) works and (2) ingests the
    taskcluster metadata
    """
    jl = JobLoader()

    pulse_job = pulse_jobs[0]
    revision = push_stored[0]["revision"]
    pulse_job["origin"]["revision"] = revision
    pulse_job["state"] = "pending"
    jl.process_job(pulse_job)

    jobs = Job.objects.all()
    assert len(jobs) == 1

    job = jobs[0]
    assert job.taskcluster_metadata
    assert job.taskcluster_metadata.task_id == 'IYyscnNMTLuxzna7PNqUJQ'

    # should not have processed any log or details for pending jobs
    assert JobLog.objects.count() == 2
    assert JobDetail.objects.count() == 2