Beispiel #1
0
def test_skip_unscheduled(first_job, failure_classifications,
                          mock_log_parser):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job_list([first_job])

    assert not Job.objects.count()
def test_skip_unscheduled(first_job, jm, mock_log_parser):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job_list([first_job])

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 0
def test_skip_unscheduled(first_job, jm):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job_list([first_job], raise_errors=True)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 0
Beispiel #4
0
def test_skip_unscheduled(first_job, failure_classifications,
                          mock_log_parser):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job_list([first_job])

    assert not Job.objects.count()
Beispiel #5
0
def test_ingest_pulse_jobs(pulse_jobs, test_repository, push_stored,
                           failure_classifications, mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = push_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision

    jl.process_job_list(pulse_jobs)

    jobs = Job.objects.all()
    assert len(jobs) == 5

    assert [job.taskcluster_metadata for job in jobs]
    assert set(TaskclusterMetadata.objects.values_list(
        'task_id', flat=True)) == set(['IYyscnNMTLuxzna7PNqUJQ',
                                       'XJCbbRQ6Sp-UL1lL-tw5ng',
                                       'ZsSzJQu3Q7q2MfehIBAzKQ',
                                       'bIzVZt9jQQKgvQYD3a2HQw'])

    job_logs = JobLog.objects.filter(job_id=1)
    assert job_logs.count() == 2
    logs_expected = [{"name": "builds-4h",
                      "url": "http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/tinderbox-builds/mozilla-inbound-linux64/mozilla-inbound_linux64_spidermonkey-warnaserr-bm57-build1-build352.txt.gz",
                      "parse_status": 0},
                     {"name": "errorsummary_json",
                      "url": "http://mozilla-releng-blobs.s3.amazonaws.com/blobs/Mozilla-Inbound-Non-PGO/sha512/05c7f57df6583c6351c6b49e439e2678e0f43c2e5b66695ea7d096a7519e1805f441448b5ffd4cc3b80b8b2c74b244288fda644f55ed0e226ef4e25ba02ca466",
                      "parse_status": 0}]
    assert [{"name": item.name, "url": item.url, "parse_status": item.status}
            for item in job_logs.all()] == logs_expected

    assert JobDetail.objects.count() == 2
def test_ingest_pulse_jobs(pulse_jobs, test_project, jm, result_set_stored,
                           mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    jl.process_job_list(pulse_jobs)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 4

    job_logs = JobLog.objects.filter(job__project_specific_id=jobs[0]["id"])
    assert job_logs.count() == 2
    logs_expected = [{"name": "builds-4h",
                      "url": "http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/tinderbox-builds/mozilla-inbound-linux64/mozilla-inbound_linux64_spidermonkey-warnaserr-bm57-build1-build352.txt.gz",
                      "parse_status": 0},
                     {"name": "errorsummary_json",
                      "url": "http://mozilla-releng-blobs.s3.amazonaws.com/blobs/Mozilla-Inbound-Non-PGO/sha512/05c7f57df6583c6351c6b49e439e2678e0f43c2e5b66695ea7d096a7519e1805f441448b5ffd4cc3b80b8b2c74b244288fda644f55ed0e226ef4e25ba02ca466",
                      # Note that the test causes store_failure_lines to be
                      # run, which sets this to parsed.
                      "parse_status": 1}]
    assert [{"name": item.name, "url": item.url, "parse_status": item.status}
            for item in job_logs.all()] == logs_expected

    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 3

    assert JobDetail.objects.count() == 2
def test_skip_unscheduled(first_job, jm, mock_log_parser):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job_list([first_job])

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 0
def test_skip_unscheduled(first_job, jm):
    jl = JobLoader()
    first_job["state"] = "unscheduled"
    jl.process_job_list([first_job], raise_errors=True)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 0
Beispiel #9
0
def test_ingest_pulse_jobs_bad_project(pulse_jobs, test_project, jm,
                                       result_set_stored, mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = result_set_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision
        job["origin"]["project"] = "ferd"

    with pytest.raises(Repository.DoesNotExist):
        jl.process_job_list(pulse_jobs)
Beispiel #10
0
def test_ingest_pulse_jobs_bad_project(pulse_jobs, test_project, jm, result_set_stored,
                                       mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = result_set_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision
        job["origin"]["project"] = "ferd"

    with pytest.raises(DatasetNotFoundError):
        jl.process_job_list(pulse_jobs)
def test_ingest_pulse_jobs_bad_project(pulse_jobs, test_repository, result_set_stored,
                                       failure_classifications, mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = result_set_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision
        job["origin"]["project"] = "ferd"

    with pytest.raises(Repository.DoesNotExist):
        jl.process_job_list(pulse_jobs)
Beispiel #12
0
def test_ingest_pulse_jobs_bad_project(pulse_jobs, test_repository, result_set_stored,
                                       failure_classifications, mock_log_parser):
    """
    Test ingesting a pulse job with bad repo will skip, ingest others
    """

    jl = JobLoader()
    revision = result_set_stored[0]["revision"]
    job = pulse_jobs[0]
    job["origin"]["revision"] = revision
    job["origin"]["project"] = "ferd"

    jl.process_job_list(pulse_jobs)
    # length of pulse jobs is 5, so one will be skipped due to bad project
    assert Job.objects.count() == 4
Beispiel #13
0
def test_ingest_pulse_jobs_with_missing_resultset(pulse_jobs):
    """
    Ingest jobs with missing resultsets, so they should throw an exception
    """

    jl = JobLoader()
    job = pulse_jobs[0]
    job["origin"]["revision"] = "1234567890123456789012345678901234567890"

    with pytest.raises(MissingPushException):
        jl.process_job_list(pulse_jobs)

    # if one job isn't ready, except on the whole batch.  They'll retry as a
    # task after the timeout.
    assert Job.objects.count() == 0
Beispiel #14
0
def test_ingest_pulse_jobs_with_missing_resultset(pulse_jobs):
    """
    Ingest jobs with missing resultsets, so they should throw an exception
    """

    jl = JobLoader()
    job = pulse_jobs[0]
    job["origin"]["revision"] = "1234567890123456789012345678901234567890"

    with pytest.raises(MissingPushException):
        jl.process_job_list(pulse_jobs)

    # if one job isn't ready, except on the whole batch.  They'll retry as a
    # task after the timeout.
    assert Job.objects.count() == 0
def test_ingest_pulse_jobs(pulse_jobs, test_repository, result_set_stored,
                           failure_classifications, mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = result_set_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision

    jl.process_job_list(pulse_jobs)

    jobs = Job.objects.all()
    assert len(jobs) == 5

    assert [job.taskcluster_metadata for job in jobs]
    assert set(TaskclusterMetadata.objects.values_list(
        'task_id', flat=True)) == set([
            'IYyscnNMTLuxzna7PNqUJQ', 'XJCbbRQ6Sp-UL1lL-tw5ng',
            'ZsSzJQu3Q7q2MfehIBAzKQ', 'bIzVZt9jQQKgvQYD3a2HQw'
        ])

    job_logs = JobLog.objects.filter(job_id=1)
    assert job_logs.count() == 2
    logs_expected = [
        {
            "name": "builds-4h",
            "url":
            "http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/tinderbox-builds/mozilla-inbound-linux64/mozilla-inbound_linux64_spidermonkey-warnaserr-bm57-build1-build352.txt.gz",
            "parse_status": 0
        },
        {
            "name": "errorsummary_json",
            "url":
            "http://mozilla-releng-blobs.s3.amazonaws.com/blobs/Mozilla-Inbound-Non-PGO/sha512/05c7f57df6583c6351c6b49e439e2678e0f43c2e5b66695ea7d096a7519e1805f441448b5ffd4cc3b80b8b2c74b244288fda644f55ed0e226ef4e25ba02ca466",
            # Note that the test causes store_failure_lines to be
            # run, which sets this to parsed.
            "parse_status": 1
        }
    ]
    assert [{
        "name": item.name,
        "url": item.url,
        "parse_status": item.status
    } for item in job_logs.all()] == logs_expected

    assert JobDetail.objects.count() == 2
Beispiel #16
0
def test_ingest_pulse_jobs_bad_project(pulse_jobs, test_repository,
                                       push_stored, failure_classifications,
                                       mock_log_parser):
    """
    Test ingesting a pulse job with bad repo will skip, ingest others
    """

    jl = JobLoader()
    revision = push_stored[0]["revision"]
    job = pulse_jobs[0]
    job["origin"]["revision"] = revision
    job["origin"]["project"] = "ferd"

    jl.process_job_list(pulse_jobs)
    # length of pulse jobs is 5, so one will be skipped due to bad project
    assert Job.objects.count() == 4
Beispiel #17
0
def test_ingest_pulse_jobs(pulse_jobs, test_project, jm, result_set_stored,
                           mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    jl.process_job_list(pulse_jobs)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 4

    logs = jm.get_job_log_url_list([jobs[0]["id"]])
    assert len(logs) == 1
    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 4
Beispiel #18
0
def test_ingest_pulse_jobs(pulse_jobs, test_project, jm, result_set_stored,
                           mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    jl.process_job_list(pulse_jobs)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 4

    logs = jm.get_job_log_url_list([jobs[0]["id"]])
    assert len(logs) == 1
    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 4
Beispiel #19
0
def test_ingest_pulse_jobs(pulse_jobs, test_project, jm, result_set_stored,
                           mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    revision = result_set_stored[0]["revision"]
    for job in pulse_jobs:
        job["origin"]["revision"] = revision

    jl.process_job_list(pulse_jobs)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 4

    job_logs = JobLog.objects.filter(job__project_specific_id=jobs[0]["id"])
    assert job_logs.count() == 2
    logs_expected = [
        {
            "name": "builds-4h",
            "url":
            "http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/tinderbox-builds/mozilla-inbound-linux64/mozilla-inbound_linux64_spidermonkey-warnaserr-bm57-build1-build352.txt.gz",
            "parse_status": 0
        },
        {
            "name": "errorsummary_json",
            "url":
            "http://mozilla-releng-blobs.s3.amazonaws.com/blobs/Mozilla-Inbound-Non-PGO/sha512/05c7f57df6583c6351c6b49e439e2678e0f43c2e5b66695ea7d096a7519e1805f441448b5ffd4cc3b80b8b2c74b244288fda644f55ed0e226ef4e25ba02ca466",
            # Note that the test causes store_failure_lines to be
            # run, which sets this to parsed.
            "parse_status": 1
        }
    ]
    assert [{
        "name": item.name,
        "url": item.url,
        "parse_status": item.status
    } for item in job_logs.all()] == logs_expected

    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 2

    assert JobDetail.objects.count() == 2
Beispiel #20
0
def test_ingest_pulse_jobs_with_revision_hash(pulse_jobs, test_project, jm,
                                              result_set_stored,
                                              mock_log_parser):
    """
    Ingest a revision_hash job with the JobLoader used by Pulse
    """

    jl = JobLoader()
    revision_hash = Push.objects.values_list('revision_hash',
                                             flat=True).get(id=1)
    for job in pulse_jobs:
        origin = job["origin"]
        del(origin["revision"])
        origin["revision_hash"] = revision_hash

    jl.process_job_list(pulse_jobs)

    assert Job.objects.count() == 4
Beispiel #21
0
def test_ingest_pulse_jobs_with_revision_hash(pulse_jobs, test_project, jm,
                                              result_set_stored,
                                              mock_log_parser):
    """
    Ingest a revision_hash job with the JobLoader used by Pulse
    """

    jl = JobLoader()
    revision_hash = Push.objects.values_list('revision_hash',
                                             flat=True).get(id=1)
    for job in pulse_jobs:
        origin = job["origin"]
        del (origin["revision"])
        origin["revision_hash"] = revision_hash

    jl.process_job_list(pulse_jobs)

    assert Job.objects.count() == 4
def test_ingest_pulse_jobs_with_revision_hash(pulse_jobs, test_project, jm,
                                              result_set_stored,
                                              mock_log_parser):
    """
    Ingest a revision_hash job with the JobLoader used by Pulse
    """

    jl = JobLoader()
    rs = jm.get_result_set_list(0, 10)[0]
    revision_hash = rs["revision_hash"]
    for job in pulse_jobs:
        origin = job["origin"]
        del (origin["revision"])
        origin["revision_hash"] = revision_hash

    jl.process_job_list(pulse_jobs)

    assert Job.objects.count() == 4
def test_ingest_pulse_jobs_with_revision_hash(pulse_jobs, test_project, jm,
                                              result_set_stored,
                                              mock_log_parser):
    """
    Ingest a revision_hash job with the JobLoader used by Pulse
    """

    jl = JobLoader()
    rs = jm.get_result_set_list(0, 10)[0]
    revision_hash = rs["revision_hash"]
    for job in pulse_jobs:
        origin = job["origin"]
        del(origin["revision"])
        origin["revision_hash"] = revision_hash

    jl.process_job_list(pulse_jobs)

    assert Job.objects.count() == 4
def test_ingest_pulse_jobs(pulse_jobs, test_project, jm, result_set_stored,
                           mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    jl.process_job_list(pulse_jobs)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 4

    assert JobLog.objects.filter(job__project_specific_id=jobs[0]["id"]).count() == 1
    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 3

    assert JobDetail.objects.count() == 2
def test_ingest_pulse_jobs(pulse_jobs, test_project, jm, result_set_stored,
                           mock_log_parser):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """

    jl = JobLoader()
    jl.process_job_list(pulse_jobs)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 4

    assert JobLog.objects.filter(
        job__project_specific_id=jobs[0]["id"]).count() == 1
    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 3

    assert JobDetail.objects.count() == 2
Beispiel #26
0
def test_ingest_pulse_job(sample_data, test_project, jm, result_set_stored):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """
    revision = result_set_stored[0]["revisions"][0]["revision"]
    sample_jobs = sample_data.pulse_jobs
    for job in sample_jobs:
        job["origin"]["project"] = test_project
        job["origin"]["revision"] = revision

    jl = JobLoader()
    jl.process_job_list(sample_jobs, raise_errors=True)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 3

    logs = jm.get_job_log_url_list([jobs[0]["id"]])
    assert len(logs) == 1
    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 2
def test_ingest_pulse_job(sample_data, test_project, jm, result_set_stored):
    """
    Ingest a job through the JSON Schema validated JobLoader used by Pulse
    """
    revision = result_set_stored[0]["revisions"][0]["revision"]
    sample_jobs = sample_data.pulse_jobs
    for job in sample_jobs:
        job["origin"]["project"] = test_project
        job["origin"]["revision"] = revision

    jl = JobLoader()
    jl.process_job_list(sample_jobs, raise_errors=True)

    jobs = jm.get_job_list(0, 10)
    assert len(jobs) == 3

    logs = jm.get_job_log_url_list([jobs[0]["id"]])
    assert len(logs) == 1
    with ArtifactsModel(test_project) as am:
        artifacts = am.get_job_artifact_list(0, 10)
        assert len(artifacts) == 2
Beispiel #28
0
def test_ingest_pending_pulse_job(pulse_jobs, result_set_stored,
                                  failure_classifications, mock_log_parser):
    """
    Test that ingesting a pending job (1) works and (2) ingests the
    taskcluster metadata
    """
    jl = JobLoader()

    pulse_job = pulse_jobs[0]
    revision = result_set_stored[0]["revision"]
    pulse_job["origin"]["revision"] = revision
    pulse_job["state"] = "pending"
    jl.process_job_list([pulse_job])

    jobs = Job.objects.all()
    assert len(jobs) == 1

    job = jobs[0]
    assert job.taskcluster_metadata
    assert job.taskcluster_metadata.task_id == 'IYyscnNMTLuxzna7PNqUJQ'

    # should not have processed any log or details for pending jobs
    assert JobLog.objects.count() == 0
    assert JobDetail.objects.count() == 0
def test_ingest_pending_pulse_job(pulse_jobs, result_set_stored,
                                  failure_classifications, mock_log_parser):
    """
    Test that ingesting a pending job (1) works and (2) ingests the
    taskcluster metadata
    """
    jl = JobLoader()

    pulse_job = pulse_jobs[0]
    revision = result_set_stored[0]["revision"]
    pulse_job["origin"]["revision"] = revision
    pulse_job["state"] = "pending"
    jl.process_job_list([pulse_job])

    jobs = Job.objects.all()
    assert len(jobs) == 1

    job = jobs[0]
    assert job.taskcluster_metadata
    assert job.taskcluster_metadata.task_id == 'IYyscnNMTLuxzna7PNqUJQ'

    # should not have processed any log or details for pending jobs
    assert JobLog.objects.count() == 2
    assert JobDetail.objects.count() == 2