def test_ingest_running_to_complete_job(result_set_stored, failure_classifications, mock_buildapi_running_url, mock_buildapi_builds4h_url, mock_log_parser): """ a new buildapi running job transitions to a new completed job """ etl_process = RunningJobsProcess() etl_process.run() assert Job.objects.count() == 1 # the first job in the sample data should overwrite the running job # we just ingested. Leaving us with only 32 jobs, not 33. etl_process = Builds4hJobsProcess() etl_process.run() assert Job.objects.count() == 32 # all jobs should be completed, including the original one which # transitioned from running. for job in Job.objects.all(): assert job.state == 'completed'
def test_ingest_running_to_complete_job(jm, result_set_stored, mock_buildapi_running_url, mock_buildapi_builds4h_url, mock_log_parser): """ a new buildapi running job transitions to a new completed job """ etl_process = RunningJobsProcess() etl_process.run() stored_running = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_running) == 1 # the first job in the sample data should overwrite the running job # we just ingested. Leaving us with only 32 jobs, not 33. etl_process = Builds4hJobsProcess() etl_process.run() stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 32 # all jobs should be completed, including the original one which # transitioned from running. for job in stored_obj: assert job['state'] == 'completed'
def test_ingest_running_to_complete_job(jm, mock_buildapi_running_url, mock_buildapi_builds4h_url, mock_log_parser, mock_get_resultset): """ a new buildapi running job transitions to a new completed job """ etl_process = RunningJobsProcess() etl_process.run() stored_running = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_running) == 1 # the first job in the sample data should overwrite the running job # we just ingested. Leaving us with only 32 jobs, not 33. etl_process = Builds4hJobsProcess() etl_process.run() stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 32 # all jobs should be completed, including the original one which # transitioned from running. for job in stored_obj: assert job['state'] == 'completed'
def test_ingest_running_job_fields(push_stored, failure_classifications, mock_buildapi_running_url, mock_log_parser): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() etl_process.run() assert Job.objects.count() == 1 assert time.mktime(Job.objects.all()[0].start_time.timetuple()) > 0
def test_ingest_running_job_fields(result_set_stored, failure_classifications, mock_buildapi_running_url, mock_log_parser): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() etl_process.run() assert Job.objects.count() == 1 assert time.mktime(Job.objects.all()[0].start_time.timetuple()) > 0
def test_ingest_running_job_fields(jm, result_set_stored, mock_buildapi_running_url, mock_log_parser): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() etl_process.run() stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 1 assert stored_obj[0]["start_timestamp"] is not 0
def test_ingest_running_job_fields(jm, mock_buildapi_running_url, mock_log_parser, mock_get_resultset): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() etl_process.run() stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 1 assert stored_obj[0]["start_timestamp"] is not 0
def test_ingest_running_job_fields(jm, initial_data, mock_buildapi_running_url, mock_post_json, mock_log_parser, mock_get_resultset, mock_fetch_json): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() etl_process.run() stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 1 assert stored_obj[0]["start_timestamp"] is not 0
def test_ingest_running_jobs(jm, initial_data, mock_buildapi_running_url, mock_post_json_data, mock_log_parser, mock_get_resultset, mock_get_remote_content): """ a new buildapi running job creates a new obj in the job table """ from treeherder.etl.buildapi import RunningJobsProcess etl_process = RunningJobsProcess() etl_process.run() stored_obj = jm.get_jobs_dhub().execute(proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 1
def test_ingest_running_jobs(push_stored, failure_classifications, mock_buildapi_running_url, mock_log_parser): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['running']) == {24767134} new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False assert Job.objects.count() == 1
def test_ingest_running_jobs(jm, result_set_stored, mock_buildapi_running_url, mock_log_parser): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['running']) == {24767134} new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 1
def test_ingest_running_to_complete_job(jm, initial_data, mock_buildapi_running_url, mock_buildapi_builds4h_url, mock_post_json_data, mock_log_parser, mock_get_resultset, mock_get_remote_content): """ a new buildapi running job transitions to a new completed job Also ensure that a running job does NOT go through the objectstore. """ from treeherder.etl.buildapi import RunningJobsProcess from treeherder.etl.buildapi import Builds4hJobsProcess etl_process = RunningJobsProcess() etl_process.run() stored_running = jm.get_jobs_dhub().execute( proc="jobs_test.selects.jobs") stored_objectstore = jm.get_os_dhub().execute( proc="objectstore_test.selects.all") # ensure running jobs do not go to the objectstore, but go directly # to the jobs table without needing process_objects assert len(stored_objectstore) == 0 assert len(stored_running) == 1 # the first job in the sample data should overwrite the running job # we just ingested. Leaving us with only 20 jobs, not 21. etl_process = Builds4hJobsProcess() etl_process.run() jm.process_objects(20) stored_obj = jm.get_jobs_dhub().execute( proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 20 # all jobs should be completed, including the original one which # transitioned from running. for job in stored_obj: assert job['state'] == 'completed'
def test_ingest_running_jobs(result_set_stored, failure_classifications, mock_buildapi_running_url, mock_log_parser): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['running']) == {24767134} new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False assert Job.objects.count() == 1
def test_ingest_running_jobs(jm, mock_buildapi_running_url, mock_log_parser, mock_get_resultset): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['running']) == {24767134} new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 1
def test_ingest_running_jobs(jm, initial_data, mock_buildapi_running_url, mock_post_json, mock_log_parser, mock_get_resultset, mock_get_remote_content): """ a new buildapi running job creates a new obj in the job table """ etl_process = RunningJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['running']) == set([24767134]) new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 1
def test_ingest_running_jobs(jm, initial_data, mock_buildapi_running_url, mock_post_json_data, mock_log_parser, mock_get_resultset, mock_get_remote_content): """ a new buildapi running job creates a new obj in the job table """ from treeherder.etl.buildapi import RunningJobsProcess etl_process = RunningJobsProcess() etl_process.run() stored_obj = jm.get_jobs_dhub().execute( proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 1