def _handle(self, *args, **options): if len(args) != 2: raise CommandError("Need to specify (only) branch and changeset") (project, changeset) = args # get reference to repo rdm = RefDataManager() repos = filter(lambda x: x['name'] == project, rdm.get_all_repository_info()) if not repos: raise CommandError("No project found named '%s'" % project) repo = repos[0] # make sure all tasks are run synchronously / immediately settings.CELERY_ALWAYS_EAGER = True # get hg pushlog pushlog_url = '%s/json-pushes/?full=1&version=2' % repo['url'] # ingest this particular revision for this project process = HgPushlogProcess() # Use the actual push SHA, in case the changeset specified was a tag # or branch name (eg tip). HgPushlogProcess returns the full SHA. push_sha = process.run(pushlog_url, project, changeset=changeset)[:12] Builds4hJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=options['filter_job_group']) PendingJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=options['filter_job_group']) RunningJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=options['filter_job_group'])
def _handle(self, *args, **options): if len(args) != 2: raise CommandError("Need to specify (only) branch and changeset") (project, changeset) = args # get reference to repo rdm = RefDataManager() repos = filter(lambda x: x['name'] == project, rdm.get_all_repository_info()) if not repos: raise CommandError("No project found named '%s'" % project) repo = repos[0] # make sure all tasks are run synchronously / immediately settings.CELERY_ALWAYS_EAGER = True # get hg pushlog pushlog_url = '%s/json-pushes/?full=1' % repo['url'] # ingest this particular revision for this project process = HgPushlogProcess() process.run(pushlog_url, project, changeset=changeset) self._process_all_objects_for_project(project) Builds4hJobsProcess().run(filter_to_project=project, filter_to_revision=changeset) PendingJobsProcess().run(filter_to_project=project, filter_to_revision=changeset) RunningJobsProcess().run(filter_to_project=project, filter_to_revision=changeset) self._process_all_objects_for_project(project)
def test_ingest_pending_jobs_1_missing_resultset( jm, initial_data, sample_resultset, test_repository, mock_buildapi_pending_missing1_url, mock_post_json, mock_get_resultset, mock_get_remote_content, activate_responses): """ Ensure the pending job with the missing resultset is queued for refetching """ etl_process = PendingJobsProcess() _do_missing_resultset_test(jm, etl_process)
def test_ingest_pending_jobs(push_stored, failure_classifications, mock_buildapi_pending_url, mock_log_parser): """ a new buildapi pending job creates a new obj in the job table """ etl_process = PendingJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['pending']) == {24575179} new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False assert Job.objects.count() == 1
def test_ingest_pending_jobs(jm, initial_data, mock_buildapi_pending_url, mock_post_json_data, mock_log_parser, mock_get_resultset, mock_get_remote_content): """ a new buildapi pending job creates a new obj in the job table """ from treeherder.etl.buildapi import PendingJobsProcess etl_process = PendingJobsProcess() etl_process.run() stored_obj = jm.get_jobs_dhub().execute(proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 1
def _handle(self, *args, **options): project = options['project'] changeset = options['changeset'] if not options['last_n_pushes'] and not changeset: raise CommandError('must specify --last-n-pushes or a positional ' 'changeset argument') # get reference to repo repo = Repository.objects.get(name=project, active_status='active') if options['last_n_pushes']: last_push_id = last_push_id_from_server(repo) fetch_push_id = max(1, last_push_id - options['last_n_pushes']) logger.info('last server push id: %d; fetching push %d and newer' % (last_push_id, fetch_push_id)) else: fetch_push_id = None # make sure all tasks are run synchronously / immediately settings.CELERY_ALWAYS_EAGER = True # get hg pushlog pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url # ingest this particular revision for this project process = HgPushlogProcess() # Use the actual push SHA, in case the changeset specified was a tag # or branch name (eg tip). HgPushlogProcess returns the full SHA. push_sha = process.run(pushlog_url, project, changeset=changeset, last_push_id=fetch_push_id) # Only perform additional processing if fetching a single changeset # because we only have the sha1 if the tip-most push in "last N pushes" # mode and can't filter appropriately. if not fetch_push_id: group_filter = options['filter_job_group'] Builds4hJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=group_filter) PendingJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=group_filter) RunningJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=group_filter)
def test_ingest_pending_jobs(jm, result_set_stored, mock_buildapi_pending_url, mock_log_parser): """ a new buildapi pending job creates a new obj in the job table """ etl_process = PendingJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['pending']) == {24575179} new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") assert len(stored_obj) == 1
def test_ingest_pending_jobs(jm, initial_data, mock_buildapi_pending_url, mock_post_json, mock_log_parser, mock_get_resultset, mock_get_remote_content): """ a new buildapi pending job creates a new obj in the job table """ etl_process = PendingJobsProcess() new_jobs_were_added = etl_process.run() assert new_jobs_were_added is True assert cache.get(CACHE_KEYS['pending']) == set([24575179]) new_jobs_were_added = etl_process.run() assert new_jobs_were_added is False stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs") jm.disconnect() assert len(stored_obj) == 1
def fetch_buildapi_pending(): """ Fetches the buildapi pending jobs api and load them """ PendingJobsProcess().run()
def fetch_buildapi_pending(): """ Fetches the buildapi pending jobs api and load them to the objectstore ingestion endpoint """ PendingJobsProcess().run()