def task_priority_etl_to_solr(guid, buid, name): try: import_jobs.update_job_source(guid, buid, name, clear_cache=True) except Exception as e: logging.error("Error loading jobs for jobsource: %s", guid) logging.exception(e) raise task_priority_etl_to_solr.retry(exc=e)
def task_etl_to_solr(guid, buid, name): try: import_jobs.update_job_source(guid, buid, name) except Exception as e: logging.error("Error loading jobs for jobsource: %s", guid) logging.exception(e) raise task_etl_to_solr.retry()
def task_priority_etl_to_solr(guid, buid, name): try: import_jobs.update_job_source(guid, buid, name) BusinessUnit.clear_cache(int(buid)) ImportRecord(buid=int(buid), success=True).save() except Exception as e: logging.error("Error loading jobs for jobsource: %s", guid) logging.exception(e) ImportRecord(buid=int(buid), success=False).save() raise task_priority_etl_to_solr.retry(exc=e)
def test_update_job_source(self, mock_jobsfs): mock_jobsfs.return_value = open(self.zipfile, 'rb') count = self.conn.search('*:*').hits self.assertEqual(count, 0, "Jobs for buid in solr before the test. Cannot guarantee correct behavior.") self.assertEqual(self.businessunit.associated_jobs, 4, "Initial Job Count does not match the factory") update_job_source(self.guid, self.buid, self.name) count = self.conn.search('buid:%s' % self.buid).hits # Note the job count being one low here is due to one job being filtered out due to include_in_index_bit self.assertEqual(count, 27, "38 Jobs not in solr after call to update job source. Found %s" % count) self.assertEqual(BusinessUnit.objects.get(id=self.buid).associated_jobs, 27, "Job Count not updated after imports: Should be 38 was %s" % self.businessunit.associated_jobs)