def setUp(self): inputs = [("fragility", ""), ("exposure", "")] self.job = self.setup_classic_job(inputs=inputs) kvs.mark_job_as_current(self.job.id) kvs.cache_gc(self.job.id) self.site = Site(1.0, 1.0) block = Block(self.job.id, BLOCK_ID, [self.site]) block.to_kvs() # this region contains a single site, that is exactly # a site with longitude == 1.0 and latitude == 1.0 params = {"REGION_VERTEX": "1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0", "REGION_GRID_SPACING": "0.5", "BASE_PATH": ".", "OUTPUT_DIR": "."} self.job_ctxt = JobContext(params, self.job.id, oq_job=self.job) self.em = self._store_em() self._store_gmvs([0.40, 0.30, 0.45, 0.35, 0.40]) self.calculator = ScenarioDamageRiskCalculator(self.job_ctxt) # just stubbing out some preprocessing stuff... ScenarioDamageRiskCalculator.store_exposure_assets = lambda self: None ScenarioDamageRiskCalculator.store_fragility_model = lambda self: None ScenarioDamageRiskCalculator.partition = lambda self: None
def tearDown(self): for cfg in self.generated_files: try: os.remove(cfg) except OSError: pass kvs.cache_gc('::JOB::1::') kvs.cache_gc('::JOB::2::')
def cleanup_after_job(job_id): """ Release the resources used by an openquake job. :param job_id: the job id :type job_id: int """ logging.info('Cleaning up after job %s', job_id) kvs.cache_gc(job_id)
def clear_job_data(job_id): """ Clear KVS cache data for the given job. This is done by searching in the KVS for keys matching a job key (derived from the job_id) and deleting each result. Invoked by the -j or --job command line arg. :param job_id: job ID as an integer """ try: job_id = int(job_id) except ValueError: print 'Job ID should be an integer.' print 'Use the --list option to show current jobs.' raise LOG.info('Attempting to clear cache data for job %s...' % job_id) result = kvs.cache_gc(job_id) if result is None: LOG.info('Job %s not found.' % job_id) else: LOG.info('Removed %s keys.' % result)
def test_gc_nonexistent_job(self): """ If we try to run garbage collection on a nonexistent job, the result of :py:function:`openquake.kvs.cache_gc` should be None. """ nonexist_job = '1234nonexistent' result = kvs.cache_gc(nonexist_job) self.assertTrue(result is None)
def test_gc_dataless_job(self): """ Test that :py:function:`openquake.kvs.cache_gc` returns 0 (to indicate that the job existed but there was nothing to delete). The job key should key should be removed from CURRENT_JOBS. """ self.assertTrue( self.client.sismember(kvs.tokens.CURRENT_JOBS, self.dataless_job)) result = kvs.cache_gc(self.dataless_job) self.assertEqual(0, result) # make sure the job was deleted from CURRENT_JOBS self.assertFalse( self.client.sismember(kvs.tokens.CURRENT_JOBS, self.dataless_job))
def test_gc_some_job_data(self): """ Test that all job data is cleared and the job key is removed from CURRENT_JOBS. """ result = kvs.cache_gc(self.test_job) # 3 things should have been deleted self.assertEqual(3, result) # make sure each piece of data was deleted for key in (self.gmf1_key, self.gmf2_key, self.vuln_key): self.assertFalse(self.client.exists(key)) # make sure the job was deleted from CURRENT_JOBS self.assertFalse( self.client.sismember(kvs.tokens.CURRENT_JOBS, self.test_job))
def tearDown(self): kvs.mark_job_as_current(self.job_id) kvs.cache_gc(self.job_id)
def setUp(self): kvs.mark_job_as_current(self.job_id) kvs.cache_gc(self.job_id)