def log_percent_complete(job_id, ctype): """Log a message when the percentage completed changed for a calculation. :param int job_id: identifier of the job in question :param str ctype: calculation type, one of: hazard, risk """ if ctype not in ("hazard", "risk"): LOG.warn("Unknown calculation type: '%s'" % ctype) return -1 key = "nhzrd_total" if ctype == "hazard" else "nrisk_total" total = stats.pk_get(job_id, key) key = "nhzrd_done" if ctype == "hazard" else "nrisk_done" done = stats.pk_get(job_id, key) if done <= 0 or total <= 0: return 0 percent = total / 100.0 # Store percentage complete as well as the last value reported as integers # in order to avoid reporting the same percentage more than once. percent_complete = int(done / percent) # Get the last value reported lvr = stats.pk_get(job_id, "lvr") # Only report the percentage completed if it is above the last value shown if percent_complete > lvr: LOG.progress( "%s %3d%% complete" % (ctype, percent_complete), indent=True ) stats.pk_set(job_id, "lvr", percent_complete) return percent_complete
def log_percent_complete(job_id, ctype): """Log a message when the percentage completed changed for a calculation. :param int job_id: identifier of the job in question :param str ctype: calculation type, one of: hazard, risk """ if ctype not in ("hazard", "risk"): LOG.warn("Unknown calculation type: '%s'" % ctype) return -1 key = "nhzrd_total" if ctype == "hazard" else "nrisk_total" total = stats.pk_get(job_id, key) key = "nhzrd_done" if ctype == "hazard" else "nrisk_done" done = stats.pk_get(job_id, key) if done <= 0 or total <= 0: return 0 percent = total / 100.0 # Store percentage complete as well as the last value reported as integers # in order to avoid reporting the same percentage more than once. percent_complete = int(done / percent) # Get the last value reported lvr = stats.pk_get(job_id, "lvr") # Only report the percentage completed if it is above the last value shown if percent_complete > lvr: LOG.progress("%s %3d%% complete" % (ctype, percent_complete), indent=True) stats.pk_set(job_id, "lvr", percent_complete) return percent_complete
def test_pk_get_with_existing_incremental(self): """The correct value is obtained for an existing predefined key.""" job_id = 92 pkey = "cblock" key = stats.key_name(job_id, *stats.STATS_KEYS[pkey]) stats.delete_job_counters(job_id) kvs = self.connect() kvs.set(key, 929) stats.pk_get(job_id, pkey) self.assertEqual("929", kvs.get(key))
def test_initialize_progress(self): # Tests that the progress counter has been initialized # properly self.calculator.pre_execute() total = 2 # expected self.calculator._initialize_progress(total) self.assertEqual(total, stats.pk_get( self.calculator.job.id, "nrisk_total")) self.assertEqual(2, total) self.assertEqual({'VF': 2}, self.calculator.taxonomies) done = stats.pk_get(self.calculator.job.id, "nrisk_done") self.assertEqual(0, done)
def test_initialize_progress(self): # Tests that the progress counter has been initialized # properly self.calculator.pre_execute() total = 2 # expected self.calculator._initialize_progress(total) self.assertEqual(total, stats.pk_get(self.calculator.job.id, "nrisk_total")) self.assertEqual(2, total) self.assertEqual({'VF': 2}, self.calculator.taxonomies_asset_count) done = stats.pk_get(self.calculator.job.id, "nrisk_done") self.assertEqual(0, done)
def test_initialize_pr_data(self): # The total/done counters for progress reporting are initialized # correctly. self.calc.initialize_sources() self.calc.initialize_realizations( rlz_callbacks=[self.calc.initialize_hazard_curve_progress]) ltr1, ltr2 = models.LtRealization.objects.filter( hazard_calculation=self.job.hazard_calculation.id).order_by("id") ltr1.completed_items = 11 ltr1.save() self.calc.initialize_pr_data() total = stats.pk_get(self.calc.job.id, "nhzrd_total") self.assertEqual(ltr1.total_items + ltr2.total_items, total) done = stats.pk_get(self.calc.job.id, "nhzrd_done") self.assertEqual(ltr1.completed_items + ltr2.completed_items, done)
def test_initialize_pr_data_with_ses(self): hc = self.job.hazard_calculation # Initialize sources as a setup for the test: self.calc.initialize_sources() self.calc.initialize_realizations(rlz_callbacks=[self.calc.initialize_ses_db_records]) ltr1, ltr2 = models.LtRealization.objects.filter(hazard_calculation=hc).order_by("id") ltr1.completed_items = 12 ltr1.save() self.calc.initialize_pr_data() total = stats.pk_get(self.calc.job.id, "nhzrd_total") self.assertEqual(ltr1.total_items + ltr2.total_items, total) done = stats.pk_get(self.calc.job.id, "nhzrd_done") self.assertEqual(ltr1.completed_items + ltr2.completed_items, done)
def test_failure_stats(self): """ The failure counter is incremented when the wrapped function terminates raises an exception. """ area = "r" @stats.count_progress(area) def raise_exception(job_id, items): raise NotImplementedError previous_value = stats.pk_get(22, "nrisk_failed") self.assertIsNone(previous_value) # Call the wrapped function. self.assertRaises(NotImplementedError, raise_exception, 22, range(6)) value = stats.pk_get(22, "nrisk_failed") self.assertEqual(6, value)
def test_success_stats(self): """ The success counter is incremented when the wrapped function terminates without raising an exception. """ area = "h" @stats.count_progress(area) def no_exception(job_id, items): return 999 previous_value = stats.pk_get(11, "nhzrd_done") self.assertIsNone(previous_value) # Call the wrapped function. self.assertEqual(999, no_exception(11, range(5))) value = stats.pk_get(11, "nhzrd_done") self.assertEqual(5, value)
def test_initialize_pr_data_with_gmf(self): hc = self.job.hazard_calculation # Initialize sources as a setup for the test: self.calc.initialize_sources() self.calc.initialize_realizations( rlz_callbacks=[self.calc.initialize_ses_db_records]) ltr1, ltr2 = models.LtRealization.objects.filter( hazard_calculation=hc).order_by("id") ltr1.completed_items = 13 ltr1.save() self.calc.initialize_pr_data() total = stats.pk_get(self.calc.job.id, "nhzrd_total") self.assertEqual(ltr1.total_items + ltr2.total_items, total) done = stats.pk_get(self.calc.job.id, "nhzrd_done") self.assertEqual(ltr1.completed_items + ltr2.completed_items, done)
def test_gc_clears_stats(self): # redis garbage collection should clear stats counters as well stats.pk_set(self.test_job, 'nhzrd_total', 10) stats.pk_set(self.test_job, 'nhzrd_done', 7) stats.pk_set(self.test_job, 'nhzrd_failed', 3) # Sanity check: self.assertEqual(10, stats.pk_get(self.test_job, 'nhzrd_total')) self.assertEqual(7, stats.pk_get(self.test_job, 'nhzrd_done')) self.assertEqual(3, stats.pk_get(self.test_job, 'nhzrd_failed')) result = kvs.cache_gc(self.test_job) # 6 keys should be deleted, including the stats keys: self.assertEqual(6, result) # explicitly test that the stats keys are deleted self.assertIsNone(stats.pk_get(self.test_job, 'nhzrd_total')) self.assertIsNone(stats.pk_get(self.test_job, 'nhzrd_done')) self.assertIsNone(stats.pk_get(self.test_job, 'nhzrd_failed'))