def test_complex_fault_demo_hazard_nrml_written_once(self): """ Run the `complex_fault_demo_hazard` demo and verify that the NRML files are written only once. """ def filter_multi(): """Filter and return files that were written more than once.""" counts = defaultdict(int) files = stats.kvs_op("lrange", key, 0, -1) for file in files: counts[file] += 1 return [(f, c) for f, c in counts.iteritems() if c > 1] job_cfg = helpers.demo_file(os.path.join( "complex_fault_demo_hazard", "config.gem")) helpers.run_job(job_cfg, ['--output-type=xml']) self.job = models.OqJob.objects.latest("id") key = stats.key_name( self.job.id, *stats.STATS_KEYS["hcls_xmlcurvewrites"]) if key: multi_writes = filter_multi() self.assertFalse(multi_writes, str(multi_writes)) key = stats.key_name( self.job.id, *stats.STATS_KEYS["hcls_xmlmapwrites"]) if key: multi_writes = filter_multi() self.assertFalse(multi_writes, str(multi_writes))
def test_complex_fault_demo_hazard_nrml_written_once(self): """ Run the `complex_fault_demo_hazard` demo and verify that the NRML files are written only once. """ def filter_multi(): """Filter and return files that were written more than once.""" counts = defaultdict(int) files = stats.kvs_op("lrange", key, 0, -1) for file in files: counts[file] += 1 return [(f, c) for f, c in counts.iteritems() if c > 1] job_cfg = helpers.demo_file(os.path.join( "complex_fault_demo_hazard", "config.gem")) helpers.run_job(job_cfg, output="xml") self.job = models.OqCalculation.objects.latest("id") key = stats.key_name( self.job.id, *stats.STATS_KEYS["hcls_xmlcurvewrites"]) if key: multi_writes = filter_multi() self.assertFalse(multi_writes, str(multi_writes)) key = stats.key_name( self.job.id, *stats.STATS_KEYS["hcls_xmlmapwrites"]) if key: multi_writes = filter_multi() self.assertFalse(multi_writes, str(multi_writes))
def test_get_value_with_non_existent_total(self): """`None` is returned for a non-existent total counter.""" args = (57, "h", "d/c/z", "t") key = stats.key_name(*args) kvs = self.connect() self.assertIs(None, kvs.get(key)) self.assertIs(None, stats.get_counter(*args))
def test_get_value_with_existent_total(self): """The expected value is returned for an existent total counter.""" value = "582" args = (58, "h", "d/d/z", "t") key = stats.key_name(*args) kvs = self.connect() kvs.set(key, value) self.assertEqual(int(value), stats.get_counter(*args))
def test_set_total(self): """ The total value is set for the given key """ kvs = self.connect() # Specify a 'totals' counter type. key = stats.key_name(33, "h", "a/b/c", "t") stats.set_total(33, "h", "a/b/c", 123) self.assertEqual("123", kvs.get(key))
def test_pk_set_with_existing_total(self): """The value is set correctly for an existing predefined key.""" job_id = 71 pkey = "blocks" key = stats.key_name(job_id, *stats.STATS_KEYS[pkey]) stats.delete_job_counters(job_id) kvs = self.connect() stats.pk_set(job_id, pkey, 717) self.assertEqual("717", kvs.get(key))
def test_pk_inc_with_existing_incremental(self): """The value is incremented for an existing predefined key.""" job_id = 82 pkey = "cblock" key = stats.key_name(job_id, *stats.STATS_KEYS[pkey]) stats.delete_job_counters(job_id) kvs = self.connect() stats.pk_inc(job_id, pkey) self.assertEqual("1", kvs.get(key))
def test_pk_get_with_existing_debug_and_debug_stats_enabled(self): """The value is obtained correctly for an existing debug counter.""" job_id = 94 pkey = "hcls_xmlcurvewrites" stats.delete_job_counters(job_id) with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse: dse.return_value = True key = stats.key_name(job_id, *stats.STATS_KEYS[pkey]) kvs = self.connect() kvs.set(key, 949) self.assertEqual(949, stats.pk_get(job_id, pkey))
def test_pk_get_with_existing_incremental(self): """The correct value is obtained for an existing predefined key.""" job_id = 92 pkey = "cblock" key = stats.key_name(job_id, *stats.STATS_KEYS[pkey]) stats.delete_job_counters(job_id) kvs = self.connect() kvs.set(key, 929) stats.pk_get(job_id, pkey) self.assertEqual("929", kvs.get(key))
def test_incr_counter(self): """ The counter is incremented for the given key """ args = (44, "h", "d/x/z", "i") kvs = self.connect() key = stats.key_name(*args) previous_value = kvs.get(key) previous_value = int(previous_value) if previous_value else 0 stats.incr_counter(*args[:-1]) value = int(kvs.get(key)) self.assertEqual(1, (value - previous_value))
def test_get_value_with_debug_stats_enabled_but_no_value(self): """ `None` is returned for a debug counter if debug stats are enabled but the counter has no value. """ args = (61, "h", "d/g/z", "d") stats.delete_job_counters(args[0]) with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse: dse.return_value = True key = stats.key_name(*args) kvs = self.connect() self.assertIs(None, kvs.get(key)) self.assertIs(None, stats.get_counter(*args))
def test_get_value_with_debug_stats_enabled(self): """ The correct value is returned for a debug counter if debug stats are enabled. """ value = "603" args = (60, "h", "d/f/z", "d") with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse: dse.return_value = True key = stats.key_name(*args) kvs = self.connect() kvs.set(key, value) self.assertEqual(int(value), stats.get_counter(*args))
def test_delete_job_counters_resets_counters(self): """ The progress indication counters for a given job are reset. """ kvs = self.connect() args = [(66, "h", "g/h/i", "i"), (66, "h", "j/k/l", "i")] for data in args: stats.incr_counter(*data[:-1]) stats.delete_job_counters(66) # The counters have been reset, after incrementing we expect them all # to have a value of "1". for data in args: stats.incr_counter(*data[:-1]) self.assertEqual("1", kvs.get(stats.key_name(*data)))
def test_success_stats(self): """ The success counter is incremented when the wrapped function terminates without raising an exception. """ @stats.progress_indicator def no_exception(job_id): return 999 kvs = self.connect() key = stats.key_name(11, no_exception.__name__) previous_value = kvs.get(key) previous_value = int(previous_value) if previous_value else 0 # Call the wrapped function. self.assertEqual(999, no_exception(11)) value = int(kvs.get(key)) self.assertEqual(1, (value - previous_value))
def test_failure_stats(self): """ The failure counter is incremented when the wrapped function terminates raises an exception. """ @stats.progress_indicator def raise_exception(job_id): raise NotImplementedError kvs = self.connect() key = stats.key_name(22, raise_exception.__name__) + ":f" previous_value = kvs.get(key) previous_value = int(previous_value) if previous_value else 0 # Call the wrapped function. self.assertRaises(NotImplementedError, raise_exception, 22) value = int(kvs.get(key)) self.assertEqual(1, (value - previous_value))
def test_success_stats(self): """ The success counter is incremented when the wrapped function terminates without raising an exception. """ area = "aaa" @stats.progress_indicator(area) def no_exception(job_id): return 999 kvs = self.connect() key = stats.key_name(11, area, no_exception.__name__, "i") previous_value = kvs.get(key) previous_value = int(previous_value) if previous_value else 0 # Call the wrapped function. self.assertEqual(999, no_exception(11)) value = int(kvs.get(key)) self.assertEqual(1, (value - previous_value))
def test_failure_stats(self): """ The failure counter is incremented when the wrapped function raises an exception. """ area = "bbb" @stats.progress_indicator(area) def raise_exception(job_id): raise NotImplementedError kvs = self.connect() key = stats.key_name(22, area, raise_exception.__name__ + "-failures", "i") previous_value = kvs.get(key) previous_value = int(previous_value) if previous_value else 0 # Call the wrapped function. self.assertRaises(NotImplementedError, raise_exception, 22) value = int(kvs.get(key)) self.assertEqual(1, (value - previous_value))
def _maintain_debug_stats(self): """Capture the file written if debug statistics are turned on.""" key = stats.key_name(config.Config().job_id, *stats.STATS_KEYS["hcls_xmlcurvewrites"]) if key: stats.kvs_op("rpush", key, self.path)