예제 #1
0
    def execute(self):
        """Entry point to trigger the computation."""

        random_generator = java.jclass(
            "Random")(int(self.job_ctxt.params["GMF_RANDOM_SEED"]))

        encoder = json.JSONEncoder()
        kvs_client = kvs.get_client()

        num_calculations = self._number_of_calculations()
        self.initialize_pr_data(num_calculations=num_calculations)

        for cnum in xrange(num_calculations):
            try:
                gmf = self.compute_ground_motion_field(random_generator)
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_done", 1)
            except:
                # Count failure
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_failed", 1)
                raise
            logs.log_percent_complete(self.job_ctxt.job_id, "hazard")
            imt = self.job_ctxt.params["INTENSITY_MEASURE_TYPE"]
            self._serialize_gmf(gmf, imt, cnum)

            for gmv in gmf_to_dict(gmf, imt):
                site = shapes.Site(gmv["site_lon"], gmv["site_lat"])

                key = kvs.tokens.ground_motion_values_key(
                    self.job_ctxt.job_id, site)
                kvs_client.rpush(key, encoder.encode(gmv))
예제 #2
0
파일: core.py 프로젝트: pslh/oq-engine
    def execute(self, kvs_keys_purged=None):  # pylint: disable=W0221
        """
        Trigger the calculation and serialization of hazard curves, mean hazard
        curves/maps and quantile curves.

        :param kvs_keys_purged: a list only passed by tests who check the
            kvs keys used/purged in the course of the job.
        :returns: the keys used in the course of the calculation (for the sake
            of testability).
        """
        sites = self.job_ctxt.sites_to_compute()
        realizations = self.job_ctxt["NUMBER_OF_LOGIC_TREE_SAMPLES"]

        LOG.info("Going to run classical PSHA hazard for %s realizations "
                 "and %s sites" % (realizations, len(sites)))

        stats.pk_set(self.job_ctxt.job_id, "hcls_sites", len(sites))
        stats.pk_set(self.job_ctxt.job_id, "hcls_realizations", realizations)

        block_size = config.hazard_block_size()
        stats.pk_set(self.job_ctxt.job_id, "block_size", block_size)

        blocks = range(0, len(sites), block_size)
        stats.pk_set(self.job_ctxt.job_id, "blocks", len(blocks))
        stats.pk_set(self.job_ctxt.job_id, "cblock", 0)

        for start in blocks:
            stats.pk_inc(self.job_ctxt.job_id, "cblock")
            end = start + block_size
            data = sites[start:end]

            LOG.debug("> curves!")
            self.do_curves(
                data,
                realizations,
                serializer=self.serialize_hazard_curve_of_realization)

            LOG.debug("> means!")
            # mean curves
            self.do_means(data,
                          realizations,
                          curve_serializer=self.serialize_mean_hazard_curves,
                          map_func=general.compute_mean_hazard_maps,
                          map_serializer=self.serialize_mean_hazard_map)

            LOG.debug("> quantiles!")
            # quantile curves
            quantiles = self.quantile_levels
            self.do_quantiles(
                data,
                realizations,
                quantiles,
                curve_serializer=self.serialize_quantile_hazard_curves,
                map_func=general.compute_quantile_hazard_maps,
                map_serializer=self.serialize_quantile_hazard_map)

            # Done with this block, purge intermediate results from kvs.
            release_data_from_kvs(self.job_ctxt.job_id, data, realizations,
                                  quantiles, self.poes_hazard_maps,
                                  kvs_keys_purged)
예제 #3
0
    def execute(self):
        """Entry point to trigger the computation."""

        random_generator = java.jclass("Random")(int(
            self.job_ctxt.params["GMF_RANDOM_SEED"]))

        encoder = json.JSONEncoder()
        kvs_client = kvs.get_client()

        num_calculations = self._number_of_calculations()
        self.initialize_pr_data(num_calculations=num_calculations)

        for cnum in xrange(num_calculations):
            try:
                gmf = self.compute_ground_motion_field(random_generator)
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_done", 1)
            except:
                # Count failure
                stats.pk_inc(self.job_ctxt.job_id, "nhzrd_failed", 1)
                raise
            logs.log_percent_complete(self.job_ctxt.job_id, "hazard")
            imt = self.job_ctxt.params["INTENSITY_MEASURE_TYPE"]
            self._serialize_gmf(gmf, imt, cnum)

            for gmv in gmf_to_dict(gmf, imt):
                site = shapes.Site(gmv["site_lon"], gmv["site_lat"])

                key = kvs.tokens.ground_motion_values_key(
                    self.job_ctxt.job_id, site)
                kvs_client.rpush(key, encoder.encode(gmv))
예제 #4
0
파일: core.py 프로젝트: angri/openquake
    def execute(self, kvs_keys_purged=None):  # pylint: disable=W0221
        """
        Trigger the calculation and serialization of hazard curves, mean hazard
        curves/maps and quantile curves.

        :param kvs_keys_purged: a list only passed by tests who check the
            kvs keys used/purged in the course of the job.
        :returns: the keys used in the course of the calculation (for the sake
            of testability).
        """
        sites = self.job_ctxt.sites_to_compute()
        realizations = self.job_ctxt["NUMBER_OF_LOGIC_TREE_SAMPLES"]

        LOG.info("Going to run classical PSHA hazard for %s realizations "
                 "and %s sites" % (realizations, len(sites)))

        stats.pk_set(self.job_ctxt.job_id, "hcls_sites", len(sites))
        stats.pk_set(self.job_ctxt.job_id, "hcls_realizations",
                     realizations)

        block_size = config.hazard_block_size()
        stats.pk_set(self.job_ctxt.job_id, "block_size", block_size)

        blocks = range(0, len(sites), block_size)
        stats.pk_set(self.job_ctxt.job_id, "blocks", len(blocks))
        stats.pk_set(self.job_ctxt.job_id, "cblock", 0)

        for start in blocks:
            stats.pk_inc(self.job_ctxt.job_id, "cblock")
            end = start + block_size
            data = sites[start:end]

            LOG.debug("> curves!")
            self.do_curves(
                data, realizations,
                serializer=self.serialize_hazard_curve_of_realization)

            LOG.debug("> means!")
            # mean curves
            self.do_means(
                data, realizations,
                curve_serializer=self.serialize_mean_hazard_curves,
                map_func=general.compute_mean_hazard_maps,
                map_serializer=self.serialize_mean_hazard_map)

            LOG.debug("> quantiles!")
            # quantile curves
            quantiles = self.quantile_levels
            self.do_quantiles(
                data, realizations, quantiles,
                curve_serializer=self.serialize_quantile_hazard_curves,
                map_func=general.compute_quantile_hazard_maps,
                map_serializer=self.serialize_quantile_hazard_map)

            # Done with this block, purge intermediate results from kvs.
            release_data_from_kvs(self.job_ctxt.job_id, data, realizations,
                                  quantiles, self.poes_hazard_maps,
                                  kvs_keys_purged)
예제 #5
0
    def test_pk_inc_with_existing_incremental(self):
        """The value is incremented for an existing predefined key."""
        job_id = 82
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_inc(job_id, pkey)
        self.assertEqual("1", kvs.get(key))
예제 #6
0
    def test_pk_inc_with_existing_incremental(self):
        """The value is incremented for an existing predefined key."""
        job_id = 82
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_inc(job_id, pkey)
        self.assertEqual("1", kvs.get(key))
예제 #7
0
 def test_pk_inc_with_existing_debug_and_debug_stats_enabled(self):
     """The value is incremented correctly for an existing debug counter."""
     job_id = 84
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         stats.pk_inc(job_id, pkey)
         key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])
         kvs = self.connect()
         self.assertEqual("1", kvs.get(key))
예제 #8
0
 def test_pk_inc_with_existing_debug_and_debug_stats_enabled(self):
     """The value is incremented correctly for an existing debug counter."""
     job_id = 84
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         stats.pk_inc(job_id, pkey)
         key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])
         kvs = self.connect()
         self.assertEqual("1", kvs.get(key))
예제 #9
0
 def test_pk_inc_with_existing_debug_and_debug_stats_off(self):
     """
     The debug counter value is not incremented when debug stats are off.
     """
     job_id = 85
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         stats.pk_inc(job_id, pkey)
         kvs = self.connect()
         key = stats._KEY_TEMPLATE % ((job_id,) + stats.STATS_KEYS[pkey])
         self.assertIs(None, kvs.get(key))
예제 #10
0
 def test_pk_inc_with_existing_debug_and_debug_stats_off(self):
     """
     The debug counter value is not incremented when debug stats are off.
     """
     job_id = 85
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         stats.pk_inc(job_id, pkey)
         kvs = self.connect()
         key = stats._KEY_TEMPLATE % ((job_id, ) + stats.STATS_KEYS[pkey])
         self.assertIs(None, kvs.get(key))
예제 #11
0
파일: core.py 프로젝트: bwyss/oq-engine
    def do_curves(self,
                  sites,
                  realizations,
                  serializer=None,
                  the_task=compute_hazard_curve):
        """Trigger the calculation of hazard curves, serialize as requested.

        The calculated curves will only be serialized if the `serializer`
        parameter is not `None`.

        :param sites: The sites for which to calculate hazard curves.
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param realizations: The number of realizations to calculate
        :type realizations: :py:class:`int`
        :param serializer: A serializer for the calculated hazard curves,
            receives the KVS keys of the calculated hazard curves in
            its single parameter.
        :type serializer: a callable with a single parameter: list of strings
        :param the_task: The `celery` task to use for the hazard curve
            calculation, it takes the following parameters:
                * job ID
                * the sites for which to calculate the hazard curves
                * the logic tree realization number
        :type the_task: a callable taking three parameters
        :returns: KVS keys of the calculated hazard curves.
        :rtype: list of string
        """
        source_model_generator = random.Random()
        source_model_generator.seed(
            self.job_ctxt["SOURCE_MODEL_LT_RANDOM_SEED"])

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.job_ctxt["GMPE_LT_RANDOM_SEED"])

        stats.pk_set(self.job_ctxt.job_id, "hcls_crealization", 0)

        for realization in xrange(0, realizations):
            stats.pk_inc(self.job_ctxt.job_id, "hcls_crealization")
            LOG.info("Calculating hazard curves for realization %s" %
                     realization)
            self.store_source_model(source_model_generator.getrandbits(32))
            self.store_gmpe_map(source_model_generator.getrandbits(32))

            tf_args = dict(job_id=self.job_ctxt.job_id,
                           realization=realization)
            ath_args = dict(sites=sites, rtype="curve", datum=realization)
            utils_tasks.distribute(the_task, ("sites", [[s] for s in sites]),
                                   tf_args=tf_args,
                                   ath=serializer,
                                   ath_args=ath_args)
예제 #12
0
    def do_curves(self, sites, realizations, serializer=None,
                  the_task=compute_hazard_curve):
        """Trigger the calculation of hazard curves, serialize as requested.

        The calculated curves will only be serialized if the `serializer`
        parameter is not `None`.

        :param sites: The sites for which to calculate hazard curves.
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param realizations: The number of realizations to calculate
        :type realizations: :py:class:`int`
        :param serializer: A serializer for the calculated hazard curves,
            receives the KVS keys of the calculated hazard curves in
            its single parameter.
        :type serializer: a callable with a single parameter: list of strings
        :param the_task: The `celery` task to use for the hazard curve
            calculation, it takes the following parameters:
                * job ID
                * the sites for which to calculate the hazard curves
                * the logic tree realization number
        :type the_task: a callable taking three parameters
        :returns: KVS keys of the calculated hazard curves.
        :rtype: list of string
        """
        source_model_generator = random.Random()
        source_model_generator.seed(
            self.job_ctxt["SOURCE_MODEL_LT_RANDOM_SEED"])

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.job_ctxt["GMPE_LT_RANDOM_SEED"])

        stats.pk_set(self.job_ctxt.job_id, "hcls_crealization", 0)

        for realization in xrange(0, realizations):
            stats.pk_inc(self.job_ctxt.job_id, "hcls_crealization")
            LOG.info("Calculating hazard curves for realization %s"
                     % realization)
            self.store_source_model(source_model_generator.getrandbits(32))
            self.store_gmpe_map(source_model_generator.getrandbits(32))

            tf_args = dict(job_id=self.job_ctxt.job_id,
                           realization=realization)
            ath_args = dict(sites=sites, rtype="curve", datum=realization)
            utils_tasks.distribute(
                the_task, ("sites", [[s] for s in sites]), tf_args=tf_args,
                ath=serializer, ath_args=ath_args)
예제 #13
0
    def test_remaining_tasks_in_block_nonzero_start_count(self):
        # Same as the above test, except test with the start_count
        # set to something > 0 (to simulate a mid-calculation block).

        incr_count = lambda: stats.pk_inc(self.job_id, "nhzrd_done")

        # Just for variety, set 5 successful and 5 failed task counters:
        for _ in xrange(5):
            stats.pk_inc(self.job_id, "nhzrd_done")
        for _ in xrange(5):
            stats.pk_inc(self.job_id, "nhzrd_failed")

        # count starts at 10
        gen = remaining_tasks_in_block(self.job_id, 4, 10)

        self.assertEqual(4, gen.next())
        incr_count()
        self.assertEqual(3, gen.next())
        incr_count()
        incr_count()
        self.assertEqual(1, gen.next())
        incr_count()
        self.assertRaises(StopIteration, gen.next)
예제 #14
0
파일: ath_test.py 프로젝트: bwyss/oq-engine
    def test_remaining_tasks_in_block_nonzero_start_count(self):
        # Same as the above test, except test with the start_count
        # set to something > 0 (to simulate a mid-calculation block).

        incr_count = lambda: stats.pk_inc(self.job_id, "nhzrd_done")

        # Just for variety, set 5 successful and 5 failed task counters:
        for _ in xrange(5):
            stats.pk_inc(self.job_id, "nhzrd_done")
        for _ in xrange(5):
            stats.pk_inc(self.job_id, "nhzrd_failed")

        # count starts at 10
        gen = remaining_tasks_in_block(self.job_id, 4, 10)

        self.assertEqual(4, gen.next())
        incr_count()
        self.assertEqual(3, gen.next())
        incr_count()
        incr_count()
        self.assertEqual(1, gen.next())
        incr_count()
        self.assertRaises(StopIteration, gen.next)
예제 #15
0
    def test_remaining_tasks_in_block(self):
        # Tasks should be submitted to works for one block (of sites) at a
        # time. For each block, we want to look at Redis counters to determine
        # when the block is finished calculating.
        # `remaining_tasks_in_block` is a generator that yields the remaining
        # number of tasks in a block. When there are no more tasks left in the
        # block, a `StopIteration` is raised.
        gen = remaining_tasks_in_block(self.job_id, 4, 0)

        incr_count = lambda: stats.pk_inc(self.job_id, "nhzrd_done")

        self.assertEqual(4, gen.next())
        incr_count()
        self.assertEqual(3, gen.next())
        incr_count()
        incr_count()
        self.assertEqual(1, gen.next())
        incr_count()
        self.assertRaises(StopIteration, gen.next)
예제 #16
0
파일: ath_test.py 프로젝트: bwyss/oq-engine
    def test_remaining_tasks_in_block(self):
        # Tasks should be submitted to works for one block (of sites) at a
        # time. For each block, we want to look at Redis counters to determine
        # when the block is finished calculating.
        # `remaining_tasks_in_block` is a generator that yields the remaining
        # number of tasks in a block. When there are no more tasks left in the
        # block, a `StopIteration` is raised.
        gen = remaining_tasks_in_block(self.job_id, 4, 0)

        incr_count = lambda: stats.pk_inc(self.job_id, "nhzrd_done")

        self.assertEqual(4, gen.next())
        incr_count()
        self.assertEqual(3, gen.next())
        incr_count()
        incr_count()
        self.assertEqual(1, gen.next())
        incr_count()
        self.assertRaises(StopIteration, gen.next)
예제 #17
0
 def test_complete_task_count_success_and_fail(self):
     # Test `complete_task_count` with success and fail counters:
     stats.pk_inc(self.job_id, "nhzrd_done")
     stats.pk_inc(self.job_id, "nhzrd_failed")
     self.assertEqual(2, completed_task_count(self.job_id))
예제 #18
0
 def test_complete_task_count_failures(self):
     stats.pk_inc(self.job_id, "nhzrd_failed")
     self.assertEqual(1, completed_task_count(self.job_id))
예제 #19
0
 def test_complete_task_count_success(self):
     stats.pk_inc(self.job_id, "nhzrd_done")
     self.assertEqual(1, completed_task_count(self.job_id))
예제 #20
0
파일: ath_test.py 프로젝트: bwyss/oq-engine
 def test_complete_task_count_success(self):
     stats.pk_inc(self.job_id, "nhzrd_done")
     self.assertEqual(1, completed_task_count(self.job_id))
예제 #21
0
파일: ath_test.py 프로젝트: bwyss/oq-engine
 def test_complete_task_count_failures(self):
     stats.pk_inc(self.job_id, "nhzrd_failed")
     self.assertEqual(1, completed_task_count(self.job_id))
예제 #22
0
파일: ath_test.py 프로젝트: bwyss/oq-engine
 def test_complete_task_count_success_and_fail(self):
     # Test `complete_task_count` with success and fail counters:
     stats.pk_inc(self.job_id, "nhzrd_done")
     stats.pk_inc(self.job_id, "nhzrd_failed")
     self.assertEqual(2, completed_task_count(self.job_id))