コード例 #1
0
ファイル: core.py プロジェクト: pslh/oq-engine
    def execute(self, kvs_keys_purged=None):  # pylint: disable=W0221
        """
        Trigger the calculation and serialization of hazard curves, mean hazard
        curves/maps and quantile curves.

        :param kvs_keys_purged: a list only passed by tests who check the
            kvs keys used/purged in the course of the job.
        :returns: the keys used in the course of the calculation (for the sake
            of testability).
        """
        sites = self.job_ctxt.sites_to_compute()
        realizations = self.job_ctxt["NUMBER_OF_LOGIC_TREE_SAMPLES"]

        LOG.info("Going to run classical PSHA hazard for %s realizations "
                 "and %s sites" % (realizations, len(sites)))

        stats.pk_set(self.job_ctxt.job_id, "hcls_sites", len(sites))
        stats.pk_set(self.job_ctxt.job_id, "hcls_realizations", realizations)

        block_size = config.hazard_block_size()
        stats.pk_set(self.job_ctxt.job_id, "block_size", block_size)

        blocks = range(0, len(sites), block_size)
        stats.pk_set(self.job_ctxt.job_id, "blocks", len(blocks))
        stats.pk_set(self.job_ctxt.job_id, "cblock", 0)

        for start in blocks:
            stats.pk_inc(self.job_ctxt.job_id, "cblock")
            end = start + block_size
            data = sites[start:end]

            LOG.debug("> curves!")
            self.do_curves(
                data,
                realizations,
                serializer=self.serialize_hazard_curve_of_realization)

            LOG.debug("> means!")
            # mean curves
            self.do_means(data,
                          realizations,
                          curve_serializer=self.serialize_mean_hazard_curves,
                          map_func=general.compute_mean_hazard_maps,
                          map_serializer=self.serialize_mean_hazard_map)

            LOG.debug("> quantiles!")
            # quantile curves
            quantiles = self.quantile_levels
            self.do_quantiles(
                data,
                realizations,
                quantiles,
                curve_serializer=self.serialize_quantile_hazard_curves,
                map_func=general.compute_quantile_hazard_maps,
                map_serializer=self.serialize_quantile_hazard_map)

            # Done with this block, purge intermediate results from kvs.
            release_data_from_kvs(self.job_ctxt.job_id, data, realizations,
                                  quantiles, self.poes_hazard_maps,
                                  kvs_keys_purged)
コード例 #2
0
ファイル: logs.py プロジェクト: arbeit/openquake-packages
def log_percent_complete(job_id, ctype):
    """Log a message when the percentage completed changed for a calculation.

    :param int job_id: identifier of the job in question
    :param str ctype: calculation type, one of: hazard, risk
    """
    if ctype not in ("hazard", "risk"):
        LOG.warn("Unknown calculation type: '%s'" % ctype)
        return -1

    key = "nhzrd_total" if ctype == "hazard" else "nrisk_total"
    total = stats.pk_get(job_id, key)
    key = "nhzrd_done" if ctype == "hazard" else "nrisk_done"
    done = stats.pk_get(job_id, key)

    if done <= 0 or total <= 0:
        return 0

    percent = total / 100.0
    # Store percentage complete as well as the last value reported as integers
    # in order to avoid reporting the same percentage more than once.
    percent_complete = int(done / percent)
    # Get the last value reported
    lvr = stats.pk_get(job_id, "lvr")

    # Only report the percentage completed if it is above the last value shown
    if percent_complete > lvr:
        log_progress("%s %3d%% complete" % (ctype, percent_complete), 2)
        stats.pk_set(job_id, "lvr", percent_complete)

    return percent_complete
コード例 #3
0
ファイル: logs.py プロジェクト: bwyss/oq-engine
def log_percent_complete(job_id, ctype):
    """Log a message when the percentage completed changed for a calculation.

    :param int job_id: identifier of the job in question
    :param str ctype: calculation type, one of: hazard, risk
    """
    if ctype not in ("hazard", "risk"):
        LOG.warn("Unknown calculation type: '%s'" % ctype)
        return -1

    key = "nhzrd_total" if ctype == "hazard" else "nrisk_total"
    total = stats.pk_get(job_id, key)
    key = "nhzrd_done" if ctype == "hazard" else "nrisk_done"
    done = stats.pk_get(job_id, key)

    if done <= 0 or total <= 0:
        return 0

    percent = total / 100.0
    # Store percentage complete as well as the last value reported as integers
    # in order to avoid reporting the same percentage more than once.
    percent_complete = int(done / percent)
    # Get the last value reported
    lvr = stats.pk_get(job_id, "lvr")

    # Only report the percentage completed if it is above the last value shown
    if percent_complete > lvr:
        log_progress("%s %3d%% complete" % (ctype, percent_complete), 2)
        stats.pk_set(job_id, "lvr", percent_complete)

    return percent_complete
コード例 #4
0
ファイル: core.py プロジェクト: angri/openquake
    def execute(self, kvs_keys_purged=None):  # pylint: disable=W0221
        """
        Trigger the calculation and serialization of hazard curves, mean hazard
        curves/maps and quantile curves.

        :param kvs_keys_purged: a list only passed by tests who check the
            kvs keys used/purged in the course of the job.
        :returns: the keys used in the course of the calculation (for the sake
            of testability).
        """
        sites = self.job_ctxt.sites_to_compute()
        realizations = self.job_ctxt["NUMBER_OF_LOGIC_TREE_SAMPLES"]

        LOG.info("Going to run classical PSHA hazard for %s realizations "
                 "and %s sites" % (realizations, len(sites)))

        stats.pk_set(self.job_ctxt.job_id, "hcls_sites", len(sites))
        stats.pk_set(self.job_ctxt.job_id, "hcls_realizations",
                     realizations)

        block_size = config.hazard_block_size()
        stats.pk_set(self.job_ctxt.job_id, "block_size", block_size)

        blocks = range(0, len(sites), block_size)
        stats.pk_set(self.job_ctxt.job_id, "blocks", len(blocks))
        stats.pk_set(self.job_ctxt.job_id, "cblock", 0)

        for start in blocks:
            stats.pk_inc(self.job_ctxt.job_id, "cblock")
            end = start + block_size
            data = sites[start:end]

            LOG.debug("> curves!")
            self.do_curves(
                data, realizations,
                serializer=self.serialize_hazard_curve_of_realization)

            LOG.debug("> means!")
            # mean curves
            self.do_means(
                data, realizations,
                curve_serializer=self.serialize_mean_hazard_curves,
                map_func=general.compute_mean_hazard_maps,
                map_serializer=self.serialize_mean_hazard_map)

            LOG.debug("> quantiles!")
            # quantile curves
            quantiles = self.quantile_levels
            self.do_quantiles(
                data, realizations, quantiles,
                curve_serializer=self.serialize_quantile_hazard_curves,
                map_func=general.compute_quantile_hazard_maps,
                map_serializer=self.serialize_quantile_hazard_map)

            # Done with this block, purge intermediate results from kvs.
            release_data_from_kvs(self.job_ctxt.job_id, data, realizations,
                                  quantiles, self.poes_hazard_maps,
                                  kvs_keys_purged)
コード例 #5
0
    def test_pk_set_with_existing_incremental(self):
        """The value is set correctly for an existing predefined key."""
        job_id = 72
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_set(job_id, pkey, 727)
        self.assertEqual("727", kvs.get(key))
コード例 #6
0
    def test_pk_set_with_existing_incremental(self):
        """The value is set correctly for an existing predefined key."""
        job_id = 72
        pkey = "cblock"
        key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])

        stats.delete_job_counters(job_id)
        kvs = self.connect()
        stats.pk_set(job_id, pkey, 727)
        self.assertEqual("727", kvs.get(key))
コード例 #7
0
ファイル: utils_stats_test.py プロジェクト: pslh/oq-engine
 def test_pk_set_with_existing_debug_and_debug_stats_off(self):
     """The debug counter value is not set when debug stats are off."""
     job_id = 75
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         stats.pk_set(job_id, pkey, 757)
         key = stats._KEY_TEMPLATE % ((job_id, ) + stats.STATS_KEYS[pkey])
         kvs = self.connect()
         self.assertIs(None, kvs.get(key))
コード例 #8
0
ファイル: utils_stats_test.py プロジェクト: pslh/oq-engine
 def test_pk_set_with_existing_debug_and_debug_stats_enabled(self):
     """The value is set correctly for an existing debug counter."""
     job_id = 74
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         stats.pk_set(job_id, pkey, 747)
         key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])
         kvs = self.connect()
         self.assertEqual("747", kvs.get(key))
コード例 #9
0
 def test_pk_set_with_existing_debug_and_debug_stats_enabled(self):
     """The value is set correctly for an existing debug counter."""
     job_id = 74
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = True
         stats.pk_set(job_id, pkey, 747)
         key = stats.key_name(job_id, *stats.STATS_KEYS[pkey])
         kvs = self.connect()
         self.assertEqual("747", kvs.get(key))
コード例 #10
0
 def test_pk_set_with_existing_debug_and_debug_stats_off(self):
     """The debug counter value is not set when debug stats are off."""
     job_id = 75
     pkey = "hcls_xmlcurvewrites"
     stats.delete_job_counters(job_id)
     with helpers.patch("openquake.utils.stats.debug_stats_enabled") as dse:
         dse.return_value = False
         stats.pk_set(job_id, pkey, 757)
         key = stats._KEY_TEMPLATE % ((job_id,) + stats.STATS_KEYS[pkey])
         kvs = self.connect()
         self.assertIs(None, kvs.get(key))
コード例 #11
0
ファイル: utils_stats_test.py プロジェクト: matley/oq-engine
 def test_get_progress_timing_data_with_stale_increment_ts(self):
     # The progress counter increment time stamp exists but is not used
     # since the time stamp in the *executing* `JobPhaseStats` record is
     # more recent.
     tstamp = datetime.utcnow() - timedelta(minutes=9)
     stats.pk_set(self.job.id, "lvr_ts", tstamp.strftime("%s"))
     tstamp = datetime.utcnow() - timedelta(minutes=8)
     jps = JobPhaseStats(oq_job=self.job, ctype="hazard",
                         job_status="executing")
     jps.start_time = tstamp
     jps.save()
     actual, timeout = stats.get_progress_timing_data(self.job)
     self.assertTrue(approx_equal(480, actual, 5))
コード例 #12
0
ファイル: core.py プロジェクト: bwyss/oq-engine
    def do_curves(self,
                  sites,
                  realizations,
                  serializer=None,
                  the_task=compute_hazard_curve):
        """Trigger the calculation of hazard curves, serialize as requested.

        The calculated curves will only be serialized if the `serializer`
        parameter is not `None`.

        :param sites: The sites for which to calculate hazard curves.
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param realizations: The number of realizations to calculate
        :type realizations: :py:class:`int`
        :param serializer: A serializer for the calculated hazard curves,
            receives the KVS keys of the calculated hazard curves in
            its single parameter.
        :type serializer: a callable with a single parameter: list of strings
        :param the_task: The `celery` task to use for the hazard curve
            calculation, it takes the following parameters:
                * job ID
                * the sites for which to calculate the hazard curves
                * the logic tree realization number
        :type the_task: a callable taking three parameters
        :returns: KVS keys of the calculated hazard curves.
        :rtype: list of string
        """
        source_model_generator = random.Random()
        source_model_generator.seed(
            self.job_ctxt["SOURCE_MODEL_LT_RANDOM_SEED"])

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.job_ctxt["GMPE_LT_RANDOM_SEED"])

        stats.pk_set(self.job_ctxt.job_id, "hcls_crealization", 0)

        for realization in xrange(0, realizations):
            stats.pk_inc(self.job_ctxt.job_id, "hcls_crealization")
            LOG.info("Calculating hazard curves for realization %s" %
                     realization)
            self.store_source_model(source_model_generator.getrandbits(32))
            self.store_gmpe_map(source_model_generator.getrandbits(32))

            tf_args = dict(job_id=self.job_ctxt.job_id,
                           realization=realization)
            ath_args = dict(sites=sites, rtype="curve", datum=realization)
            utils_tasks.distribute(the_task, ("sites", [[s] for s in sites]),
                                   tf_args=tf_args,
                                   ath=serializer,
                                   ath_args=ath_args)
コード例 #13
0
 def test_get_progress_timing_data_with_stale_increment_ts(self):
     # The progress counter increment time stamp exists but is not used
     # since the time stamp in the *executing* `JobPhaseStats` record is
     # more recent.
     tstamp = datetime.utcnow() - timedelta(minutes=9)
     stats.pk_set(self.job.id, "lvr_ts", tstamp.strftime("%s"))
     tstamp = datetime.utcnow() - timedelta(minutes=8)
     jps = JobPhaseStats(oq_job=self.job,
                         ctype="hazard",
                         job_status="executing")
     jps.start_time = tstamp
     jps.save()
     actual, timeout = stats.get_progress_timing_data(self.job)
     self.assertTrue(approx_equal(480, actual, 5))
コード例 #14
0
    def do_curves(self, sites, realizations, serializer=None,
                  the_task=compute_hazard_curve):
        """Trigger the calculation of hazard curves, serialize as requested.

        The calculated curves will only be serialized if the `serializer`
        parameter is not `None`.

        :param sites: The sites for which to calculate hazard curves.
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param realizations: The number of realizations to calculate
        :type realizations: :py:class:`int`
        :param serializer: A serializer for the calculated hazard curves,
            receives the KVS keys of the calculated hazard curves in
            its single parameter.
        :type serializer: a callable with a single parameter: list of strings
        :param the_task: The `celery` task to use for the hazard curve
            calculation, it takes the following parameters:
                * job ID
                * the sites for which to calculate the hazard curves
                * the logic tree realization number
        :type the_task: a callable taking three parameters
        :returns: KVS keys of the calculated hazard curves.
        :rtype: list of string
        """
        source_model_generator = random.Random()
        source_model_generator.seed(
            self.job_ctxt["SOURCE_MODEL_LT_RANDOM_SEED"])

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.job_ctxt["GMPE_LT_RANDOM_SEED"])

        stats.pk_set(self.job_ctxt.job_id, "hcls_crealization", 0)

        for realization in xrange(0, realizations):
            stats.pk_inc(self.job_ctxt.job_id, "hcls_crealization")
            LOG.info("Calculating hazard curves for realization %s"
                     % realization)
            self.store_source_model(source_model_generator.getrandbits(32))
            self.store_gmpe_map(source_model_generator.getrandbits(32))

            tf_args = dict(job_id=self.job_ctxt.job_id,
                           realization=realization)
            ath_args = dict(sites=sites, rtype="curve", datum=realization)
            utils_tasks.distribute(
                the_task, ("sites", [[s] for s in sites]), tf_args=tf_args,
                ath=serializer, ath_args=ath_args)
コード例 #15
0
    def test_log_percent_complete_with_zero_percent_done(self):
        # nothing is reported since the percentage complete value is zero
        job_id = 13
        stats.pk_set(job_id, "nhzrd_total", 100)
        stats.pk_set(job_id, "nhzrd_done", 0)
        stats.pk_set(job_id, "lvr", -1)

        with mock.patch("openquake.logs.log_progress") as lpm:
            rv = logs.log_percent_complete(job_id, "hazard")
            self.assertEqual(0, rv)
            self.assertEqual(0, lpm.call_count)
コード例 #16
0
    def initialize_pr_data(self, **kwargs):
        """
        Record the total/completed number of work items.

        This is needed for the purpose of providing an indication of progress
        to the end user."""
        num_calculations = kwargs.get("num_calculations")
        assert num_calculations, "Invalid 'num_calculations' parameter"
        stats.pk_set(self.job_ctxt.job_id, "lvr", 0)
        stats.pk_set(self.job_ctxt.job_id, "nhzrd_total", num_calculations)
        stats.pk_set(self.job_ctxt.job_id, "nhzrd_done", 0)
コード例 #17
0
    def initialize_pr_data(self, **kwargs):
        """
        Record the total/completed number of work items.

        This is needed for the purpose of providing an indication of progress
        to the end user."""
        num_calculations = kwargs.get("num_calculations")
        assert num_calculations, "Invalid 'num_calculations' parameter"
        stats.pk_set(self.job_ctxt.job_id, "lvr", 0)
        stats.pk_set(self.job_ctxt.job_id, "nhzrd_total", num_calculations)
        stats.pk_set(self.job_ctxt.job_id, "nhzrd_done", 0)
コード例 #18
0
ファイル: logs_test.py プロジェクト: bwyss/oq-engine
    def test_log_percent_complete_with_zero_percent_done(self):
        # nothing is reported since the percentage complete value is zero
        job_id = 13
        stats.pk_set(job_id, "nhzrd_total", 100)
        stats.pk_set(job_id, "nhzrd_done", 0)
        stats.pk_set(job_id, "lvr", -1)

        with mock.patch("openquake.logs.log_progress") as lpm:
            rv = logs.log_percent_complete(job_id, "hazard")
            self.assertEqual(0, rv)
            self.assertEqual(0, lpm.call_count)
コード例 #19
0
    def test_log_percent_complete_with_almost_same_percentage_value(self):
        # only 1 value is reported when the percentage complete value is
        # almost the same (12.6 versus 12).
        job_id = 12
        stats.pk_set(job_id, "nhzrd_total", 366)
        stats.pk_set(job_id, "nhzrd_done", 46)
        stats.pk_set(job_id, "lvr", 12)

        with mock.patch("openquake.logs.log_progress") as lpm:
            rv = logs.log_percent_complete(job_id, "hazard")
            self.assertEqual(12, rv)
            self.assertEqual(0, lpm.call_count)
コード例 #20
0
ファイル: logs_test.py プロジェクト: bwyss/oq-engine
    def test_log_percent_complete_with_almost_same_percentage_value(self):
        # only 1 value is reported when the percentage complete value is
        # almost the same (12.6 versus 12).
        job_id = 12
        stats.pk_set(job_id, "nhzrd_total", 366)
        stats.pk_set(job_id, "nhzrd_done", 46)
        stats.pk_set(job_id, "lvr", 12)

        with mock.patch("openquake.logs.log_progress") as lpm:
            rv = logs.log_percent_complete(job_id, "hazard")
            self.assertEqual(12, rv)
            self.assertEqual(0, lpm.call_count)
コード例 #21
0
ファイル: general.py プロジェクト: arbeit/openquake-packages
    def initialize_pr_data(self):
        """Record the total/completed number of work items.

        This is needed for the purpose of providing an indication of progress
        to the end user."""
        stats.pk_set(self.job.id, "lvr", 0)
        rs = models.LtRealization.objects.filter(
            hazard_calculation=self.job.hazard_calculation)
        total = rs.aggregate(Sum("total_sources"))
        done = rs.aggregate(Sum("completed_sources"))
        stats.pk_set(self.job.id, "nhzrd_total", total.values().pop())
        if done > 0:
            stats.pk_set(self.job.id, "nhzrd_done", done.values().pop())
コード例 #22
0
    def test_log_percent_complete_with_new_percentage_value(self):
        # the percentage complete is reported since it exceeds the last value
        # reported
        job_id = 14
        stats.pk_set(job_id, "nhzrd_total", 100)
        stats.pk_set(job_id, "nhzrd_done", 20)
        stats.pk_set(job_id, "lvr", 12)

        with mock.patch("openquake.logs.log_progress") as lpm:
            rv = logs.log_percent_complete(job_id, "hazard")
            self.assertEqual(20, rv)
            self.assertEqual(1, lpm.call_count)
            self.assertEqual("hazard  20% complete",
                             lpm.call_args_list[0][0][0])
コード例 #23
0
ファイル: logs_test.py プロジェクト: bwyss/oq-engine
    def test_log_percent_complete_with_new_percentage_value(self):
        # the percentage complete is reported since it exceeds the last value
        # reported
        job_id = 14
        stats.pk_set(job_id, "nhzrd_total", 100)
        stats.pk_set(job_id, "nhzrd_done", 20)
        stats.pk_set(job_id, "lvr", 12)

        with mock.patch("openquake.logs.log_progress") as lpm:
            rv = logs.log_percent_complete(job_id, "hazard")
            self.assertEqual(20, rv)
            self.assertEqual(1, lpm.call_count)
            self.assertEqual("hazard  20% complete",
                             lpm.call_args_list[0][0][0])
コード例 #24
0
    def initialize_pr_data(self, **kwargs):
        """
        Record the total/completed number of work items for the classical,
        event-based and uhs calculators.

        This is needed for the purpose of providing an indication of progress
        to the end user."""
        sites = kwargs.get("sites")
        assert sites, "Invalid 'sites' parameter"
        realizations = kwargs.get("realizations")
        assert realizations, "Invalid 'realizations' parameter"
        stats.pk_set(self.job_ctxt.job_id, "lvr", 0)
        stats.pk_set(self.job_ctxt.job_id, "nhzrd_total",
                     len(sites) * realizations)
        stats.pk_set(self.job_ctxt.job_id, "nhzrd_done", 0)