Beispiel #1
0
    def execute(self):
        """Loop over realizations (logic tree samples), split the geometry of
        interest into blocks of sites, and distribute Celery tasks to carry out
        the UHS computation.
        """
        job_ctxt = self.job_ctxt
        all_sites = job_ctxt.sites_to_compute()
        site_block_size = config.hazard_block_size()
        job_profile = job_ctxt.oq_job_profile

        src_model_rnd = random.Random(job_profile.source_model_lt_random_seed)
        gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed)

        for rlz in xrange(job_ctxt.oq_job_profile.realizations):

            # Sample the gmpe and source models:
            general.store_source_model(
                job_ctxt.job_id, src_model_rnd.getrandbits(32),
                job_ctxt.params, self.lt_processor)
            general.store_gmpe_map(
                job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor)

            for site_block in block_splitter(all_sites, site_block_size):

                tf_args = dict(job_id=job_ctxt.job_id, realization=rlz)

                num_tasks_completed = completed_task_count(job_ctxt.job_id)

                ath_args = dict(job_id=job_ctxt.job_id,
                                num_tasks=len(site_block),
                                start_count=num_tasks_completed)

                utils_tasks.distribute(
                    compute_uhs_task, ('site', site_block), tf_args=tf_args,
                    ath=uhs_task_handler, ath_args=ath_args)
Beispiel #2
0
    def execute(self):
        """Loop over realizations (logic tree samples), split the geometry of
        interest into blocks of sites, and distribute Celery tasks to carry out
        the UHS computation.
        """
        job_ctxt = self.job_ctxt
        all_sites = job_ctxt.sites_to_compute()
        site_block_size = config.hazard_block_size()
        job_profile = job_ctxt.oq_job_profile

        self.initialize_pr_data(
            sites=all_sites, realizations=job_ctxt.oq_job_profile.realizations)

        src_model_rnd = random.Random(job_profile.source_model_lt_random_seed)
        gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed)

        for rlz in xrange(job_ctxt.oq_job_profile.realizations):

            # Sample the gmpe and source models:
            general.store_source_model(
                job_ctxt.job_id, src_model_rnd.getrandbits(32),
                job_ctxt.params, self.lt_processor)
            general.store_gmpe_map(
                job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor)

            for site_block in block_splitter(all_sites, site_block_size):

                tf_args = dict(job_id=job_ctxt.job_id, realization=rlz)

                num_tasks_completed = completed_task_count(job_ctxt.job_id)

                ath_args = dict(job_id=job_ctxt.job_id,
                                num_tasks=len(site_block),
                                start_count=num_tasks_completed)

                utils_tasks.distribute(
                    compute_uhs_task, ('site', site_block), tf_args=tf_args,
                    ath=uhs_task_handler, ath_args=ath_args)
Beispiel #3
0
def store_hazard_logic_trees(a_job):
    """Helper function to store the source model and GMPE logic trees in the
    KVS so that it can be read by the Java code. This is basically what the
    @preload decorator does.

    :param a_job:
        :class:`openquake.engine.JobContext` instance.
    """
    lt_proc = LogicTreeProcessor(a_job['BASE_PATH'],
                                 a_job['SOURCE_MODEL_LOGIC_TREE_FILE_PATH'],
                                 a_job['GMPE_LOGIC_TREE_FILE_PATH'])

    src_model_seed = a_job['SOURCE_MODEL_LT_RANDOM_SEED']
    gmpe_seed = a_job['GMPE_LT_RANDOM_SEED']

    src_model_rnd = random.Random()
    src_model_rnd.seed(src_model_seed)
    gmpe_rnd = random.Random()
    gmpe_rnd.seed(gmpe_seed)

    store_source_model(a_job.job_id, src_model_rnd.getrandbits(32),
                       a_job.params, lt_proc)
    store_gmpe_map(a_job.job_id, gmpe_rnd.getrandbits(32), lt_proc)
Beispiel #4
0
def store_hazard_logic_trees(a_job):
    """Helper function to store the source model and GMPE logic trees in the
    KVS so that it can be read by the Java code. This is basically what the
    @preload decorator does.

    :param a_job:
        :class:`openquake.engine.JobContext` instance.
    """
    lt_proc = LogicTreeProcessor(
        a_job['BASE_PATH'],
        a_job['SOURCE_MODEL_LOGIC_TREE_FILE_PATH'],
        a_job['GMPE_LOGIC_TREE_FILE_PATH'])

    src_model_seed = a_job['SOURCE_MODEL_LT_RANDOM_SEED']
    gmpe_seed = a_job['GMPE_LT_RANDOM_SEED']

    src_model_rnd = random.Random()
    src_model_rnd.seed(src_model_seed)
    gmpe_rnd = random.Random()
    gmpe_rnd.seed(gmpe_seed)

    store_source_model(a_job.job_id, src_model_rnd.getrandbits(32),
                       a_job.params, lt_proc)
    store_gmpe_map(a_job.job_id, gmpe_rnd.getrandbits(32), lt_proc)
Beispiel #5
0
    def distribute_disagg(self, sites, realizations, poes, result_dir):
        """Compute disaggregation by splitting up the calculation over sites,
        realizations, and PoE values.

        :param the_job:
            JobContext definition
        :type the_job:
            :class:`openquake.engine.JobContext` instance
        :param sites:
            List of :class:`openquake.shapes.Site` objects
        :param poes:
            Probability of Exceedence levels for the calculation
        :type poes:
            List of floats
        :param result_dir:
            Path where full disaggregation results should be stored
        :returns:
            Result data in the following form::
                [(realization_1, poe_1,
                  [(site_1, gmv_1, matrix_path_1),
                   (site_2, gmv_2, matrix_path_2)]
                 ),
                 (realization_1, poe_2,
                  [(site_1, gmv_1, matrix_path_3),
                   (site_2, gmv_2, matrix_path_4)]
                 ),
                 ...
                 (realization_N, poe_N,
                  [(site_1, gmv_1, matrix_path_N-1),
                   (site_2, gmv_2, matrix_path_N)]
                 ),
                ]

            A single matrix result in this form looks like this::
                [(1, 0.1,
                  [(Site(0.0, 0.0), 0.2257,
                    '/var/lib/openquake/disagg-results/job-372/some_guid.h5'),]
                 ),
                ]
        """
        # accumulates the final results of this method
        full_da_results = []

        # accumulates task data across the realization and poe loops
        task_data = []

        src_model_rnd = random.Random()
        src_model_rnd.seed(self.job_ctxt['SOURCE_MODEL_LT_RANDOM_SEED'])
        gmpe_rnd = random.Random()
        gmpe_rnd.seed(self.job_ctxt['GMPE_LT_RANDOM_SEED'])

        for rlz in xrange(1, realizations + 1):  # 1 to N, inclusive
            # cache the source model and gmpe model in the KVS
            # so the Java code can access it

            general.store_source_model(self.job_ctxt.job_id,
                                       src_model_rnd.getrandbits(32),
                                       self.job_ctxt.params, self.calc)
            general.store_gmpe_map(self.job_ctxt.job_id,
                                   gmpe_rnd.getrandbits(32), self.calc)

            for poe in poes:
                task_site_pairs = []
                for site in sites:
                    a_task = compute_disagg_matrix_task.delay(
                        self.job_ctxt.job_id, site, rlz, poe, result_dir)

                    task_site_pairs.append((a_task, site))

                task_data.append((rlz, poe, task_site_pairs))

        for rlz, poe, task_site_pairs in task_data:

            # accumulates all data for a given (realization, poe) pair
            rlz_poe_data = []
            for a_task, site in task_site_pairs:
                a_task.wait()
                if not a_task.successful():
                    msg = (
                        "Full Disaggregation matrix computation task"
                        " for job %s with task_id=%s, realization=%s, PoE=%s,"
                        " site=%s has failed with the following error: %s")
                    msg %= (self.job_ctxt.job_id, a_task.task_id, rlz, poe,
                            site, a_task.result)
                    LOG.critical(msg)
                    raise RuntimeError(msg)
                else:
                    gmv, matrix_path = a_task.result
                    rlz_poe_data.append((site, gmv, matrix_path))

            full_da_results.append((rlz, poe, rlz_poe_data))

        return full_da_results
Beispiel #6
0
    def distribute_disagg(self, sites, realizations, poes, result_dir):
        """Compute disaggregation by splitting up the calculation over sites,
        realizations, and PoE values.

        :param the_job:
            CalculationProxy definition
        :type the_job:
            :class:`openquake.engine.CalculationProxy` instance
        :param sites:
            List of :class:`openquake.shapes.Site` objects
        :param poes:
            Probability of Exceedence levels for the calculation
        :type poes:
            List of floats
        :param result_dir:
            Path where full disaggregation results should be stored
        :returns:
            Result data in the following form::
                [(realization_1, poe_1,
                  [(site_1, gmv_1, matrix_path_1),
                   (site_2, gmv_2, matrix_path_2)]
                 ),
                 (realization_1, poe_2,
                  [(site_1, gmv_1, matrix_path_3),
                   (site_2, gmv_2, matrix_path_4)]
                 ),
                 ...
                 (realization_N, poe_N,
                  [(site_1, gmv_1, matrix_path_N-1),
                   (site_2, gmv_2, matrix_path_N)]
                 ),
                ]

            A single matrix result in this form looks like this::
                [(1, 0.1,
                  [(Site(0.0, 0.0), 0.2257,
                    '/var/lib/openquake/disagg-results/job-372/some_guid.h5'),]
                 ),
                ]
        """
        # accumulates the final results of this method
        full_da_results = []

        # accumulates task data across the realization and poe loops
        task_data = []

        src_model_rnd = random.Random()
        src_model_rnd.seed(self.calc_proxy['SOURCE_MODEL_LT_RANDOM_SEED'])
        gmpe_rnd = random.Random()
        gmpe_rnd.seed(self.calc_proxy['GMPE_LT_RANDOM_SEED'])

        for rlz in xrange(1, realizations + 1):  # 1 to N, inclusive
            # cache the source model and gmpe model in the KVS
            # so the Java code can access it

            store_source_model(self.calc_proxy.job_id,
                               src_model_rnd.getrandbits(32),
                               self.calc_proxy.params, self.calc)
            store_gmpe_map(self.calc_proxy.job_id, gmpe_rnd.getrandbits(32),
                           self.calc)

            for poe in poes:
                task_site_pairs = []
                for site in sites:
                    a_task = compute_disagg_matrix_task.delay(
                        self.calc_proxy.job_id, site, rlz, poe, result_dir)

                    task_site_pairs.append((a_task, site))

                task_data.append((rlz, poe, task_site_pairs))

        for rlz, poe, task_site_pairs in task_data:

            # accumulates all data for a given (realization, poe) pair
            rlz_poe_data = []
            for a_task, site in task_site_pairs:
                a_task.wait()
                if not a_task.successful():
                    msg = (
                        "Full Disaggregation matrix computation task"
                        " for job %s with task_id=%s, realization=%s, PoE=%s,"
                        " site=%s has failed with the following error: %s")
                    msg %= (
                        self.calc_proxy.job_id, a_task.task_id, rlz, poe,
                        site, a_task.result)
                    LOG.critical(msg)
                    raise RuntimeError(msg)
                else:
                    gmv, matrix_path = a_task.result
                    rlz_poe_data.append((site, gmv, matrix_path))

            full_da_results.append((rlz, poe, rlz_poe_data))

        return full_da_results