コード例 #1
0
ファイル: utils_tasks_test.py プロジェクト: luisera/oq-engine
 def test_type_error(self):
     try:
         tasks.map_reduce(just_say_hello, range(5),
                          lambda lst, val: lst + [val], [])
     except TypeError as exc:
         # the message depend on the OQ_NO_DISTRIBUTE flag
         self.assertIn('int', str(exc))
     else:
         raise Exception("Exception not raised.")
コード例 #2
0
ファイル: core.py プロジェクト: monellid/oq-engine
    def compute_risk(self):
        """
        Generate the GMFs and optionally the hazard curves too, then
        compute the risk.
        """
        getter_builders = []
        risk_models = []
        with self.monitor('associating assets<->sites'):
            for risk_model in self.risk_models.itervalues():
                logs.LOG.info('associating assets<->sites for taxonomy %s',
                              risk_model.taxonomy)
                try:
                    with db.transaction.commit_on_success(using='job_init'):
                        gbuilder = GetterBuilder(self.rc, risk_model.taxonomy)
                        getter_builders.append(gbuilder)
                        risk_models.append(risk_model)
                except AssetSiteAssociationError as e:
                    logs.LOG.warn(str(e))
                    continue

        # notice that here the commit is really needed, since
        # combine_builders save the loss curve containers on the db
        with db.transaction.commit_on_success(using='job_init'):
            outputdict = writers.combine_builders(
                [ob(self) for ob in self.output_builders])

        args = []
        # compute the risk by splitting by sites
        for sites in split_site_collection(
                self.hc.site_collection, self.concurrent_tasks):
            args.append((self.job.id, sites, self.rc,
                         risk_models, getter_builders, outputdict,
                         self.calculator_parameters))
        self.acc = tasks.map_reduce(event_based_fr, args, self.agg_result, {})
コード例 #3
0
ファイル: base.py プロジェクト: marmarques/oq-engine
 def execute(self):
     """
     Method responsible for the distribution strategy. It divides
     the considered exposure into chunks of homogeneous assets
     (i.e. having the same taxonomy).
     """
     def agg(acc, otm):
         return otm.aggregate_results(self.agg_result, acc)
     run = self.run_subtasks
     name = run.__name__ + '[%s]' % self.core_calc_task.__name__
     self.acc = tasks.map_reduce(
         run, self.task_arg_gen(), agg, self.acc, name)
コード例 #4
0
ファイル: core.py プロジェクト: MohsenKohrangi/oq-engine
    def full_disaggregation(self):
        """
        Run the disaggregation phase after hazard curve finalization.
        """
        super(DisaggHazardCalculator, self).post_execute()
        hc = self.hc
        tl = self.hc.truncation_level
        mag_bin_width = self.hc.mag_bin_width
        eps_edges = numpy.linspace(-tl, tl, self.hc.num_epsilon_bins + 1)
        logs.LOG.info('%d epsilon bins from %s to %s', len(eps_edges) - 1,
                      min(eps_edges), max(eps_edges))

        arglist = []
        self.bin_edges = {}
        curves_dict = dict((site.id, self.get_curves(site))
                           for site in self.hc.site_collection)

        for job_id, sitecol, srcs, lt_model, gsim_by_rlz, task_no in \
                self.task_arg_gen():

            trt_num = dict((trt, i) for i, trt in enumerate(
                           lt_model.get_tectonic_region_types()))
            infos = list(models.LtModelInfo.objects.filter(
                         lt_model=lt_model))

            max_mag = max(i.max_mag for i in infos)
            min_mag = min(i.min_mag for i in infos)
            mag_edges = mag_bin_width * numpy.arange(
                int(numpy.floor(min_mag / mag_bin_width)),
                int(numpy.ceil(max_mag / mag_bin_width) + 1))
            logs.LOG.info('%d mag bins from %s to %s', len(mag_edges) - 1,
                          min_mag, max_mag)

            for site in self.hc.site_collection:
                curves = curves_dict[site.id]
                if not curves:
                    continue  # skip zero-valued hazard curves
                bb = self.bb_dict[lt_model.id, site.id]
                if not bb:
                    logs.LOG.info(
                        'location %s was too far, skipping disaggregation',
                        site.location)
                    continue

                dist_edges, lon_edges, lat_edges = bb.bins_edges(
                    hc.distance_bin_width, hc.coordinate_bin_width)
                logs.LOG.info(
                    '%d dist bins from %s to %s', len(dist_edges) - 1,
                    min(dist_edges), max(dist_edges))
                logs.LOG.info('%d lon bins from %s to %s', len(lon_edges) - 1,
                              bb.west, bb.east)
                logs.LOG.info('%d lat bins from %s to %s', len(lon_edges) - 1,
                              bb.south, bb.north)

                self.bin_edges[lt_model.id, site.id] = (
                    mag_edges, dist_edges, lon_edges, lat_edges, eps_edges)

            arglist.append((self.job.id, sitecol, srcs, lt_model, gsim_by_rlz,
                            trt_num, curves_dict, self.bin_edges))

        self.initialize_percent(compute_disagg, arglist)
        res = tasks.map_reduce(compute_disagg, arglist, self.agg_result, {})
        self.save_disagg_results(res)  # dictionary key -> probability array
コード例 #5
0
ファイル: utils_tasks_test.py プロジェクト: luisera/oq-engine
 def test_single_item(self):
     expected = ["hello"] * 5
     result = tasks.map_reduce(
         just_say_hello, [(i, ) for i in range(5)],
         lambda lst, val: lst + [val], [])
     self.assertEqual(expected, result)