示例#1
0
 def __init__(self, dstore, full_lt, pgetter, srcidx):
     self.datastore = dstore
     self.full_lt = full_lt
     self.et_ids = dstore['et_ids'][:]
     self.rlzs_by_gsim_list = full_lt.get_rlzs_by_gsim_list(self.et_ids)
     self.slice_by_g = getters.get_slice_by_g(self.rlzs_by_gsim_list)
     self.get_hcurves = pgetter.get_hcurves
     self.srcidx = srcidx
     self.data = []
示例#2
0
 def execute(self):
     """
     Compute the conditional spectrum
     """
     oq = self.oqparam
     self.full_lt = self.datastore['full_lt']
     self.trts = list(self.full_lt.gsim_lt.values)
     [self.poe] = oq.poes_disagg
     self.imts = list(oq.imtls)
     self.M = len(self.imts)
     dstore = (self.datastore.parent if self.datastore.parent
               else self.datastore)
     totrups = len(dstore['rup/mag'])
     logging.info('Reading {:_d} ruptures'.format(totrups))
     rdt = [('grp_id', U16), ('nsites', U16), ('idx', U32)]
     rdata = numpy.zeros(totrups, rdt)
     rdata['idx'] = numpy.arange(totrups)
     rdata['grp_id'] = dstore['rup/grp_id'][:]
     rdata['nsites'] = dstore['rup/nsites'][:]
     totweight = rdata['nsites'].sum()
     et_ids = dstore['et_ids'][:]
     rlzs_by_gsim = self.full_lt.get_rlzs_by_gsim_list(et_ids)
     self.slice_by_g = getters.get_slice_by_g(rlzs_by_gsim)
     L = oq.imtls.size
     poes_shape = (sum(len(rbg) for rbg in rlzs_by_gsim), self.N, L)
     self.datastore.create_dset('poes', float, poes_shape)
     G = max(len(rbg) for rbg in rlzs_by_gsim)
     maxw = 2 * 1024**3 / (16 * G * self.M)  # at max 2 GB
     maxweight = min(
         numpy.ceil(totweight / (oq.concurrent_tasks or 1)), maxw)
     U = 0
     Ta = 0
     cmakers = read_cmakers(self.datastore)
     self.datastore.swmr_on()
     smap = parallel.Starmap(conditional_spectrum, h5=self.datastore.hdf5)
     # IMPORTANT!! we rely on the fact that the classical part
     # of the calculation stores the ruptures in chunks of constant
     # grp_id, therefore it is possible to build (start, stop) slices
     for block in general.block_splitter(rdata, maxweight,
                                         operator.itemgetter('nsites'),
                                         operator.itemgetter('grp_id')):
         Ta += 1
         grp_id = block[0]['grp_id']
         G = len(rlzs_by_gsim[grp_id])
         cmaker = cmakers[grp_id]
         U = max(U, block.weight)
         slc = slice(block[0]['idx'], block[-1]['idx'] + 1)
         smap.submit((dstore, slc, cmaker))
     results = smap.reduce(self.agg_result)
     return results
示例#3
0
    def post_execute(self, pmap_by_key):
        """
        Collect the hazard curves by realization and export them.

        :param pmap_by_key:
            a dictionary key -> hazard curves
        """
        nr = {
            name: len(dset['mag'])
            for name, dset in self.datastore.items() if name.startswith('rup_')
        }
        if nr:  # few sites, log the number of ruptures per magnitude
            logging.info('%s', nr)
        oq = self.oqparam
        et_ids = self.datastore['et_ids'][:]
        rlzs_by_gsim_list = self.full_lt.get_rlzs_by_gsim_list(et_ids)
        slice_by_g = getters.get_slice_by_g(rlzs_by_gsim_list)
        data = []
        weights = [rlz.weight for rlz in self.realizations]
        pgetter = getters.PmapGetter(self.datastore, weights,
                                     self.sitecol.sids, oq.imtls)
        logging.info('Saving _poes')
        enum = enumerate(self.datastore['source_info']['source_id'])
        srcid = {source_id: i for i, source_id in enum}
        with self.monitor('saving probability maps'):
            for key, pmap in pmap_by_key.items():
                if isinstance(key, str):  # disagg_by_src
                    rlzs_by_gsim = rlzs_by_gsim_list[pmap.grp_id]
                    self.datastore['disagg_by_src'][..., srcid[key]] = (
                        pgetter.get_hcurves(pmap, rlzs_by_gsim))
                elif pmap:  # pmap can be missing if the group is filtered away
                    # key is the group ID
                    trt = self.full_lt.trt_by_et[et_ids[key][0]]
                    # avoid saving PoEs == 1
                    base.fix_ones(pmap)
                    sids = sorted(pmap)
                    arr = numpy.array([pmap[sid].array for sid in sids])
                    self.datastore['_poes'][sids, :, slice_by_g[key]] = arr
                    extreme = max(
                        get_extreme_poe(pmap[sid].array, oq.imtls)
                        for sid in pmap)
                    data.append((key, trt, extreme))
        if oq.hazard_calculation_id is None and '_poes' in self.datastore:
            self.datastore['disagg_by_grp'] = numpy.array(
                sorted(data), grp_extreme_dt)
            self.datastore.swmr_on()  # needed
            self.calc_stats()
示例#4
0
    def test_case_56(self):
        # test with oversampling
        # there are 6 potential paths 1A 1B 1C 2A 2B 2C
        # 10 rlzs are being sampled: 1C 1A 1B 1A 1C 1A 2B 2A 2B 2A
        # rlzs_by_g is 135 2 4, 79 68 i.e. 1A*3 1B*1 1C*1, 2A*2 2B*2
        self.run_calc(case_56.__file__, 'job.ini', concurrent_tasks='0')
        [fname] = export(('hcurves/mean', 'csv'), self.calc.datastore)
        self.assertEqualFiles('expected/hcurves.csv', fname)

        self.calc.datastore['_poes'].shape
        full_lt = self.calc.datastore['full_lt']
        rlzs_by_grp = full_lt.get_rlzs_by_grp()
        numpy.testing.assert_equal(rlzs_by_grp['grp-00'],
                                   [[1, 3, 5], [2], [0, 4]])
        numpy.testing.assert_equal(rlzs_by_grp['grp-01'], [[7, 9], [6, 8]])
        # there are two slices 0:3 and 3:5 with length 3 and 2 respectively
        slc0, slc1 = get_slice_by_g(rlzs_by_grp)
        [(trt, gsims)] = full_lt.get_gsims_by_trt().items()
        self.assertEqual(len(gsims), 3)