Example #1
0
 def _gen_riskinputs(self, kind, eps, num_events):
     assets_by_site = self.assetcol.assets_by_site()
     dstore = self.can_read_parent() or self.datastore
     for sid, assets in enumerate(assets_by_site):
         if len(assets) == 0:
             continue
         # build the riskinputs
         if kind == 'poe':  # hcurves, shape (R, N)
             getter = PmapGetter(dstore, self.rlzs_assoc, [sid])
             getter.num_rlzs = self.R
         else:  # gmf
             getter = GmfDataGetter(dstore, [sid], self.R,
                                    self.oqparam.imtls)
         if dstore is self.datastore:
             # read the hazard data in the controller node
             getter.init()
         else:
             # the datastore must be closed to avoid the HDF5 fork bug
             assert dstore.hdf5 == (), '%s is not closed!' % dstore
         for block in general.block_splitter(assets, 1000):
             # dictionary of epsilons for the reduced assets
             reduced_eps = {
                 ass.ordinal: eps[ass.ordinal]
                 for ass in block if eps is not None and len(eps)
             }
             yield riskinput.RiskInput(getter, [block], reduced_eps)
Example #2
0
 def _gen_riskinputs(self, kind, eps, num_events):
     rinfo_dt = numpy.dtype([('sid', U16), ('num_assets', U16)])
     rinfo = []
     assets_by_site = self.assetcol.assets_by_site()
     dstore = self.can_read_parent() or self.datastore
     for sid, assets in enumerate(assets_by_site):
         if len(assets) == 0:
             continue
         # build the riskinputs
         if kind == 'poe':  # hcurves, shape (R, N)
             getter = PmapGetter(dstore, self.rlzs_assoc, [sid])
             getter.num_rlzs = self.R
         else:  # gmf
             getter = GmfDataGetter(dstore, [sid], self.R)
         if dstore is self.datastore:
             # read the hazard data in the controller node
             getter.init()
         else:
             # the datastore must be closed to avoid the HDF5 fork bug
             assert dstore.hdf5 == (), '%s is not closed!' % dstore
         for block in general.block_splitter(
                 assets, self.oqparam.assets_per_site_limit):
             # dictionary of epsilons for the reduced assets
             reduced_eps = {
                 ass.ordinal: eps[ass.ordinal]
                 for ass in block if eps is not None and len(eps)
             }
             yield riskinput.RiskInput(getter, [block], reduced_eps)
         rinfo.append((sid, len(block)))
         if len(block) >= TWO16:
             logging.error('There are %d assets on site #%d!', len(block),
                           sid)
     self.datastore['riskinput_info'] = numpy.array(rinfo, rinfo_dt)
Example #3
0
 def build_riskinputs(self, kind, eps=None, num_events=0):
     """
     :param kind:
         kind of hazard getter, can be 'poe' or 'gmf'
     :param eps:
         a matrix of epsilons (or None)
     :param num_events:
         how many events there are
     :returns:
         a list of RiskInputs objects, sorted by IMT.
     """
     logging.info('There are %d realizations', self.R)
     imtls = self.oqparam.imtls
     if not set(self.oqparam.risk_imtls) & set(imtls):
         rsk = ', '.join(self.oqparam.risk_imtls)
         haz = ', '.join(imtls)
         raise ValueError('The IMTs in the risk models (%s) are disjoint '
                          "from the IMTs in the hazard (%s)" % (rsk, haz))
     num_tasks = self.oqparam.concurrent_tasks or 1
     if not hasattr(self, 'assetcol'):
         self.assetcol = self.datastore['assetcol']
     self.riskmodel.taxonomy = self.assetcol.tagcol.taxonomy
     assets_by_site = self.assetcol.assets_by_site()
     with self.monitor('building riskinputs', autoflush=True):
         riskinputs = []
         sid_weight_pairs = [(sid, len(assets))
                             for sid, assets in enumerate(assets_by_site)]
         blocks = general.split_in_blocks(sid_weight_pairs,
                                          num_tasks,
                                          weight=operator.itemgetter(1))
         dstore = self.can_read_parent() or self.datastore
         for block in blocks:
             sids = numpy.array([sid for sid, _weight in block])
             reduced_assets = assets_by_site[sids]
             # dictionary of epsilons for the reduced assets
             reduced_eps = {}
             for assets in reduced_assets:
                 for ass in assets:
                     if eps is not None and len(eps):
                         reduced_eps[ass.ordinal] = eps[ass.ordinal]
             # build the riskinputs
             if kind == 'poe':  # hcurves, shape (R, N)
                 getter = PmapGetter(dstore, sids, self.rlzs_assoc)
                 getter.num_rlzs = self.R
             else:  # gmf
                 getter = GmfDataGetter(dstore, sids, self.R, num_events)
             if dstore is self.datastore:
                 # read the hazard data in the controller node
                 logging.info('Reading hazard')
                 getter.init()
             else:
                 # the datastore must be closed to avoid the HDF5 fork bug
                 assert dstore.hdf5 == (), '%s is not closed!' % dstore
             ri = riskinput.RiskInput(getter, reduced_assets, reduced_eps)
             if ri.weight > 0:
                 riskinputs.append(ri)
         assert riskinputs
         logging.info('Built %d risk inputs', len(riskinputs))
         return riskinputs
Example #4
0
 def _gen_riskinputs(self, kind, eps, num_events):
     num_tasks = self.oqparam.concurrent_tasks or 1
     assets_by_site = self.assetcol.assets_by_site()
     if kind == 'poe':
         indices = None
     else:
         indices = self.datastore['gmf_data/indices'].value
     dstore = self.can_read_parent() or self.datastore
     sid_weight = []
     for sid, assets in enumerate(assets_by_site):
         if len(assets) == 0:
             continue
         elif indices is None:
             weight = len(assets)
         else:
             idx = indices[sid]
             if indices.dtype.names:  # engine < 3.2
                 num_gmfs = sum(stop - start for start, stop in idx)
             else:  # engine >= 3.2
                 num_gmfs = (idx[1] - idx[0]).sum()
             weight = len(assets) * (num_gmfs or 1)
         sid_weight.append((sid, weight))
     for block in general.split_in_blocks(sid_weight,
                                          num_tasks,
                                          weight=operator.itemgetter(1)):
         sids = numpy.array([sid for sid, _weight in block])
         reduced_assets = assets_by_site[sids]
         # dictionary of epsilons for the reduced assets
         reduced_eps = {}
         for assets in reduced_assets:
             for ass in assets:
                 if eps is not None and len(eps):
                     reduced_eps[ass.ordinal] = eps[ass.ordinal]
         # build the riskinputs
         if kind == 'poe':  # hcurves, shape (R, N)
             getter = PmapGetter(dstore, self.rlzs_assoc, sids)
             getter.num_rlzs = self.R
         else:  # gmf
             getter = GmfDataGetter(dstore, sids, self.R, num_events,
                                    self.oqparam.imtls)
         if dstore is self.datastore:
             # read the hazard data in the controller node
             getter.init()
         else:
             # the datastore must be closed to avoid the HDF5 fork bug
             assert dstore.hdf5 == (), '%s is not closed!' % dstore
         ri = riskinput.RiskInput(getter, reduced_assets, reduced_eps)
         ri.weight = block.weight
         yield ri
Example #5
0
    def test_case_11(self):
        self.assert_curves_ok([
            'hazard_curve-mean.csv', 'hazard_curve-smltp_b1_b2-gsimltp_b1.csv',
            'hazard_curve-smltp_b1_b3-gsimltp_b1.csv',
            'hazard_curve-smltp_b1_b4-gsimltp_b1.csv',
            'quantile_curve-0.1.csv', 'quantile_curve-0.9.csv'
        ], case_11.__file__)

        # checking PmapGetter.get_pcurve
        pgetter = PmapGetter(self.calc.datastore, self.calc.weights)
        poes = pgetter.get_hcurves(pgetter.init())[0]
        mean = self.calc.datastore.sel('hcurves-stats', stat='mean', sid=0)
        mean2 = poes.T @ numpy.array([w['weight'] for w in self.calc.weights])
        aac(mean2.flat, mean.flat)
        check_disagg_by_src(self.calc.datastore)