예제 #1
0
    def export_curves_rlzs(self, aids, key):
        """
        :returns: a dictionary key -> record of dtype loss_curve_dt
        """
        if 'loss_curves-rlzs' in self.dstore:  # classical_risk
            data = self.dstore['loss_curves-rlzs'][aids]  # shape (A, R)
            if key.startswith('rlz-'):
                rlzi = int(key[4:])
                return {key: data[:, rlzi]}
            # else key == 'rlzs', returns all data
            return {'rlz-%03d' % rlzi: data[:, rlzi] for rlzi in range(self.R)}

        # otherwise event_based
        crm = riskinput.read_composite_risk_model(self.dstore)
        builder = crm.curve_builder
        avalues = self.assetcol.values(aids)
        lrgetter = riskinput.LossRatiosGetter(self.dstore, aids)
        if key.startswith('rlz-'):
            rlzi = int(key[4:])
            ratios = lrgetter.get(rlzi)
            return {'rlz-%03d' % rlzi: builder.build_curves(avalues, ratios)}
        else:  # key is 'rlzs', return a dictionary will all realizations
            # this may be disabled in the future unless an asset is specified
            dic = {}
            for rlzi in range(self.R):
                dic['rlz-%03d' % rlzi] = builder.build_curves(
                    avalues, lrgetter.get(rlzi))
            return dic
예제 #2
0
 def execute(self):
     oq = self.oqparam
     R = len(self.loss_builder.weights)
     # build loss maps
     if 'all_loss_ratios' in self.datastore and oq.conditional_loss_poes:
         assetcol = self.assetcol
         stats = oq.risk_stats()
         builder = self.loss_builder
         A = len(assetcol)
         S = len(stats)
         P = len(builder.return_periods)
         # create loss_maps datasets
         self.datastore.create_dset('loss_maps-rlzs',
                                    self.loss_maps_dt, (A, R),
                                    fillvalue=None)
         if R > 1:
             self.datastore.create_dset('loss_maps-stats',
                                        self.loss_maps_dt, (A, S),
                                        fillvalue=None)
             self.datastore.set_attrs(
                 'loss_maps-stats',
                 stats=[encode(name) for (name, func) in stats])
             self.datastore.create_dset('curves-stats',
                                        oq.loss_dt(), (A, S, P),
                                        fillvalue=None)
             self.datastore.set_attrs(
                 'curves-stats',
                 return_periods=builder.return_periods,
                 stats=[encode(name) for (name, func) in stats])
         mon = self.monitor('loss maps')
         read_access = (bool(config.directory.shared_dir)
                        if config.distribution.oq_distribute == 'celery'
                        else True)
         lazy = (oq.hazard_calculation_id
                 and 'all_loss_ratios' in self.datastore.parent
                 and read_access)
         logging.info('Instantiating LossRatiosGetters')
         with self.monitor('building lrgetters',
                           measuremem=True,
                           autoflush=True):
             allargs = []
             for aids in split_in_blocks(range(A), oq.concurrent_tasks):
                 dstore = self.datastore.parent if lazy else self.datastore
                 getter = riskinput.LossRatiosGetter(dstore, aids, lazy)
                 # a lazy getter will read the loss_ratios from the workers
                 # an eager getter reads the loss_ratios upfront
                 allargs.append((assetcol.values(aids), builder, getter,
                                 stats, oq.conditional_loss_poes, mon))
         if lazy:
             # avoid OSError: Can't read data (Wrong b-tree signature)
             self.datastore.parent.close()
         parallel.Starmap(build_curves_maps,
                          allargs).reduce(self.save_curves_maps)
         if lazy:  # the parent was closed, reopen it
             self.datastore.parent.open()
예제 #3
0
    def execute(self):
        # build loss maps
        if ('all_loss_ratios' in self.datastore
                and self.oqparam.conditional_loss_poes):
            assetcol = self.assetcol
            rlzs = self.rlzs_assoc.realizations
            stats = self.oqparam.risk_stats()
            builder = self.riskmodel.curve_builder
            A = len(assetcol)
            R = len(self.datastore['realizations'])
            # create loss_maps datasets
            self.datastore.create_dset('loss_maps-rlzs',
                                       builder.loss_maps_dt, (A, R),
                                       fillvalue=None)
            if R > 1:
                self.datastore.create_dset('loss_maps-stats',
                                           builder.loss_maps_dt,
                                           (A, len(stats)),
                                           fillvalue=None)
            mon = self.monitor('loss maps')
            if self.oqparam.hazard_calculation_id and (
                    'asset_loss_table' in self.datastore.parent):
                Starmap = parallel.Starmap  # we can parallelize fully
                lrgetter = riskinput.LossRatiosGetter(self.datastore.parent)
                # avoid OSError: Can't read data (Wrong b-tree signature)
                self.datastore.parent.close()
            else:  # there is a single datastore
                # we cannot read from it in parallel while writing
                Starmap = parallel.Sequential
                lrgetter = riskinput.LossRatiosGetter(self.datastore)
            Starmap.apply(build_loss_maps,
                          (assetcol, builder, lrgetter, rlzs, stats, mon),
                          self.oqparam.concurrent_tasks).reduce(
                              self.save_loss_maps)
            if self.oqparam.hazard_calculation_id:
                self.datastore.parent.open()

        # build an aggregate loss curve per realization
        if 'agg_loss_table' in self.datastore:
            self.build_agg_curve()
예제 #4
0
def export_asset_loss_table(ekey, dstore):
    """
    Export in parallel the asset loss table from the datastore.

    NB1: for large calculation this may run out of memory
    NB2: due to an heisenbug in the parallel reading of .hdf5 files this works
    reliably only if the datastore has been created by a different process

    The recommendation is: *do not use this exporter*: rather, study its source
    code and write what you need. Every postprocessing is different.
    """
    key, fmt = ekey
    oq = dstore['oqparam']
    assetcol = dstore['assetcol']
    arefs = dstore['asset_refs'].value
    avals = assetcol.values()
    loss_types = dstore.get_attr('all_loss_ratios', 'loss_types').split()
    dtlist = [(lt, F32) for lt in loss_types]
    if oq.insured_losses:
        for lt in loss_types:
            dtlist.append((lt + '_ins', F32))
    lrs_dt = numpy.dtype([('rlzi', U16), ('losses', dtlist)])
    fname = dstore.export_path('%s.%s' % ekey)
    monitor = performance.Monitor(key, fname)
    lrgetter = riskinput.LossRatiosGetter(dstore)
    aids = range(len(assetcol))
    allargs = [(lrgetter, list(block), monitor)
               for block in split_in_blocks(aids, oq.concurrent_tasks)]
    dstore.close()  # avoid OSError: Can't read data (Wrong b-tree signature)
    L = len(loss_types)
    with hdf5.File(fname, 'w') as f:
        nbytes = 0
        total = numpy.zeros(len(dtlist), F32)
        for pairs in parallel.Starmap(get_loss_ratios, allargs):
            for aid, data in pairs:
                asset = assetcol[aid]
                avalue = avals[aid]
                for l, lt in enumerate(loss_types):
                    aval = avalue[lt]
                    for i in range(oq.insured_losses + 1):
                        data['ratios'][:, l + L * i] *= aval
                aref = arefs[asset.idx]
                f[b'asset_loss_table/' + aref] = data.view(lrs_dt)
                total += data['ratios'].sum(axis=0)
                nbytes += data.nbytes
        f['asset_loss_table'].attrs['loss_types'] = ' '.join(loss_types)
        f['asset_loss_table'].attrs['total'] = total
        f['asset_loss_table'].attrs['nbytes'] = nbytes
    return [fname]
예제 #5
0
    def execute(self):
        oq = self.oqparam
        # build loss maps
        if 'all_loss_ratios' in self.datastore and oq.conditional_loss_poes:
            assetcol = self.assetcol
            stats = oq.risk_stats()
            builder = self.riskmodel.curve_builder
            A = len(assetcol)
            weights = self.datastore['realizations']['weight']
            R = len(weights)
            # create loss_maps datasets
            self.datastore.create_dset('loss_maps-rlzs',
                                       self.loss_maps_dt, (A, R),
                                       fillvalue=None)
            if R > 1:
                self.datastore.create_dset('loss_maps-stats',
                                           self.loss_maps_dt, (A, len(stats)),
                                           fillvalue=None)
            mon = self.monitor('loss maps')
            lazy = (oq.hazard_calculation_id
                    and 'all_loss_ratios' in self.datastore.parent)
            logging.info('Instantiating LossRatiosGetters')
            with self.monitor('building lrgetters',
                              measuremem=True,
                              autoflush=True):
                allargs = []
                for aids in split_in_blocks(range(A), oq.concurrent_tasks):
                    dstore = self.datastore.parent if lazy else self.datastore
                    getter = riskinput.LossRatiosGetter(dstore, aids, lazy)
                    # a lazy getter will read the loss_ratios from the workers
                    # an eager getter reads the loss_ratios upfront
                    allargs.append((assetcol.values(aids), builder, getter,
                                    weights, stats, mon))
            if lazy:
                # avoid OSError: Can't read data (Wrong b-tree signature)
                self.datastore.parent.close()
            parallel.Starmap(build_loss_maps,
                             allargs).reduce(self.save_loss_maps)
            if lazy:  # the parent was closed, reopen it
                self.datastore.parent.open()

        # build an aggregate loss curve per realization
        if 'agg_loss_table' in self.datastore:
            self.build_agg_curve()
예제 #6
0
    def export_curves_rlzs(self, aids, key):
        """
        :returns: a dictionary key -> record of dtype loss_curve_dt
        """
        if 'loss_curves-rlzs' in self.dstore:  # classical_risk
            data = self.dstore['loss_curves-rlzs'][aids]  # shape (A, R)
            if key:
                rlzi = int(key[4:])
                return {key: data[:, rlzi]}
            return {'rlz-%03d' % rlzi: data[:, rlzi] for rlzi in range(self.R)}

        # otherwise event_based
        builder = self.dstore['riskmodel'].curve_builder
        assets = [self.assetcol[aid] for aid in aids]
        rlzi = int(key[4:]) if key else None  # strip rlz-
        ratios = riskinput.LossRatiosGetter(self.dstore).get(aids, rlzi)
        if rlzi:
            return {rlzi: builder.build_curves(assets, ratios, rlzi)}
        # return a dictionary will all realizations
        return {
            'rlz-%03d' % rlzi: builder.build_curves(assets, ratios, rlzi)
            for rlzi in range(self.R)
        }