예제 #1
0
def main(calc_id: int, aggregate_by):
    """
    Re-run the postprocessing after an event based risk calculation
    """
    parent = util.read(calc_id)
    oqp = parent['oqparam']
    aggby = aggregate_by.split(',')
    for tagname in aggby:
        if tagname not in oqp.aggregate_by:
            raise ValueError('%r not in %s' % (tagname, oqp.aggregate_by))
    job_id = logs.init('job', level=logging.INFO)
    dic = dict(
        calculation_mode='reaggregate',
        description=oqp.description + '[aggregate_by=%s]' % aggregate_by,
        user_name=getpass.getuser(), is_running=1, status='executing',
        pid=os.getpid(), hazard_calculation_id=job_id)
    logs.dbcmd('update_job', job_id, dic)
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    with logs.handle(job_id, logging.INFO):
        oqp.hazard_calculation_id = parent.calc_id
        parallel.Starmap.init()
        prc = PostRiskCalculator(oqp, job_id)
        try:
            prc.run(aggregate_by=aggby)
            engine.expose_outputs(prc.datastore)
            logs.dbcmd('finish', job_id, 'complete')
        except Exception:
            logs.dbcmd('finish', job_id, 'failed')
        finally:
            parallel.Starmap.shutdown()
예제 #2
0
def main(calc_id: int, aggregate_by):
    """
    Re-run the postprocessing after an event based risk calculation
    """
    parent = datastore.read(calc_id)
    oqp = parent['oqparam']
    aggby = aggregate_by.split(',')
    for tagname in aggby:
        if tagname not in oqp.aggregate_by:
            raise ValueError('%r not in %s' % (tagname, oqp.aggregate_by))
    dic = dict(calculation_mode='reaggregate',
               description=oqp.description +
               '[aggregate_by=%s]' % aggregate_by,
               user_name=getpass.getuser(),
               is_running=1,
               status='executing',
               pid=os.getpid(),
               hazard_calculation_id=calc_id)
    log = logs.init('job', dic, logging.INFO)
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    with log:
        oqp.hazard_calculation_id = parent.calc_id
        parallel.Starmap.init()
        prc = PostRiskCalculator(oqp, log.calc_id)
        prc.run(aggregate_by=aggby)
        engine.expose_outputs(prc.datastore)
예제 #3
0
    def post_execute(self, dummy):
        """
        Compute and store average losses from the risk_by_event dataset,
        and then loss curves and maps.
        """
        oq = self.oqparam

        # sanity check on the risk_by_event
        alt = self.datastore.read_df('risk_by_event', 'event_id')
        K = self.datastore['risk_by_event'].attrs.get('K', 0)
        upper_limit = self.E * self.L * (K + 1)
        size = len(alt)
        assert size <= upper_limit, (size, upper_limit)
        if oq.avg_losses:
            for r in range(self.R):
                self.avg_losses[:, r] *= self.avg_ratio[r]
            self.datastore['avg_losses-rlzs'] = self.avg_losses
            stats.set_rlzs_stats(self.datastore,
                                 'avg_losses',
                                 asset_id=self.assetcol['id'],
                                 loss_type=oq.loss_names)

        # save agg_losses
        units = self.datastore['cost_calculator'].get_units(oq.loss_names)
        if oq.calculation_mode == 'scenario_risk':  # compute agg_losses
            alt['rlz_id'] = self.rlzs[alt.index.to_numpy()]
            agglosses = numpy.zeros((K + 1, self.R, self.L), F32)
            for (agg_id, rlz_id,
                 loss_id), df in alt.groupby(['agg_id', 'rlz_id', 'loss_id']):
                agglosses[agg_id, rlz_id,
                          loss_id] = (df.loss.sum() * self.avg_ratio[rlz_id])
            self.datastore['agg_losses-rlzs'] = agglosses
            stats.set_rlzs_stats(self.datastore,
                                 'agg_losses',
                                 agg_id=K,
                                 loss_types=oq.loss_names,
                                 units=units)
            logging.info('Total portfolio loss\n' +
                         views.view('portfolio_loss', self.datastore))
        else:  # event_based_risk, run post_risk
            prc = PostRiskCalculator(oq, self.datastore.calc_id)
            if hasattr(self, 'exported'):
                prc.exported = self.exported
            with prc.datastore:
                prc.run(exports='')

        if (oq.investigation_time or not oq.avg_losses
                or 'agg_losses-rlzs' not in self.datastore):
            return

        # sanity check on the agg_losses and sum_losses
        sumlosses = self.avg_losses.sum(axis=0)
        if not numpy.allclose(agglosses[K], sumlosses, rtol=1E-6):
            url = ('https://docs.openquake.org/oq-engine/advanced/'
                   'addition-is-non-associative.html')
            logging.warning(
                'Due to rounding errors inherent in floating-point arithmetic,'
                ' agg_losses != sum(avg_losses): %s != %s\nsee %s',
                agglosses[K].mean(), sumlosses.mean(), url)
예제 #4
0
 def post_execute(self, dummy):
     """
     Compute and store average losses from the losses_by_event dataset,
     and then loss curves and maps.
     """
     oq = self.oqparam
     self.datastore.create_dframe('avg_gmf', self.avg_gmf.items())
     prc = PostRiskCalculator(oq, self.datastore.calc_id)
     prc.datastore.parent = self.datastore.parent
     prc.run()
예제 #5
0
 def post_execute(self, dummy):
     """
     Compute and store average losses from the losses_by_event dataset,
     and then loss curves and maps.
     """
     oq = self.oqparam
     if oq.avg_losses:
         self.datastore['avg_losses-stats'].attrs['stats'] = [b'mean']
     prc = PostRiskCalculator(oq, self.datastore.calc_id)
     prc.datastore.parent = self.datastore.parent
     prc.run()
예제 #6
0
def post_risk(ebr_id):
    """
    Generate loss curves and maps from an event loss table
    """
    dbserver.ensure_on()
    dstore = util.read(ebr_id)
    oq = dstore['oqparam']
    prc = PostRiskCalculator(oq)
    prc.datastore.parent = dstore
    prc.run()
    logging.info('Generated %s', prc.datastore.filename)
예제 #7
0
 def test_recompute(self):
     # test recomputing aggregate loss curves with post_risk
     self.run_calc(recompute.__file__, 'job.ini')
     parent = self.calc.datastore
     # the parent has aggregate_by = NAME_1, NAME_2, taxonomy
     oq = parent['oqparam']
     oq.__dict__['aggregate_by'] = ['NAME_1']
     job_id = logs.init('nojob', logging.INFO)  # requires the DbServer
     prc = PostRiskCalculator(oq, job_id)
     oq.hazard_calculation_id = parent.calc_id
     with mock.patch.dict(os.environ, {'OQ_DISTRIBUTE': 'no'}):
         prc.run()
예제 #8
0
def recompute_losses(calc_id, aggregate_by):
    """Re-run the postprocessing after an event based risk calculation"""
    parent = util.read(calc_id)
    oqp = parent['oqparam']
    aggby = aggregate_by.split(',')
    for tagname in aggby:
        if tagname not in oqp.aggregate_by:
            raise ValueError('%r not in %s' % (tagname, oqp.aggregate_by))
    job_id = logs.init('job', level=logging.INFO)
    if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
        os.environ['OQ_DISTRIBUTE'] = 'processpool'
    with logs.handle(job_id, logging.INFO):
        oqp.hazard_calculation_id = calc_id
        parallel.Starmap.init()
        prc = PostRiskCalculator(oqp, job_id)
        try:
            prc.run(aggregate_by=aggby)
        finally:
            parallel.Starmap.shutdown()
예제 #9
0
    def test_recompute(self):
        # test recomputing aggregate loss curves with post_risk
        # this is starting from a ruptures.csv file
        out = self.run_calc(recompute.__file__, 'job.ini', exports='csv')
        [fname] = out['agg_losses-rlzs', 'csv']
        self.assertEqualFiles('expected/agg_losses.csv', fname, delta=1E-5)
        [fname] = out['agg_curves-rlzs', 'csv']
        self.assertEqualFiles('expected/agg_curves.csv', fname, delta=1E-5)

        parent = self.calc.datastore
        # the parent has aggregate_by = NAME_1, NAME_2, taxonomy
        oq = parent['oqparam']
        oq.__dict__['aggregate_by'] = ['NAME_1']
        job_id = logs.init('nojob', logging.INFO)  # requires the DbServer
        prc = PostRiskCalculator(oq, job_id)
        oq.hazard_calculation_id = parent.calc_id
        with mock.patch.dict(os.environ, {'OQ_DISTRIBUTE': 'no'}):
            prc.run()
        [fname] = export(('agg_losses-rlzs', 'csv'), prc.datastore)
        self.assertEqualFiles('expected/recomputed_losses.csv', fname,
                              delta=1E-5)
예제 #10
0
    def test_recompute(self):
        # test recomputing aggregate loss curves with post_risk
        # this is starting from a ruptures.csv file
        out = self.run_calc(recompute.__file__, 'job.ini', exports='csv')
        [fname] = out['aggrisk', 'csv']
        self.assertEqualFiles('expected/agg_losses.csv', fname, delta=1E-5)
        [fname] = out['aggcurves', 'csv']
        self.assertEqualFiles('expected/aggcurves.csv', fname, delta=1E-5)

        parent = self.calc.datastore
        # the parent has aggregate_by = NAME_1, NAME_2, taxonomy
        oq = parent['oqparam']
        oq.__dict__['aggregate_by'] = ['NAME_1']
        log = logs.init('job', {'calculation_mode': 'post_risk',
                                'description': 'test recompute'})
        prc = PostRiskCalculator(oq, log.calc_id)
        prc.assetcol = self.calc.assetcol
        oq.hazard_calculation_id = parent.calc_id
        with mock.patch.dict(os.environ, {'OQ_DISTRIBUTE': 'no'}), log:
            prc.run()
        [fname] = export(('aggrisk', 'csv'), prc.datastore)
        self.assertEqualFiles('expected/recomputed_losses.csv', fname,
                              delta=1E-5)
예제 #11
0
    def build_aggcurves(self):
        prc = PostRiskCalculator(self.oqparam, self.datastore.calc_id)
        prc.assetcol = self.assetcol
        if hasattr(self, 'exported'):
            prc.exported = self.exported
        with prc.datastore:
            prc.run(exports='')

        # save agg_curves-stats
        if self.R > 1 and 'aggcurves' in self.datastore:
            save_curve_stats(self.datastore)