def event_based_risk(riskinputs, param, monitor): """ :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param param: a dictionary of parameters :param monitor: :class:`openquake.baselib.performance.Monitor` instance :returns: a dictionary of numpy arrays of shape (L, R) """ crmodel = monitor.read('crmodel') L = len(crmodel.lti) tempname = param['tempname'] for ri in riskinputs: with monitor('getting hazard'): ri.hazard_getter.init() hazard = ri.hazard_getter.get_hazard() mon = monitor('build risk curves', measuremem=False) A = len(ri.aids) R = ri.hazard_getter.num_rlzs try: avg = numpy.zeros((A, R, L), F32) except MemoryError: raise MemoryError('Building array avg of shape (%d, %d, %d)' % (A, R, L)) result = dict(aids=ri.aids, avglosses=avg) acc = AccumDict() # accumulator eidx -> agglosses aid2idx = {aid: idx for idx, aid in enumerate(ri.aids)} if 'builder' in param: builder = param['builder'] P = len(builder.return_periods) all_curves = numpy.zeros((A, R, P), builder.loss_dt) # update the result dictionary and the agg array with each output for out in ri.gen_outputs(crmodel, monitor, tempname, hazard): if len(out.eids) == 0: # this happens for sites with no events continue r = out.rlzi agglosses = numpy.zeros((len(out.eids), L), F32) for l, loss_type in enumerate(crmodel.loss_types): loss_ratios = out[loss_type] if loss_ratios is None: # for GMFs below the minimum_intensity continue avalues = riskmodels.get_values(loss_type, ri.assets) for a, asset in enumerate(ri.assets): aval = avalues[a] aid = asset['ordinal'] idx = aid2idx[aid] ratios = loss_ratios[a] # length E # average losses avg[idx, r, l] = (ratios.sum(axis=0) * param['ses_ratio'] * aval) # agglosses agglosses[:, l] += ratios * aval if 'builder' in param: with mon: # this is the heaviest part try: all_curves[idx, r][loss_type] = ( builder.build_curve(aval, ratios, r)) except ValueError: pass # not enough event to compute the curve # NB: I could yield the agglosses per output, but then I would # have millions of small outputs with big data transfer and slow # saving time acc += dict(zip(out.eids, agglosses)) if 'builder' in param: clp = param['conditional_loss_poes'] result['curves-rlzs'], result['curves-stats'] = builder.pair( all_curves, param['stats']) if R > 1 and param['individual_curves'] is False: del result['curves-rlzs'] if clp: result['loss_maps-rlzs'], result['loss_maps-stats'] = ( builder.build_maps(all_curves, clp, param['stats'])) if R > 1 and param['individual_curves'] is False: del result['loss_maps-rlzs'] # store info about the GMFs, must be done at the end result['agglosses'] = (numpy.array(list(acc)), numpy.array(list(acc.values()))) yield result
def event_based_risk(riskinputs, riskmodel, param, monitor): """ :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param riskmodel: a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param param: a dictionary of parameters :param monitor: :class:`openquake.baselib.performance.Monitor` instance :returns: a dictionary of numpy arrays of shape (L, R) """ L = len(riskmodel.lti) epspath = param['epspath'] for ri in riskinputs: with monitor('getting hazard'): ri.hazard_getter.init() hazard = ri.hazard_getter.get_hazard() mon = monitor('build risk curves', measuremem=False) A = len(ri.aids) R = ri.hazard_getter.num_rlzs try: avg = numpy.zeros((A, R, L), F32) except MemoryError: raise MemoryError( 'Building array avg of shape (%d, %d, %d)' % (A, R, L)) result = dict(aids=ri.aids, avglosses=avg) acc = AccumDict() # accumulator eidx -> agglosses aid2idx = {aid: idx for idx, aid in enumerate(ri.aids)} if 'builder' in param: builder = param['builder'] P = len(builder.return_periods) all_curves = numpy.zeros((A, R, P), builder.loss_dt) # update the result dictionary and the agg array with each output for out in riskmodel.gen_outputs(ri, monitor, epspath, hazard): if len(out.eids) == 0: # this happens for sites with no events continue r = out.rlzi agglosses = numpy.zeros((len(out.eids), L), F32) for l, loss_type in enumerate(riskmodel.loss_types): loss_ratios = out[loss_type] if loss_ratios is None: # for GMFs below the minimum_intensity continue avalues = riskmodels.get_values(loss_type, ri.assets) for a, asset in enumerate(ri.assets): aval = avalues[a] aid = asset['ordinal'] idx = aid2idx[aid] ratios = loss_ratios[a] # length E # average losses avg[idx, r, l] = ( ratios.sum(axis=0) * param['ses_ratio'] * aval) # agglosses agglosses[:, l] += ratios * aval if 'builder' in param: with mon: # this is the heaviest part all_curves[idx, r][loss_type] = ( builder.build_curve(aval, ratios, r)) # NB: I could yield the agglosses per output, but then I would # have millions of small outputs with big data transfer and slow # saving time acc += dict(zip(out.eids, agglosses)) if 'builder' in param: clp = param['conditional_loss_poes'] result['curves-rlzs'], result['curves-stats'] = builder.pair( all_curves, param['stats']) if R > 1 and param['individual_curves'] is False: del result['curves-rlzs'] if clp: result['loss_maps-rlzs'], result['loss_maps-stats'] = ( builder.build_maps(all_curves, clp, param['stats'])) if R > 1 and param['individual_curves'] is False: del result['loss_maps-rlzs'] # store info about the GMFs, must be done at the end result['agglosses'] = (numpy.array(list(acc)), numpy.array(list(acc.values()))) yield result