def get_loss_builder(dstore, return_periods=None, loss_dt=None): """ :param dstore: datastore for an event based risk calculation :returns: a LossCurvesMapsBuilder instance """ oq = dstore['oqparam'] weights = dstore['weights'][()] try: haz_time = dstore['gmf_data'].attrs['effective_time'] except KeyError: haz_time = None eff_time = oq.investigation_time * oq.ses_per_logic_tree_path * ( len(weights) if oq.collect_rlzs else 1) if oq.collect_rlzs: if haz_time and haz_time != eff_time: raise ValueError('The effective time stored in gmf_data is %d, ' 'which is inconsistent with %d' % (haz_time, eff_time)) num_events = numpy.array([len(dstore['events'])]) weights = numpy.ones(1) else: num_events = numpy.bincount(dstore['events']['rlz_id']) periods = return_periods or oq.return_periods or scientific.return_periods( eff_time, num_events.max()) return scientific.LossCurvesMapsBuilder(oq.conditional_loss_poes, numpy.array(periods), loss_dt or oq.loss_dt(), weights, dict(enumerate(num_events)), eff_time, oq.risk_investigation_time)
def get_loss_builder(dstore, return_periods=None, loss_dt=None): """ :param dstore: datastore for an event based risk calculation :returns: a LossCurvesMapsBuilder instance """ oq = dstore['oqparam'] weights = dstore['weights'][()] eff_time = oq.investigation_time * oq.ses_per_logic_tree_path num_events = countby(dstore['events'][()], 'rlz') periods = return_periods or oq.return_periods or scientific.return_periods( eff_time, max(num_events.values())) return scientific.LossCurvesMapsBuilder( oq.conditional_loss_poes, numpy.array(periods), loss_dt or oq.loss_dt(), weights, num_events, eff_time, oq.risk_investigation_time)