def build_loss_tables(dstore): """ Compute the total losses by rupture and losses by rlzi. """ oq = dstore['oqparam'] R = dstore['csm_info'].get_num_rlzs() lbe = dstore['losses_by_event'][()] loss = lbe['loss'] shp = (R, ) + lbe.dtype['loss'].shape lbr = numpy.zeros(shp, F32) # losses by rlz losses_by_rlz = fast_agg(lbe['rlzi'], loss) lbr[:len(losses_by_rlz)] = losses_by_rlz dstore['losses_by_rlzi'] = lbr rup_id = dstore['events']['rup_id'] if len(shp) > 2: loss = loss.sum(axis=tuple(range(1, len(shp) - 1))) losses_by_rupid = fast_agg(rup_id[lbe['event_id']], loss) lst = [('rup_id', U32)] + [(name, F32) for name in oq.loss_names] tbl = numpy.zeros(len(losses_by_rupid), lst) tbl['rup_id'] = numpy.arange(len(tbl)) for li, name in enumerate(oq.loss_names): tbl[name] = losses_by_rupid[:, li] tbl.sort(order=oq.loss_names[0]) dstore['rup_loss_table'] = tbl
def aggregate_by(self, tagnames, array): """ :param tagnames: a list of valid tag names :param array: an array with the same length as the asset collection :returns: an array of aggregate values with the proper shape """ missing = set(tagnames) - set(self.tagcol.tagnames) if missing: raise ValueError('Unknown tagname(s) %s' % missing) A, *shp = array.shape if A != len(self): raise ValueError('The array must have length %d, got %d' % (len(self), A)) if not tagnames: return array.sum(axis=0) elif len(tagnames) == 1: # fast track for single-tag aggregation # for the Canada exposure it is 30x faster # fast_agg(assets['taxonomy'], values) => 47.6 ms # fast_agg2(assets[['taxonomy']], values) => 1.4 s [tagname] = tagnames avalues = general.fast_agg(self.array[tagname], array)[1:] tags = [(i + 1, ) for i in range(len(avalues))] else: # multi-tag aggregation tags, avalues = general.fast_agg2(self.array[tagnames], array) shape = [ len(getattr(self.tagcol, tagname)) - 1 for tagname in tagnames ] arr = numpy.zeros(shape, (F32, tuple(shp)) if shp else F32) for tag, aval in zip(tags, avalues): arr[tuple(i - 1 for i in tag)] = aval return arr
def view_events_by_mag(token, dstore): """ Show how many events there are for each magnitude """ rups = dstore['ruptures'][()] num_evs = fast_agg(dstore['events']['rup_id']) counts = {} for mag, grp in group_array(rups, 'mag').items(): counts[mag] = sum(num_evs[rup_id] for rup_id in grp['id']) return numpy.array(list(counts.items()), dt('mag num_events'))
def view_mean_perils(token, dstore): """ For instance `oq show mean_perils` """ oq = dstore['oqparam'] pdcols = dstore.get_attr('gmf_data', '__pdcolumns__').split() perils = [col for col in pdcols[2:] if not col.startswith('gmv_')] N = len(dstore['sitecol/sids']) sid = dstore['gmf_data/sid'][:] out = numpy.zeros(N, [(per, float) for per in perils]) if oq.number_of_logic_tree_samples: E = len(dstore['events']) for peril in perils: out[peril] = fast_agg(sid, dstore['gmf_data/' + peril][:]) / E else: rlz_weights = dstore['weights'][:] ev_weights = rlz_weights[dstore['events']['rlz_id']] totw = ev_weights.sum() # num_gmfs for peril in perils: data = dstore['gmf_data/' + peril][:] weights = ev_weights[dstore['gmf_data/eid'][:]] out[peril] = fast_agg(sid, data * weights) / totw return out
def build_loss_tables(dstore): """ Compute the total losses by rupture """ oq = dstore['oqparam'] R = dstore['full_lt'].get_num_rlzs() lbe = dstore['losses_by_event'][()] loss = lbe['loss'] # shape (E, L, T...) shp = (R, ) + lbe.dtype['loss'].shape rup_id = dstore['events']['rup_id'] if len(shp) > 2: loss = loss.sum(axis=tuple(range(2, len(shp)))) # shape (E, L) losses_by_rupid = general.fast_agg(rup_id[lbe['event_id']], loss) lst = [('rup_id', U32)] + [(name, F32) for name in oq.loss_names] tbl = numpy.zeros(len(losses_by_rupid), lst) tbl['rup_id'] = numpy.arange(len(tbl)) for li, name in enumerate(oq.loss_names): tbl[name] = losses_by_rupid[:, li] tbl.sort(order=oq.loss_names[0]) dstore['rup_loss_table'] = tbl