def build_loss_tables(dstore): """ Compute the total losses by rupture and losses by rlzi. """ oq = dstore['oqparam'] R = dstore['csm_info'].get_num_rlzs() lbe = dstore['losses_by_event'][()] loss = lbe['loss'] shp = (R, ) + lbe.dtype['loss'].shape lbr = numpy.zeros(shp, F32) # losses by rlz rlzs, losses_by_rlz = fast_agg2(lbe['rlzi'], loss) lbr[rlzs] = losses_by_rlz dstore['losses_by_rlzi'] = lbr rup_id = dstore['events']['rup_id'] if len(shp) > 2: loss = loss.sum(axis=tuple(range(1, len(shp) - 1))) rupids, losses_by_rupid = fast_agg2(rup_id[lbe['event_id']], loss) lst = [('rup_id', U32)] + [(name, F32) for name in oq.loss_names] tbl = numpy.zeros(len(rupids), lst) tbl['rup_id'] = rupids for li, name in enumerate(oq.loss_names): tbl[name] = losses_by_rupid[:, li] tbl.sort(order=oq.loss_names[0]) dstore['rup_loss_table'] = tbl
def aggregate_by(self, tagnames, array): """ :param tagnames: a list of valid tag names :param array: an array with the same length as the asset collection :returns: an array of aggregate values with the proper shape """ missing = set(tagnames) - set(self.tagcol.tagnames) if missing: raise ValueError('Unknown tagname(s) %s' % missing) A, *shp = array.shape if A != len(self): raise ValueError('The array must have length %d, got %d' % (len(self), A)) if not tagnames: return array.sum(axis=0) elif len(tagnames) == 1: # fast track for single-tag aggregation # for the Canada exposure it is 30x faster # fast_agg(assets['taxonomy'], values) => 47.6 ms # fast_agg2(assets[['taxonomy']], values) => 1.4 s [tagname] = tagnames avalues = general.fast_agg(self.array[tagname], array)[1:] tags = [(i + 1, ) for i in range(len(avalues))] else: # multi-tag aggregation tags, avalues = general.fast_agg2(self.array[tagnames], array) shape = [ len(getattr(self.tagcol, tagname)) - 1 for tagname in tagnames ] arr = numpy.zeros(shape, (F32, tuple(shp)) if shp else F32) for tag, aval in zip(tags, avalues): arr[tuple(i - 1 for i in tag)] = aval return arr
def view_events_by_mag(token, dstore): """ Show how many events there are for each magnitude """ rups = dstore['ruptures'][()] num_evs = dict(zip(*fast_agg2(dstore['events']['rup_id']))) counts = {} for mag, grp in group_array(rups, 'mag').items(): counts[mag] = sum(num_evs[rup_id] for rup_id in grp['rup_id']) return rst_table(counts.items(), ['mag', 'num_events'])
def fast_agg(keys, values, correl, li, acc): """ :param keys: an array of N uint64 numbers encoding (event_id, agg_id) :param values: an array of (N, D) floats :param correl: True if there is asset correlation :param li: loss type index :param acc: dictionary unique key -> array(L, D) """ ukeys, avalues = general.fast_agg2(keys, values) if correl: # restore the variances avalues[:, 0] = avalues[:, 0]**2 for ukey, avalue in zip(ukeys, avalues): acc[ukey][li] += avalue