def extract_aggregate_by(dstore, what): """ /extract/aggregate_by/taxonomy,occupancy/curves/structural yield pairs (<stat>, <array of shape (T, O, S, P)>) /extract/aggregate_by/taxonomy,occupancy/avg_losses/structural yield pairs (<stat>, <array of shape (T, O, S)>) """ try: tagnames, name, loss_type = what.split('/') except ValueError: # missing '/' at the end tagnames, name = what.split('/') loss_type = '' assert name in ('avg_losses', 'curves'), name tagnames = tagnames.split(',') assetcol = dstore['assetcol'] oq = dstore['oqparam'] dset, stats = _get(dstore, name) for s, stat in enumerate(stats): if loss_type: array = dset[:, s, oq.lti[loss_type]] else: array = dset[:, s] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}) for tagname in tagnames: setattr(aw, tagname, getattr(assetcol.tagcol, tagname)) if not loss_type: aw.extra = ('loss_type', ) + oq.loss_dt().names if name == 'curves': aw.return_period = dset.attrs['return_periods'] aw.tagnames = encode(tagnames + ['return_period']) else: aw.tagnames = encode(tagnames) yield decode(stat), aw
def extract_aggregate(dstore, what): """ /extract/aggregate/avg_losses? kind=mean&loss_type=structural&tag=taxonomy&tag=occupancy """ name, qstring = what.split('?', 1) info = get_info(dstore) qdic = parse(qstring, info) suffix = '-rlzs' if qdic['rlzs'] else '-stats' tagnames = qdic.get('tag', []) assetcol = dstore['assetcol'] loss_types = info['loss_types'] ltypes = qdic.get('loss_type', []) # list of indices if ltypes: lti = ltypes[0] lt = [lt for lt, i in loss_types.items() if i == lti] array = dstore[name + suffix][:, qdic['k'][0], lti] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}, (lt, )) else: array = dstore[name + suffix][:, qdic['k'][0]] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}, loss_types) for tagname in tagnames: setattr(aw, tagname, getattr(assetcol.tagcol, tagname)) aw.tagnames = encode(tagnames) return aw
def extract_aggregate(dstore, what): """ /extract/aggregate/avg_losses? kind=mean&loss_type=structural&tag=taxonomy&tag=occupancy """ name, qstring = what.split('?', 1) info = get_info(dstore) qdic = parse(qstring, info) suffix = '-rlzs' if qdic['rlzs'] else '-stats' tagnames = qdic.get('tag', []) assetcol = dstore['assetcol'] ltypes = qdic.get('loss_type', []) if ltypes: array = dstore[name + suffix][:, qdic['k'][0], ltypes[0]] else: array = dstore[name + suffix][:, qdic['k'][0]] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}) for tagname in tagnames: setattr(aw, tagname, getattr(assetcol.tagcol, tagname)) aw.tagnames = encode(tagnames) if not ltypes: aw.extra = ('loss_type', ) + tuple(info['loss_types']) return aw
def extract_aggregate(dstore, what): """ /extract/aggregate/avg_losses? kind=mean&loss_type=structural&tag=taxonomy&tag=occupancy """ name, qstring = what.split('?', 1) info = get_info(dstore) qdic = parse(qstring, info) suffix = '-rlzs' if qdic['rlzs'] else '-stats' tagnames = qdic.get('tag', []) assetcol = dstore['assetcol'] ltypes = qdic.get('loss_type', []) if ltypes: array = dstore[name + suffix][:, qdic['k'][0], ltypes[0]] else: array = dstore[name + suffix][:, qdic['k'][0]] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}) for tagname in tagnames: setattr(aw, tagname, getattr(assetcol.tagcol, tagname)) aw.tagnames = encode(tagnames) if not ltypes: aw.extra = ('loss_type',) + tuple(info['loss_types']) return aw