def __init__(self, dstore, sids=None, rlzs_assoc=None): self.rlzs_assoc = rlzs_assoc or dstore['csm_info'].get_rlzs_assoc() self.dstore = dstore self.weights = [rlz.weight for rlz in self.rlzs_assoc.realizations] self.num_levels = len(self.dstore['oqparam'].imtls.array) self.sids = sids self.nbytes = 0 if sids is None: self.sids = dstore['sitecol'].complete.sids # populate _pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: # build probability maps restricted to the given sids for grp, dset in self.dstore['poes'].items(): sid2idx = {sid: i for i, sid in enumerate(dset.attrs['sids'])} L, I = dset.shape[1:] pmap = probability_map.ProbabilityMap(L, I) for sid in self.sids: try: idx = sid2idx[sid] except KeyError: continue else: pmap[sid] = probability_map.ProbabilityCurve(dset[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes
def init(self): """ Read the poes and set the .data attribute with the hazard curves """ if hasattr(self, '_pmap_by_grp'): # already initialized return self._pmap_by_grp if isinstance(self.dstore, str): self.dstore = hdf5.File(self.dstore, 'r') else: self.dstore.open('r') # if not if self.sids is None: self.sids = self.dstore['sitecol'].sids oq = self.dstore['oqparam'] self.imtls = oq.imtls self.poes = self.poes or oq.poes self.rlzs_by_grp = self.dstore['full_lt'].get_rlzs_by_grp() # populate _pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: # build probability maps restricted to the given sids ok_sids = set(self.sids) for grp, dset in self.dstore['poes'].items(): ds = dset['array'] L, G = ds.shape[1:] pmap = probability_map.ProbabilityMap(L, G) for idx, sid in enumerate(dset['sids'][()]): if sid in ok_sids: pmap[sid] = probability_map.ProbabilityCurve(ds[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes return self._pmap_by_grp
def init(self): if hasattr(self, 'data'): # already initialized return self.dstore.open() # if not # populate _pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: # build probability maps restricted to the given sids for grp, dset in self.dstore['poes'].items(): sid2idx = {sid: i for i, sid in enumerate(dset.attrs['sids'])} L, I = dset.shape[1:] pmap = probability_map.ProbabilityMap(L, I) for sid in self.sids: try: idx = sid2idx[sid] except KeyError: continue else: pmap[sid] = probability_map.ProbabilityCurve(dset[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes self.imtls = self.dstore['oqparam'].imtls self.data = collections.OrderedDict() try: hcurves = self.get_hcurves(self.imtls) # shape (R, N) except IndexError: # no data return for sid, hcurve_by_rlz in zip(self.sids, hcurves.T): self.data[sid] = datadict = {} for rlzi, hcurve in enumerate(hcurve_by_rlz): datadict[rlzi] = lst = [None for imt in self.imtls] for imti, imt in enumerate(self.imtls): lst[imti] = hcurve[imt] # imls
def get_pmap_by_grp(self, sids=None): """ :param sids: an array of site IDs :returns: a dictionary of probability maps by source group """ if self._pmap_by_grp is None: # populate the cache self._pmap_by_grp = {} for grp, dset in self.dstore['poes'].items(): sid2idx = {sid: i for i, sid in enumerate(dset.attrs['sids'])} L, I = dset.shape[1:] pmap = probability_map.ProbabilityMap(L, I) for sid in sids: try: idx = sid2idx[sid] except KeyError: continue else: pmap[sid] = probability_map.ProbabilityCurve(dset[idx]) self._pmap_by_grp[grp] = pmap self.sids = sids # store the sids used in the cache self.nbytes += pmap.nbytes else: # make sure the cache refer to the right sids assert sids is None or (sids == self.sids).all() return self._pmap_by_grp
def combine_pmaps(self, pmap_by_grp): """ :param pmap_by_grp: dictionary group string -> probability map :returns: a list of probability maps, one per realization """ grp = list(pmap_by_grp)[0] # pmap_by_grp must be non-empty num_levels = pmap_by_grp[grp].shape_y pmaps = [probability_map.ProbabilityMap(num_levels, 1) for _ in self.realizations] array = self.by_grp() for grp in pmap_by_grp: for gsim_idx, rlzis in array[grp]: pmap = pmap_by_grp[grp].extract(gsim_idx) for rlzi in rlzis: pmaps[rlzi] |= pmap return pmaps
def combine_pmaps(self, pmap_by_grp): """ :param pmap_by_grp: dictionary group string -> probability map :returns: a list of probability maps, one per realization """ pmaps = [ probability_map.ProbabilityMap(self.num_levels, 1) for _ in self.weights ] for rec in self.assoc_by_grp: grp = 'grp-%02d' % rec['grp_id'] if grp in pmap_by_grp: pmap = pmap_by_grp[grp].extract(rec['gsim_idx']) for rlzi in rec['rlzis']: pmaps[rlzi] |= pmap return pmaps
def get(self, sids, rlzi): """ :param sids: an array of S site IDs :param rlzi: a realization index :returns: the hazard curves for the given realization """ pmap_by_grp = self.get_pmap_by_grp(sids) pmap = probability_map.ProbabilityMap(self.num_levels, 1) for rec in self.assoc_by_grp: grp = 'grp-%02d' % rec['grp_id'] if grp in pmap_by_grp: for r in rec['rlzis']: if r == rlzi: pmap |= pmap_by_grp[grp].extract(rec['gsim_idx']) break return pmap
def get(self, rlzi, grp=None): """ :param rlzi: a realization index :param grp: None (all groups) or a string of the form "grp-XX" :returns: the hazard curves for the given realization """ self.init() assert self.sids is not None pmap = probability_map.ProbabilityMap(len(self.imtls.array), 1) grps = [grp] if grp is not None else sorted(self._pmap_by_grp) for grp in grps: for gsim_idx, rlzis in enumerate(self.rlzs_by_grp[grp]): for r in rlzis: if r == rlzi: pmap |= self._pmap_by_grp[grp].extract(gsim_idx) break return pmap
def get(self, rlzi, grp=None): """ :param rlzi: a realization index :param grp: None (all groups) or a string of the form "grp-XX" :returns: the hazard curves for the given realization """ assert self.sids is not None pmap = probability_map.ProbabilityMap(self.num_levels, 1) grps = [grp] if grp is not None else sorted(self._pmap_by_grp) array = self.rlzs_assoc.by_grp() for grp in grps: for gsim_idx, rlzis in array[grp]: for r in rlzis: if r == rlzi: pmap |= self._pmap_by_grp[grp].extract(gsim_idx) break return pmap
def pmap_by_grp(self): """ :returns: dictionary "grp-XXX" -> ProbabilityMap instance """ if hasattr(self, '_pmap_by_grp'): # already called return self._pmap_by_grp # populate _pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: # build probability maps restricted to the given sids ok_sids = set(self.sids) for grp, dset in self.dstore['poes'].items(): ds = dset['array'] L, G = ds.shape[1:] pmap = probability_map.ProbabilityMap(L, G) for idx, sid in enumerate(dset['sids'].value): if sid in ok_sids: pmap[sid] = probability_map.ProbabilityCurve(ds[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes return self._pmap_by_grp
def pmap_by_grp(self): """ :returns: dictionary "grp-XXX" -> ProbabilityMap instance """ if hasattr(self, '_pmap_by_grp'): # already called return self._pmap_by_grp # populate _pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: # build probability maps restricted to the given sids for grp, dset in self.dstore['poes'].items(): sid2idx = {sid: i for i, sid in enumerate(dset.attrs['sids'])} L, I = dset.shape[1:] pmap = probability_map.ProbabilityMap(L, I) for sid in self.sids: try: idx = sid2idx[sid] except KeyError: continue else: pmap[sid] = probability_map.ProbabilityCurve(dset[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes return self._pmap_by_grp