Пример #1
0
 def save_bin_edges(self):
     """
     Save disagg-bins
     """
     b = self.bin_edges
     T = len(self.trts)
     shape = [len(bin) - 1 for bin in disagg.get_bins(b, 0)] + [T]
     matrix_size = numpy.prod(shape)  # 6D
     if matrix_size > 1E6:
         raise ValueError(
             'The disaggregation matrix is too large '
             '(%d elements): fix the binning!' % matrix_size)
     self.datastore['disagg-bins/mags'] = b[0]
     self.datastore['disagg-bins/dists'] = b[1]
     for sid in self.sitecol.sids:
         self.datastore['disagg-bins/lons/sid-%d' % sid] = b[2][sid]
         self.datastore['disagg-bins/lats/sid-%d' % sid] = b[3][sid]
     self.datastore['disagg-bins/eps'] = b[4]
Пример #2
0
def compute_disagg(sitecol, sources, cmaker, iml2s, trti, bin_edges,
                   oqparam, monitor):
    # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation
    # of the algorithm used
    """
    :param sitecol:
        a :class:`openquake.hazardlib.site.SiteCollection` instance
    :param sources:
        list of hazardlib source objects
    :param cmaker:
        a :class:`openquake.hazardlib.gsim.base.ContextMaker` instance
    :param iml2s:
        a list of N arrays of shape (M, P)
    :param dict trti:
        tectonic region type index
    :param bin_egdes:
        a quintet (mag_edges, dist_edges, lon_edges, lat_edges, eps_edges)
    :param oqparam:
        the parameters in the job.ini file
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary of probability arrays, with composite key
        (sid, rlzi, poe, imt, iml, trti).
    """
    result = {'trti': trti, 'num_ruptures': 0}
    # all the time is spent in collect_bin_data
    ruptures = []
    for src in sources:
        ruptures.extend(src.iter_ruptures())
    for sid, iml2 in zip(sitecol.sids, iml2s):
        singlesitecol = sitecol.filtered([sid])
        bin_data = disagg.collect_bin_data(
            ruptures, singlesitecol, cmaker, iml2,
            oqparam.truncation_level, oqparam.num_epsilon_bins, monitor)
        if bin_data:  # dictionary poe, imt, rlzi -> pne
            bins = disagg.get_bins(bin_edges, sid)
            for (poe, imt, rlzi), matrix in disagg.build_disagg_matrix(
                    bin_data, bins, monitor).items():
                result[sid, rlzi, poe, imt] = matrix
        result['num_ruptures'] += len(bin_data.mags)
    return result  # sid, rlzi, poe, imt, iml -> array
Пример #3
0
 def save_bin_edges(self):
     """
     Save disagg-bins
     """
     b = self.bin_edges
     for sid in self.sitecol.sids:
         bins = disagg.get_bins(b, sid)
         shape = [len(bin) - 1 for bin in bins] + [len(self.trts)]
         logging.info('disagg_matrix_shape=%s, site=#%d', str(shape), sid)
         matrix_size = numpy.prod(shape)
         if matrix_size > 1E6:
             raise ValueError(
                 'The disaggregation matrix for site #%d is too large '
                 '(%d elements): fix the binnning!' % (sid, matrix_size))
     self.datastore['disagg-bins/mags'] = b[0]
     self.datastore['disagg-bins/dists'] = b[1]
     for sid in self.sitecol.sids:
         self.datastore['disagg-bins/lons/sid-%d' % sid] = b[2][sid]
         self.datastore['disagg-bins/lats/sid-%d' % sid] = b[3][sid]
     self.datastore['disagg-bins/eps'] = b[4]
Пример #4
0
 def save_bin_edges(self):
     """
     Save disagg-bins
     """
     b = self.bin_edges
     T = len(self.trts)
     for sid in self.sitecol.sids:
         bins = disagg.get_bins(b, sid)
         shape = [len(bin) - 1 for bin in bins] + [T]
         shape_dic = dict(zip(BIN_NAMES, shape))
         if sid == 0:
             logging.info('nbins=%s for site=#%d', shape_dic, sid)
         matrix_size = numpy.prod(shape)  # 6D
         if matrix_size > 1E6:
             raise ValueError(
                 'The disaggregation matrix for site #%d is too large '
                 '(%d elements): fix the binning!' % (sid, matrix_size))
     self.datastore['disagg-bins/mags'] = b[0]
     self.datastore['disagg-bins/dists'] = b[1]
     for sid in self.sitecol.sids:
         self.datastore['disagg-bins/lons/sid-%d' % sid] = b[2][sid]
         self.datastore['disagg-bins/lats/sid-%d' % sid] = b[3][sid]
     self.datastore['disagg-bins/eps'] = b[4]
     return shape_dic
Пример #5
0
def compute_disagg(dstore, idxs, cmaker, iml3, trti, bin_edges, oq, monitor):
    # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation
    # of the algorithm used
    """
    :param dstore
        a DataStore instance
    :param idxs:
        an array of indices to ruptures
    :param cmaker:
        a :class:`openquake.hazardlib.gsim.base.ContextMaker` instance
    :param iml3:
        an ArrayWrapper of shape (N, P, Z) with an attribute imt
    :param trti:
        tectonic region type index
    :param bin_egdes:
        a quintet (mag_edges, dist_edges, lon_edges, lat_edges, eps_edges)
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary sid -> 8D-array
    """
    with monitor('reading rupdata', measuremem=True):
        dstore.open('r')
        sitecol = dstore['sitecol']
        rupdata = {k: dstore['rup/' + k][idxs] for k in dstore['rup']}
    RuptureContext.temporal_occurrence_model = PoissonTOM(
        oq.investigation_time)
    pne_mon = monitor('disaggregate_pne', measuremem=False)
    mat_mon = monitor('build_disagg_matrix', measuremem=True)
    gmf_mon = monitor('disagg mean_std', measuremem=False)
    for sid, iml2 in zip(sitecol.sids, iml3):
        singlesite = sitecol.filtered([sid])
        bins = disagg.get_bins(bin_edges, sid)
        gsim_by_z = {}
        for z in range(iml3.shape[-1]):
            try:
                gsim = cmaker.gsim_by_rlzi[iml3.rlzs[sid, z]]
            except KeyError:
                pass
            else:
                gsim_by_z[z] = gsim
        ctxs = []
        ok, = numpy.where(
            rupdata['rrup_'][:, sid] <= cmaker.maximum_distance(cmaker.trt))
        for ridx in ok:  # consider only the ruptures close to the site
            ctx = RuptureContext((par, rupdata[par][ridx])
                                 for par in rupdata if not par.endswith('_'))
            for par in rupdata:
                if par.endswith('_'):
                    setattr(ctx, par[:-1], rupdata[par][ridx, [sid]])
            ctxs.append(ctx)
        if not ctxs:
            continue
        eps3 = disagg._eps3(cmaker.trunclevel, oq.num_epsilon_bins)
        matrix = numpy.zeros([len(b) - 1 for b in bins] + list(iml2.shape))
        for z, gsim in gsim_by_z.items():
            with gmf_mon:
                ms = disagg.get_mean_stdv(singlesite, ctxs, iml3.imt, gsim)
            bdata = disagg.disaggregate(
                ms, ctxs, iml3.imt, iml2[:, z], eps3, pne_mon)
            if bdata.pnes.sum():
                with mat_mon:
                    matrix[..., z] = disagg.build_disagg_matrix(bdata, bins)
        if matrix.any():
            yield {'trti': trti, 'imti': iml3.imti, sid: matrix}