Beispiel #1
0
    def setUp(self):

        fname = gettemp(ampl_func)
        df = read_csv(fname, {
            'ampcode': ampcode_dt,
            None: numpy.float64
        },
                      index='ampcode')
        self.df = AmplFunction(df)

        # Set GMMs
        gmmA = BooreAtkinson2008()

        # Set parameters
        dsts = [10., 15., 20., 30., 40.]
        dsts = [10.]
        imts = [PGA(), SA(1.0)]
        sites = Dummy.get_site_collection(len(dsts), vs30=760.0)
        self.mag = 5.5
        rup = Dummy.get_rupture(mag=self.mag)
        ctx = full_context(sites, rup)
        ctx.rjb = numpy.array(dsts)
        ctx.rrup = numpy.array(dsts)
        self.rrup = ctx.rrup

        # Compute GM on rock
        self.cmaker = ContextMaker('TRT', [gmmA],
                                   dict(imtls={str(im): [0]
                                               for im in imts}))
        [self.meastd] = self.cmaker.get_mean_stds([ctx], const.StdDev.TOTAL)
 def test_mutually_exclusive_ruptures(self):
     # Test the calculation of hazard curves using mutually exclusive
     # ruptures for a single source
     gsim_by_trt = [SadighEtAl1997()]
     rupture = _create_rupture(10., 6.)
     data = [(rupture, PMF([(0.7, 0), (0.3, 1)])),
             (rupture, PMF([(0.6, 0), (0.4, 1)]))]
     data[0][0].weight = 0.5
     data[1][0].weight = 0.5
     src = NonParametricSeismicSource('0', 'test', "Active Shallow Crust",
                                      data)
     src.id = 0
     src.grp_id = 0
     src.trt_smr = 0
     src.mutex_weight = 1
     group = SourceGroup(src.tectonic_region_type, [src], 'test', 'mutex',
                         'mutex')
     param = dict(imtls=self.imtls,
                  src_interdep=group.src_interdep,
                  rup_interdep=group.rup_interdep,
                  grp_probability=group.grp_probability)
     cmaker = ContextMaker(src.tectonic_region_type, gsim_by_trt, param)
     crv = classical(group, self.sites, cmaker)['pmap'][0]
     npt.assert_almost_equal(numpy.array([0.35000, 0.32497, 0.10398]),
                             crv.array[:, 0],
                             decimal=4)
Beispiel #3
0
def sample_ruptures(group,
                    src_filter=filters.source_site_noop_filter,
                    gsims=(),
                    param=(),
                    monitor=Monitor()):
    """
    :param group:
        a SourceGroup or a sequence of sources of the same group
    :param src_filter:
        a source site filter (default noop filter)
    :param gsims:
        a list of GSIMs for the current tectonic region model
    :param param:
        a dictionary of additional parameters (by default
        ses_per_logic_tree_path=1,  samples=1, seed=42, filter_distance=1000)
    :param monitor:
        monitor instance
    :returns:
        a dictionary with eb_ruptures, num_events, num_ruptures, calc_times
    """
    if not param:
        param = dict(ses_per_logic_tree_path=1,
                     samples=1,
                     seed=42,
                     filter_distance=1000)
    if getattr(group, 'src_interdep', None) == 'mutex':
        prob = {src: sw for src, sw in zip(group, group.srcs_weights)}
    else:
        prob = {src: 1 for src in group}
    eb_ruptures = []
    calc_times = []
    rup_mon = monitor('making contexts', measuremem=False)
    # Compute and save stochastic event sets
    num_ruptures = 0
    eids = numpy.zeros(0)
    cmaker = ContextMaker(gsims, src_filter.integration_distance,
                          param['filter_distance'], monitor)
    for src, s_sites in src_filter(group):
        t0 = time.time()
        num_ruptures += src.num_ruptures
        num_occ_by_rup = _sample_ruptures(src, prob[src],
                                          param['ses_per_logic_tree_path'],
                                          param['samples'], param['seed'])
        # NB: the number of occurrences is very low, << 1, so it is
        # more efficient to filter only the ruptures that occur, i.e.
        # to call sample_ruptures *before* the filtering
        for ebr in _build_eb_ruptures(src, num_occ_by_rup, cmaker, s_sites,
                                      param['seed'], rup_mon):
            eb_ruptures.append(ebr)
        eids = set_eids(eb_ruptures)
        src_id = src.source_id.split(':', 1)[0]
        dt = time.time() - t0
        calc_times.append((src_id, src.nsites, eids, dt))
    dic = dict(eb_ruptures=eb_ruptures,
               num_events=len(eids),
               calc_times=calc_times,
               num_ruptures=num_ruptures)
    return dic
Beispiel #4
0
    def acc0(self):
        """
        Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
        """
        zd = AccumDict()
        num_levels = len(self.oqparam.imtls.array)
        rparams = {
            'grp_id', 'occurrence_rate', 'weight', 'probs_occur', 'lon_',
            'lat_', 'rrup_'
        }
        gsims_by_trt = self.full_lt.get_gsims_by_trt()
        n = len(self.full_lt.sm_rlzs)
        trts = list(self.full_lt.gsim_lt.values)
        for sm in self.full_lt.sm_rlzs:
            for grp_id in self.full_lt.grp_ids(sm.ordinal):
                trt = trts[grp_id // n]
                gsims = gsims_by_trt[trt]
                cm = ContextMaker(trt, gsims)
                rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
                for dparam in cm.REQUIRES_DISTANCES:
                    rparams.add(dparam + '_')
                zd[grp_id] = ProbabilityMap(num_levels, len(gsims))
        zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
        if self.few_sites:
            self.rparams = sorted(rparams)
            for k in self.rparams:
                # variable length arrays
                if k == 'grp_id':
                    self.datastore.create_dset('rup/' + k, U16)
                elif k == 'probs_occur':  # vlen
                    self.datastore.create_dset('rup/' + k, hdf5.vfloat32)
                elif k.endswith('_'):  # array of shape (U, N)
                    self.datastore.create_dset('rup/' + k,
                                               F32,
                                               shape=(None, self.N),
                                               compression='gzip')
                else:
                    self.datastore.create_dset('rup/' + k, F32)
        else:
            self.rparams = {}
        self.by_task = {}  # task_no => src_ids
        self.totrups = 0  # total number of ruptures before collapsing
        self.maxradius = 0
        self.gidx = {
            tuple(grp_ids): i
            for i, grp_ids in enumerate(self.datastore['grp_ids'])
        }

        # estimate max memory per core
        max_num_gsims = max(len(gsims) for gsims in gsims_by_trt.values())
        max_num_grp_ids = max(len(grp_ids) for grp_ids in self.gidx)
        pmapbytes = self.N * num_levels * max_num_gsims * max_num_grp_ids * 8
        if pmapbytes > TWO32:
            logging.warning(TOOBIG % (self.N, num_levels, max_num_gsims,
                                      max_num_grp_ids, humansize(pmapbytes)))
        logging.info(MAXMEMORY % (self.N, num_levels, max_num_gsims,
                                  max_num_grp_ids, humansize(pmapbytes)))
        return zd
def compute_hazard(sources, src_filter, rlzs_by_gsim, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param rlzs_by_gsim: a dictionary gsim -> rlzs
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    serial = 1
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    ebruptures = []
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    cmaker = ContextMaker(rlzs_by_gsim, src_filter.integration_distance)
    for sample in range(param['samples']):
        for ses_idx, ses_seed in param['ses_seeds']:
            seed = sample * TWO16 + ses_seed
            with sampl_mon:
                rups, n_occs = generate_event_set(src, background_sids,
                                                  src_filter, seed)
            with filt_mon:
                for rup, n_occ in zip(rups, n_occs):
                    rup.serial = serial
                    try:
                        rup.sctx, rup.dctx = cmaker.make_contexts(sitecol, rup)
                        indices = rup.sctx.sids
                    except FarAwayRupture:
                        continue
                    events = []
                    for _ in range(n_occ):
                        events.append((0, src.src_group_id, ses_idx, sample))
                    if events:
                        evs = numpy.array(events, stochastic.event_dt)
                        ebruptures.append(EBRupture(rup, src.id, indices, evs))
                        serial += 1
    res.num_events = len(stochastic.set_eids(ebruptures))
    res['ruptures'] = {src.src_group_id: ebruptures}
    if param['save_ruptures']:
        res.ruptures_by_grp = {src.src_group_id: ebruptures}
    else:
        res.events_by_grp = {
            src.src_group_id: event_based.get_events(ebruptures)
        }
    res.eff_ruptures = {src.src_group_id: src.num_ruptures}
    if param.get('gmf'):
        getter = getters.GmfGetter(rlzs_by_gsim, ebruptures, sitecol,
                                   param['oqparam'], param['min_iml'],
                                   param['samples'])
        res.update(getter.compute_gmfs_curves(monitor))
    return res
Beispiel #6
0
 def acc0(self):
     """
     Initial accumulator, a dict et_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     rparams = {'grp_id', 'occurrence_rate', 'clon_', 'clat_', 'rrup_'}
     gsims_by_trt = self.full_lt.get_gsims_by_trt()
     for trt, gsims in gsims_by_trt.items():
         cm = ContextMaker(trt, gsims)
         rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
         for dparam in cm.REQUIRES_DISTANCES:
             rparams.add(dparam + '_')
     zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
     mags = set()
     for trt, dset in self.datastore['source_mags'].items():
         mags.update(dset[:])
     mags = sorted(mags)
     if self.few_sites:
         self.rdt = [('nsites', U16)]
         dparams = ['sids_']
         for rparam in rparams:
             if rparam.endswith('_'):
                 dparams.append(rparam)
             elif rparam == 'grp_id':
                 self.rdt.append((rparam, U32))
             else:
                 self.rdt.append((rparam, F32))
         self.rdt.append(('idx', U32))
         self.rdt.append(('probs_occur', hdf5.vfloat64))
         for mag in mags:
             name = 'mag_%s/' % mag
             self.datastore.create_dset(name + 'rctx',
                                        self.rdt, (None, ),
                                        compression='gzip')
             for dparam in dparams:
                 dt = hdf5.vuint32 if dparam == 'sids_' else hdf5.vfloat32
                 self.datastore.create_dset(name + dparam,
                                            dt, (None, ),
                                            compression='gzip')
     self.by_task = {}  # task_no => src_ids
     self.totrups = 0  # total number of ruptures before collapsing
     self.maxradius = 0
     self.Ns = len(self.csm.source_info)
     if self.oqparam.disagg_by_src:
         sources = self.get_source_ids()
         self.datastore.create_dset(
             'disagg_by_src', F32,
             (self.N, self.R, self.M, self.L1, self.Ns))
         self.datastore.set_shape_attrs('disagg_by_src',
                                        site_id=self.N,
                                        rlz_id=self.R,
                                        imt=list(self.oqparam.imtls),
                                        lvl=self.L1,
                                        src_id=sources)
     return zd
Beispiel #7
0
 def acc0(self):
     """
     Initial accumulator, a dict et_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     params = {
         'grp_id', 'occurrence_rate', 'clon_', 'clat_', 'rrup_', 'nsites',
         'probs_occur_', 'sids_', 'src_id'
     }
     gsims_by_trt = self.full_lt.get_gsims_by_trt()
     for trt, gsims in gsims_by_trt.items():
         cm = ContextMaker(trt, gsims)
         params.update(cm.REQUIRES_RUPTURE_PARAMETERS)
         for dparam in cm.REQUIRES_DISTANCES:
             params.add(dparam + '_')
     zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
     mags = set()
     for trt, dset in self.datastore['source_mags'].items():
         mags.update(dset[:])
     mags = sorted(mags)
     if self.few_sites:
         for param in params:
             if param == 'sids_':
                 dt = hdf5.vuint16
             elif param == 'probs_occur_':
                 dt = hdf5.vfloat64
             elif param.endswith('_'):
                 dt = hdf5.vfloat32
             elif param == 'src_id':
                 dt = U32
             elif param in {'nsites', 'grp_id'}:
                 dt = U16
             else:
                 dt = F32
             self.datastore.create_dset('rup/' + param,
                                        dt, (None, ),
                                        compression='gzip')
         dset = self.datastore.getitem('rup')
         dset.attrs['__pdcolumns__'] = ' '.join(params)
     self.by_task = {}  # task_no => src_ids
     self.totrups = 0  # total number of ruptures before collapsing
     self.maxradius = 0
     self.Ns = len(self.csm.source_info)
     if self.oqparam.disagg_by_src:
         sources = self.get_source_ids()
         self.datastore.create_dset(
             'disagg_by_src', F32,
             (self.N, self.R, self.M, self.L1, self.Ns))
         self.datastore.set_shape_attrs('disagg_by_src',
                                        site_id=self.N,
                                        rlz_id=self.R,
                                        imt=list(self.oqparam.imtls),
                                        lvl=self.L1,
                                        src_id=sources)
     return zd
def ucerf_classical(rupset_idx, ucerf_source, src_filter, gsims, monitor):
    """
    :param rupset_idx:
        indices of the rupture sets
    :param ucerf_source:
        an object taking the place of a source for UCERF
    :param src_filter:
        a source filter returning the sites affected by the source
    :param gsims:
        a list of GSIMs
    :param monitor:
        a monitor instance
    :returns:
        a ProbabilityMap
    """
    t0 = time.time()
    truncation_level = monitor.oqparam.truncation_level
    imtls = monitor.oqparam.imtls
    ucerf_source.src_filter = src_filter  # so that .iter_ruptures() work
    grp_id = ucerf_source.src_group_id
    mag = ucerf_source.mags[rupset_idx].max()
    ridx = set()
    for idx in rupset_idx:
        ridx.update(ucerf_source.get_ridx(idx))
    ucerf_source.rupset_idx = rupset_idx
    ucerf_source.num_ruptures = nruptures = len(rupset_idx)

    # prefilter the sites close to the rupture set
    s_sites = ucerf_source.get_rupture_sites(ridx, src_filter, mag)
    if s_sites is None:  # return an empty probability map
        pm = ProbabilityMap(len(imtls.array), len(gsims))
        acc = AccumDict({grp_id: pm})
        acc.calc_times = {
            ucerf_source.source_id:
            numpy.array([nruptures, 0, time.time() - t0, 1])
        }
        acc.eff_ruptures = {grp_id: 0}
        return acc

    # compute the ProbabilityMap
    cmaker = ContextMaker(gsims,
                          src_filter.integration_distance,
                          monitor=monitor)
    imtls = DictArray(imtls)
    pmap = cmaker.poe_map(ucerf_source, s_sites, imtls, truncation_level)
    nsites = len(s_sites)
    acc = AccumDict({grp_id: pmap})
    acc.calc_times = {
        ucerf_source.source_id:
        numpy.array([nruptures * nsites, nsites,
                     time.time() - t0, 1])
    }
    acc.eff_ruptures = {grp_id: ucerf_source.num_ruptures}
    return acc
Beispiel #9
0
 def create_dsets(self):
     """
     Store some empty datasets in the datastore
     """
     self.init_poes()
     params = {
         'grp_id', 'occurrence_rate', 'clon_', 'clat_', 'rrup_',
         'probs_occur_', 'sids_', 'src_id'
     }
     gsims_by_trt = self.full_lt.get_gsims_by_trt()
     for trt, gsims in gsims_by_trt.items():
         cm = ContextMaker(trt, gsims, self.oqparam)
         params.update(cm.REQUIRES_RUPTURE_PARAMETERS)
         for dparam in cm.REQUIRES_DISTANCES:
             params.add(dparam + '_')
     mags = set()
     for trt, dset in self.datastore['source_mags'].items():
         mags.update(dset[:])
     mags = sorted(mags)
     if self.few_sites:
         descr = []  # (param, dt)
         for param in params:
             if param == 'sids_':
                 dt = hdf5.vuint16
             elif param == 'probs_occur_':
                 dt = hdf5.vfloat64
             elif param.endswith('_'):
                 dt = hdf5.vfloat32
             elif param == 'src_id':
                 dt = U32
             elif param == 'grp_id':
                 dt = U16
             else:
                 dt = F32
             descr.append((param, dt))
         self.datastore.create_df('rup', descr, 'gzip')
     self.by_task = AccumDict(accum=AccumDict())
     # task_no => effrups, effsites, srcids
     self.Ns = len(self.csm.source_info)
     self.rel_ruptures = AccumDict(accum=0)  # grp_id -> rel_ruptures
     # NB: the relevant ruptures are less than the effective ruptures,
     # which are a preclassical concept
     if self.oqparam.disagg_by_src:
         sources = self.get_source_ids()
         self.datastore.create_dset(
             'disagg_by_src', F32,
             (self.N, self.R, self.M, self.L1, self.Ns))
         self.datastore.set_shape_descr('disagg_by_src',
                                        site_id=self.N,
                                        rlz_id=self.R,
                                        imt=list(self.oqparam.imtls),
                                        lvl=self.L1,
                                        src_id=sources)
Beispiel #10
0
 def acc0(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()  # populated in get_args
     params = {
         'grp_id', 'occurrence_rate', 'clon_', 'clat_', 'rrup_', 'nsites',
         'probs_occur_', 'sids_', 'src_id'
     }
     gsims_by_trt = self.full_lt.get_gsims_by_trt()
     for trt, gsims in gsims_by_trt.items():
         cm = ContextMaker(trt, gsims)
         params.update(cm.REQUIRES_RUPTURE_PARAMETERS)
         for dparam in cm.REQUIRES_DISTANCES:
             params.add(dparam + '_')
     zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
     mags = set()
     for trt, dset in self.datastore['source_mags'].items():
         mags.update(dset[:])
     mags = sorted(mags)
     if self.few_sites:
         descr = []  # (param, dt)
         for param in params:
             if param == 'sids_':
                 dt = hdf5.vuint16
             elif param == 'probs_occur_':
                 dt = hdf5.vfloat64
             elif param.endswith('_'):
                 dt = hdf5.vfloat32
             elif param == 'src_id':
                 dt = U32
             elif param in {'nsites', 'grp_id'}:
                 dt = U16
             else:
                 dt = F32
             descr.append((param, dt))
         self.datastore.create_dframe('rup', descr, 'gzip')
     self.by_task = {}  # task_no => src_ids
     self.maxradius = 0
     self.Ns = len(self.csm.source_info)
     if self.oqparam.disagg_by_src:
         sources = self.get_source_ids()
         self.datastore.create_dset(
             'disagg_by_src', F32,
             (self.N, self.R, self.M, self.L1, self.Ns))
         self.datastore.set_shape_attrs('disagg_by_src',
                                        site_id=self.N,
                                        rlz_id=self.R,
                                        imt=list(self.oqparam.imtls),
                                        lvl=self.L1,
                                        src_id=sources)
     return zd
def compute_ruptures(sources, src_filter, gsims, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param gsims: a list of GSIMs
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    serial = 1
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    ebruptures = []
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    cmaker = ContextMaker(gsims, src_filter.integration_distance)
    for sample in range(param['samples']):
        for ses_idx, ses_seed in param['ses_seeds']:
            seed = sample * TWO16 + ses_seed
            with sampl_mon:
                rups, n_occs = generate_event_set(src, background_sids,
                                                  src_filter, seed)
            with filt_mon:
                for rup, n_occ in zip(rups, n_occs):
                    rup.serial = serial
                    rup.seed = seed
                    try:
                        rup.sctx, rup.dctx = cmaker.make_contexts(sitecol, rup)
                        indices = rup.sctx.sids
                    except FarAwayRupture:
                        continue
                    events = []
                    for _ in range(n_occ):
                        events.append((0, src.src_group_id, ses_idx, sample))
                    if events:
                        evs = numpy.array(events, stochastic.event_dt)
                        ebruptures.append(EBRupture(rup, indices, evs))
                        serial += 1
    res.num_events = len(stochastic.set_eids(ebruptures))
    res[src.src_group_id] = ebruptures
    if not param['save_ruptures']:
        res.events_by_grp = {
            grp_id: event_based.get_events(res[grp_id])
            for grp_id in res
        }
    res.eff_ruptures = {src.src_group_id: src.num_ruptures}
    return res
def compute_hazard(sources, src_filter, rlzs_by_gsim, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param rlzs_by_gsim: a dictionary gsim -> rlzs
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    serial = 1
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    cmaker = ContextMaker(rlzs_by_gsim, src_filter.integration_distance)
    num_ses = param['ses_per_logic_tree_path']
    samples = getattr(src, 'samples', 1)
    n_occ = AccumDict(accum=numpy.zeros((samples, num_ses), numpy.uint16))
    with sampl_mon:
        for sam_idx in range(samples):
            for ses_idx, ses_seed in param['ses_seeds']:
                seed = sam_idx * TWO16 + ses_seed
                rups, occs = generate_event_set(src, background_sids,
                                                src_filter, seed)
                for rup, occ in zip(rups, occs):
                    n_occ[rup][sam_idx, ses_idx] = occ
                    rup.serial = serial
                    serial += 1
    with filt_mon:
        rlzs = numpy.concatenate(list(rlzs_by_gsim.values()))
        ebruptures = stochastic.build_eb_ruptures(src, rlzs, num_ses, cmaker,
                                                  sitecol, n_occ.items())
    res.num_events = sum(ebr.multiplicity for ebr in ebruptures)
    res['ruptures'] = {src.src_group_id: ebruptures}
    if param['save_ruptures']:
        res.ruptures_by_grp = {src.src_group_id: ebruptures}
    else:
        res.events_by_grp = {
            src.src_group_id: event_based.get_events(ebruptures)
        }
    res.eff_ruptures = {src.src_group_id: src.num_ruptures}
    if param.get('gmf'):
        getter = getters.GmfGetter(rlzs_by_gsim, ebruptures, sitecol,
                                   param['oqparam'], param['min_iml'], samples)
        res.update(getter.compute_gmfs_curves(monitor))
    return res
Beispiel #13
0
    def test_case_11(self):
        # compute the limit distances for a GMV of 0.01g
        self.run_calc(case_11.__file__, 'job.ini')
        self.calc.rup.tectonic_region_type = 'Subduction Deep'
        oq = self.calc.oqparam
        gsim_lt = self.calc.datastore['csm_info/gsim_lt']
        mags = {'Subduction Deep': [self.calc.rup.mag]}
        dist = gsim_lt.get_integration_distance(mags, oq)
        aae(dist['Subduction Deep'], 162.24509294)

        cmaker = ContextMaker(
            'Subduction Deep', self.calc.gsims, oq.maximum_distance)
        dist = cmaker.get_limit_distance(
            self.calc.sitecol, self.calc.rup, oq.imtls, oq.minimum_intensity)
        aae(dist, 163.5109114)
Beispiel #14
0
def sample_ruptures(sources,
                    src_filter=source_site_noop_filter,
                    gsims=(),
                    param=(),
                    monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param src_filter:
        a source site filter
    :param gsims:
        a list of GSIMs for the current tectonic region model (can be empty)
    :param param:
        a dictionary of additional parameters (by default
        ses_per_logic_tree_path=1 and filter_distance=1000)
    :param monitor:
        monitor instance
    :returns:
        a dictionary with eb_ruptures, num_events, num_ruptures, calc_times
    """
    if not param:
        param = dict(ses_per_logic_tree_path=1, filter_distance=1000)
    eb_ruptures = []
    # AccumDict of arrays with 3 elements weight, nsites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    cmaker = ContextMaker(gsims, src_filter.integration_distance, param,
                          monitor)
    for src, sites in src_filter(sources):
        mutex_weight = getattr(src, 'mutex_weight', 1)
        samples = getattr(src, 'samples', 1)
        t0 = time.time()
        with cmaker.ir_mon:
            ruptures = list(src.iter_ruptures())
        num_occ_by_rup = _sample_ruptures(src, mutex_weight,
                                          param['ses_per_logic_tree_path'],
                                          samples, ruptures)
        # NB: the number of occurrences is very low, << 1, so it is
        # more efficient to filter only the ruptures that occur, i.e.
        # to call sample_ruptures *before* the filtering
        ebrs = list(
            _build_eb_ruptures(src, num_occ_by_rup, cmaker, sites, monitor))
        eb_ruptures.extend(ebrs)
        eids = set_eids(ebrs)
        dt = time.time() - t0
        calc_times[src.id] += numpy.array([len(eids), src.nsites, dt])
    dic = dict(eb_ruptures=eb_ruptures, calc_times=calc_times)
    return dic
Beispiel #15
0
 def test_make_pmap(self):
     trunclevel = 3
     imtls = DictArray({'PGA': [0.01]})
     gsims = [valid.gsim('AkkarBommer2010')]
     ctxs = []
     for occ_rate in (.001, .002):
         ctx = RuptureContext()
         ctx.mag = 5.5
         ctx.rake = 90
         ctx.occurrence_rate = occ_rate
         ctx.sids = numpy.array([0.])
         ctx.vs30 = numpy.array([760.])
         ctx.rrup = numpy.array([100.])
         ctx.rjb = numpy.array([99.])
         ctxs.append(ctx)
     cmaker = ContextMaker('TRT', gsims,
                           dict(imtls=imtls, truncation_level=trunclevel))
     pmap = _make_pmap(ctxs, cmaker, 50.)
     numpy.testing.assert_almost_equal(pmap[0].array, 0.066381)
Beispiel #16
0
def sample_ruptures(sources, param, src_filter=source_site_noop_filter,
                    monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param param:
        a dictionary of additional parameters including rlzs_by_gsim,
        ses_per_logic_tree_path and filter_distance
    :param src_filter:
        a source site filter
    :param monitor:
        monitor instance
    :returns:
        a dictionary with eb_ruptures, num_events, num_ruptures, calc_times
    """
    # AccumDict of arrays with 3 elements weight, nsites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    cmaker = ContextMaker(param['gsims'],
                          src_filter.integration_distance,
                          param, monitor)
    num_ses = param['ses_per_logic_tree_path']
    eff_ruptures = 0
    grp_id = sources[0].src_group_id
    eb_ruptures = []
    for src, sites in src_filter(sources):
        t0 = time.time()
        if len(eb_ruptures) > MAX_RUPTURES:
            yield AccumDict(rup_array=get_rup_array(eb_ruptures),
                            calc_times={},
                            eff_ruptures={})
            eb_ruptures.clear()
        ebrs = build_eb_ruptures(src, num_ses, cmaker, sites)
        n_occ = sum(ebr.n_occ for ebr in ebrs)
        eb_ruptures.extend(ebrs)
        eff_ruptures += src.num_ruptures
        dt = time.time() - t0
        calc_times[src.id] += numpy.array([n_occ, src.nsites, dt])
    yield AccumDict(rup_array=get_rup_array(eb_ruptures)
                    if eb_ruptures else (),
                    calc_times=calc_times,
                    eff_ruptures={grp_id: eff_ruptures})
Beispiel #17
0
 def acc0(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     csm_info = self.csm.info
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     rparams = {'grp_id', 'srcidx', 'occurrence_rate',
                'weight', 'probs_occur', 'sid_', 'lon_', 'lat_'}
     for grp in self.csm.src_groups:
         gsims = csm_info.gsim_lt.get_gsims(grp.trt)
         cm = ContextMaker(grp.trt, gsims)
         rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
         for dparam in cm.REQUIRES_DISTANCES:
             rparams.add(dparam + '_')
         zd[grp.id] = ProbabilityMap(num_levels, len(gsims))
     zd.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
     zd.nsites = AccumDict()  # src.id -> nsites
     self.rparams = sorted(rparams)
     return zd
 def acc0(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     rparams = {'grp_id', 'srcidx', 'occurrence_rate',
                'weight', 'probs_occur', 'sid_', 'lon_', 'lat_'}
     gsims_by_trt = self.csm_info.get_gsims_by_trt()
     for sm in self.csm_info.source_models:
         for grp in sm.src_groups:
             gsims = gsims_by_trt[grp.trt]
             cm = ContextMaker(grp.trt, gsims)
             rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
             for dparam in cm.REQUIRES_DISTANCES:
                 rparams.add(dparam + '_')
             zd[grp.id] = ProbabilityMap(num_levels, len(gsims))
     zd.eff_ruptures = AccumDict(accum=0)  # grp_id -> eff_ruptures
     self.rparams = sorted(rparams)
     self.sources_by_task = {}  # task_no => src_ids
     return zd
Beispiel #19
0
    def __init__(self, database, gmpes, imts):
        """
        Shakemap results are stored to an hdf5 databse
        :param str database:
            Path to database
        :param list gmpes:
            List of gmpes (as strings)
        :param list imts:
            List of IMTs (as strings)
        """

        self.gmpes = [GSIM_SET[gmpe]() for gmpe in gmpes]
        self.imts = [from_string(imt) for imt in imts]
        self.db_file = database
        self.context = ContextMaker(self.gmpes)
        # Determine the site attributes
        self.site_attribs = []
        for gmpe in self.gmpes:
            for site_attrib in gmpe.REQUIRES_SITES_PARAMETERS:
                if not site_attrib in self.site_attribs:
                    self.site_attribs.append(site_attrib)
Beispiel #20
0
 def acc0(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     rparams = {
         'grp_id', 'srcidx', 'occurrence_rate', 'weight', 'probs_occur',
         'sid_', 'lon_', 'lat_', 'rrup_'
     }
     gsims_by_trt = self.csm_info.get_gsims_by_trt()
     for sm in self.csm_info.source_models:
         for grp in sm.src_groups:
             gsims = gsims_by_trt[grp.trt]
             cm = ContextMaker(grp.trt, gsims)
             rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
             for dparam in cm.REQUIRES_DISTANCES:
                 rparams.add(dparam + '_')
             zd[grp.id] = ProbabilityMap(num_levels, len(gsims))
     zd.eff_ruptures = AccumDict(accum=0)  # grp_id -> eff_ruptures
     self.rparams = sorted(rparams)
     for k in self.rparams:
         # variable length arrays
         vlen = k.endswith('_') or k == 'probs_occur'
         if k == 'grp_id':
             dt = U16
         elif k == 'sid_':
             dt = hdf5.vuint16
         elif vlen:
             dt = hdf5.vfloat32
         else:
             dt = F32
         self.datastore.create_dset('rup/' + k, dt)
     rparams = [p for p in self.rparams if not p.endswith('_')]
     dparams = [p[:-1] for p in self.rparams if p.endswith('_')]
     logging.info('Scalar parameters %s', rparams)
     logging.info('Vector parameters %s', dparams)
     self.sources_by_task = {}  # task_no => src_ids
     self.totrups = 0  # total number of ruptures before collapsing
     return zd
Beispiel #21
0
 def acc0(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     rparams = {
         'grp_id', 'occurrence_rate', 'weight', 'probs_occur', 'sid_',
         'lon_', 'lat_', 'rrup_'
     }
     gsims_by_trt = self.full_lt.get_gsims_by_trt()
     n = len(self.full_lt.sm_rlzs)
     trts = list(self.full_lt.gsim_lt.values)
     for sm in self.full_lt.sm_rlzs:
         for grp_id in self.full_lt.grp_ids(sm.ordinal):
             trt = trts[grp_id // n]
             gsims = gsims_by_trt[trt]
             cm = ContextMaker(trt, gsims)
             rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
             for dparam in cm.REQUIRES_DISTANCES:
                 rparams.add(dparam + '_')
             zd[grp_id] = ProbabilityMap(num_levels, len(gsims))
     zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
     self.rparams = sorted(rparams)
     for k in self.rparams:
         # variable length arrays
         vlen = k.endswith('_') or k == 'probs_occur'
         if k == 'grp_id':
             dt = U16
         elif k == 'sid_':
             dt = hdf5.vuint16
         elif vlen:
             dt = hdf5.vfloat32
         else:
             dt = F32
         self.datastore.create_dset('rup/' + k, dt)
     self.by_task = {}  # task_no => src_ids
     self.totrups = 0  # total number of ruptures before collapsing
     return zd
Beispiel #22
0
def build_ruptures(sources, src_filter, param, monitor):
    """
    :param sources: a list with a single UCERF source
    :param src_filter: a SourceFilter instance
    :param param: extra parameters
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources
    res = AccumDict()
    res.calc_times = []
    sampl_mon = monitor('sampling ruptures', measuremem=True)
    filt_mon = monitor('filtering ruptures', measuremem=False)
    res.trt = DEFAULT_TRT
    background_sids = src.get_background_sids(src_filter)
    sitecol = src_filter.sitecol
    cmaker = ContextMaker(param['gsims'], src_filter.integration_distance)
    num_ses = param['ses_per_logic_tree_path']
    samples = getattr(src, 'samples', 1)
    n_occ = AccumDict(accum=0)
    t0 = time.time()
    with sampl_mon:
        for sam_idx in range(samples):
            for ses_idx, ses_seed in param['ses_seeds']:
                seed = sam_idx * TWO16 + ses_seed
                rups, occs = generate_event_set(src, background_sids,
                                                src_filter, ses_idx, seed)
                for rup, occ in zip(rups, occs):
                    n_occ[rup] += occ
    tot_occ = sum(n_occ.values())
    dic = {'eff_ruptures': {src.src_group_id: src.num_ruptures}}
    with filt_mon:
        eb_ruptures = stochastic.build_eb_ruptures(src, num_ses, cmaker,
                                                   sitecol, n_occ.items())
        dic['rup_array'] = (stochastic.get_rup_array(eb_ruptures)
                            if eb_ruptures else ())
    dt = time.time() - t0
    dic['calc_times'] = {src.id: numpy.array([tot_occ, len(sitecol), dt], F32)}
    return dic
Beispiel #23
0
    def pt_src_are(self, pt_src, gsim, weight, lnSA, monitor):
        """
        Returns the vector-valued Annual Rate of Exceedance for one single point-source

        :param pt_src: single instance of class "openquake.hazardlib.source.area.PointSource"
        :param gsim: tuple, containing (only one?) instance of Openquake GSIM class
        :param: weight, weight to be multiplied to ARE estimate
        :param lnSA: list, natural logarithm of acceleration values for each spectral period.
            Note : Values should be ordered in the same order than self.periods
        """
        annual_rate = 0

        # Loop over ruptures:
        # i.e. one rupture for each combination of (mag, nodal plane, hypocentral depth):
        for r in pt_src.iter_ruptures():
        # NOTE: IF ACCOUNTING FOR "pointsource_distance" IN THE INI FILE, ONE SHOULD USE THE
        # "point_ruptures()" METHOD BELOW:
        # Loop over ruptures, one rupture for each magnitude ( neglect floating and combination on
        # nodal plane and hypocentral depth):
        ## for r in pt_src.point_ruptures():
            # Note: Seismicity rate evenly distributed over all point sources
            #       Seismicity rate also accounts for FMD (i.e. decreasing for
            #         increasing magnitude value)

            # Filter the site collection with respect to the rupture and prepare context objects:
            context_maker = ContextMaker(r.tectonic_region_type, gsim)
            site_ctx, dist_ctx = context_maker.make_contexts(self.sites, r)
            rup_ctx = RuptureContext()
            rup_ctx.mag = r.mag
            rup_ctx.rake = r.rake
            assert len(gsim)==1

            annual_rate += r.occurrence_rate * weight * self.gm_poe(gsim[0],
                                                                    dist_ctx,
                                                                    rup_ctx,
                                                                    site_ctx,
                                                                    lnSA)
        return annual_rate
Beispiel #24
0
def sample_ruptures(sources,
                    param,
                    src_filter=source_site_noop_filter,
                    monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param param:
        a dictionary of additional parameters including rlzs_by_gsim,
        ses_per_logic_tree_path and filter_distance
    :param src_filter:
        a source site filter
    :param monitor:
        monitor instance
    :returns:
        a dictionary with eb_ruptures, num_events, num_ruptures, calc_times
    """
    eb_ruptures = []
    # AccumDict of arrays with 3 elements weight, nsites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    cmaker = ContextMaker(param['rlzs_by_gsim'],
                          src_filter.integration_distance, param, monitor)
    num_ses = param['ses_per_logic_tree_path']
    rlzs = numpy.concatenate(list(param['rlzs_by_gsim'].values()))
    for src, sites in src_filter(sources):
        t0 = time.time()
        # NB: the number of occurrences is very low, << 1, so it is
        # more efficient to filter only the ruptures that occur, i.e.
        # to call sample_ruptures *before* the filtering
        ebrs = build_eb_ruptures(src, rlzs, num_ses, cmaker, sites)
        n_evs = sum(ebr.multiplicity for ebr in ebrs)
        eb_ruptures.extend(ebrs)
        dt = time.time() - t0
        calc_times[src.id] += numpy.array([n_evs, src.nsites, dt])
    dic = dict(eb_ruptures=eb_ruptures, calc_times=calc_times)
    return dic
Beispiel #25
0
    def acc0(self):
        """
        Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
        """
        zd = AccumDict()
        num_levels = len(self.oqparam.imtls.array)
        rparams = {
            'grp_id', 'occurrence_rate', 'weight', 'probs_occur', 'clon_',
            'clat_', 'rrup_'
        }
        gsims_by_trt = self.full_lt.get_gsims_by_trt()
        n = len(self.full_lt.sm_rlzs)
        trts = list(self.full_lt.gsim_lt.values)
        for sm in self.full_lt.sm_rlzs:
            for grp_id in self.full_lt.grp_ids(sm.ordinal):
                trt = trts[grp_id // n]
                gsims = gsims_by_trt[trt]
                cm = ContextMaker(trt, gsims)
                rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
                for dparam in cm.REQUIRES_DISTANCES:
                    rparams.add(dparam + '_')
        zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
        if self.few_sites:
            self.rparams = sorted(rparams)
            for k in self.rparams:
                # variable length arrays
                if k == 'grp_id':
                    self.datastore.create_dset('rup/' + k, U16)
                elif k == 'probs_occur':  # vlen
                    self.datastore.create_dset('rup/' + k, hdf5.vfloat64)
                elif k.endswith('_'):  # array of shape (U, N)
                    self.datastore.create_dset('rup/' + k,
                                               F32,
                                               shape=(None, self.N),
                                               compression='gzip')
                else:
                    self.datastore.create_dset('rup/' + k, F32)
        else:
            self.rparams = {}
        self.by_task = {}  # task_no => src_ids
        self.totrups = 0  # total number of ruptures before collapsing
        self.maxradius = 0
        self.gidx = {
            tuple(grp_ids): i
            for i, grp_ids in enumerate(self.datastore['grp_ids'])
        }

        # estimate max memory per core
        max_num_gsims = max(len(gsims) for gsims in gsims_by_trt.values())
        max_num_grp_ids = max(len(grp_ids) for grp_ids in self.gidx)
        pmapbytes = self.N * num_levels * max_num_gsims * max_num_grp_ids * 8
        if pmapbytes > TWO32:
            logging.warning(TOOBIG % (self.N, num_levels, max_num_gsims,
                                      max_num_grp_ids, humansize(pmapbytes)))
        logging.info(MAXMEMORY % (self.N, num_levels, max_num_gsims,
                                  max_num_grp_ids, humansize(pmapbytes)))

        self.Ns = len(self.csm.source_info)
        if self.oqparam.disagg_by_src:
            self.M = len(self.oqparam.imtls)
            self.L1 = num_levels // self.M
            sources = encode([src_id for src_id in self.csm.source_info])
            size, msg = get_array_nbytes(
                dict(N=self.N, R=self.R, M=self.M, L1=self.L1, Ns=self.Ns))
            if size > TWO32:
                raise RuntimeError(
                    'The matrix disagg_by_src is too large: %s' % msg)
            self.datastore.create_dset(
                'disagg_by_src', F32,
                (self.N, self.R, self.M, self.L1, self.Ns))
            self.datastore.set_shape_attrs('disagg_by_src',
                                           site_id=self.N,
                                           rlz_id=self.R,
                                           imt=list(self.oqparam.imtls),
                                           lvl=self.L1,
                                           src_id=sources)
        return zd
Beispiel #26
0
def disaggregation(sources,
                   site,
                   imt,
                   iml,
                   gsim_by_trt,
                   truncation_level,
                   n_epsilons,
                   mag_bin_width,
                   dist_bin_width,
                   coord_bin_width,
                   source_filter=filters.nofilter,
                   **kwargs):
    """
    Compute "Disaggregation" matrix representing conditional probability of an
    intensity mesaure type ``imt`` exceeding, at least once, an intensity
    measure level ``iml`` at a geographical location ``site``, given rupture
    scenarios classified in terms of:

    - rupture magnitude
    - Joyner-Boore distance from rupture surface to site
    - longitude and latitude of the surface projection of a rupture's point
      closest to ``site``
    - epsilon: number of standard deviations by which an intensity measure
      level deviates from the median value predicted by a GSIM, given the
      rupture parameters
    - rupture tectonic region type

    In other words, the disaggregation matrix allows to compute the probability
    of each scenario with the specified properties (e.g., magnitude, or the
    magnitude and distance) to cause one or more exceedences of a given hazard
    level.

    For more detailed information about the disaggregation, see for instance
    "Disaggregation of Seismic Hazard", Paolo Bazzurro, C. Allin Cornell,
    Bulletin of the Seismological Society of America, Vol. 89, pp. 501-520,
    April 1999.

    :param sources:
        Seismic source model, as for
        :mod:`PSHA <openquake.hazardlib.calc.hazard_curve>` calculator it
        should be an iterator of seismic sources.
    :param site:
        :class:`~openquake.hazardlib.site.Site` of interest to calculate
        disaggregation matrix for.
    :param imt:
        Instance of :mod:`intensity measure type <openquake.hazardlib.imt>`
        class.
    :param iml:
        Intensity measure level. A float value in units of ``imt``.
    :param gsim_by_trt:
        Tectonic region type to GSIM objects mapping.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param n_epsilons:
        Integer number of epsilon histogram bins in the result matrix.
    :param mag_bin_width:
        Magnitude discretization step, width of one magnitude histogram bin.
    :param dist_bin_width:
        Distance histogram discretization step, in km.
    :param coord_bin_width:
        Longitude and latitude histograms discretization step,
        in decimal degrees.
    :param source_filter:
        Optional source-site filter function. See
        :mod:`openquake.hazardlib.calc.filters`.

    :returns:
        A tuple of two items. First is itself a tuple of bin edges information
        for (in specified order) magnitude, distance, longitude, latitude,
        epsilon and tectonic region types.

        Second item is 6d-array representing the full disaggregation matrix.
        Dimensions are in the same order as bin edges in the first item
        of the result tuple. The matrix can be used directly by pmf-extractor
        functions.
    """
    trts = sorted(set(src.tectonic_region_type for src in sources))
    trt_num = dict((trt, i) for i, trt in enumerate(trts))
    rlzs_by_gsim = {gsim_by_trt[trt]: [0] for trt in trts}
    by_trt = groupby(sources, operator.attrgetter('tectonic_region_type'))
    bdata = {}  # by trt, magi
    sitecol = SiteCollection([site])
    iml2 = numpy.array([[iml]])
    eps3 = _eps3(truncation_level, n_epsilons)

    rups = AccumDict(accum=[])
    cmaker = {}  # trt -> cmaker
    for trt, srcs in by_trt.items():
        tom = srcs[0].temporal_occurrence_model
        cmaker[trt] = ContextMaker(
            trt, rlzs_by_gsim, {
                'truncation_level': truncation_level,
                'maximum_distance': source_filter.integration_distance,
                'imtls': {
                    str(imt): [iml]
                }
            })
        rups[trt].extend(cmaker[trt].from_srcs(srcs, sitecol))
    min_mag = min(r.mag for rs in rups.values() for r in rs)
    max_mag = max(r.mag for rs in rups.values() for r in rs)
    mag_bins = mag_bin_width * numpy.arange(
        int(numpy.floor(min_mag / mag_bin_width)),
        int(numpy.ceil(max_mag / mag_bin_width) + 1))

    for trt in cmaker:
        for magi, ctxs in enumerate(_magbin_groups(rups[trt], mag_bins)):
            set_mean_std(ctxs, cmaker[trt])
            bdata[trt, magi] = disaggregate(ctxs, tom, [0], {imt: iml2}, eps3)

    if sum(len(bd.dists) for bd in bdata.values()) == 0:
        warnings.warn(
            'No ruptures have contributed to the hazard at site %s' % site,
            RuntimeWarning)
        return None, None

    min_dist = min(bd.dists.min() for bd in bdata.values())
    max_dist = max(bd.dists.max() for bd in bdata.values())
    dist_bins = dist_bin_width * numpy.arange(
        int(numpy.floor(min_dist / dist_bin_width)),
        int(numpy.ceil(max_dist / dist_bin_width) + 1))
    lon_bins, lat_bins = lon_lat_bins(site.location.x, site.location.y,
                                      max_dist, coord_bin_width)
    eps_bins = numpy.linspace(-truncation_level, truncation_level,
                              n_epsilons + 1)
    bin_edges = (mag_bins, dist_bins, lon_bins, lat_bins, eps_bins)
    matrix = numpy.zeros(
        (len(mag_bins) - 1, len(dist_bins) - 1, len(lon_bins) - 1,
         len(lat_bins) - 1, len(eps_bins) - 1, len(trts)))  # 6D
    for trt, magi in bdata:
        mat7 = _build_disagg_matrix(bdata[trt, magi], bin_edges[1:])
        matrix[magi, ..., trt_num[trt]] = mat7[..., 0, 0, 0]
    return bin_edges + (trts, ), matrix
Beispiel #27
0
    def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
        """
        Calculate and return mean value of intensity distribution and it's
        standard deviation.

        Method must be implemented by subclasses.

        :param sites:
            Instance of :class:`openquake.hazardlib.site.SiteCollection`
            with parameters of sites
            collection assigned to respective values as numpy arrays.
            Only those attributes that are listed in class'
            :attr:`REQUIRES_SITES_PARAMETERS` set are available.
        :param rup:
            Instance of :class:`openquake.hazardlib.source.rupture.BaseRupture`
            with parameters of a rupture
            assigned to respective values. Only those attributes that are
            listed in class' :attr:`REQUIRES_RUPTURE_PARAMETERS` set are
            available.
        :param dists:
            Instance of :class:`DistancesContext` with values of distance
            measures between the rupture and each site of the collection
            assigned to respective values as numpy arrays. Only those
            attributes that are listed in class' :attr:`REQUIRES_DISTANCES`
            set are available.
        :param imt:
            An instance (not a class) of intensity measure type.
            See :mod:`openquake.hazardlib.imt`.
        :param stddev_types:
            List of standard deviation types, constants from
            :class:`openquake.hazardlib.const.StdDev`.
            Method result value should include
            standard deviation values for each of types in this list.

        :returns:
            Method should return a tuple of two items. First item should be
            a numpy array of floats -- mean values of respective component
            of a chosen intensity measure type, and the second should be
            a list of numpy arrays of standard deviation values for the same
            single component of the same single intensity measure type, one
            array for each type in ``stddev_types`` parameter, preserving
            the order.

        Combining interface to mean and standard deviation values in a single
        method allows to avoid redoing the same intermediate calculations
        if there are some shared between stddev and mean formulae without
        resorting to keeping any sort of internal state (and effectively
        making GSIM not reenterable).

        However it is advised to split calculation of mean and stddev values
        and make ``get_mean_and_stddevs()`` just combine both (and possibly
        compute interim steps).
        """
        # mean and stddevs by calling the underlying .compute method
        N = len(sites)
        mean = numpy.zeros((1, N))
        sig = numpy.zeros((1, N))
        tau = numpy.zeros((1, N))
        phi = numpy.zeros((1, N))
        if sites is not rup or dists is not rup:
            # convert three old-style contexts to a single new-style context
            ctx = full_context(sites, rup, dists)
        else:
            ctx = rup  # rup is already a good object
        if self.compute.__annotations__.get("ctx") is numpy.recarray:
            cmaker = ContextMaker('*', [self], {'imtls': {imt: [0]}})
            ctx = cmaker.recarray([ctx])
        self.compute(ctx, [imt], mean, sig, tau, phi)
        stddevs = []
        for stddev_type in stddev_types:
            if stddev_type == const.StdDev.TOTAL:
                stddevs.append(sig[0])
            elif stddev_type == const.StdDev.INTER_EVENT:
                stddevs.append(tau[0])
            elif stddev_type == const.StdDev.INTRA_EVENT:
                stddevs.append(phi[0])
        return mean[0], stddevs
Beispiel #28
0
    def acc0(self):
        """
        Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
        """
        zd = AccumDict()
        num_levels = len(self.oqparam.imtls.array)
        rparams = {'gidx', 'occurrence_rate', 'clon_', 'clat_', 'rrup_'}
        gsims_by_trt = self.full_lt.get_gsims_by_trt()
        n = len(self.full_lt.sm_rlzs)
        trts = list(self.full_lt.gsim_lt.values)
        for sm in self.full_lt.sm_rlzs:
            for grp_id in self.full_lt.grp_ids(sm.ordinal):
                trt = trts[grp_id // n]
                gsims = gsims_by_trt[trt]
                cm = ContextMaker(trt, gsims)
                rparams.update(cm.REQUIRES_RUPTURE_PARAMETERS)
                for dparam in cm.REQUIRES_DISTANCES:
                    rparams.add(dparam + '_')
        zd.eff_ruptures = AccumDict(accum=0)  # trt -> eff_ruptures
        mags = set()
        for trt, dset in self.datastore['source_mags'].items():
            mags.update(dset[:])
        mags = sorted(mags)
        if self.few_sites:
            self.rdt = [('nsites', U16)]
            dparams = ['sids_']
            for rparam in rparams:
                if rparam.endswith('_'):
                    dparams.append(rparam)
                elif rparam == 'gidx':
                    self.rdt.append((rparam, U32))
                else:
                    self.rdt.append((rparam, F32))
            self.rdt.append(('idx', U32))
            self.rdt.append(('probs_occur', hdf5.vfloat64))
            for mag in mags:
                name = 'mag_%s/' % mag
                self.datastore.create_dset(name + 'rctx',
                                           self.rdt, (None, ),
                                           compression='gzip')
                for dparam in dparams:
                    dt = hdf5.vuint32 if dparam == 'sids_' else hdf5.vfloat32
                    self.datastore.create_dset(name + dparam,
                                               dt, (None, ),
                                               compression='gzip')
        self.by_task = {}  # task_no => src_ids
        self.totrups = 0  # total number of ruptures before collapsing
        self.maxradius = 0

        # estimate max memory per core
        max_num_gsims = max(len(gsims) for gsims in gsims_by_trt.values())
        max_num_grp_ids = max(
            len(grp_ids) for grp_ids in self.datastore['grp_ids'])
        pmapbytes = self.N * num_levels * max_num_gsims * max_num_grp_ids * 8
        if pmapbytes > TWO32:
            logging.warning(TOOBIG % (self.N, num_levels, max_num_gsims,
                                      max_num_grp_ids, humansize(pmapbytes)))
        logging.info(MAXMEMORY % (self.N, num_levels, max_num_gsims,
                                  max_num_grp_ids, humansize(pmapbytes)))

        self.Ns = len(self.csm.source_info)
        if self.oqparam.disagg_by_src:
            sources = self.get_source_ids()
            self.datastore.create_dset(
                'disagg_by_src', F32,
                (self.N, self.R, self.M, self.L1, self.Ns))
            self.datastore.set_shape_attrs('disagg_by_src',
                                           site_id=self.N,
                                           rlz_id=self.R,
                                           imt=list(self.oqparam.imtls),
                                           lvl=self.L1,
                                           src_id=sources)
        return zd