Пример #1
0
    def __init__(self, trt, gsims, param=None, monitor=Monitor()):
        param = param or {}  # empty in the gmpe-smtk
        self.af = param.get('af', None)
        self.max_sites_disagg = param.get('max_sites_disagg', 10)
        self.collapse_level = param.get('collapse_level', False)
        self.trt = trt
        self.gsims = gsims
        self.single_site_opt = numpy.array(
            [hasattr(gsim, 'get_mean_std1') for gsim in gsims])
        self.maximum_distance = (param.get('maximum_distance')
                                 or MagDepDistance({}))
        self.investigation_time = param.get('investigation_time')
        self.trunclevel = param.get('truncation_level')
        self.num_epsilon_bins = param.get('num_epsilon_bins', 1)
        self.grp_id = param.get('grp_id', 0)
        self.effect = param.get('effect')
        self.task_no = getattr(monitor, 'task_no', 0)
        for req in self.REQUIRES:
            reqset = set()
            for gsim in gsims:
                reqset.update(getattr(gsim, 'REQUIRES_' + req))
            setattr(self, 'REQUIRES_' + req, reqset)
        # self.pointsource_distance is a dict mag -> dist, possibly empty
        psd = param.get('pointsource_distance')
        if hasattr(psd, 'ddic'):
            self.pointsource_distance = psd.ddic.get(trt, {})
        else:
            self.pointsource_distance = {}
        if 'imtls' in param:
            self.imtls = param['imtls']
        elif 'hazard_imtls' in param:
            self.imtls = DictArray(param['hazard_imtls'])
        else:
            self.imtls = {}
        self.imts = [imt_module.from_string(imt) for imt in self.imtls]
        self.reqv = param.get('reqv')
        if self.reqv is not None:
            self.REQUIRES_DISTANCES.add('repi')
        self.mon = monitor
        self.ctx_mon = monitor('make_contexts', measuremem=False)
        self.loglevels = DictArray(self.imtls) if self.imtls else {}
        self.shift_hypo = param.get('shift_hypo')
        with warnings.catch_warnings():
            # avoid RuntimeWarning: divide by zero encountered in log
            warnings.simplefilter("ignore")
            for imt, imls in self.imtls.items():
                if imt != 'MMI':
                    self.loglevels[imt] = numpy.log(imls)

        # instantiate monitors
        self.gmf_mon = monitor('computing mean_std', measuremem=False)
        self.poe_mon = monitor('get_poes', measuremem=False)
Пример #2
0
 def __init__(self, trt, gsims, param=None, monitor=Monitor()):
     param = param or {}
     self.max_sites_disagg = param.get('max_sites_disagg', 10)
     self.collapse_level = param.get('collapse_level', False)
     self.point_rupture_bins = param.get('point_rupture_bins', 20)
     self.trt = trt
     self.gsims = gsims
     self.maximum_distance = (param.get('maximum_distance')
                              or IntegrationDistance({}))
     self.trunclevel = param.get('truncation_level')
     self.effect = param.get('effect')
     for req in self.REQUIRES:
         reqset = set()
         for gsim in gsims:
             reqset.update(getattr(gsim, 'REQUIRES_' + req))
         setattr(self, 'REQUIRES_' + req, reqset)
     # self.pointsource_distance is a dict mag -> dist, possibly empty
     if param.get('pointsource_distance'):
         self.pointsource_distance = param['pointsource_distance'][trt]
     else:
         self.pointsource_distance = {}
     self.filter_distance = 'rrup'
     if 'imtls' in param:
         self.imtls = param['imtls']
     elif 'hazard_imtls' in param:
         self.imtls = DictArray(param['hazard_imtls'])
     else:
         self.imtls = {}
     self.imts = [imt_module.from_string(imt) for imt in self.imtls]
     self.reqv = param.get('reqv')
     if self.reqv is not None:
         self.REQUIRES_DISTANCES.add('repi')
     if hasattr(gsims, 'items'):
         # gsims is actually a dict rlzs_by_gsim
         # since the ContextMaker must be used on ruptures with the
         # same TRT, given a realization there is a single gsim
         self.gsim_by_rlzi = {}
         for gsim, rlzis in gsims.items():
             for rlzi in rlzis:
                 self.gsim_by_rlzi[rlzi] = gsim
     self.mon = monitor
     self.ctx_mon = monitor('make_contexts', measuremem=False)
     self.loglevels = DictArray(self.imtls)
     self.shift_hypo = param.get('shift_hypo')
     with warnings.catch_warnings():
         # avoid RuntimeWarning: divide by zero encountered in log
         warnings.simplefilter("ignore")
         for imt, imls in self.imtls.items():
             if imt != 'MMI':
                 self.loglevels[imt] = numpy.log(imls)
Пример #3
0
    def __init__(self, trt, gsims, param=None, monitor=Monitor()):
        param = param or {}
        self.af = param.get('af', None)
        self.max_sites_disagg = param.get('max_sites_disagg', 10)
        self.split_sources = param.get('split_sources', True)
        self.collapse_level = param.get('collapse_level', False)
        self.point_rupture_bins = param.get('point_rupture_bins', 20)
        self.trt = trt
        self.gsims = gsims
        self.maximum_distance = (param.get('maximum_distance')
                                 or MagDepDistance({}))
        self.trunclevel = param.get('truncation_level')
        self.effect = param.get('effect')
        for req in self.REQUIRES:
            reqset = set()
            for gsim in gsims:
                reqset.update(getattr(gsim, 'REQUIRES_' + req))
            setattr(self, 'REQUIRES_' + req, reqset)
        # self.pointsource_distance is a dict mag -> dist, possibly empty
        if param.get('pointsource_distance'):
            self.pointsource_distance = param['pointsource_distance'][trt]
        else:
            self.pointsource_distance = {}
        self.filter_distance = 'rrup'
        if 'imtls' in param:
            self.imtls = param['imtls']
        elif 'hazard_imtls' in param:
            self.imtls = DictArray(param['hazard_imtls'])
        else:
            self.imtls = {}
        self.imts = [imt_module.from_string(imt) for imt in self.imtls]
        self.reqv = param.get('reqv')
        if self.reqv is not None:
            self.REQUIRES_DISTANCES.add('repi')
        self.mon = monitor
        self.ctx_mon = monitor('make_contexts', measuremem=False)
        self.loglevels = DictArray(self.imtls)
        self.shift_hypo = param.get('shift_hypo')
        with warnings.catch_warnings():
            # avoid RuntimeWarning: divide by zero encountered in log
            warnings.simplefilter("ignore")
            for imt, imls in self.imtls.items():
                if imt != 'MMI':
                    self.loglevels[imt] = numpy.log(imls)

        # instantiate monitors
        self.gmf_mon = monitor('computing mean_std', measuremem=False)
        self.poe_mon = monitor('get_poes', measuremem=False)
Пример #4
0
def export_hcurves_csv(ekey, dstore):
    """
    Exports the hazard curves into several .csv files

    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    info = get_info(dstore)
    R = dstore['full_lt'].get_num_rlzs()
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    key, kind, fmt = get_kkf(ekey)
    fnames = []
    comment = dstore.metadata
    hmap_dt = oq.hmap_dt()
    for kind in oq.get_kinds(kind, R):
        fname = hazard_curve_name(dstore, (key, fmt), kind)
        comment.update(kind=kind, investigation_time=oq.investigation_time)
        if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or
                oq.hazard_maps):
            hmap = extract(dstore, 'hmaps?kind=' + kind)[kind]
        if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra:
            uhs_curves = calc.make_uhs(hmap, info)
            writers.write_csv(
                fname, util.compose_arrays(sitemesh, uhs_curves),
                comment=comment)
            fnames.append(fname)
        elif key == 'hmaps' and oq.poes and oq.hazard_maps:
            fnames.extend(
                export_hmaps_csv(ekey, fname, sitemesh,
                                 hmap.flatten().view(hmap_dt), comment))
        elif key == 'hcurves':
            # shape (N, R|S, M, L1)
            if ('amplification' in oq.inputs and
                    oq.amplification_method == 'convolution'):
                imtls = DictArray(
                    {imt: oq.soil_intensities for imt in oq.imtls})
            else:
                imtls = oq.imtls
            for imt, imls in imtls.items():
                hcurves = extract(
                    dstore, 'hcurves?kind=%s&imt=%s' % (kind, imt))[kind]
                fnames.append(
                    export_hcurves_by_imt_csv(
                        ekey, kind, fname, sitecol, hcurves, imt, imls,
                        comment))
    return sorted(fnames)
Пример #5
0
def get_pmap_from_nrml(oqparam, fname):
    """
    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :param fname:
        an XML file containing hazard curves
    :returns:
        sitecol, curve array
    """
    hcurves_by_imt = {}
    oqparam.hazard_imtls = imtls = collections.OrderedDict()
    for hcurves in nrml.read(fname):
        imt = hcurves['IMT']
        oqparam.investigation_time = hcurves['investigationTime']
        if imt == 'SA':
            imt += '(%s)' % hcurves['saPeriod']
        imtls[imt] = ~hcurves.IMLs
        data = sorted((~node.Point.pos, ~node.poEs) for node in hcurves[1:])
        hcurves_by_imt[imt] = numpy.array([d[1] for d in data])
    lons, lats = [], []
    for xy, poes in data:
        lons.append(xy[0])
        lats.append(xy[1])
    mesh = geo.Mesh(numpy.array(lons), numpy.array(lats))
    sitecol = get_site_collection(oqparam, mesh)
    num_levels = sum(len(v) for v in imtls.values())
    array = numpy.zeros((len(sitecol), num_levels))
    imtls = DictArray(imtls)
    for imt_ in hcurves_by_imt:
        array[:, imtls.slicedic[imt_]] = hcurves_by_imt[imt_]
    return sitecol, ProbabilityMap.from_array(array, sitecol.sids)
Пример #6
0
 def test_single_site(self):
     # NB: the performance of get_mean_std is totally dominated by two
     # concomitant factors:
     # 1) source splitting (do not split the area source)
     # 2) collect the contexts in a single array
     # together they give a 200x speedup
     # numba is totally useless
     site = Site(Point(0, 0),
                 vs30=760.,
                 z1pt0=48.0,
                 z2pt5=0.607,
                 vs30measured=True)
     sitecol = SiteCollection([site])
     imtls = {"PGA": [.123]}
     for period in numpy.arange(.1, 1.3, .1):
         imtls['SA(%.2f)' % period] = [.123]
     assert len(imtls) == 13  # 13 periods
     oq = unittest.mock.Mock(imtls=DictArray(imtls),
                             maximum_distance=MagDepDistance.new('300'))
     mon = Monitor()
     hcurve = calc_hazard_curve(sitecol, asource, [ExampleA2021()], oq, mon)
     for child in mon.children:
         print(child)
     got = hcurve.array[:, 0]
     exp = [
         0.103379, 0.468937, 0.403896, 0.278772, 0.213645, 0.142985,
         0.103438, 0.079094, 0.062861, 0.051344, 0.04066, 0.031589, 0.024935
     ]
     numpy.testing.assert_allclose(got, exp, atol=1E-5)
Пример #7
0
 def imtls(self):
     """
     Returns a DictArray with the risk intensity measure types and
     levels, if given, or the hazard ones.
     """
     imtls = self.hazard_imtls or self.risk_imtls
     return DictArray(imtls) if imtls else {}
Пример #8
0
    def test_resampling(self):
        path = os.path.dirname(os.path.abspath(__file__))

        # Read AF
        f_af = os.path.join(path, 'data', 'convolution', 'amplification.csv')
        df_af = read_csv(f_af, {'ampcode': ampcode_dt, None: numpy.float64},
                         index='ampcode')

        # Read hc
        f_hc = os.path.join(path, 'data', 'convolution', 'hazard_curve.csv')
        df_hc = pd.read_csv(f_hc, skiprows=1)

        # Get imls from the hc
        imls = []
        pattern = 'poe-(\\d*\\.\\d*)'
        for k in df_hc.columns:
            m = re.match(pattern, k)
            if m:
                imls.append(float(m.group(1)))
        imtls = DictArray({'PGA': imls})

        # Create a list with one ProbabilityCurve instance
        poes = numpy.squeeze(df_hc.iloc[0, 3:].to_numpy())
        tmp = numpy.expand_dims(poes, 1)
        pcurve = ProbabilityCurve(tmp)

        soil_levels = numpy.array(list(numpy.geomspace(0.001, 2, 50)))
        a = Amplifier(imtls, df_af, soil_levels)
        res = a.amplify(b'MQ15', pcurve)

        tmp = 'hazard_curve_expected.csv'
        fname_expected = os.path.join(path, 'data', 'convolution', tmp)
        expected = numpy.loadtxt(fname_expected)

        numpy.testing.assert_allclose(numpy.squeeze(res.array), expected)
Пример #9
0
def export_hcurves_csv(ekey, dstore):
    """
    Exports the hazard curves into several .csv files

    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    key, kind, fmt = get_kkf(ekey)
    fnames = []
    if oq.poes:
        pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    for kind, hcurves in calc.PmapGetter(dstore).items(kind):
        fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc)
        comment = _comment(rlzs_assoc, kind, oq.investigation_time)
        if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra:
            uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes,
                                       len(sitemesh))
            writers.write_csv(fname,
                              util.compose_arrays(sitemesh, uhs_curves),
                              comment=comment)
            fnames.append(fname)
        elif key == 'hmaps' and oq.poes and oq.hazard_maps:
            hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes)
            fnames.extend(
                export_hazard_csv(ekey, fname, sitemesh, hmap, pdic, comment))
        elif key == 'hcurves':
            fnames.extend(
                export_hcurves_by_imt_csv(ekey, kind, rlzs_assoc, fname,
                                          sitecol, hcurves, oq))
    return sorted(fnames)
Пример #10
0
def export_hmaps_xml_json(ekey, dstore):
    key, kind, fmt = get_kkf(ekey)
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    sitemesh = get_mesh(sitecol)
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    fnames = []
    writercls = hazard_writers.HazardMapXMLWriter
    pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    nsites = len(sitemesh)
    for kind, hcurves in PmapGetter(dstore).items():
        hmaps = calc.make_hmap(
            hcurves, oq.imtls, oq.poes).convert(pdic, nsites)
        if kind.startswith('rlz-'):
            rlz = rlzs_assoc.realizations[int(kind[4:])]
            smlt_path = '_'.join(rlz.sm_lt_path)
            gsimlt_path = rlz.gsim_rlz.uid
        else:
            smlt_path = ''
            gsimlt_path = ''
        for imt in oq.imtls:
            for j, poe in enumerate(oq.poes):
                suffix = '-%s-%s' % (poe, imt)
                fname = hazard_curve_name(
                    dstore, ekey, kind + suffix, rlzs_assoc)
                data = [HazardMap(site[0], site[1], _extract(hmap, imt, j))
                        for site, hmap in zip(sitemesh, hmaps)]
                writer = writercls(
                    fname, investigation_time=oq.investigation_time,
                    imt=imt, poe=poe,
                    smlt_path=smlt_path, gsimlt_path=gsimlt_path)
                writer.serialize(data)
                fnames.append(fname)
    return sorted(fnames)
Пример #11
0
 def imtls(self):
     """
     Returns an OrderedDict with the risk intensity measure types and
     levels, if given, or the hazard ones.
     """
     imtls = getattr(self, 'hazard_imtls', None) or self.risk_imtls
     return DictArray(imtls)
Пример #12
0
    def test01(self):

        fname = gettemp(ampl_func)
        df = read_csv(fname, {'ampcode': ampcode_dt, None: numpy.float64},
                      index='ampcode')
        sitecode = b'A'

        imls_soil = numpy.log([0.012, 0.052, 0.12, 0.22, 0.52])
        imls_soil = numpy.log(numpy.logspace(-2, 0, num=20))
        self.cmaker.loglevels = ll = DictArray(
            {'PGA': imls_soil, 'SA(1.0)': imls_soil})
        self.cmaker.af = AmplFunction.from_dframe(df)
        self.cmaker.truncation_level = tl = 3

        # The output in this case will be (1, x, 2) i.e. 1 site, number
        # intensity measure levels times 2 and 2 GMMs
        tmp = _get_poes(self.meastd, ll, tl)

        # This function is rather slow at the moment
        ctx = unittest.mock.Mock(mag=self.mag, rrup=self.rrup, sids=[0],
                                 sites=dict(ampcode=[sitecode]))
        res = get_poes_site(self.meastd, self.cmaker, ctx)

        if False:
            import matplotlib.pyplot as plt
            plt.plot(numpy.exp(imls_soil), res[0, 0:len(imls_soil), 0], '-o',
                     label='soil')
            plt.plot(numpy.exp(imls_soil), tmp[0, 0:len(imls_soil), 0], '-o',
                     label='rock')
            plt.legend()
            plt.xscale('log')
            plt.yscale('log')
            plt.grid(which='both')
            plt.show()
Пример #13
0
    def setUp(self):
        # simple logic tree with 3 realizations
        #    ___/ b11 (w=.2)
        #  _/   \ b12 (w=.2)
        #   \____ b02 (w=.6)
        self.bs0 = bs0 = lt.BranchSet('abGRAbsolute')
        bs0.branches = [
            lt.Branch('bs0', 'b01', .4, (4.6, 1.1)),
            lt.Branch('bs0', 'b02', .6, (4.4, 0.9))
        ]

        self.bs1 = bs1 = lt.BranchSet('maxMagGRAbsolute')
        bs1.branches = [
            lt.Branch('bs1', 'b11', .5, 7.0),
            lt.Branch('bs1', 'b12', .5, 7.6)
        ]
        bs0.branches[0].bset = bs1

        # setup sitecol, srcfilter, gsims, imtls
        sitecol = site.SiteCollection(
            [site.Site(Point(0, 0), numpy.array([760.]))])
        self.srcfilter = calc.filters.SourceFilter(sitecol, {'default': 200})
        self.gsims = [valid.gsim('ToroEtAl2002')]
        self.imtls = DictArray({'PGA': valid.logscale(.01, 1, 5)})
        self.sg = sourceconverter.SourceGroup(ps.tectonic_region_type, [ps])
Пример #14
0
def calc_hazard_curves(
        groups, ss_filter, imtls, gsim_by_trt, truncation_level=None,
        apply=Sequential.apply):
    """
    Compute hazard curves on a list of sites, given a set of seismic source
    groups and a dictionary of ground shaking intensity models (one per
    tectonic region type).

    Probability of ground motion exceedance is computed in different ways
    depending if the sources are independent or mutually exclusive.

    :param groups:
        A sequence of groups of seismic sources objects (instances of
        of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`).
    :param ss_filter:
        A source filter over the site collection or the site collection itself
    :param imtls:
        Dictionary mapping intensity measure type strings
        to lists of intensity measure levels.
    :param gsim_by_trt:
        Dictionary mapping tectonic region types (members
        of :class:`openquake.hazardlib.const.TRT`) to
        :class:`~openquake.hazardlib.gsim.base.GMPE` or
        :class:`~openquake.hazardlib.gsim.base.IPE` objects.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param maximum_distance:
        The integration distance, if any
    :returns:
        An array of size N, where N is the number of sites, which elements
        are records with fields given by the intensity measure types; the
        size of each field is given by the number of levels in ``imtls``.
    """
    # This is ensuring backward compatibility i.e. processing a list of
    # sources
    if not isinstance(groups[0], SourceGroup):  # sent a list of sources
        dic = groupby(groups, operator.attrgetter('tectonic_region_type'))
        groups = [SourceGroup(trt, dic[trt], 'src_group', 'indep', 'indep')
                  for trt in dic]
    if hasattr(ss_filter, 'sitecol'):  # a filter, as it should be
        sitecol = ss_filter.sitecol
    else:  # backward compatibility, a site collection was passed
        sitecol = ss_filter
        ss_filter = SourceFilter(sitecol, {})

    imtls = DictArray(imtls)
    pmap = ProbabilityMap(len(imtls.array), 1)
    # Processing groups with homogeneous tectonic region
    for group in groups:
        if group.src_interdep == 'mutex':  # do not split the group
            pmap |= pmap_from_grp(
                group, ss_filter, imtls, gsim_by_trt, truncation_level)
        else:  # split the group and apply `pmap_from_grp` in parallel
            pmap |= apply(
                pmap_from_grp,
                (group, ss_filter, imtls, gsim_by_trt, truncation_level),
                weight=operator.attrgetter('weight')).reduce(operator.or_)
    return pmap.convert(imtls, len(sitecol.complete))
Пример #15
0
def ucerf_classical(rupset_idx, ucerf_source, src_filter, gsims, monitor):
    """
    :param rupset_idx:
        indices of the rupture sets
    :param ucerf_source:
        an object taking the place of a source for UCERF
    :param src_filter:
        a source filter returning the sites affected by the source
    :param gsims:
        a list of GSIMs
    :param monitor:
        a monitor instance
    :returns:
        a ProbabilityMap
    """
    t0 = time.time()
    truncation_level = monitor.oqparam.truncation_level
    imtls = monitor.oqparam.imtls
    ucerf_source.src_filter = src_filter  # so that .iter_ruptures() work

    # prefilter the sites close to the rupture set
    with h5py.File(ucerf_source.control.source_file, "r") as hdf5:
        mag = hdf5[ucerf_source.idx_set["mag_idx"]][rupset_idx].max()
        ridx = set()
        # find the combination of rupture sections used in this model
        rup_index_key = "/".join(
            [ucerf_source.idx_set["geol_idx"], "RuptureIndex"])
        # determine which of the rupture sections used in this set of indices
        rup_index = hdf5[rup_index_key]
        for i in rupset_idx:
            ridx.update(rup_index[i])
        s_sites = ucerf_source.get_rupture_sites(hdf5, ridx, src_filter, mag)
        if s_sites is None:  # return an empty probability map
            pm = ProbabilityMap(len(imtls.array), len(gsims))
            pm.calc_times = []  # TODO: fix .calc_times
            pm.eff_ruptures = {ucerf_source.src_group_id: 0}
            pm.grp_id = ucerf_source.src_group_id
            return pm

    # compute the ProbabilityMap by using hazardlib.calc.hazard_curve.poe_map
    ucerf_source.rupset_idx = rupset_idx
    ucerf_source.num_ruptures = nruptures = len(rupset_idx)
    cmaker = ContextMaker(gsims, src_filter.integration_distance)
    imtls = DictArray(imtls)
    ctx_mon = monitor('making contexts', measuremem=False)
    pne_mons = [
        monitor('%s.get_poes' % gsim, measuremem=False) for gsim in gsims
    ]
    pmap = poe_map(ucerf_source, s_sites, imtls, cmaker, truncation_level,
                   ctx_mon, pne_mons)
    nsites = len(s_sites)
    pmap.calc_times = [(ucerf_source.source_id, nruptures * nsites, nsites,
                        time.time() - t0)]
    pmap.grp_id = ucerf_source.src_group_id
    pmap.eff_ruptures = {pmap.grp_id: ucerf_source.num_ruptures}
    return pmap
Пример #16
0
def ucerf_classical(rupset_idx, ucerf_source, src_filter, gsims, monitor):
    """
    :param rupset_idx:
        indices of the rupture sets
    :param ucerf_source:
        an object taking the place of a source for UCERF
    :param src_filter:
        a source filter returning the sites affected by the source
    :param gsims:
        a list of GSIMs
    :param monitor:
        a monitor instance
    :returns:
        a ProbabilityMap
    """
    t0 = time.time()
    truncation_level = monitor.oqparam.truncation_level
    imtls = monitor.oqparam.imtls
    ucerf_source.src_filter = src_filter  # so that .iter_ruptures() work
    grp_id = ucerf_source.src_group_id
    mag = ucerf_source.mags[rupset_idx].max()
    ridx = set()
    for idx in rupset_idx:
        ridx.update(ucerf_source.get_ridx(idx))
    ucerf_source.rupset_idx = rupset_idx
    ucerf_source.num_ruptures = nruptures = len(rupset_idx)

    # prefilter the sites close to the rupture set
    s_sites = ucerf_source.get_rupture_sites(ridx, src_filter, mag)
    if s_sites is None:  # return an empty probability map
        pm = ProbabilityMap(len(imtls.array), len(gsims))
        acc = AccumDict({grp_id: pm})
        acc.calc_times = {
            ucerf_source.source_id:
            numpy.array([nruptures, 0, time.time() - t0, 1])
        }
        acc.eff_ruptures = {grp_id: 0}
        return acc

    # compute the ProbabilityMap
    cmaker = ContextMaker(gsims, src_filter.integration_distance)
    imtls = DictArray(imtls)
    ctx_mon = monitor('make_contexts', measuremem=False)
    poe_mon = monitor('get_poes', measuremem=False)
    pmap = cmaker.poe_map(ucerf_source, s_sites, imtls, truncation_level,
                          ctx_mon, poe_mon)
    nsites = len(s_sites)
    acc = AccumDict({grp_id: pmap})
    acc.calc_times = {
        ucerf_source.source_id:
        numpy.array([nruptures * nsites, nsites,
                     time.time() - t0, 1])
    }
    acc.eff_ruptures = {grp_id: ucerf_source.num_ruptures}
    return acc
Пример #17
0
def pmap_from_grp(sources,
                  source_site_filter,
                  imtls,
                  gsims,
                  truncation_level=None,
                  bbs=(),
                  monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.
    The arguments are the same as in :func:`calc_hazard_curves`, except
    for ``gsims``, which is a list of GSIM instances.

    :returns: a ProbabilityMap instance
    """
    if isinstance(sources, SourceGroup):
        group = sources
        sources = group.sources
        trt = sources[0].tectonic_region_type
    else:  # list of sources
        trt = sources[0].tectonic_region_type
        group = SourceGroup(trt, sources, 'src_group', 'indep', 'indep')
    try:
        maxdist = source_site_filter.integration_distance[trt]
    except:
        maxdist = source_site_filter.integration_distance
    if hasattr(gsims, 'keys'):  # dictionary trt -> gsim
        gsims = [gsims[trt]]
    with GroundShakingIntensityModel.forbid_instantiation():
        imtls = DictArray(imtls)
        cmaker = ContextMaker(gsims, maxdist)
        ctx_mon = monitor('making contexts', measuremem=False)
        pne_mon = monitor('computing poes', measuremem=False)
        disagg_mon = monitor('get closest points', measuremem=False)
        src_indep = group.src_interdep == 'indep'
        pmap = ProbabilityMap(len(imtls.array), len(gsims))
        pmap.calc_times = []  # pairs (src_id, delta_t)
        pmap.grp_id = sources[0].src_group_id
        for src, s_sites in source_site_filter(sources):
            t0 = time.time()
            poemap = poe_map(src, s_sites, imtls, cmaker, truncation_level,
                             bbs, group.rup_interdep == 'indep', ctx_mon,
                             pne_mon, disagg_mon)
            if src_indep:  # usual composition of probabilities
                pmap |= poemap
            else:  # mutually exclusive probabilities
                weight = float(group.srcs_weights[src.source_id])
                for sid in poemap:
                    pmap[sid] += poemap[sid] * weight
            pmap.calc_times.append(
                (src.source_id, len(s_sites), time.time() - t0))
        # storing the number of contributing ruptures too
        pmap.eff_ruptures = {pmap.grp_id: pne_mon.counts}
        return pmap
 def setUp(self):
     testfile = os.path.join(DATA, 'source_group_cluster.xml')
     sc = SourceConverter(area_source_discretization=10.,
                          investigation_time=1.)
     # This provides a SourceModel
     self.sg = getattr(nrml.to_python(testfile, sc), 'src_groups')
     self.imtls = DictArray({'PGA': [0.01, 0.1, 0.2, 0.3, 1.0]})
     gsim = SadighEtAl1997()
     self.gsim_by_trt = {TRT.ACTIVE_SHALLOW_CRUST: gsim}
     site = Site(Point(1.0, -0.1), 800, z1pt0=30., z2pt5=1.)
     self.sites = SiteCollection([site])
Пример #19
0
def export_hmaps_npz(ekey, dstore):
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    fname = dstore.export_path('%s.%s' % ekey)
    dic = {}
    for kind, hcurves in calc.PmapGetter(dstore).items():
        hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes)
        dic[kind] = convert_to_array(hmap, mesh, pdic)
    savez(fname, **dic)
    return [fname]
Пример #20
0
 def test(self):
     source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
     groups = nrml.parse(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(135.68, 35.68), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), None)
     imtls = DictArray({'PGV': [20, 40, 80]})
     gsim_by_trt = {'Subduction Interface': SiMidorikawa1999SInter()}
     hcurves = calc_hazard_curves_ext(groups, s_filter, imtls, gsim_by_trt)
     npt.assert_almost_equal([0.91149953, 0.12548556, 0.00177583],
                             hcurves['PGV'][0])
Пример #21
0
 def test(self):
     source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
     groups = nrml.to_python(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(135.68, 35.68), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), {})
     imtls = DictArray({'PGV': [20, 40, 80]})
     gsim_by_trt = {'Subduction Interface': SiMidorikawa1999SInter()}
     hcurves = calc_hazard_curves(groups, s_filter, imtls, gsim_by_trt)
     npt.assert_almost_equal(
         [1.1262869e-01, 3.9968668e-03, 3.1005840e-05],
         hcurves['PGV'][0])
Пример #22
0
def export_hmaps_np(ekey, dstore):
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    mesh = get_mesh(sitecol)
    pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    fname = dstore.export_path('%s.%s' % ekey)
    dic = {}
    for kind, hcurves in PmapGetter(dstore).items():
        hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes)
        dic[kind] = calc.convert_to_array(hmap, len(mesh), pdic)
    save_np(fname, dic, mesh, ('vs30', F32, sitecol.vs30),
            investigation_time=oq.investigation_time)
    return [fname]
Пример #23
0
 def setUp(self):
     self.src1 = _create_non_param_sourceA(15., 6.3,
                                           PMF([(0.6, 0), (0.4, 1)]))
     self.src2 = _create_non_param_sourceA(10., 6.0,
                                           PMF([(0.7, 0), (0.3, 1)]))
     self.src3 = _create_non_param_sourceA(10., 6.0,
                                           PMF([(0.7, 0), (0.3, 1)]),
                                           "Geothermal")
     site = Site(Point(0.0, 0.0), 800, z1pt0=100., z2pt5=1.)
     self.sites = SiteCollection([site])
     self.imtls = DictArray({'PGA': [0.01, 0.1, 0.3]})
     gsim = SadighEtAl1997()
     self.gsim_by_trt = {"Active Shallow Crust": gsim}
Пример #24
0
 def setUp(self):
     self.src1 = _create_non_param_sourceA(15., 6.3,
                                           PMF([(0.6, 0), (0.4, 1)]))
     self.src2 = _create_non_param_sourceA(10., 6.0,
                                           PMF([(0.7, 0), (0.3, 1)]))
     self.src3 = _create_non_param_sourceA(10., 6.0,
                                           PMF([(0.7, 0), (0.3, 1)]),
                                           TRT.GEOTHERMAL)
     site = Site(Point(0.0, 0.0), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), {})
     self.sites = s_filter
     self.imtls = DictArray({'PGA': [0.01, 0.1, 0.3]})
     self.gsim_by_trt = {TRT.ACTIVE_SHALLOW_CRUST: SadighEtAl1997()}
Пример #25
0
def extract_hmaps(dstore, what):
    """
    Extracts hazard maps. Use it as /extract/hmaps/mean or
    /extract/hmaps/rlz-0, etc
    """
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    mesh = get_mesh(sitecol)
    pdic = DictArray({imt: oq.poes for imt in oq.imtls})
    dic = {}
    for kind, hcurves in getters.PmapGetter(dstore).items(what):
        hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes)
        dic[kind] = calc.convert_to_array(hmap, len(mesh), pdic)
    return hazard_items(dic, mesh, investigation_time=oq.investigation_time)
Пример #26
0
 def _parse_header(self, header):
     fields = []  # pairs (name, dtype), for instance ('PGA', F32)
     cols = []  # pairs (name, float), for instance ('PGA', 0.1)
     for col in header:
         if '-' in col:  # for instance PGA-0.1
             cols.append(col.split('-', 1))
         else:  # for lon and lat
             fields.append((col, F32))
     imtls = {}
     for imt, imls in groupby(cols, operator.itemgetter(0),
                              lambda g: [F32(r[1]) for r in g]).items():
         fields.append((imt, (F32, len(imls))))
         imtls[imt] = imls
     return DictArray(imtls), fields
Пример #27
0
 def __init__(self, trt, gsims, param=None, monitor=Monitor()):
     param = param or {}
     self.max_sites_disagg = param.get('max_sites_disagg', 10)
     self.trt = trt
     self.gsims = gsims
     self.maximum_distance = (
         param.get('maximum_distance') or IntegrationDistance({}))
     self.trunclevel = param.get('truncation_level')
     for req in self.REQUIRES:
         reqset = set()
         for gsim in gsims:
             reqset.update(getattr(gsim, 'REQUIRES_' + req))
         setattr(self, 'REQUIRES_' + req, reqset)
     self.collapse_factor = param.get('collapse_factor', 3)
     self.max_radius = param.get('max_radius')
     self.pointsource_distance = param.get('pointsource_distance')
     filter_distance = param.get('filter_distance')
     if filter_distance is None:
         if 'rrup' in self.REQUIRES_DISTANCES:
             filter_distance = 'rrup'
         elif 'rjb' in self.REQUIRES_DISTANCES:
             filter_distance = 'rjb'
         else:
             filter_distance = 'rrup'
     self.filter_distance = filter_distance
     self.imtls = param.get('imtls', {})
     self.imts = [imt_module.from_string(imt) for imt in self.imtls]
     self.reqv = param.get('reqv')
     self.REQUIRES_DISTANCES.add(self.filter_distance)
     if self.reqv is not None:
         self.REQUIRES_DISTANCES.add('repi')
     if hasattr(gsims, 'items'):
         # gsims is actually a dict rlzs_by_gsim
         # since the ContextMaker must be used on ruptures with the
         # same TRT, given a realization there is a single gsim
         self.gsim_by_rlzi = {}
         for gsim, rlzis in gsims.items():
             for rlzi in rlzis:
                 self.gsim_by_rlzi[rlzi] = gsim
     self.ctx_mon = monitor('make_contexts', measuremem=False)
     self.poe_mon = monitor('get_poes', measuremem=False)
     self.pne_mon = monitor('composing pnes', measuremem=False)
     self.gmf_mon = monitor('computing mean_std', measuremem=False)
     self.loglevels = DictArray(self.imtls)
     with warnings.catch_warnings():
         # avoid RuntimeWarning: divide by zero encountered in log
         warnings.simplefilter("ignore")
         for imt, imls in self.imtls.items():
             self.loglevels[imt] = numpy.log(imls)
Пример #28
0
 def test_two_sites(self):
     site1 = Site(Point(0, 0), vs30=760., z1pt0=48.0, z2pt5=0.607,
                  vs30measured=True)
     site2 = Site(Point(0, 0.5), vs30=760., z1pt0=48.0, z2pt5=0.607,
                  vs30measured=True)
     sitecol = SiteCollection([site1, site2])
     srcfilter = SourceFilter(sitecol, IntegrationDistance.new('200'))
     imtls = {"PGA": [.123]}
     for period in numpy.arange(.1, .5, .1):
         imtls['SA(%.2f)' % period] = [.123]
     assert len(imtls) == 5  # 5 periods
     gsim_by_trt = {'Stable Continental Crust': ExampleA2021()}
     hcurves = calc_hazard_curves(
         [asource], srcfilter, DictArray(imtls), gsim_by_trt)
     print(hcurves)
Пример #29
0
 def test(self):
     # mutually exclusive ruptures
     d = os.path.dirname(os.path.dirname(__file__))
     tmps = 'nonparametric-source-mutex-ruptures.xml'
     source_model = os.path.join(d, 'source_model', tmps)
     groups = nrml.to_python(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(143.5, 39.5), 800, z1pt0=100., z2pt5=1.)
     sitecol = SiteCollection([site])
     imtls = DictArray({'PGA': [0.01, 0.1, 0.2, 0.5]})
     gsim_by_trt = {'Some TRT': Campbell2003()}
     hcurves = calc_hazard_curves(groups, sitecol, imtls, gsim_by_trt)
     # expected results obtained with an ipython notebook
     expected = [4.3998728e-01, 1.1011728e-01, 7.5495312e-03, 8.5812844e-06]
     npt.assert_almost_equal(hcurves['PGA'][0], expected)
Пример #30
0
 def test_make_pmap(self):
     trunclevel = 3
     imtls = DictArray({'PGA': [0.01]})
     gsims = [valid.gsim('AkkarBommer2010')]
     ctxs = []
     for occ_rate in (.001, .002):
         ctx = RuptureContext()
         ctx.mag = 5.5
         ctx.rake = 90
         ctx.occurrence_rate = occ_rate
         ctx.sids = numpy.array([0.])
         ctx.vs30 = numpy.array([760.])
         ctx.rrup = numpy.array([100.])
         ctx.rjb = numpy.array([99.])
         ctxs.append(ctx)
     pmap = make_pmap(ctxs, gsims, imtls, trunclevel, 50.)
     numpy.testing.assert_almost_equal(pmap[0].array, 0.066381)