Esempio n. 1
0
 def compute_gmfs_curves(self, monitor):
     """
     :returns: a dict with keys gmfdata, hcurves
     """
     oq = self.oqparam
     mon = monitor('getting ruptures', measuremem=True)
     hcurves = {}  # key -> poes
     if oq.hazard_curves_from_gmfs:
         hc_mon = monitor('building hazard curves', measuremem=False)
         gmfdata = self.get_gmfdata(mon)  # returned later
         if len(gmfdata) == 0:
             return dict(gmfdata=(), hcurves=hcurves)
         for (sid, rlz), df in gmfdata.groupby(['sid', 'rlz']):
             with hc_mon:
                 poes = calc.gmvs_to_poes(
                     df, oq.imtls, oq.ses_per_logic_tree_path)
                 for m, imt in enumerate(oq.imtls):
                     hcurves[rsi2str(rlz, sid, imt)] = poes[m]
     if not oq.ground_motion_fields:
         return dict(gmfdata=(), hcurves=hcurves)
     if not oq.hazard_curves_from_gmfs:
         gmfdata = self.get_gmfdata(mon)
     if len(gmfdata) == 0:
         return dict(gmfdata=[])
     times = numpy.array([tup + (monitor.task_no,) for tup in self.times],
                         time_dt)
     times.sort(order='rup_id')
     res = dict(gmfdata=strip_zeros(gmfdata), hcurves=hcurves, times=times,
                sig_eps=numpy.array(self.sig_eps, self.sig_eps_dt))
     return res
Esempio n. 2
0
 def compute_gmfs_curves(self, monitor):
     """
     :returns: a dict with keys gmfdata, hcurves
     """
     oq = self.oqparam
     mon = monitor('getting ruptures', measuremem=True)
     hcurves = {}  # key -> poes
     if oq.hazard_curves_from_gmfs:
         hc_mon = monitor('building hazard curves', measuremem=False)
         gmfdata = self.get_gmfdata(mon)  # returned later
         hazard = self.get_hazard_by_sid(data=gmfdata)
         for sid, hazardr in hazard.items():
             dic = general.group_array(hazardr, 'rlz')
             for rlzi, array in dic.items():
                 with hc_mon:
                     poes = gmvs_to_poes(
                         array['gmv'].T, oq.imtls,
                         oq.ses_per_logic_tree_path)
                     for m, imt in enumerate(oq.imtls):
                         hcurves[rsi2str(rlzi, sid, imt)] = poes[m]
     if not oq.ground_motion_fields:
         return dict(gmfdata=(), hcurves=hcurves)
     if not oq.hazard_curves_from_gmfs:
         gmfdata = self.get_gmfdata(mon)
     if len(gmfdata) == 0:
         return dict(gmfdata=[])
     times = numpy.array([tup + (monitor.task_no,) for tup in self.times],
                         time_dt)
     times.sort(order='rup_id')
     res = dict(gmfdata=gmfdata, hcurves=hcurves, times=times,
                sig_eps=numpy.array(self.sig_eps, self.sig_eps_dt))
     return res
Esempio n. 3
0
    def test_case_1(self):
        out = self.run_calc(case_1.__file__, 'job.ini', exports='csv,xml')

        # make sure ses_id >= 65536 is valid
        high_ses = (self.calc.datastore['events']['ses_id'] >= 65536).sum()
        self.assertGreater(high_ses, 1000)

        [fname] = out['ruptures', 'xml']  # just check that it exists

        [fname] = export(('hcurves', 'csv'), self.calc.datastore)
        self.assertEqualFiles('expected/hazard_curve-smltp_b1-gsimltp_b1.csv',
                              fname)

        export(('hcurves', 'xml'), self.calc.datastore)  # check it works

        [fname] = out['hcurves', 'xml']
        self.assertEqualFiles(
            'expected/hazard_curve-smltp_b1-gsimltp_b1-PGA.xml', fname)

        # compute hcurves in postprocessing and compare with inprocessing
        # take advantage of the fact that there is a single site
        df = self.calc.datastore.read_df('gmf_data', 'sid')
        dt = self.calc.datastore['oqparam'].gmf_data_dt()
        gmvs = numpy.zeros(len(df), dt)
        gmvs['sid'] = 0
        gmvs['eid'] = df.eid.to_numpy()
        for col in df.columns:
            if col.startswith('gmv_'):
                gmvs[col] = df[col].to_numpy()
        oq = self.calc.datastore['oqparam']
        poes = gmvs_to_poes(gmvs, oq.imtls, oq.ses_per_logic_tree_path)
        hcurve = self.calc.datastore['hcurves-stats'][0, 0]  # shape (M, L)
        aae(poes, hcurve)

        # test gsim_by_imt
        out = self.run_calc(case_1.__file__,
                            'job.ini',
                            ses_per_logic_tree_path='30',
                            gsim_logic_tree_file='gsim_by_imt_logic_tree.xml',
                            exports='csv')

        # testing event_info
        einfo = dict(extract(self.calc.datastore, 'event_info/0'))
        self.assertEqual(einfo['trt'], 'active shallow crust')
        self.assertEqual(einfo['rupture_class'],
                         'ParametricProbabilisticRupture')
        self.assertEqual(einfo['surface_class'], 'PlanarSurface')
        self.assertEqual(einfo['seed'], 73073755)
        self.assertEqual(
            str(einfo['gsim']), '[MultiGMPE."PGA".AkkarBommer2010]\n'
            '[MultiGMPE."SA(0.1)".SadighEtAl1997]')
        self.assertEqual(einfo['rlzi'], 0)
        self.assertEqual(einfo['et_id'], 0)
        aae(einfo['occurrence_rate'], 0.6)
        aae(einfo['hypo'], [0., 0., 4.])

        [fname, _, _] = out['gmf_data', 'csv']
        self.assertEqualFiles('expected/gsim_by_imt.csv', fname)
Esempio n. 4
0
    def test_case_1(self):
        out = self.run_calc(case_1.__file__, 'job.ini', exports='csv,xml')

        etime = self.calc.datastore.get_attr('gmf_data', 'effective_time')
        self.assertEqual(etime, 80000.)  # ses_per_logic_tree_path = 80000
        imts = self.calc.datastore.get_attr('gmf_data', 'imts')
        self.assertEqual(imts, 'PGA')
        self.check_avg_gmf()

        # make sure ses_id >= 65536 is valid
        high_ses = (self.calc.datastore['events']['ses_id'] >= 65536).sum()
        self.assertGreater(high_ses, 1000)

        [fname] = out['ruptures', 'xml']  # just check that it exists

        [fname] = export(('hcurves', 'csv'), self.calc.datastore)
        self.assertEqualFiles('expected/hazard_curve-smltp_b1-gsimltp_b1.csv',
                              fname)

        export(('hcurves', 'xml'), self.calc.datastore)  # check it works

        [fname] = out['hcurves', 'xml']
        self.assertEqualFiles(
            'expected/hazard_curve-smltp_b1-gsimltp_b1-PGA.xml', fname)

        # compute hcurves in postprocessing and compare with inprocessing
        # take advantage of the fact that there is a single site
        df = self.calc.datastore.read_df('gmf_data', 'sid')
        oq = self.calc.datastore['oqparam']
        poes = gmvs_to_poes(df, oq.imtls, oq.ses_per_logic_tree_path)
        hcurve = self.calc.datastore['hcurves-stats'][0, 0]  # shape (M, L)
        aac(poes, hcurve)

        # test gsim_by_imt
        out = self.run_calc(case_1.__file__,
                            'job.ini',
                            ses_per_logic_tree_path='30',
                            gsim_logic_tree_file='gsim_by_imt_logic_tree.xml',
                            exports='csv')

        # testing event_info
        einfo = dict(extract(self.calc.datastore, 'event_info/0'))
        self.assertEqual(einfo['trt'], 'active shallow crust')
        self.assertEqual(einfo['rupture_class'],
                         'ParametricProbabilisticRupture')
        self.assertEqual(einfo['surface_class'], 'PlanarSurface')
        self.assertEqual(einfo['seed'], 1483155045)
        self.assertEqual(
            str(einfo['gsim']), '[MultiGMPE."PGA".AkkarBommer2010]\n'
            '[MultiGMPE."SA(0.1)".SadighEtAl1997]')
        self.assertEqual(einfo['rlzi'], 0)
        self.assertEqual(einfo['trt_smrlz'], 0)
        aac(einfo['occurrence_rate'], 0.6)
        aac(einfo['hypo'], [0., 0., 4.])

        [fname, _, _] = out['gmf_data', 'csv']
        self.assertEqualFiles('expected/gsim_by_imt.csv', fname)
Esempio n. 5
0
def event_based(proxies, full_lt, oqparam, dstore, monitor):
    """
    Compute GMFs and optionally hazard curves
    """
    alldata = AccumDict(accum=[])
    sig_eps = []
    times = []  # rup_id, nsites, dt
    hcurves = {}  # key -> poes
    trt_smr = proxies[0]['trt_smr']
    fmon = monitor('filtering ruptures', measuremem=False)
    cmon = monitor('computing gmfs', measuremem=False)
    with dstore:
        trt = full_lt.trts[trt_smr // len(full_lt.sm_rlzs)]
        srcfilter = SourceFilter(dstore['sitecol'],
                                 oqparam.maximum_distance(trt))
        rupgeoms = dstore['rupgeoms']
        rlzs_by_gsim = full_lt._rlzs_by_gsim(trt_smr)
        param = vars(oqparam).copy()
        param['imtls'] = oqparam.imtls
        param['min_iml'] = oqparam.min_iml
        param['maximum_distance'] = oqparam.maximum_distance(trt)
        cmaker = ContextMaker(trt, rlzs_by_gsim, param)
        min_mag = getdefault(oqparam.minimum_magnitude, trt)
        for proxy in proxies:
            t0 = time.time()
            with fmon:
                if proxy['mag'] < min_mag:
                    continue
                sids = srcfilter.close_sids(proxy, trt)
                if len(sids) == 0:  # filtered away
                    continue
                proxy.geom = rupgeoms[proxy['geom_id']]
                ebr = proxy.to_ebr(cmaker.trt)  # after the geometry is set
                try:
                    computer = GmfComputer(ebr,
                                           srcfilter.sitecol.filtered(sids),
                                           cmaker, oqparam.correl_model,
                                           oqparam.cross_correl,
                                           oqparam._amplifier,
                                           oqparam._sec_perils)
                except FarAwayRupture:
                    continue
            with cmon:
                data = computer.compute_all(sig_eps)
            dt = time.time() - t0
            times.append((computer.ebrupture.id, len(computer.ctx.sids), dt))
            for key in data:
                alldata[key].extend(data[key])
    for key, val in sorted(alldata.items()):
        if key in 'eid sid rlz':
            alldata[key] = U32(alldata[key])
        else:
            alldata[key] = F32(alldata[key])
    gmfdata = strip_zeros(pandas.DataFrame(alldata))
    if len(gmfdata) and oqparam.hazard_curves_from_gmfs:
        hc_mon = monitor('building hazard curves', measuremem=False)
        for (sid, rlz), df in gmfdata.groupby(['sid', 'rlz']):
            with hc_mon:
                poes = calc.gmvs_to_poes(df, oqparam.imtls,
                                         oqparam.ses_per_logic_tree_path)
                for m, imt in enumerate(oqparam.imtls):
                    hcurves[rsi2str(rlz, sid, imt)] = poes[m]
    times = numpy.array([tup + (monitor.task_no, ) for tup in times], time_dt)
    times.sort(order='rup_id')
    if not oqparam.ground_motion_fields:
        gmfdata = ()
    return dict(gmfdata=gmfdata,
                hcurves=hcurves,
                times=times,
                sig_eps=numpy.array(sig_eps, sig_eps_dt(oqparam.imtls)))