def test_case_7a(self): # case with preimported exposure self.run_calc(case_7a.__file__, 'job_h.ini') self.run_calc(case_7a.__file__, 'job_r.ini', hazard_calculation_id=str(self.calc.datastore.calc_id)) [fname] = export(('risk_by_event', 'csv'), self.calc.datastore) self.assertEqualFiles('expected/agg_losses.csv', fname, delta=1E-4) rup_ids = set(read_csv(fname, {None: '<S50'})['rup_id']) [fname] = export(('agg_curves-rlzs', 'csv'), self.calc.datastore) self.assertEqualFiles('expected/agg_curves.csv', fname, delta=1E-4) # check that the IDs in risk_by_event.csv exist in ruptures.csv # this is using extract/rupture_info internally [fname] = export(('ruptures', 'csv'), self.calc.datastore) rupids = set(read_csv(fname, {None: '<S50'})['rup_id']) self.assertTrue( rup_ids <= rupids, 'There are non-existing rupture IDs' ' in the event loss table!') # check that the exported ruptures can be re-imported text = extract(self.calc.datastore, 'ruptures').array rups = get_ruptures(gettemp(text)) aac(rups['n_occ'], [1, 1, 1, 1])
def test_case_29(self): # non parametric source with 2 KiteSurfaces # first test that the exported ruptures can be re-imported self.run_calc(case_29.__file__, 'job.ini', calculation_mode='event_based', ses_per_logic_tree_path='10') csv = extract(self.calc.datastore, 'ruptures').array rups = get_ruptures(general.gettemp(csv)) self.assertEqual(len(rups), 1) # check what QGIS will be seeing aw = extract(self.calc.datastore, 'rupture_info') poly = gzip.decompress(aw.boundaries).decode('ascii') self.assertEqual( poly, '''POLYGON((0.17961 0.00000, 0.13492 0.00000, 0.08980 0.00000, 0.04512 0.00000, 0.00000 0.00000, 0.00000 0.04054, 0.00000 0.08109, 0.00000 0.12163, 0.00000 0.16217, 0.00000 0.20272, 0.00000 0.24326, 0.00000 0.28381, 0.04512 0.28381, 0.08980 0.28381, 0.13492 0.28381, 0.17961 0.28381, 0.17961 0.24326, 0.17961 0.20272, 0.17961 0.16217, 0.17961 0.12163, 0.17961 0.08109, 0.17961 0.04054, 0.17961 0.00000, 0.17961 0.10000, 0.13492 0.10000, 0.08980 0.10000, 0.04512 0.10000, 0.00000 0.10000, 0.00000 0.14054, 0.00000 0.18109, 0.00000 0.22163, 0.00000 0.26217, 0.00000 0.30272, 0.00000 0.34326, 0.00000 0.38381, 0.04512 0.38381, 0.08980 0.38381, 0.13492 0.38381, 0.17961 0.38381, 0.17961 0.34326, 0.17961 0.30272, 0.17961 0.26217, 0.17961 0.22163, 0.17961 0.18109, 0.17961 0.14054, 0.17961 0.10000))''' ) # then perform a classical calculation self.assert_curves_ok(['hazard_curve-PGA.csv'], case_29.__file__)
def _get_ebruptures(fname, conv=None, ses_seed=None): """ :param fname: path to a rupture file (XML or CSV) :param conv: RuptureConverter instanc, used for XML ruptures :param ses_seed: used for XML ruptures :returns: a list of one or more EBRuptures """ if fname.endswith('.xml'): [rup_node] = nrml.read(fname) rup = conv.convert_node(rup_node) rup.tectonic_region_type = '*' # no TRT for scenario ruptures rup.rup_id = ses_seed ebrs = [EBRupture(rup, 'NA', 0, id=rup.rup_id, scenario=True)] return ebrs assert fname.endswith('.csv'), fname aw = get_ruptures(fname) ebrs = [] for i, rec in enumerate(aw.array): rupture = _get_rupture(rec, aw.geoms[i], aw.trts[rec['trt_smr']]) ebr = EBRupture(rupture, rec['source_id'], rec['trt_smr'], rec['n_occ'], rec['id'], rec['e0']) ebrs.append(ebr) return ebrs
def _read_scenario_ruptures(self): oq = self.oqparam gsim_lt = readinput.get_gsim_lt(self.oqparam) G = gsim_lt.get_num_paths() if oq.calculation_mode.startswith('scenario'): ngmfs = oq.number_of_ground_motion_fields if oq.inputs['rupture_model'].endswith('.xml'): # check the number of branchsets bsets = len(gsim_lt._ltnode) if bsets > 1: raise InvalidFile( '%s for a scenario calculation must contain a single ' 'branchset, found %d!' % (oq.inputs['job_ini'], bsets)) [(trt, rlzs_by_gsim)] = gsim_lt.get_rlzs_by_gsim_trt().items() self.cmaker = ContextMaker( trt, rlzs_by_gsim, { 'maximum_distance': oq.maximum_distance(trt), 'minimum_distance': oq.minimum_distance, 'truncation_level': oq.truncation_level, 'imtls': oq.imtls }) rup = readinput.get_rupture(oq) if self.N > oq.max_sites_disagg: # many sites, split rupture ebrs = [ EBRupture(copyobj(rup, rup_id=rup.rup_id + i), 'NA', 0, G, e0=i * G, scenario=True) for i in range(ngmfs) ] else: # keep a single rupture with a big occupation number ebrs = [ EBRupture(rup, 'NA', 0, G * ngmfs, rup.rup_id, scenario=True) ] srcfilter = SourceFilter(self.sitecol, oq.maximum_distance(trt)) aw = get_rup_array(ebrs, srcfilter) if len(aw) == 0: raise RuntimeError( 'The rupture is too far from the sites! Please check the ' 'maximum_distance and the position of the rupture') elif oq.inputs['rupture_model'].endswith('.csv'): aw = get_ruptures(oq.inputs['rupture_model']) if len(gsim_lt.values) == 1: # fix for scenario_damage/case_12 aw['trt_smr'] = 0 # a single TRT if oq.calculation_mode.startswith('scenario'): # rescale n_occ by ngmfs and nrlzs aw['n_occ'] *= ngmfs * gsim_lt.get_num_paths() else: raise InvalidFile("Something wrong in %s" % oq.inputs['job_ini']) rup_array = aw.array hdf5.extend(self.datastore['rupgeoms'], aw.geom) if len(rup_array) == 0: raise RuntimeError( 'There are no sites within the maximum_distance' ' of %s km from the rupture' % oq.maximum_distance(rup.tectonic_region_type)(rup.mag)) fake = logictree.FullLogicTree.fake(gsim_lt) self.realizations = fake.get_realizations() self.datastore['full_lt'] = fake self.store_rlz_info({}) # store weights self.save_params() imp = calc.RuptureImporter(self.datastore) imp.import_rups_events(rup_array, get_rupture_getters)