def test_missing_cost_types(self): job_ini = general.gettemp('''\ [general] description = Exposure with missing cost_types calculation_mode = scenario exposure_file = %s''' % os.path.basename(self.exposure4)) oqparam = readinput.get_oqparam(job_ini) with self.assertRaises(InvalidFile) as ctx: readinput.get_sitecol_assetcol(oqparam, cost_types=['structural']) self.assertIn("is missing", str(ctx.exception))
def test_missing_cost_types(self): job_ini = general.gettemp('''\ [general] description = Exposure with missing cost_types calculation_mode = scenario exposure_file = %s''' % os.path.basename(self.exposure4)) oqparam = readinput.get_oqparam(job_ini) with self.assertRaises(InvalidFile) as ctx: readinput.get_sitecol_assetcol(oqparam, cost_types=['structural']) self.assertIn("is missing", str(ctx.exception))
def test_grid_site_model_exposure(self): oq = readinput.get_oqparam('job.ini', case_16) oq.region_grid_spacing = 15 sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 141) # 10 sites were discarded silently self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # no assets were discarded
def read_exposure(self, haz_sitecol): # after load_risk_model """ Read the exposure, the risk models and update the attributes .sitecol, .assetcol """ oq = self.oqparam with self.monitor('reading exposure'): self.sitecol, self.assetcol, discarded = ( readinput.get_sitecol_assetcol( oq, haz_sitecol, self.crmodel.loss_types)) if len(discarded): self.datastore['discarded'] = discarded if hasattr(self, 'rup'): # this is normal for the case of scenario from rupture logging.info('%d assets were discarded because too far ' 'from the rupture; use `oq show discarded` ' 'to show them and `oq plot_assets` to plot ' 'them' % len(discarded)) elif not oq.discard_assets: # raise an error self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol raise RuntimeError( '%d assets were discarded; use `oq show discarded` to' ' show them and `oq plot_assets` to plot them' % len(discarded)) self.policy_name = '' self.policy_dict = {} if oq.inputs.get('insurance'): k, v = zip(*oq.inputs['insurance'].items()) self.load_insurance_data(k, v) return readinput.exposure
def read_exposure(self, haz_sitecol=None): # after load_risk_model """ Read the exposure, the riskmodel and update the attributes .sitecol, .assetcol """ with self.monitor('reading exposure', autoflush=True): self.sitecol, self.assetcol, discarded = ( readinput.get_sitecol_assetcol( self.oqparam, haz_sitecol, self.riskmodel.loss_types)) if len(discarded): self.datastore['discarded'] = discarded if hasattr(self, 'rup'): # this is normal for the case of scenario from rupture logging.info('%d assets were discarded because too far ' 'from the rupture; use `oq show discarded` ' 'to show them and `oq plot_assets` to plot ' 'them' % len(discarded)) elif not self.oqparam.discard_assets: # raise an error self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol raise RuntimeError( '%d assets were discarded; use `oq show discarded` to' ' show them and `oq plot_assets` to plot them' % len(discarded)) # reduce the riskmodel to the relevant taxonomies taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy if taxo != '?') if len(self.riskmodel.taxonomies) > len(taxonomies): logging.info('Reducing risk model from %d to %d taxonomies', len(self.riskmodel.taxonomies), len(taxonomies)) self.riskmodel = self.riskmodel.reduce(taxonomies) return readinput.exposure
def read_exposure(self, haz_sitecol=None): """ Read the exposure, the riskmodel and update the attributes .sitecol, .assetcol """ with self.monitor('reading exposure', autoflush=True): self.sitecol, self.assetcol, discarded = ( readinput.get_sitecol_assetcol(self.oqparam, haz_sitecol, self.riskmodel.loss_types)) if len(discarded): self.datastore['discarded'] = discarded msg = ('%d sites with assets were discarded; use ' '`oq plot_assets` to see them' % len(discarded)) if hasattr(self, 'rup') or self.oqparam.discard_assets: # just log a warning in case of scenario from rupture # or when discard_assets is set to True logging.warn(msg) else: # raise an error self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol raise RuntimeError(msg) readinput.exposure = None # reset the global # reduce the riskmodel to the relevant taxonomies taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy if taxo != '?') if len(self.riskmodel.taxonomies) > len(taxonomies): logging.info('Reducing risk model from %d to %d taxonomies', len(self.riskmodel.taxonomies), len(taxonomies)) self.riskmodel = self.riskmodel.reduce(taxonomies)
def test_exposure_only(self): oq = readinput.get_oqparam('job.ini', case_16) del oq.inputs['site_model'] sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # test agg_value arr = assetcol.agg_value() assert_allclose(arr, [3.6306637e+09]) arr = assetcol.agg_value('taxonomy') assert_allclose(arr, [[4.9882240e+06], [1.1328099e+08], [4.2222912e+08], [1.6412870e+07], [5.0686808e+07], [2.5343402e+07], [1.5254313e+09], [6.6375590e+06], [8.3206810e+08], [1.6412871e+07], [3.9439158e+08], [1.6734690e+07], [6.7582400e+06], [1.3613027e+08], [4.3124016e+07], [9.4132640e+06], [1.0620092e+07]]) arr = assetcol.agg_value('occupancy') assert_allclose(assetcol.agg_value('occupancy'), [[3.6306644e+09]]) arr = assetcol.agg_value('taxonomy', 'occupancy') self.assertEqual(arr.shape, (17, 1, 1))
def test_grid_site_model_exposure(self): oq = readinput.get_oqparam( 'job.ini', case_16, region_grid_spacing='15') sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) # 3 sites were discarded silently self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # no assets were discarded
def read_exposure(self, haz_sitecol=None): # after load_risk_model """ Read the exposure, the riskmodel and update the attributes .sitecol, .assetcol """ with self.monitor('reading exposure', autoflush=True): self.sitecol, self.assetcol, discarded = ( readinput.get_sitecol_assetcol( self.oqparam, haz_sitecol, self.riskmodel.loss_types)) if len(discarded): self.datastore['discarded'] = discarded if hasattr(self, 'rup'): # this is normal for the case of scenario from rupture logging.info('%d assets were discarded because too far ' 'from the rupture; use `oq show discarded` ' 'to show them and `oq plot_assets` to plot ' 'them' % len(discarded)) elif not self.oqparam.discard_assets: # raise an error self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol raise RuntimeError( '%d assets were discarded; use `oq show discarded` to' ' show them and `oq plot_assets` to plot them' % len(discarded)) # reduce the riskmodel to the relevant taxonomies taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy if taxo != '?') if len(self.riskmodel.taxonomies) > len(taxonomies): logging.info('Reducing risk model from %d to %d taxonomies', len(self.riskmodel.taxonomies), len(taxonomies)) self.riskmodel = self.riskmodel.reduce(taxonomies) return readinput.exposure
def test_exposure_only(self): oq = readinput.get_oqparam('job.ini', case_16) del oq.inputs['site_model'] sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0)
def read_exposure(self, haz_sitecol=None): """ Read the exposure, the riskmodel and update the attributes .sitecol, .assetcol """ with self.monitor('reading exposure', autoflush=True): self.sitecol, self.assetcol = readinput.get_sitecol_assetcol( self.oqparam, haz_sitecol, self.riskmodel.loss_types) readinput.exposure = None # reset the global
def read_exposure(self): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assets_by_site, .taxonomies. """ logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assetcol = (readinput.get_sitecol_assetcol( self.oqparam, self.exposure)) # NB: using hdf5.vstr would fail for large exposures; # the datastore could become corrupt, and also ultra-strange things # may happen (i.e. having the sitecol saved inside asset_refs!!) arefs = numpy.array(self.exposure.asset_refs) self.datastore['asset_refs'] = arefs self.datastore.set_attrs('asset_refs', nbytes=arefs.nbytes)
def read_config_file(cfg): gmf_file = cfg['input']['gmf_file'] gmf_file_gmpe_rate = cfg['input']['gmf_file_gmpe_rate'] job_ini = cfg['input']['job_ini'] oq_param = get_oqparam(job_ini) get_risk_model(oq_param) # read risk functions and set imtls haz_sitecol = get_site_collection(oq_param) sites, assets_by_site, _ = get_sitecol_assetcol(oq_param, haz_sitecol) gsimlt = get_gsim_lt(oq_param) gsim_list = [br.uncertainty for br in gsimlt.branches] cinfo = source.CompositionInfo.fake(gsimlt) mean_shift_inter_residuals = float( cfg['input']['mean_shift_inter_residuals']) realizations_inter = int(cfg['input']['realizations_inter']) realizations_intra = int(cfg['input']['realizations_intra']) intra_files_name = cfg['input']['intra_files_name'] intra_files = cfg['input']['intra_files'].split() csv_rate_gmf_file = cfg['output']['csv_rate_gmf_file'] seed = int(cfg['input']['seed']) return (gmf_file, gmf_file_gmpe_rate, sites, gsim_list, cinfo, oq_param, mean_shift_inter_residuals, realizations_inter, realizations_intra, intra_files_name, intra_files, csv_rate_gmf_file, seed)
def test_exposure_only(self): oq = readinput.get_oqparam('job.ini', case_16) del oq.inputs['site_model'] sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0) # test agg_value arr = assetcol.agg_value() assert_allclose(arr, [3.6306637e+09]) arr = assetcol.agg_value('taxonomy') assert_allclose(arr, [[0.0000000e+00], [4.9882240e+06], [1.1328099e+08], [4.2222912e+08], [1.6412870e+07], [5.0686808e+07], [2.5343402e+07], [1.5254313e+09], [6.6375590e+06], [8.3206810e+08], [1.6412871e+07], [3.9439158e+08], [1.6734690e+07], [6.7582400e+06], [1.3613027e+08], [4.3124016e+07], [9.4132640e+06], [1.0620092e+07]]) arr = assetcol.agg_value('occupancy') assert_allclose(assetcol.agg_value('occupancy'), [[0.0000000e+00], [3.6306644e+09]]) arr = assetcol.agg_value('taxonomy', 'occupancy') self.assertEqual(arr.shape, (18, 2, 1))
def main(cfg_file): startTime = datetime.now() cfg = configparser.ConfigParser() cfg.read(cfg_file) (oq_param, source_model_file, matrixMagsMin, matrixMagsMax, matrixMagsStep, matrixDistsMin, matrixDistsMax, matrixDistsStep, limitIM, imt_filtering, trunc_level, im_filter, gmf_file, gmf_file_gmpe_rate, rup_mesh_spac, complex_mesh_spac, mfd_bin, area_discre, limit_max_mag, limit_min_mag) = read_config_file(cfg) # Set up the source model configuration conv1 = SourceConverter(1.0, # Investigation time rup_mesh_spac, # Rupture mesh spacing complex_fault_mesh_spacing=complex_mesh_spac, width_of_mfd_bin=mfd_bin, area_source_discretization=area_discre) # Parse the source Model if source_model_file: # only one source model file source_model = to_python(source_model_file, conv1) else: # source model has many files (in this case 2 - adapt for more) source_model_file2 = "demo_data/SA_RA_CATAL1_05.xml" source_model2 = to_python(source_model_file2, conv1) source_model = source_model+source_model2 # Calculate total number of ruptures in the erf # num_rup = 0 # rate_rup = [] # for a in range(len(source_model)): # model_trt = source_model[a] # for b in range(len(model_trt)): # num_rup = num_rup + len(list(model_trt[b].iter_ruptures())) # for rup in model_trt[b].iter_ruptures(): # rate_rup.append(rup.occurrence_rate) # print(num_rup) # print(sum(rate_rup)) # print(rate_rup[0:10]) # If exposure model is provided: haz_sitecol = get_site_collection(oq_param) sites, assets_by_site, _ = get_sitecol_assetcol(oq_param, haz_sitecol) # print(list(sites)[0:10]) # np.savetxt('sites.csv',list(zip(sites.lons, sites.lats))) # If region coordinates are provided: # sites = get_site_collection(oq_param) gsimlt = get_gsim_lt(oq_param) gsim_list = [br.uncertainty for br in gsimlt.branches] GMPEmatrix = build_gmpe_table(matrixMagsMin, matrixMagsMax, matrixMagsStep, matrixDistsMin, matrixDistsMax, matrixDistsStep, imt_filtering, limitIM, gsim_list, limit_max_mag, limit_min_mag) # Calculate minimum distance between rupture and assets # Import exposure from .ini file depths = np.zeros(len(sites)) exposureCoords = Mesh(sites.lons, sites.lats, depths) # To calculate Joyner Boore distance: exposurePoints = (exposureCoords, exposureCoords) recMeshExposure = RectangularMesh.from_points_list(exposurePoints) imts = ['PGA', 'SA(0.3)'] cmake = ContextMaker(gsim_list) filter1 = SourceFilter(sites, oq_param.maximum_distance) if im_filter == 'True': # Here we consider the IM and the MaxDist filter gmfs_median = calculate_gmfs_filter(source_model, gsimlt, filter1, cmake, gsim_list, recMeshExposure, matrixMagsMin, matrixMagsStep, matrixDistsMin, matrixDistsStep, GMPEmatrix, imts, trunc_level) else: # No IM filter, just the MAxDist filter gmfs_median = calc_gmfs_no_IM_filter(source_model, imts, gsim_list, trunc_level, gsimlt, filter1, cmake) print("%s Ground Motion Fields" % len(gmfs_median)) save_gmfs(gmf_file, gmf_file_gmpe_rate, gmfs_median, exposureCoords, gsim_list, imts) print(datetime.now() - startTime)
def test_site_model_exposure(self): oq = readinput.get_oqparam('job.ini', case_16) sitecol, assetcol, discarded = readinput.get_sitecol_assetcol(oq) self.assertEqual(len(sitecol), 148) self.assertEqual(len(assetcol), 151) self.assertEqual(len(discarded), 0)