def test_exposure_zero_number(self): oqparam = mock.Mock() oqparam.calculation_mode = 'scenario_damage' oqparam.inputs = {'exposure': self.exposure0} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("node assets: Could not convert number->compose(positivefloat,nonzero): '0' is zero, line 17", str(ctx.exception))
def test_exposure_missing_number(self): oqparam = mock.Mock() oqparam.calculation_mode = 'scenario_damage' oqparam.inputs = {'exposure': self.exposure} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(KeyError) as ctx: readinput.get_exposure(oqparam) self.assertIn("node asset: 'number', line 17 of", str(ctx.exception))
def test_exposure_invalid_asset_id(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['structural'] oqparam.inputs = {'exposure': self.exposure1} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("Invalid ID 'a 1': the only accepted chars are " "a-zA-Z0-9_-, line 11", str(ctx.exception))
def test_invalid_asset_id(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['structural'] oqparam.inputs = {'exposure': self.exposure1} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("Invalid ID 'a 1': the only accepted chars are " "a-zA-Z0-9_-, line 11", str(ctx.exception))
def test_wrong_cost_type(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_risk' oqparam.all_cost_types = ['structural'] oqparam.region_constraint = '''\ POLYGON((68.0 31.5, 69.5 31.5, 69.5 25.5, 68.0 25.5, 68.0 31.5))''' oqparam.inputs = {'exposure': self.exposure2, 'structural_vulnerability': None} with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("Got 'aggregate', expected " "aggregated|per_area|per_asset, line 7", str(ctx.exception))
def test_exposure_wrong_cost_type(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_risk' oqparam.all_cost_types = ['structural'] oqparam.region_constraint = '''\ POLYGON((68.0 31.5, 69.5 31.5, 69.5 25.5, 68.0 25.5, 68.0 31.5))''' oqparam.inputs = {'exposure': self.exposure2, 'structural_vulnerability': None} with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("Got 'aggregate', expected " "aggregated|per_area|per_asset, line 7", str(ctx.exception))
def test_missing_number(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['occupants'] oqparam.inputs = {'exposure': [self.exposure]} oqparam.region = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] oqparam.aggregate_by = [] with self.assertRaises(Exception) as ctx: readinput.get_exposure(oqparam) self.assertIn("node asset: 'number', line 17 of", str(ctx.exception))
def test_invalid_taxonomy(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['structural'] oqparam.inputs = {'exposure': self.exposure3} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.insured_losses = False oqparam.ignore_missing_costs = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("'RM ' contains whitespace chars, line 11", str(ctx.exception))
def test_exposure_no_insured_data(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_risk' oqparam.all_cost_types = ['structural'] oqparam.insured_losses = True oqparam.inputs = {'exposure': self.exposure, 'structural_vulnerability': None} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(KeyError) as ctx: readinput.get_exposure(oqparam) self.assertIn("node cost: 'deductible', line 14", str(ctx.exception))
def test_no_insured_data(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_risk' oqparam.all_cost_types = ['structural'] oqparam.insured_losses = True oqparam.inputs = {'exposure': self.exposure, 'structural_vulnerability': None} oqparam.region_constraint = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(KeyError) as ctx: readinput.get_exposure(oqparam) self.assertIn("node cost: 'deductible', line 14", str(ctx.exception))
def test_invalid_taxonomy(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['structural'] oqparam.inputs = {'exposure': [self.exposure3]} oqparam.region = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.insured_losses = False oqparam.ignore_missing_costs = [] oqparam.aggregate_by = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("'RM ' contains whitespace chars, line 11", str(ctx.exception))
def test_no_assets(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_risk' oqparam.all_cost_types = ['structural'] oqparam.insured_losses = True oqparam.inputs = {'exposure': self.exposure, 'structural_vulnerability': None} oqparam.region_constraint = '''\ POLYGON((68.0 31.5, 69.5 31.5, 69.5 25.5, 68.0 25.5, 68.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(RuntimeError) as ctx: readinput.get_exposure(oqparam) self.assertIn('Could not find any asset within the region!', str(ctx.exception))
def test_exposure_no_assets(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_risk' oqparam.all_cost_types = ['structural'] oqparam.insured_losses = True oqparam.inputs = {'exposure': self.exposure, 'structural_vulnerability': None} oqparam.region_constraint = '''\ POLYGON((68.0 31.5, 69.5 31.5, 69.5 25.5, 68.0 25.5, 68.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] with self.assertRaises(RuntimeError) as ctx: readinput.get_exposure(oqparam) self.assertIn('Could not find any asset within the region!', str(ctx.exception))
def test_zero_number(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['structural'] oqparam.insured_losses = False oqparam.inputs = {'exposure': [self.exposure0]} oqparam.region = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] oqparam.aggregate_by = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("'0.0' is zero, line 17", str(ctx.exception))
def setUpClass(cls): cls.oqparam = readinput.get_oqparam('job_loss.ini', pkg=case_2) cls.oqparam.insured_losses = True cls.sitecol, cls.assets_by_site = readinput.get_sitecol_assets( cls.oqparam, readinput.get_exposure(cls.oqparam)) rmdict = riskmodels.get_risk_models(cls.oqparam) cls.riskmodel = readinput.get_risk_model(cls.oqparam, rmdict)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol = readinput.get_site_collection(oqparam) else: sitecol = None if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol) _print_info(dstore, filtersources, weightsources) else: print("No info for '%s'" % name)
def test_zero_number(self): oqparam = mock.Mock() oqparam.base_path = '/' oqparam.calculation_mode = 'scenario_damage' oqparam.all_cost_types = ['structural'] oqparam.insured_losses = False oqparam.inputs = {'exposure': [self.exposure0]} oqparam.region = '''\ POLYGON((78.0 31.5, 89.5 31.5, 89.5 25.5, 78.0 25.5, 78.0 31.5))''' oqparam.time_event = None oqparam.ignore_missing_costs = [] oqparam.aggregate_by = [] with self.assertRaises(ValueError) as ctx: readinput.get_exposure(oqparam) self.assertIn("Could not convert number->compose" "(positivefloat,nonzero): '0' is zero, line 17", str(ctx.exception))
def pre_execute(self): """ Read the site collection and initialize GmfComputer, tags and seeds """ if 'exposure' in self.oqparam.inputs: logging.info('Reading the exposure') exposure = readinput.get_exposure(self.oqparam) logging.info('Reading the site collection') self.sitecol, _assets = readinput.get_sitecol_assets( self.oqparam, exposure) else: self.sitecol = readinput.get_site_collection(self.oqparam) self._init_tags()
def read_exposure_sitecol(self): """ Read the exposure (if any) and then the site collection, possibly extracted from the exposure. """ inputs = self.oqparam.inputs if 'gmfs' in inputs and self.oqparam.sites: haz_sitecol = self.sitecol = readinput.get_site_collection( self.oqparam) if 'scenario_' in self.oqparam.calculation_mode: self.gmfs = get_gmfs(self) haz_sitecol = self.sitecol if 'exposure' in inputs: logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assets_by_site = ( readinput.get_sitecol_assets(self.oqparam, self.exposure)) self.cost_types = self.exposure.cost_types self.taxonomies = numpy.array( sorted(self.exposure.taxonomies), '|S100') num_assets = self.count_assets() if self.datastore.parent: haz_sitecol = self.datastore.parent['sitecol'] elif 'gmfs' in inputs: pass # haz_sitecol is already defined # TODO: think about the case hazard_curves in inputs else: haz_sitecol = None if haz_sitecol is not None and haz_sitecol != self.sitecol: with self.monitor('assoc_assets_sites'): self.sitecol, self.assets_by_site = \ self.assoc_assets_sites(haz_sitecol.complete) ok_assets = self.count_assets() num_sites = len(self.sitecol) logging.warn('Associated %d assets to %d sites, %d discarded', ok_assets, num_sites, num_assets - ok_assets) elif (self.datastore.parent and 'exposure' in OqParam.from_(self.datastore.parent.attrs).inputs): logging.info('Re-using the already imported exposure') else: # no exposure logging.info('Reading the site collection') with self.monitor('reading site collection', autoflush=True): self.sitecol = readinput.get_site_collection(self.oqparam) # save mesh and asset collection self.save_mesh() if hasattr(self, 'assets_by_site'): self.assetcol = riskinput.build_asset_collection( self.assets_by_site, self.oqparam.time_event)
def read_exposure(self): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assets_by_site, .taxonomies. """ logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) arefs = numpy.array(self.exposure.asset_refs, hdf5.vstr) self.datastore['asset_refs'] = arefs self.datastore.set_attrs('asset_refs', nbytes=arefs.nbytes) self.cost_calculator = readinput.get_cost_calculator(self.oqparam) self.sitecol, self.assets_by_site = ( readinput.get_sitecol_assets(self.oqparam, self.exposure))
def read_exposure(self, haz_sitecol=None): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assetcol """ logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) mesh, assets_by_site = (readinput.get_mesh_assets_by_site( self.oqparam, self.exposure)) if haz_sitecol: tot_assets = sum(len(assets) for assets in assets_by_site) all_sids = haz_sitecol.complete.sids sids = set(haz_sitecol.sids) # associate the assets to the hazard sites asset_hazard_distance = self.oqparam.asset_hazard_distance siteobjects = geo.utils.GeographicObjects( Site(sid, lon, lat) for sid, lon, lat in zip( haz_sitecol.sids, haz_sitecol.lons, haz_sitecol.lats)) assets_by_sid = general.AccumDict(accum=[]) for assets in assets_by_site: if len(assets): lon, lat = assets[0].location site, distance = siteobjects.get_closest(lon, lat) if site.sid in sids and distance <= asset_hazard_distance: # keep the assets, otherwise discard them assets_by_sid += {site.sid: list(assets)} if not assets_by_sid: raise AssetSiteAssociationError( 'Could not associate any site to any assets within the ' 'asset_hazard_distance of %s km' % asset_hazard_distance) mask = numpy.array([sid in assets_by_sid for sid in all_sids]) assets_by_site = [assets_by_sid[sid] for sid in all_sids] num_assets = sum(len(assets) for assets in assets_by_site) logging.info('Associated %d/%d assets to the hazard sites', num_assets, tot_assets) self.sitecol = haz_sitecol.complete.filter(mask) else: # use the exposure sites as hazard sites self.sitecol = readinput.get_site_collection(self.oqparam, mesh) self.assetcol = asset.AssetCollection( self.exposure.asset_refs, assets_by_site, self.exposure.tagcol, self.exposure.cost_calculator, self.oqparam.time_event, occupancy_periods=hdf5.array_of_vstr( sorted(self.exposure.occupancy_periods))) logging.info('Considering %d assets on %d sites', len(self.assetcol), len(self.sitecol))
def expo2csv(job_ini): """ Convert an exposure in XML format into CSV format """ oq = readinput.get_oqparam(job_ini) exposure = readinput.get_exposure(oq) rows = [] header = ['asset_ref', 'number', 'area', 'taxonomy', 'lon', 'lat'] for costname in exposure.cost_types['name']: if costname != 'occupants': header.append(costname) header.append(costname + '-deductible') header.append(costname + '-insured_limit') header.extend(exposure.occupancy_periods) header.extend(exposure.tagnames) for asset, asset_ref in zip(exposure.assets, exposure.asset_refs): row = [ asset_ref.decode('utf8'), asset.number, asset.area, asset.taxonomy, asset.location[0], asset.location[1] ] for costname in exposure.cost_types['name']: if costname != 'occupants': row.append(asset.values[costname]) row.append(asset.deductibles.get(costname, '?')) row.append(asset.insurance_limits.get(costname, '?')) for time_event in exposure.occupancy_periods: row.append(asset.value(time_event)) for tagname, tagidx in zip(exposure.tagnames, asset.tagidxs): row.append(tagidx) rows.append(row) with performance.Monitor('expo2csv') as mon: # save exposure data as csv csvname = oq.inputs['exposure'].replace('.xml', '.csv') print('Saving %s' % csvname) with open(csvname, 'w') as f: writer = csv.writer(f) writer.writerow(header) for row in rows: writer.writerow(row) # save exposure header as xml head = nrml.read(oq.inputs['exposure'], stop='assets') xmlname = oq.inputs['exposure'].replace('.xml', '-header.xml') print('Saving %s' % xmlname) head[0].assets.text = os.path.basename(csvname) with open(xmlname, 'wb') as f: nrml.write(head, f) print(mon)
def read_exposure(self): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assets_by_site, .cost_types, .taxonomies. """ logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assets_by_site = (readinput.get_sitecol_assets( self.oqparam, self.exposure)) if len(self.exposure.cost_types): self.cost_types = self.exposure.cost_types self.taxonomies = numpy.array(sorted(self.exposure.taxonomies), '|S100') self.datastore['time_events'] = sorted(self.exposure.time_events)
def read_exposure(self): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assets_by_site, .taxonomies. """ logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assetcol = (readinput.get_sitecol_assetcol( self.oqparam, self.exposure)) # NB: using hdf5.vstr would fail for large exposures; # the datastore could become corrupt, and also ultra-strange things # may happen (i.e. having the sitecol saved inside asset_refs!!) arefs = numpy.array(self.exposure.asset_refs) self.datastore['asset_refs'] = arefs self.datastore.set_attrs('asset_refs', nbytes=arefs.nbytes)
def read_exposure(self): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assets_by_site, .cost_types, .taxonomies. """ logging.info("Reading the exposure") with self.monitor("reading exposure", autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) arefs = numpy.array(self.exposure.asset_refs) self.datastore["asset_refs"] = arefs self.datastore.set_attrs("asset_refs", nbytes=arefs.nbytes) all_cost_types = set(self.oqparam.all_cost_types) fname = self.oqparam.inputs["exposure"] self.cost_calculator = readinput.get_exposure_lazy(fname, all_cost_types)[-1] self.sitecol, self.assets_by_site = readinput.get_sitecol_assets(self.oqparam, self.exposure) if len(self.exposure.cost_types): self.cost_types = self.exposure.cost_types
def read_exposure_sitecol(self): """ Read the exposure (if any) and then the site collection, possibly extracted from the exposure. """ logging.info('Reading the site collection') with self.monitor('reading site collection', autoflush=True): haz_sitecol = readinput.get_site_collection(self.oqparam) inputs = self.oqparam.inputs if 'exposure' in inputs: logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assets_by_site = ( readinput.get_sitecol_assets(self.oqparam, self.exposure)) if len(self.exposure.cost_types): self.cost_types = self.exposure.cost_types self.taxonomies = numpy.array( sorted(self.exposure.taxonomies), '|S100') num_assets = self.count_assets() if self.datastore.parent: haz_sitecol = self.datastore.parent['sitecol'] if haz_sitecol is not None and haz_sitecol != self.sitecol: with self.monitor('assoc_assets_sites'): self.sitecol, self.assets_by_site = \ self.assoc_assets_sites(haz_sitecol.complete) ok_assets = self.count_assets() num_sites = len(self.sitecol) logging.warn('Associated %d assets to %d sites, %d discarded', ok_assets, num_sites, num_assets - ok_assets) elif (self.datastore.parent and 'exposure' in OqParam.from_(self.datastore.parent.attrs).inputs): logging.info('Re-using the already imported exposure') else: # no exposure self.sitecol = haz_sitecol # save mesh and asset collection self.save_mesh() if hasattr(self, 'assets_by_site'): self.assetcol = riskinput.build_asset_collection( self.assets_by_site, self.oqparam.time_event) spec = set(self.oqparam.specific_assets) unknown = spec - set(self.assetcol['asset_ref']) if unknown: raise ValueError('The specific asset(s) %s are not in the ' 'exposure' % ', '.join(unknown))
def pre_execute(self): """ Set the attributes .riskmodel, .sitecol, .assets_by_site """ self.riskmodel = readinput.get_risk_model(self.oqparam) self.exposure = readinput.get_exposure(self.oqparam) logging.info('Read an exposure with %d assets of %d taxonomies', len(self.exposure.assets), len(self.exposure.taxonomies)) missing = self.exposure.taxonomies - set( self.riskmodel.get_taxonomies()) if missing: raise RuntimeError('The exposure contains the taxonomies %s ' 'which are not in the risk model' % missing) self.sitecol, self.assets_by_site = readinput.get_sitecol_assets( self.oqparam, self.exposure) logging.info('Extracted %d unique sites from the exposure', len(self.sitecol))
def read_exposure_sitecol(self): """ Read the exposure (if any) and then the site collection, possibly extracted from the exposure. """ if 'exposure' in self.oqparam.inputs: logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assets_by_site = ( readinput.get_sitecol_assets(self.oqparam, self.exposure)) self.cost_types = self.exposure.cost_types self.taxonomies = numpy.array( sorted(self.exposure.taxonomies), '|S100') num_assets = self.count_assets() mesh = readinput.get_mesh(self.oqparam) if self.datastore.parent: parent_mesh = self.datastore.parent['sitemesh'].value if mesh is None: mesh = Mesh(parent_mesh['lon'], parent_mesh['lat']) if mesh is not None: sites = readinput.get_site_collection(self.oqparam, mesh) with self.monitor('assoc_assets_sites'): self.sitecol, self.assets_by_site = \ self.assoc_assets_sites(sites) ok_assets = self.count_assets() num_sites = len(self.sitecol) logging.warn('Associated %d assets to %d sites, %d discarded', ok_assets, num_sites, num_assets - ok_assets) if (self.is_stochastic and self.datastore.parent and self.datastore.parent['sitecol'] != self.sitecol): logging.warn( 'The hazard sites are different from the risk sites %s!=%s' % (self.datastore.parent['sitecol'], self.sitecol)) else: # no exposure logging.info('Reading the site collection') with self.monitor('reading site collection', autoflush=True): self.sitecol = readinput.get_site_collection(self.oqparam) # save mesh and asset collection self.save_mesh() if hasattr(self, 'assets_by_site'): self.assetcol = riskinput.build_asset_collection( self.assets_by_site, self.oqparam.time_event)
def read_exposure_sitecol(self): """ Read the exposure (if any) and then the site collection, possibly extracted from the exposure. """ inputs = self.oqparam.inputs if 'exposure' in inputs: logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) self.sitecol, self.assets_by_site = ( readinput.get_sitecol_assets(self.oqparam, self.exposure)) self.cost_types = self.exposure.cost_types self.taxonomies = numpy.array( sorted(self.exposure.taxonomies), '|S100') num_assets = self.count_assets() if self.datastore.parent: haz_sitecol = self.datastore.parent['sitecol'] elif 'gmfs' in inputs: haz_sitecol = readinput.get_site_collection(self.oqparam) # TODO: think about the case hazard_curves in inputs else: haz_sitecol = None if haz_sitecol is not None and haz_sitecol != self.sitecol: with self.monitor('assoc_assets_sites'): self.sitecol, self.assets_by_site = \ self.assoc_assets_sites(haz_sitecol.complete) ok_assets = self.count_assets() num_sites = len(self.sitecol) logging.warn('Associated %d assets to %d sites, %d discarded', ok_assets, num_sites, num_assets - ok_assets) elif (self.datastore.parent and 'exposure' in self.datastore.parent['oqparam'].inputs): logging.info('Re-using the already imported exposure') else: # no exposure logging.info('Reading the site collection') with self.monitor('reading site collection', autoflush=True): self.sitecol = readinput.get_site_collection(self.oqparam) # save mesh and asset collection self.save_mesh() if hasattr(self, 'assets_by_site'): self.assetcol = riskinput.build_asset_collection( self.assets_by_site, self.oqparam.time_event)
def read_exposure(self): """ Read the exposure, the riskmodel and update the attributes .exposure, .sitecol, .assets_by_site, .cost_types, .taxonomies. """ logging.info('Reading the exposure') with self.monitor('reading exposure', autoflush=True): self.exposure = readinput.get_exposure(self.oqparam) arefs = numpy.array(self.exposure.asset_refs) self.datastore['asset_refs'] = arefs self.datastore.set_attrs('asset_refs', nbytes=arefs.nbytes) all_cost_types = set(self.oqparam.all_cost_types) fname = self.oqparam.inputs['exposure'] self.cost_calculator = readinput.get_exposure_lazy( fname, all_cost_types)[-1] self.sitecol, self.assets_by_site = (readinput.get_sitecol_assets( self.oqparam, self.exposure)) if len(self.exposure.cost_types): self.cost_types = self.exposure.cost_types
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection( oqparam), [] else: sitecol, assets_by_site = None, [] if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol) _print_info(dstore, filtersources, weightsources) if len(assets_by_site): assetcol = riskinput.build_asset_collection(assets_by_site) dic = groupby(assetcol, operator.attrgetter('taxonomy')) for taxo, num in dic.items(): print('taxonomy #%d, %d assets' % (taxo, num)) print('total assets = %d' % len(assetcol)) else: print("No info for '%s'" % name)
def pre_execute(self): """ Read the site collection and the sources. """ if 'exposure' in self.oqparam.inputs: logging.info('Reading the exposure') exposure = readinput.get_exposure(self.oqparam) self.sitecol, _assets = readinput.get_sitecol_assets( self.oqparam, exposure) else: logging.info('Reading the site collection') self.sitecol = readinput.get_site_collection(self.oqparam) logging.info('Reading the effective source models') source_models = list( readinput.get_effective_source_models(self.oqparam, self.sitecol)) self.all_sources = [src for src_model in source_models for trt_model in src_model.trt_models for src in trt_model] self.job_info = readinput.get_job_info( self.oqparam, source_models, self.sitecol)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection( oqparam), [] else: sitecol, assets_by_site = None, [] if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() _print_info( dict(rlzs_assoc=assoc, oqparam=oqparam, composite_source_model=csm, sitecol=sitecol), filtersources, weightsources) if len(assets_by_site): assetcol = riskinput.build_asset_collection(assets_by_site) dic = groupby(assetcol, operator.attrgetter('taxonomy')) for taxo, num in dic.items(): print('taxonomy #%d, %d assets' % (taxo, num)) print('total assets = %d' % len(assetcol)) else: print("No info for '%s'" % name)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == "gsims": for gs in gsim.get_available_gsims(): print(gs) elif name.endswith(".xml"): print(nrml.read(name).to_str()) elif name.endswith((".ini", ".zip")): oqparam = readinput.get_oqparam(name) if "exposure" in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets(oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection(oqparam), [] else: sitecol, assets_by_site = None, [] if "source_model_logic_tree" in oqparam.inputs: print("Reading the source model...") if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() dstore = datastore.Fake(vars(oqparam), rlzs_assoc=assoc, composite_source_model=csm, sitecol=sitecol) _print_info(dstore, filtersources, weightsources) if len(assets_by_site): assetcol = riskinput.build_asset_collection(assets_by_site) dic = groupby(assetcol, operator.attrgetter("taxonomy")) for taxo, num in dic.items(): print("taxonomy #%d, %d assets" % (taxo, num)) print("total assets = %d" % len(assetcol)) else: print("No info for '%s'" % name)
def _info(name, filtersources, weightsources): if name in base.calculators: print(textwrap.dedent(base.calculators[name].__doc__.strip())) elif name == 'gsims': for gs in gsim.get_available_gsims(): print(gs) elif name.endswith('.xml'): print(nrml.read(name).to_str()) elif name.endswith(('.ini', '.zip')): oqparam = readinput.get_oqparam(name) if 'exposure' in oqparam.inputs: expo = readinput.get_exposure(oqparam) sitecol, assets_by_site = readinput.get_sitecol_assets( oqparam, expo) elif filtersources or weightsources: sitecol, assets_by_site = readinput.get_site_collection( oqparam), [] else: sitecol, assets_by_site = None, [] if 'source_model_logic_tree' in oqparam.inputs: print('Reading the source model...') if weightsources: sp = source.SourceFilterWeighter elif filtersources: sp = source.SourceFilter else: sp = source.BaseSourceProcessor # do nothing csm = readinput.get_composite_source_model(oqparam, sitecol, sp) assoc = csm.get_rlzs_assoc() _print_info(assoc, oqparam, csm, sitecol, filtersources, weightsources) if len(assets_by_site): print('assets = %d' % sum(len(assets) for assets in assets_by_site)) else: print("No info for '%s'" % name)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. """ This is an example of script which is useful if you want to play with the RiskInput objects. You can enable the pdb and see what is inside the objects. """ from __future__ import print_function import sys from openquake.commonlib import readinput from openquake.commonlib.calculators.calc import calc_gmfs if __name__ == '__main__': job_ini = sys.argv[1:] o = readinput.get_oqparam(job_ini) exposure = readinput.get_exposure(o) sitecol, assets_by_site = readinput.get_sitecol_assets(o, exposure) risk_model = readinput.get_risk_model(o) gmfs_by_imt = calc_gmfs(o, sitecol) for imt in gmfs_by_imt: ri = risk_model.build_input(imt, gmfs_by_imt[imt], assets_by_site) print(ri) #for out in risk_model.gen_outputs([ri]): # print out # import pdb; pdb.set_trace()
def setUpClass(cls): cls.oqparam = readinput.get_oqparam( 'job_haz.ini,job_risk.ini', pkg=case_2) cls.sitecol, cls.assets_by_site = readinput.get_sitecol_assets( cls.oqparam, readinput.get_exposure(cls.oqparam)) cls.riskmodel = readinput.get_risk_model(cls.oqparam)
# GNU General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. """ This is an example of script which is useful if you want to play with the RiskInput objects. You can enable the pdb and see what is inside the objects. """ from __future__ import print_function import sys from openquake.commonlib import readinput from openquake.calculators.calc import calc_gmfs if __name__ == '__main__': job_ini = sys.argv[1:] o = readinput.get_oqparam(job_ini) exposure = readinput.get_exposure(o) sitecol, assets_by_site = readinput.get_sitecol_assets(o, exposure) risk_model = readinput.get_risk_model(o) gmfs_by_imt = calc_gmfs(o, sitecol) for imt in gmfs_by_imt: ri = risk_model.build_input(imt, gmfs_by_imt[imt], assets_by_site) print(ri) #for out in risk_model.gen_outputs([ri]): # print out # import pdb; pdb.set_trace()
def expo2csv(job_ini): """ Convert an exposure in XML format into CSV format """ oq = readinput.get_oqparam(job_ini) exposure = readinput.get_exposure(oq) rows = [] header = ['id', 'lon', 'lat', 'number'] area = exposure.area['type'] != '?' if area: header.append('area') for costname in exposure.cost_types['name']: if costname != 'occupants': header.append(costname) if exposure.deductible_is_absolute is not None: header.append(costname + '-deductible') if exposure.insurance_limit_is_absolute is not None: header.append(costname + '-insured_limit') if exposure.retrofitted: header.append('retrofitted') header.extend(exposure.occupancy_periods) header.extend(exposure.tagcol.tagnames) for asset, asset_ref in zip(exposure.assets, exposure.asset_refs): row = [asset_ref.decode('utf8'), asset.location[0], asset.location[1], asset.number] if area: row.append(asset.area) for costname in exposure.cost_types['name']: if costname != 'occupants': row.append(asset.values[costname]) if exposure.deductible_is_absolute is not None: row.append(asset.deductibles[costname]) if exposure.insurance_limit_is_absolute is not None: row.append(asset.insurance_limits[costname]) if exposure.retrofitted: row.append(asset._retrofitted) for time_event in exposure.occupancy_periods: row.append(asset.values['occupants_' + time_event]) for tagname, tagidx in zip(exposure.tagcol.tagnames, asset.tagidxs): tags = getattr(exposure.tagcol, tagname) row.append(tags[tagidx]) rows.append(row) with performance.Monitor('expo2csv') as mon: # save exposure data as csv csvname = oq.inputs['exposure'].replace('.xml', '.csv') print('Saving %s' % csvname) with codecs.open(csvname, 'wb', encoding='utf8') as f: writer = csv.writer(f) writer.writerow(header) for row in rows: writer.writerow(row) # save exposure header as xml head = nrml.read(oq.inputs['exposure'], stop='assets') xmlname = oq.inputs['exposure'].replace('.xml', '-header.xml') print('Saving %s' % xmlname) head[0].assets.text = os.path.basename(csvname) with open(xmlname, 'wb') as f: nrml.write(head, f) print(mon)