def read_shakemap(self, haz_sitecol, assetcol): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): # for instance for the test case_shakemap the haz_sitecol # has sids in range(0, 26) while sitecol.sids is # [8, 9, 10, 11, 13, 15, 16, 17, 18]; # the total assetcol has 26 assets on the total sites # and the reduced assetcol has 9 assets on the reduced sites smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance['default'], oq.discard_assets) if len(discarded): self.datastore['discarded'] = discarded assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): imts, gmfs = to_gmfs( shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs, imts) return sitecol, assetcol
def read_shakemap(self, haz_sitecol, assetcol): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls extra = self.riskmodel.get_extra_imts(oq.risk_imtls) if extra: logging.warn('There are risk functions for not available IMTs ' 'which will be ignored: %s' % extra) logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance or oq.region_grid_spacing) assetcol = assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): gmfs = to_gmfs(shakemap, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs) events = numpy.zeros(E, readinput.stored_event_dt) events['eid'] = numpy.arange(E, dtype=U64) self.datastore['events'] = events return sitecol, assetcol
def read_shakemap(self): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields haz_sitecol = self.datastore.parent['sitecol'] logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) self.sitecol, shakemap = get_sitecol_shakemap( smap, haz_sitecol, oq.asset_hazard_distance or oq.region_grid_spacing) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): gmfs = to_gmfs(shakemap, oq.site_effects, oq.truncation_level, E, oq.random_seed) save_gmf_data(self.datastore, self.sitecol, gmfs) events = numpy.zeros(E, readinput.stored_event_dt) events['eid'] = numpy.arange(E, dtype=U64) self.datastore['events'] = events
def read_shakemap(self, haz_sitecol, assetcol): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls extra = self.riskmodel.get_extra_imts(oq.risk_imtls) if extra: logging.warning('There are risk functions for not available IMTs ' 'which will be ignored: %s' % extra) logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance['default'], oq.discard_assets) if len(discarded): self.datastore['discarded'] = discarded assetcol = assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): imts, gmfs = to_gmfs( shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs, imts) return sitecol, assetcol
def read_shakemap(self, haz_sitecol, assetcol): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): # for instance for the test case_shakemap the haz_sitecol # has sids in range(0, 26) while sitecol.sids is # [8, 9, 10, 11, 13, 15, 16, 17, 18]; # the total assetcol has 26 assets on the total sites # and the reduced assetcol has 9 assets on the reduced sites smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance['default'], oq.discard_assets) if len(discarded): self.datastore['discarded'] = discarded assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): imts, gmfs = to_gmfs( shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs, imts) return sitecol, assetcol
def test_gmfs(self): f1 = os.path.join(CDIR, 'ghorka_grid.xml') f2 = os.path.join(CDIR, 'ghorka_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt = mean_gmf(shakemap) aae(gmf_by_imt, [0.0047045, 0.0184625, 0.0346171, 0.0175625])
def test_gmfs(self): f1 = os.path.join(CDIR, 'ghorka_grid.xml') f2 = os.path.join(CDIR, 'ghorka_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt = mean_gmf(shakemap) aae(gmf_by_imt, [0.0058806, 0.0230781, 0.0432714, 0.0219532])
def test_gmfs(self): f1 = os.path.join(CDIR, 'ghorka_grid.xml') f2 = os.path.join(CDIR, 'ghorka_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt = mean_gmf(shakemap) aae(gmf_by_imt, [0.0058806, 0.0230781, 0.0432714, 0.0219532])
def test_gmfs(self): f1 = os.path.join(CDIR, 'ghorka_grid.xml') f2 = os.path.join(CDIR, 'ghorka_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt, _ = mean_std(shakemap, site_effects=True) aae(gmf_by_imt, [[0.005391, 0.0223217, 0.0399937, 0.0183143], [0.0061, 0.025619, 0.0487997, 0.0225788], [0.0060717, 0.0253156, 0.0478506, 0.0219296], [0.007087, 0.0298716, 0.0622145, 0.0290721]])
def test_gmfs(self): f1 = os.path.join(CDIR, 'ghorka_grid.xml') f2 = os.path.join(CDIR, 'ghorka_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt, _ = mean_std(shakemap, site_effects=True) aae(gmf_by_imt, [[0.0062040, 0.0262588, 0.0497097, 0.0239060], [0.0069831, 0.0298023, 0.0602146, 0.0294691], [0.0069507, 0.0296108, 0.0594237, 0.0286251], [0.0080306, 0.0341960, 0.0762080, 0.0379360]])
def test_from_files(self): # files provided by Vitor Silva, without site amplification f1 = os.path.join(CDIR, 'test_shaking.xml') f2 = os.path.join(CDIR, 'test_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt, std_by_imt = mean_std(shakemap, site_effects=False) # PGA, SA(0.3), SA(1.0), SA(3.0) aae(gmf_by_imt, [[0.0975815, 0.2442196, 0.0286512, 0.6358019], [0.2023841, 0.5013746, 0.0297236, 0.6544367], [0.3010831, 0.5986038, 0.0305651, 0.6575208], [0.3868380, 0.9331248, 0.0296789, 0.6393688]]) aae(std_by_imt, [[0.5922380, 0.6723980, 0.6325073, 0.6445988], [0.6077153, 0.6661571, 0.6296381, 0.668559], [0.6146356, 0.6748830, 0.6714424, 0.6613612], [0.5815353, 0.6460007, 0.6491335, 0.6603457]])
def test_from_files(self): # files provided by Vitor Silva, without site amplification f1 = os.path.join(CDIR, 'test_shaking.xml') f2 = os.path.join(CDIR, 'test_uncertainty.xml') array = get_shakemap_array(f1, f2) sitecol, shakemap = get_sitecol_shakemap(array, imt_dt.names) n = 4 # number of sites self.assertEqual(len(sitecol), n) gmf_by_imt, std_by_imt = mean_std(shakemap, site_effects=False) # PGA, SA(0.3), SA(1.0), SA(3.0) aae(gmf_by_imt, [[0.1168263, 0.3056736, 0.0356231, 0.7957914], [0.2422977, 0.6275377, 0.0369565, 0.8191154], [0.3604619, 0.7492331, 0.0380028, 0.8229756], [0.4631292, 1.1679310, 0.0369009, 0.8002559]]) aae(std_by_imt, [[0.5922380, 0.6723980, 0.6325073, 0.6445988], [0.6077153, 0.6661571, 0.6296381, 0.668559], [0.6146356, 0.6748830, 0.6714424, 0.6613612], [0.5815353, 0.6460007, 0.6491335, 0.6603457]])
def read_shakemap(self, haz_sitecol, assetcol): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): # for instance for the test case_shakemap the haz_sitecol # has sids in range(0, 26) while sitecol.sids is # [8, 9, 10, 11, 13, 15, 16, 17, 18]; # the total assetcol has 26 assets on the total sites # and the reduced assetcol has 9 assets on the reduced sites smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance['default'], oq.discard_assets) if len(discarded): self.datastore['discarded'] = discarded assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): imts, gmfs = to_gmfs(shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) N, E, M = gmfs.shape events = numpy.zeros(E, rupture.events_dt) events['id'] = numpy.arange(E, dtype=U32) self.datastore['events'] = events # convert into an array of dtype gmv_data_dt lst = [(sitecol.sids[s], ei) + tuple(gmfs[s, ei]) for s in numpy.arange(N, dtype=U32) for ei, event in enumerate(events)] oq.hazard_imtls = {imt: [0] for imt in imts} data = numpy.array(lst, oq.gmf_data_dt()) create_gmf_data(self.datastore, len(imts), data=data) return sitecol, assetcol