def test_read_with_cent(self): """ Test read_footprints while passing in a Centroids object """ var_names = copy.deepcopy(DEF_VAR_EXCEL) var_names['sheet_name'] = 'fp_centroids-test' var_names['col_name']['region_id'] = 'iso_n3' test_centroids = Centroids() test_centroids.read_excel(os.path.join(DATA_DIR, 'fp_centroids-test.xls'), var_names=var_names) storms = StormEurope() storms.read_footprints(TEST_NCS, centroids=test_centroids) self.assertEqual(storms.intensity.shape, (2, 9944)) self.assertEqual( np.count_nonzero(~np.isnan(storms.centroids.region_id)), 6401)
def test_set_ssi(self): """ Test set_ssi with both dawkins and wisc_gust methodology. """ storms = StormEurope() storms.read_footprints(TEST_NCS) storms.set_ssi(method='dawkins') ssi_dawg = np.asarray([1.44573572e+09, 6.16173724e+08]) self.assertTrue(np.allclose(storms.ssi, ssi_dawg)) storms.set_ssi(method='wisc_gust') ssi_gusty = np.asarray([1.42124571e+09, 5.86870673e+08]) self.assertTrue(np.allclose(storms.ssi, ssi_gusty)) storms.set_ssi(threshold=20, on_land=False) ssi_special = np.asarray([2.96582030e+09, 1.23980294e+09]) self.assertTrue(np.allclose(storms.ssi, ssi_special))
def test_generate_prob_storms(self): """ Test the probabilistic storm generator; calls _hist2prob as well as Centroids.set_region_id() """ storms = StormEurope() storms.read_footprints(TEST_NCS) storms_prob = storms.generate_prob_storms() self.assertEqual( np.count_nonzero(storms.centroids.region_id), 6190 # here, we don't rasterise; we check if the centroids lie in a # polygon. that is to say, it's not the majority of a raster pixel, # but the centroid's location that is decisive ) self.assertEqual(storms_prob.size, 60) self.assertEqual(np.count_nonzero(storms_prob.orig), 2) self.assertEqual(storms_prob.centroids.size, 3054) self.assertIsInstance(storms_prob.intensity, sparse.csr.csr_matrix)
def test_read_with_ref(self): """ Test read_footprints while passing in a reference raster. """ storms = StormEurope() storms.read_footprints(TEST_NCS, ref_raster=TEST_NCS[1]) self.assertEqual(storms.tag.haz_type, 'WS') self.assertEqual(storms.units, 'm/s') self.assertEqual(storms.event_id.size, 2) self.assertEqual(storms.date.size, 2) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).year, 1999) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).month, 12) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).day, 26) self.assertEqual(storms.event_id[0], 1) self.assertEqual(storms.event_name[0], 'Lothar') self.assertTrue(isinstance(storms.intensity, sparse.csr.csr_matrix)) self.assertTrue(isinstance(storms.fraction, sparse.csr.csr_matrix)) self.assertEqual(storms.intensity.shape, (2, 9944)) self.assertEqual(storms.fraction.shape, (2, 9944))
def test_read_footprints(self): """ Test read_footprints function, using two small test files""" storms = StormEurope() storms.read_footprints(TEST_NCS, description='test_description') self.assertEqual(storms.tag.haz_type, 'WS') self.assertEqual(storms.units, 'm/s') self.assertEqual(storms.event_id.size, 2) self.assertEqual(storms.date.size, 2) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).year, 1999) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).month, 12) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).day, 26) self.assertEqual(storms.event_id[0], 1) self.assertEqual(storms.event_name[0], 'Lothar') self.assertIsInstance(storms.intensity, sparse.csr.csr_matrix) self.assertIsInstance(storms.fraction, sparse.csr.csr_matrix) self.assertEqual(storms.intensity.shape, (2, 9944)) self.assertEqual(storms.fraction.shape, (2, 9944))
def test_generate_prob_storms(self): """Test the probabilistic storm generator; calls _hist2prob as well as Centroids.set_region_id()""" storms = StormEurope() storms.read_footprints(WS_DEMO_NC) storms_prob = storms.generate_prob_storms() self.assertEqual( np.count_nonzero(storms.centroids.region_id), 6402 # here, we don't rasterise; we check if the centroids lie in a # polygon. that is to say, it's not the majority of a raster pixel, # but the centroid's location that is decisive ) self.assertEqual(storms_prob.size, 60) self.assertTrue( np.allclose((1 / storms_prob.frequency).astype(int), 330)) self.assertAlmostEqual(storms.frequency.sum(), storms_prob.frequency.sum()) self.assertEqual(np.count_nonzero(storms_prob.orig), 2) self.assertEqual(storms_prob.centroids.size, 3054) self.assertIsInstance(storms_prob.intensity, sparse.csr.csr_matrix)
def test_Forecast_init_raise(self): """Test calc and propety functions from the Forecast class""" #hazard with several event dates storms = StormEurope() storms.read_footprints(WS_DEMO_NC, description='test_description') #exposure data = {} data['latitude'] = np.array([1, 2, 3]) data['longitude'] = np.array([1, 2, 3]) data['value'] = np.ones_like(data['latitude']) * 100000 data['deductible'] = np.zeros_like(data['latitude']) data[INDICATOR_IMPF + 'WS'] = np.ones_like(data['latitude']) data['region_id'] = np.ones_like(data['latitude'], dtype=int) * 756 expo = Exposures(gpd.GeoDataFrame(data=data)) #vulnerability #generate vulnerability impact_function_set = ImpactFuncSet() #create and calculate Forecast with self.assertRaises(ValueError): Forecast({dt.datetime(2018, 1, 1): storms}, expo, impact_function_set)
def test_Forecast_calc_properties(self): """Test calc and propety functions from the Forecast class""" #hazard haz = StormEurope() haz.read_cosmoe_file( HAZ_DIR.joinpath('storm_europe_cosmoe_forecast_vmax_testfile.nc'), run_datetime=dt.datetime(2018, 1, 1), event_date=dt.datetime(2018, 1, 3)) #exposure data = {} data['latitude'] = haz.centroids.lat data['longitude'] = haz.centroids.lon data['value'] = np.ones_like(data['latitude']) * 100000 data['deductible'] = np.zeros_like(data['latitude']) data[INDICATOR_IMPF + 'WS'] = np.ones_like(data['latitude']) data['region_id'] = np.ones_like(data['latitude'], dtype=int) * 756 expo = Exposures(gpd.GeoDataFrame(data=data)) #vulnerability #generate vulnerability impact_function = ImpfStormEurope() impact_function.set_welker() impact_function_set = ImpactFuncSet() impact_function_set.append(impact_function) #create and calculate Forecast forecast = Forecast({dt.datetime(2018, 1, 1): haz}, expo, impact_function_set) forecast.calc() # test self.assertEqual(len(forecast.run_datetime), 1) self.assertEqual(forecast.run_datetime[0], dt.datetime(2018, 1, 1)) self.assertEqual(forecast.event_date, dt.datetime(2018, 1, 3)) self.assertEqual(forecast.lead_time().days, 2) self.assertEqual(forecast.summary_str(), 'WS_NWP_run2018010100_event20180103_Switzerland') self.assertAlmostEqual(forecast.ai_agg(), 26.347, places=1) self.assertAlmostEqual(forecast.ei_exp()[1], 7.941, places=1) self.assertEqual(len(forecast.hazard), 1) self.assertIsInstance(forecast.hazard[0], StormEurope) self.assertIsInstance(forecast.exposure, Exposures) self.assertIsInstance(forecast.vulnerability, ImpactFuncSet)
def test_icon_read(self): """test reading from icon grib""" haz = StormEurope() haz.read_icon_grib(dt.datetime(2021, 1, 28), dt.datetime(2021, 1, 28), model_name='test', grib_dir=CONFIG.hazard.test_data.str(), delete_raw_data=False) self.assertEqual(haz.tag.haz_type, 'WS') self.assertEqual(haz.units, 'm/s') self.assertEqual(haz.event_id.size, 40) self.assertEqual(haz.date.size, 40) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).year, 2021) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).month, 1) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).day, 28) self.assertEqual(haz.event_id[-1], 40) self.assertEqual(haz.event_name[-1], '2021-01-28_ens40') self.assertIsInstance(haz.intensity, sparse.csr.csr_matrix) self.assertIsInstance(haz.fraction, sparse.csr.csr_matrix) self.assertEqual(haz.intensity.shape, (40, 49)) self.assertAlmostEqual(haz.intensity.max(), 17.276321,places=3) self.assertEqual(haz.fraction.shape, (40, 49)) logger = logging.getLogger('climada.hazard.storm_europe') with mock.patch.object(logger,'warning') as mock_logger: with self.assertRaises(ValueError): haz.read_icon_grib(dt.datetime(2021, 1, 28, 6), dt.datetime(2021, 1, 28), model_name='test', grib_dir=CONFIG.hazard.test_data.str(), delete_raw_data=False) mock_logger.assert_called_once()
def test_from_footprints(self): """Test from_footprints constructor, using one small test files""" storms = StormEurope.from_footprints(WS_DEMO_NC[0], description='test_description') self.assertEqual(storms.tag.haz_type, 'WS') self.assertEqual(storms.units, 'm/s') self.assertEqual(storms.event_id.size, 1) self.assertEqual(storms.date.size, 1) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).year, 1999) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).month, 12) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).day, 26) self.assertEqual(storms.event_id[0], 1) self.assertEqual(storms.event_name[0], 'Lothar') self.assertIsInstance(storms.intensity, sparse.csr.csr_matrix) self.assertIsInstance(storms.fraction, sparse.csr.csr_matrix) self.assertEqual(storms.intensity.shape, (1, 9944)) self.assertEqual(storms.fraction.shape, (1, 9944)) self.assertEqual(storms.frequency[0], 1.0) """Test from_footprints constructor, using two small test files""" storms = StormEurope.from_footprints(WS_DEMO_NC, description='test_description') self.assertEqual(storms.tag.haz_type, 'WS') self.assertEqual(storms.units, 'm/s') self.assertEqual(storms.event_id.size, 2) self.assertEqual(storms.date.size, 2) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).year, 1999) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).month, 12) self.assertEqual(dt.datetime.fromordinal(storms.date[0]).day, 26) self.assertEqual(storms.event_id[0], 1) self.assertEqual(storms.event_name[0], 'Lothar') self.assertIsInstance(storms.intensity, sparse.csr.csr_matrix) self.assertIsInstance(storms.fraction, sparse.csr.csr_matrix) self.assertEqual(storms.intensity.shape, (2, 9944)) self.assertEqual(storms.fraction.shape, (2, 9944))
def test_icon_read(self): """test reading from icon grib""" # for this test the forecast file is supposed to be already downloaded from the dwd # another download would fail because the files are available for 24h only # instead, we download it as a test dataset through the climada data api apiclient = Client() ds = apiclient.get_dataset_info(name='test_storm_europe_icon_2021012800', status='test_dataset') dsdir, _ = apiclient.download_dataset(ds) haz = StormEurope.from_icon_grib( dt.datetime(2021, 1, 28), dt.datetime(2021, 1, 28), model_name='test', grib_dir=dsdir, delete_raw_data=False) self.assertEqual(haz.tag.haz_type, 'WS') self.assertEqual(haz.units, 'm/s') self.assertEqual(haz.event_id.size, 40) self.assertEqual(haz.date.size, 40) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).year, 2021) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).month, 1) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).day, 28) self.assertEqual(haz.event_id[-1], 40) self.assertEqual(haz.event_name[-1], '2021-01-28_ens40') self.assertIsInstance(haz.intensity, sparse.csr.csr_matrix) self.assertIsInstance(haz.fraction, sparse.csr.csr_matrix) self.assertEqual(haz.intensity.shape, (40, 49)) self.assertAlmostEqual(haz.intensity.max(), 17.276321,places=3) self.assertEqual(haz.fraction.shape, (40, 49)) with self.assertLogs('climada.hazard.storm_europe', level='WARNING') as cm: with self.assertRaises(ValueError): haz = StormEurope.from_icon_grib( dt.datetime(2021, 1, 28, 6), dt.datetime(2021, 1, 28), model_name='test', grib_dir=CONFIG.hazard.test_data.str(), delete_raw_data=False) self.assertEqual(len(cm.output), 1) self.assertIn('event definition is inaccuratly implemented', cm.output[0])
def test_cosmoe_read(self): """test reading from cosmo-e netcdf""" haz = StormEurope() haz.read_cosmoe_file(DATA_DIR.joinpath('storm_europe_cosmoe_forecast_vmax_testfile.nc'), run_datetime=dt.datetime(2018,1,1), event_date=dt.datetime(2018,1,3)) self.assertEqual(haz.tag.haz_type, 'WS') self.assertEqual(haz.units, 'm/s') self.assertEqual(haz.event_id.size, 21) self.assertEqual(haz.date.size, 21) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).year, 2018) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).month, 1) self.assertEqual(dt.datetime.fromordinal(haz.date[0]).day, 3) self.assertEqual(haz.event_id[-1], 21) self.assertEqual(haz.event_name[-1], '2018-01-03_ens21') self.assertIsInstance(haz.intensity, sparse.csr.csr_matrix) self.assertIsInstance(haz.fraction, sparse.csr.csr_matrix) self.assertEqual(haz.intensity.shape, (21, 25)) self.assertAlmostEqual(haz.intensity.max(), 36.426735,places=3) self.assertEqual(haz.fraction.shape, (21, 25))
def test_centroids_from_nc(self): """ Test if centroids can be constructed correctly """ cent = StormEurope._centroids_from_nc(TEST_NCS[0]) self.assertTrue(isinstance(cent, Centroids)) self.assertEqual(cent.size, 9944)
def test_Forecast_plot(self): """Test cplotting functions from the Forecast class""" #hazard haz1 = StormEurope() haz1.read_cosmoe_file( HAZ_DIR.joinpath('storm_europe_cosmoe_forecast_vmax_testfile.nc'), run_datetime=dt.datetime(2018, 1, 1), event_date=dt.datetime(2018, 1, 3)) haz1.centroids.lat += 0.6 haz1.centroids.lon -= 1.2 haz2 = StormEurope() haz2.read_cosmoe_file( HAZ_DIR.joinpath('storm_europe_cosmoe_forecast_vmax_testfile.nc'), run_datetime=dt.datetime(2018, 1, 1), event_date=dt.datetime(2018, 1, 3)) haz2.centroids.lat += 0.6 haz2.centroids.lon -= 1.2 #exposure data = {} data['latitude'] = haz1.centroids.lat data['longitude'] = haz1.centroids.lon data['value'] = np.ones_like(data['latitude']) * 100000 data['deductible'] = np.zeros_like(data['latitude']) data[INDICATOR_IMPF + 'WS'] = np.ones_like(data['latitude']) data['region_id'] = np.ones_like(data['latitude'], dtype=int) * 756 expo = Exposures(gpd.GeoDataFrame(data=data)) #vulnerability #generate vulnerability impact_function = ImpfStormEurope() impact_function.set_welker() impact_function_set = ImpactFuncSet() impact_function_set.append(impact_function) #create and calculate Forecast forecast = Forecast( { dt.datetime(2018, 1, 2): haz1, dt.datetime(2017, 12, 31): haz2 }, expo, impact_function_set) forecast.calc() #test plotting functions forecast.plot_imp_map(run_datetime=dt.datetime(2017, 12, 31), save_fig=False, close_fig=True) forecast.plot_hist(run_datetime=dt.datetime(2017, 12, 31), save_fig=False, close_fig=True) forecast.plot_exceedence_prob(run_datetime=dt.datetime(2017, 12, 31), threshold=5000, save_fig=False, close_fig=True) #create a file containing the polygons of Swiss cantons using natural earth cantons_file = CONFIG.local_data.save_dir.dir() / 'CHE_cantons.shp' adm1_shape_file = shapereader.natural_earth( resolution='10m', category='cultural', name='admin_1_states_provinces') if not cantons_file.exists(): with fiona.open(adm1_shape_file, 'r') as source: with fiona.open(cantons_file, 'w', **source.meta) as sink: for f in source: if f['properties']['adm0_a3'] == 'CHE': sink.write(f) forecast.plot_warn_map( str(cantons_file), decision_level='polygon', thresholds=[100000, 500000, 1000000, 5000000], probability_aggregation='mean', area_aggregation='sum', title="Building damage warning", explain_text="warn level based on aggregated damages", save_fig=False, close_fig=True) forecast.plot_warn_map( str(cantons_file), decision_level='exposure_point', thresholds=[1, 1000, 5000, 5000000], probability_aggregation=0.2, area_aggregation=0.2, title="Building damage warning", explain_text="warn level based on aggregated damages", run_datetime=dt.datetime(2017, 12, 31), save_fig=False, close_fig=True) forecast.plot_hexbin_ei_exposure() plt.close() with self.assertRaises(ValueError): forecast.plot_warn_map( str(cantons_file), decision_level='test_fail', probability_aggregation=0.2, area_aggregation=0.2, title="Building damage warning", explain_text="warn level based on aggregated damages", save_fig=False, close_fig=True) plt.close() with self.assertRaises(ValueError): forecast.plot_warn_map( str(cantons_file), decision_level='exposure_point', probability_aggregation='test_fail', area_aggregation=0.2, title="Building damage warning", explain_text="warn level based on aggregated damages", save_fig=False, close_fig=True) plt.close() with self.assertRaises(ValueError): forecast.plot_warn_map( str(cantons_file), decision_level='exposure_point', probability_aggregation=0.2, area_aggregation='test_fail', title="Building damage warning", explain_text="warn level based on aggregated damages", save_fig=False, close_fig=True) plt.close()