def from_excel(cls, file_name, description=''): """Read csv or xls or xlsx file following climada's template. Parameters ---------- file_name : str, optional file name(s) or folder name containing the files to read description : str or list(str), optional one description of the data or a description of each data file Returns ------- ent : climada.entity.Entity The entity from excel file """ exp = Exposures(pd.read_excel(file_name)) exp.tag = Tag() exp.tag.file_name = str(file_name) exp.tag.description = description dr = DiscRates.from_excel(file_name, description) impf_set = ImpactFuncSet.from_excel(file_name, description) meas_set = MeasureSet.from_excel(file_name, description) return cls(exposures=exp, disc_rates=dr, impact_func_set=impf_set, measure_set=meas_set)
def test_change_exposures_if_pass(self): """Test _change_exposures_if""" meas = Measure() meas.imp_fun_map = '1to3' meas.haz_type = 'TC' imp_set = ImpactFuncSet() imp_tc = ImpactFunc() imp_tc.haz_type = 'TC' imp_tc.id = 1 imp_tc.intensity = np.arange(10, 100, 10) imp_tc.mdd = np.arange(10, 100, 10) imp_tc.paa = np.arange(10, 100, 10) imp_set.append(imp_tc) imp_tc = ImpactFunc() imp_tc.haz_type = 'TC' imp_tc.id = 3 imp_tc.intensity = np.arange(10, 100, 10) imp_tc.mdd = np.arange(10, 100, 10) * 2 imp_tc.paa = np.arange(10, 100, 10) * 2 exp = Exposures() exp.read_hdf5(EXP_DEMO_H5) new_exp = meas._change_exposures_if(exp) self.assertEqual(new_exp.ref_year, exp.ref_year) self.assertEqual(new_exp.value_unit, exp.value_unit) self.assertEqual(new_exp.tag.file_name, exp.tag.file_name) self.assertEqual(new_exp.tag.description, exp.tag.description) self.assertTrue(np.array_equal(new_exp.gdf.value.values, exp.gdf.value.values)) self.assertTrue(np.array_equal(new_exp.gdf.latitude.values, exp.gdf.latitude.values)) self.assertTrue(np.array_equal(new_exp.gdf.longitude.values, exp.gdf.longitude.values)) self.assertTrue(np.array_equal(exp.gdf[INDICATOR_IF + 'TC'].values, np.ones(new_exp.gdf.shape[0]))) self.assertTrue(np.array_equal(new_exp.gdf[INDICATOR_IF + 'TC'].values, np.ones(new_exp.gdf.shape[0]) * 3))
def read_excel(self, file_name, description=''): """Read csv or xls or xlsx file following climada's template. Parameters: file_name (str, optional): file name(s) or folder name containing the files to read description (str or list(str), optional): one description of the data or a description of each data file Raises: ValueError """ self.exposures = Exposures(pd.read_excel(file_name)) self.exposures.tag = Tag() self.exposures.tag.file_name = file_name self.exposures.tag.description = description self.disc_rates = DiscRates() self.disc_rates.read_excel(file_name, description) self.impact_funcs = ImpactFuncSet() self.impact_funcs.read_excel(file_name, description) self.measures = MeasureSet() self.measures.read_excel(file_name, description)
def test_no_impact_fail(self): """Error if no impact ids.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['imp'] = 'no valid value' exp = Exposures() with self.assertRaises(KeyError): exp.read_mat(ENT_TEST_MAT, var_names=new_var_names)
def test_concat_fail(self): """Test failing concat function with fake data.""" with self.assertRaises(TypeError): Exposures.concat([ self.dummy, self.dummy.gdf, self.dummy.gdf.values, self.dummy ])
def set_countries(self, countries, ref_year=2016, res_km=None, from_hr=None, admin_file='admin_0_countries', **kwargs): """ Model countries using values at reference year. If GDP or income group not available for that year, consider the value of the closest available year. Parameters: countries (list or dict): list of country names (admin0 or subunits) or dict with key = admin0 name and value = [admin1 names] ref_year (int, optional): reference year. Default: 2016 res_km (float, optional): approx resolution in km. Default: nightlights resolution. from_hr (bool, optional): force to use higher resolution image, independently of its year of acquisition. admin_file (str): file name, admin_0_countries or admin_0_map_subunits kwargs (optional): 'gdp' and 'inc_grp' dictionaries with keys the country ISO_alpha3 code. 'poly_val' polynomial transformation [1,x,x^2,...] to apply to nightlight (DEF_POLY_VAL used if not provided). If provided, these are used. """ admin_key_dict = {'admin_0_countries': ['ADMIN', 'ADM0_A3'], 'admin_0_map_subunits': ['SUBUNIT', 'SU_A3']} shp_file = shapereader.natural_earth(resolution='10m', category='cultural', name=admin_file) shp_file = shapereader.Reader(shp_file) cntry_info, cntry_admin1 = country_iso_geom(countries, shp_file, admin_key_dict[admin_file]) fill_econ_indicators(ref_year, cntry_info, shp_file, **kwargs) nightlight, coord_nl, fn_nl, res_fact, res_km = get_nightlight( ref_year, cntry_info, res_km, from_hr) tag = Tag() bkmrbl_list = [] for cntry_iso, cntry_val in cntry_info.items(): bkmrbl_list.append( self._set_one_country(cntry_val, nightlight, coord_nl, res_fact, res_km, cntry_admin1[cntry_iso], **kwargs)) tag.description += ("{} {:d} GDP: {:.3e} income group: {:d} \n").\ format(cntry_val[1], cntry_val[3], cntry_val[4], cntry_val[5]) Exposures.__init__(self, gpd.GeoDataFrame( pd.concat(bkmrbl_list, ignore_index=True)), crs=DEF_CRS) # set metadata self.ref_year = ref_year self.tag = tag self.tag.file_name = fn_nl self.value_unit = 'USD' rows, cols, ras_trans = pts_to_raster_meta( (self.longitude.min(), self.latitude.min(), self.longitude.max(), self.latitude.max()), (coord_nl[0, 1], -coord_nl[0, 1])) self.meta = {'width': cols, 'height': rows, 'crs': self.crs, 'transform': ras_trans}
def test_no_unit_pass(self): """Not error if no value unit.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['uni'] = 'no valid unit' exp = Exposures() exp.read_mat(ENT_TEST_MAT, var_names=new_var_names) # Check results self.assertEqual('USD', exp.value_unit)
def test_no_category_pass(self): """Not error if no category id.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['cat'] = 'no valid category' exp = Exposures() exp.read_mat(ENT_TEST_MAT, var_names=new_var_names) # Check results self.assertTrue('category_id' not in exp.gdf)
def test_no_region_pass(self): """Not error if no region id.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['reg'] = 'no valid region' exp = Exposures() exp.read_mat(ENT_TEST_MAT, var_names=new_var_names) # Check results self.assertTrue('region_id' not in exp.gdf)
def test_no_assigned_pass(self): """Not error if no value unit.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['ass'] = 'no valid assign' exp = Exposures() exp.read_mat(ENT_TEST_MAT, var_names=new_var_names) # Check results self.assertTrue('centr_' not in exp.gdf)
def test_no_refyear_pass(self): """Not error if no value unit.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['ref'] = 'no valid ref' exp = Exposures() exp.read_mat(ENT_TEST_MAT, var_names=new_var_names) # Check results self.assertEqual(2018, exp.ref_year)
def set_countries(self, countries=[], reg=[], ref_year=2000, path=None): """Model countries using values at reference year. If GDP or income group not available for that year, consider the value of the closest available year. Parameters: countries (list): list of country names ISO3 ref_year (int, optional): reference year. Default: 2016 path (string): path to exposure dataset (ISIMIP) """ gdp2a_list = [] tag = Tag() if path is None: LOGGER.error('No path for exposure data set') raise NameError if not os.path.exists(path): LOGGER.error('Invalid path %s', path) raise NameError try: if not countries: if reg: natISO = region2isos(reg) countries = np.array(natISO) else: LOGGER.error('set_countries requires countries or reg') raise ValueError for cntr_ind in range(len(countries)): gdp2a_list.append(self._set_one_country(countries[cntr_ind], ref_year, path)) tag.description += ("{} GDP2Asset \n").\ format(countries[cntr_ind]) Exposures.__init__(self, gpd.GeoDataFrame( pd.concat(gdp2a_list, ignore_index=True))) except KeyError: LOGGER.error('Exposure countries: %s or reg %s could not be set, check ISO3 or' ' reference year %s', countries, reg, ref_year) raise KeyError self.tag = tag self.ref_year = ref_year self.value_unit = 'USD' self.tag.description = 'GDP2Asset ' + str(self.ref_year) self.crs = DEF_CRS # set meta res = 0.0416666 rows, cols, ras_trans = pts_to_raster_meta((self.longitude.min(), self.latitude.min(), self.longitude.max(), self.latitude.max()), res) self.meta = {'width': cols, 'height': rows, 'crs': self.crs, 'transform': ras_trans}
def test_no_coord_fail(self): """Error if no coordinates.""" new_var_names = copy.deepcopy(DEF_VAR_MAT) new_var_names['var_name']['lat'] = 'no valid Latitude' with self.assertRaises(KeyError): Exposures.from_mat(ENT_TEST_MAT, var_names=new_var_names) new_var_names['var_name']['lat'] = 'nLatitude' new_var_names['var_name']['lon'] = 'no valid Longitude' with self.assertRaises(KeyError): Exposures.from_mat(ENT_TEST_MAT, var_names=new_var_names)
def set_countries(self, countries, ref_year=2016, res_km=None, from_hr=None, **kwargs): """ Model countries using values at reference year. If GDP or income group not available for that year, consider the value of the closest available year. Parameters: countries (list or dict): list of country names (admin0) or dict with key = admin0 name and value = [admin1 names] ref_year (int, optional): reference year. Default: 2016 res_km (float, optional): approx resolution in km. Default: nightlights resolution. from_hr (bool, optional): force to use higher resolution image, independently of its year of acquisition. kwargs (optional): 'gdp' and 'inc_grp' dictionaries with keys the country ISO_alpha3 code. 'poly_val' polynomial transformation [1,x,x^2,...] to apply to nightlight (DEF_POLY_VAL used if not provided). If provided, these are used. """ shp_file = shapereader.natural_earth(resolution='10m', category='cultural', name='admin_0_countries') shp_file = shapereader.Reader(shp_file) cntry_info, cntry_admin1 = country_iso_geom(countries, shp_file) fill_econ_indicators(ref_year, cntry_info, shp_file, **kwargs) nightlight, coord_nl, fn_nl, res_fact, res_km = get_nightlight(\ ref_year, cntry_info, res_km, from_hr) tag = Tag() bkmrbl_list = [] for cntry_iso, cntry_val in cntry_info.items(): bkmrbl_list.append(self._set_one_country(cntry_val, nightlight, \ coord_nl, res_fact, res_km, cntry_admin1[cntry_iso], **kwargs)) tag.description += ("{} {:d} GDP: {:.3e} income group: {:d} \n").\ format(cntry_val[1], cntry_val[3], cntry_val[4], cntry_val[5]) Exposures.__init__( self, gpd.GeoDataFrame(pd.concat(bkmrbl_list, ignore_index=True))) # set metadata self.ref_year = ref_year self.tag = tag self.tag.file_name = fn_nl self.value_unit = 'USD' self.crs = {'init': 'epsg:4326'}
def test_read_demo_pass(self): """Read one single excel file""" # Read demo excel file expo = Exposures() expo.read_mat(ENT_TEST_MAT) # Check results n_expos = 50 self.assertEqual(expo.gdf.index.shape, (n_expos, )) self.assertEqual(expo.gdf.index[0], 0) self.assertEqual(expo.gdf.index[n_expos - 1], n_expos - 1) self.assertEqual(expo.gdf.value.shape, (n_expos, )) self.assertEqual(expo.gdf.value[0], 13927504367.680632) self.assertEqual(expo.gdf.value[n_expos - 1], 12624818493.687229) self.assertEqual(expo.gdf.deductible.shape, (n_expos, )) self.assertEqual(expo.gdf.deductible[0], 0) self.assertEqual(expo.gdf.deductible[n_expos - 1], 0) self.assertEqual(expo.gdf.cover.shape, (n_expos, )) self.assertEqual(expo.gdf.cover[0], 13927504367.680632) self.assertEqual(expo.gdf.cover[n_expos - 1], 12624818493.687229) self.assertIn('int', str(expo.gdf.if_.dtype)) self.assertEqual(expo.gdf.if_.shape, (n_expos, )) self.assertEqual(expo.gdf.if_[0], 1) self.assertEqual(expo.gdf.if_[n_expos - 1], 1) self.assertIn('int', str(expo.gdf.category_id.dtype)) self.assertEqual(expo.gdf.category_id.shape, (n_expos, )) self.assertEqual(expo.gdf.category_id[0], 1) self.assertEqual(expo.gdf.category_id[n_expos - 1], 1) self.assertIn('int', str(expo.gdf.centr_.dtype)) self.assertEqual(expo.gdf.centr_.shape, (n_expos, )) self.assertEqual(expo.gdf.centr_[0], 47) self.assertEqual(expo.gdf.centr_[n_expos - 1], 46) self.assertTrue('region_id' not in expo.gdf) self.assertEqual(expo.gdf.latitude.shape, (n_expos, )) self.assertEqual(expo.gdf.latitude[0], 26.93389900000) self.assertEqual(expo.gdf.latitude[n_expos - 1], 26.34795700000) self.assertEqual(expo.gdf.longitude[0], -80.12879900000) self.assertEqual(expo.gdf.longitude[n_expos - 1], -80.15885500000) self.assertEqual(expo.ref_year, 2016) self.assertEqual(expo.value_unit, 'USD') self.assertEqual(expo.tag.file_name, str(ENT_TEST_MAT))
def test_set_gdf(self): """Test setting the GeoDataFrame""" empty_gdf = gpd.GeoDataFrame() gdf_without_geometry = good_exposures().gdf good_exp = good_exposures() good_exp.set_crs(crs='epsg:3395') good_exp.set_geometry_points() gdf_with_geometry = good_exp.gdf probe = Exposures() self.assertRaises(ValueError, probe.set_gdf, pd.DataFrame()) probe.set_gdf(empty_gdf) self.assertTrue(probe.gdf.equals(gpd.GeoDataFrame())) self.assertTrue(u_coord.equal_crs(DEF_CRS, probe.crs)) self.assertIsNone(probe.gdf.crs) probe.set_gdf(gdf_with_geometry) self.assertTrue(probe.gdf.equals(gdf_with_geometry)) self.assertTrue(u_coord.equal_crs('epsg:3395', probe.crs)) self.assertTrue(u_coord.equal_crs('epsg:3395', probe.gdf.crs)) probe.set_gdf(gdf_without_geometry) self.assertTrue(probe.gdf.equals(good_exposures().gdf)) self.assertTrue(u_coord.equal_crs(DEF_CRS, probe.crs)) self.assertIsNone(probe.gdf.crs)
def test_change_all_exposures_pass(self): """Test _change_all_exposures method""" meas = Measure() meas.exposures_set = EXP_DEMO_H5 ref_exp = Exposures() ref_exp.read_hdf5(EXP_DEMO_H5) exposures = Exposures() exposures.gdf['latitude'] = np.ones(10) exposures.gdf['longitude'] = np.ones(10) new_exp = meas._change_all_exposures(exposures) self.assertEqual(new_exp.ref_year, ref_exp.ref_year) self.assertEqual(new_exp.value_unit, ref_exp.value_unit) self.assertEqual(new_exp.tag.file_name, ref_exp.tag.file_name) self.assertEqual(new_exp.tag.description, ref_exp.tag.description) self.assertTrue( np.array_equal(new_exp.gdf.value.values, ref_exp.gdf.value.values)) self.assertTrue( np.array_equal(new_exp.gdf.latitude.values, ref_exp.gdf.latitude.values)) self.assertTrue( np.array_equal(new_exp.gdf.longitude.values, ref_exp.gdf.longitude.values))
def test_cutoff_hazard_pass(self): """Test _cutoff_hazard_damage""" meas = MeasureSet() meas.read_mat(ENT_TEST_MAT) act_1 = meas.get_measure(name='Seawall')[0] haz = Hazard('TC') haz.read_mat(HAZ_TEST_MAT) exp = Exposures() exp.read_mat(ENT_TEST_MAT) exp.rename(columns={'if_': 'if_TC'}, inplace=True) exp.check() imp_set = ImpactFuncSet() imp_set.read_mat(ENT_TEST_MAT) new_haz = act_1._cutoff_hazard_damage(exp, imp_set, haz) self.assertFalse(id(new_haz) == id(haz)) pos_no_null = np.array([ 6249, 7697, 9134, 13500, 13199, 5944, 9052, 9050, 2429, 5139, 9053, 7102, 4096, 1070, 5948, 1076, 5947, 7432, 5949, 11694, 5484, 6246, 12147, 778, 3326, 7199, 12498, 11698, 6245, 5327, 4819, 8677, 5970, 7101, 779, 3894, 9051, 5976, 3329, 5978, 4282, 11697, 7193, 5351, 7310, 7478, 5489, 5526, 7194, 4283, 7191, 5328, 4812, 5528, 5527, 5488, 7475, 5529, 776, 5758, 4811, 6223, 7479, 7470, 5480, 5325, 7477, 7318, 7317, 11696, 7313, 13165, 6221 ]) all_haz = np.arange(haz.intensity.shape[0]) all_haz[pos_no_null] = -1 pos_null = np.argwhere(all_haz > 0).reshape(-1) for i_ev in pos_null: self.assertEqual(new_haz.intensity[i_ev, :].max(), 0)
def _change_all_exposures(self, exposures): """Change exposures to provided exposures_set. Parameters: exposures (Exposures): exposures instance Returns: Exposures """ if isinstance(self.exposures_set, str) and self.exposures_set == NULL_STR: return exposures if isinstance(self.exposures_set, str): LOGGER.debug('Setting new exposures %s', self.exposures_set) new_exp = Exposures() new_exp.read_hdf5(self.exposures_set) new_exp.check() elif isinstance(self.exposures_set, Exposures): LOGGER.debug('Setting new exposures. ') new_exp = copy.deepcopy(self.exposures_set) new_exp.check() else: LOGGER.error('Wrong input exposures.') raise ValueError if not np.array_equal(np.unique(exposures.latitude.values), np.unique(new_exp.latitude.values)) or \ not np.array_equal(np.unique(exposures.longitude.values), np.unique(new_exp.longitude.values)): LOGGER.warning('Exposures locations have changed.') return new_exp
def test_assign_raster_pass(self): """ Test assign_centroids with raster hazard """ exp = Exposures() exp['longitude'] = np.array([-69.235, -69.2427, -72, -68.8016496, 30]) exp['latitude'] = np.array([10.235, 10.226, 2, 9.71272097, 50]) exp.crs = DEF_CRS haz = Hazard('FL') haz.set_raster([HAZ_DEMO_FL], window=Window(10, 20, 50, 60)) exp.assign_centroids(haz) self.assertEqual(exp[INDICATOR_CENTR + 'FL'][0], 51) self.assertEqual(exp[INDICATOR_CENTR + 'FL'][1], 100) self.assertEqual(exp[INDICATOR_CENTR + 'FL'][2], -1) self.assertEqual(exp[INDICATOR_CENTR + 'FL'][3], 3000 - 1) self.assertEqual(exp[INDICATOR_CENTR + 'FL'][4], -1)
def test_calc_sector_total_impact(self): """Test running total impact calculations.""" sup = SupplyChain() sup.read_wiod16(year='test', range_rows=(5, 117), range_cols=(4, 116), col_iso3=2, col_sectors=1) # Tropical cyclone over Florida and Caribbean hazard = Hazard('TC') hazard.read_mat(HAZ_TEST_MAT) # Read demo entity values # Set the entity default file to the demo one exp = Exposures() exp.read_hdf5(EXP_DEMO_H5) exp.check() exp.gdf.region_id = 840 #assign right id for USA exp.assign_centroids(hazard) impf_tc = IFTropCyclone() impf_tc.set_emanuel_usa() impf_set = ImpactFuncSet() impf_set.append(impf_tc) impf_set.check() sup.calc_sector_direct_impact(hazard, exp, impf_set) sup.calc_indirect_impact(io_approach='ghosh') sup.calc_total_impact() self.assertAlmostEqual((sup.years.shape[0], sup.mriot_data.shape[0]), sup.total_impact.shape) self.assertAlmostEqual((sup.mriot_data.shape[0], ), sup.total_aai_agg.shape)
def set_countries(self, countries=[], reg=[], ref_year=2000, path=None): """ Model countries using values at reference year. If GDP or income group not available for that year, consider the value of the closest available year. Parameters: countries (list): list of country names ISO3 ref_year (int, optional): reference year. Default: 2016 path (string): path to exposure dataset """ gdp2a_list = [] tag = Tag() if path is None: LOGGER.error('No path for exposure data set') raise NameError if not os.path.exists(path): LOGGER.error('Invalid path ' + path) raise NameError try: if not countries: if reg: natID_info = pd.read_csv(NAT_REG_ID) natISO = natID_info["ISO"][np.isin(natID_info["Reg_name"], reg)] countries = np.array(natISO) else: LOGGER.error('set_countries requires countries or reg') raise ValueError for cntr_ind in range(len(countries)): gdp2a_list.append(self._set_one_country(countries[cntr_ind], ref_year, path)) tag.description += ("{} GDP2Asset \n").\ format(countries[cntr_ind]) Exposures.__init__(self, gpd.GeoDataFrame( pd.concat(gdp2a_list, ignore_index=True))) except KeyError: LOGGER.error('Exposure countries: ' + str(countries) + ' or reg ' + str(reg) + ' could not be set, check ISO3 or' + ' reference year ' + str(ref_year)) raise KeyError self.ref_year = ref_year self.value_unit = 'USD' self.tag = tag self.crs = DEF_CRS
def test_ctx_osm_pass(self): """Test basemap function using osm images""" myexp = Exposures() myexp.gdf['latitude'] = np.array([30, 40, 50]) myexp.gdf['longitude'] = np.array([0, 0, 0]) myexp.gdf['value'] = np.array([1, 1, 1]) myexp.check() try: myexp.plot_basemap(url=ctx.sources.OSM_A) except urllib.error.HTTPError: self.assertEqual(1, 0)
def setUp(self): exp = Exposures(crs='epsg:3395') exp.gdf['value'] = np.arange(0, 1.0e6, 1.0e5) min_lat, max_lat = 27.5, 30 min_lon, max_lon = -18, -12 exp.gdf['latitude'] = np.linspace(min_lat, max_lat, 10) exp.gdf['longitude'] = np.linspace(min_lon, max_lon, 10) exp.gdf['region_id'] = np.ones(10) exp.gdf['impf_TC'] = np.ones(10) exp.ref_year = 2015 exp.value_unit = 'XSD' self.dummy = exp
def test_cutoff_hazard_region_pass(self): """Test _cutoff_hazard_damage in specific region""" meas = MeasureSet.from_mat(ENT_TEST_MAT) act_1 = meas.get_measure(name='Seawall')[0] act_1.exp_region_id = [1] haz = Hazard.from_mat(HAZ_TEST_MAT) exp = Exposures.from_mat(ENT_TEST_MAT) exp.gdf['region_id'] = np.zeros(exp.gdf.shape[0]) exp.gdf.region_id.values[10:] = 1 exp.check() imp_set = ImpactFuncSet.from_mat(ENT_TEST_MAT) new_haz = act_1._cutoff_hazard_damage(exp, imp_set, haz) self.assertFalse(id(new_haz) == id(haz)) pos_no_null = np.array([ 6249, 7697, 9134, 13500, 13199, 5944, 9052, 9050, 2429, 5139, 9053, 7102, 4096, 1070, 5948, 1076, 5947, 7432, 5949, 11694, 5484, 6246, 12147, 778, 3326, 7199, 12498, 11698, 6245, 5327, 4819, 8677, 5970, 7101, 779, 3894, 9051, 5976, 3329, 5978, 4282, 11697, 7193, 5351, 7310, 7478, 5489, 5526, 7194, 4283, 7191, 5328, 4812, 5528, 5527, 5488, 7475, 5529, 776, 5758, 4811, 6223, 7479, 7470, 5480, 5325, 7477, 7318, 7317, 11696, 7313, 13165, 6221 ]) all_haz = np.arange(haz.intensity.shape[0]) all_haz[pos_no_null] = -1 pos_null = np.argwhere(all_haz > 0).reshape(-1) centr_null = np.unique(exp.gdf.centr_[exp.gdf.region_id == 0]) for i_ev in pos_null: self.assertEqual(new_haz.intensity[i_ev, centr_null].max(), 0)
def _cutoff_hazard_damage(self, exposures, if_set, hazard): """Cutoff of hazard events which generate damage with a frequency higher than hazard_freq_cutoff. Parameters: exposures (Exposures): exposures instance imp_set (ImpactFuncSet): impact functions instance hazard (Hazard): hazard instance Returns: ImpactFuncSet """ if self.hazard_freq_cutoff == 0: return hazard LOGGER.debug('Cutting events whose damage have a frequency > %s.', self.hazard_freq_cutoff) from climada.engine.impact import Impact imp = Impact() exp_imp = exposures if self.exp_region_id != 0: # compute impact only in selected region exp_imp = exposures[exposures.region_id == self.exp_region_id] exp_imp = Exposures(exp_imp) imp.calc(exp_imp, if_set, hazard) new_haz = copy.deepcopy(hazard) sort_idxs = np.argsort(imp.at_event)[::-1] exceed_freq = np.cumsum(imp.frequency[sort_idxs]) cutoff = exceed_freq > self.hazard_freq_cutoff sel_haz = sort_idxs[cutoff] new_haz_inten = new_haz.intensity.tolil() new_haz_inten[sel_haz, :] = np.zeros((sel_haz.size, new_haz.intensity.shape[1])) new_haz.intensity = new_haz_inten.tocsr() return new_haz
def test_constructoer_pass(self): """ Test initialization with input GeiDataFrame """ in_gpd = gpd.GeoDataFrame() in_gpd['value'] = np.zeros(10) in_gpd.ref_year = 2015 in_exp = Exposures(in_gpd) self.assertEqual(in_exp.ref_year, 2015) self.assertTrue(np.array_equal(in_exp.value, np.zeros(10)))
def test_assign_raster_same_pass(self): """Test assign_centroids with raster hazard""" exp = Exposures.from_raster(HAZ_DEMO_FL, window=Window(10, 20, 50, 60)) exp.check() haz = Hazard.from_raster([HAZ_DEMO_FL], haz_type='FL', window=Window(10, 20, 50, 60)) exp.assign_centroids(haz) np.testing.assert_array_equal(exp.gdf[INDICATOR_CENTR + 'FL'].values, np.arange(haz.centroids.size, dtype=int))
def test_concat_pass(self): """Test concat function with fake data.""" self.dummy.check() catexp = Exposures.concat([self.dummy, self.dummy.gdf, pd.DataFrame(self.dummy.gdf.values, columns=self.dummy.gdf.columns), self.dummy]) self.assertEqual(self.dummy.gdf.shape, (10,5)) self.assertEqual(catexp.gdf.shape, (40,5)) self.assertTrue(u_coord.equal_crs(catexp.crs, 'epsg:3395'))
def test_assign_raster_same_pass(self): """ Test assign_centroids with raster hazard """ exp = Exposures() exp.set_from_raster(HAZ_DEMO_FL, window=Window(10, 20, 50, 60)) exp.check() haz = Hazard('FL') haz.set_raster([HAZ_DEMO_FL], window=Window(10, 20, 50, 60)) exp.assign_centroids(haz) self.assertTrue( np.array_equal(exp[INDICATOR_CENTR + 'FL'].values, np.arange(haz.centroids.size, dtype=int)))