def _select_exact_area(countries=None, reg=None): """Extract coordinates of selected countries or region from NatID grid. If countries are given countries are cut, if only reg is given, the whole region is cut. Parameters: countries: List of countries reg: List of regions Raises: KeyError Returns: centroids """ lat, lon = get_region_gridpoints(countries=countries, regions=reg, basemap="isimip", resolution=150) if reg: country_isos = region2isos(reg) else: country_isos = countries if countries else [] natIDs = country_iso2natid(country_isos) centroids = Centroids() centroids.set_lat_lon(lat, lon) centroids.id = np.arange(centroids.lon.shape[0]) # centroids.set_region_id() return centroids, country_isos, natIDs
def set_countries(self, countries=[], reg=[], ref_year=2000, path=None): """Model countries using values at reference year. If GDP or income group not available for that year, consider the value of the closest available year. Parameters: countries (list): list of country names ISO3 ref_year (int, optional): reference year. Default: 2016 path (string): path to exposure dataset (ISIMIP) """ gdp2a_list = [] tag = Tag() if path is None: LOGGER.error('No path for exposure data set') raise NameError if not Path(path).is_file(): LOGGER.error('Invalid path %s', path) raise NameError try: if not countries: if reg: natISO = region2isos(reg) countries = np.array(natISO) else: LOGGER.error('set_countries requires countries or reg') raise ValueError for cntr_ind in range(len(countries)): gdp2a_list.append( self._set_one_country(countries[cntr_ind], ref_year, path)) tag.description += ("{} GDP2Asset \n").\ format(countries[cntr_ind]) except KeyError: LOGGER.error( 'Exposure countries: %s or reg %s could not be set, check ISO3 or' ' reference year %s', countries, reg, ref_year) raise tag.description += 'GDP2Asset ' + str(self.ref_year) Exposures.__init__(self, data=Exposures.concat(gdp2a_list).gdf, ref_year=ref_year, tag=tag, value_unit='USD') # set meta res = 0.0416666 rows, cols, ras_trans = pts_to_raster_meta( (self.gdf.longitude.min(), self.gdf.latitude.min(), self.gdf.longitude.max(), self.gdf.latitude.max()), res) self.meta = { 'width': cols, 'height': rows, 'crs': self.crs, 'transform': ras_trans }
def set_from_nc(self, dph_path=None, frc_path=None, origin=False, centroids=None, countries=None, reg=None, shape=None, ISINatIDGrid=False, years=None): """Wrapper to fill hazard from nc_flood file Parameters: dph_path (string): Flood file to read (depth) frc_path (string): Flood file to read (fraction) origin (bool): Historical or probabilistic event centroids (Centroids): centroids to extract countries (list of countries ISO3) selection of countries (reg must be None!) reg (list of regions): can be set with region code if whole areas are considered (if not None, countries and centroids are ignored) ISINatIDGrid (Bool): Indicates whether ISIMIP_NatIDGrid is used years (int list): years that are considered raises: NameError """ if years is None: years = [2000] if dph_path is None: LOGGER.error('No flood-depth-path set') raise NameError if frc_path is None: LOGGER.error('No flood-fraction-path set') raise NameError if not Path(dph_path).exists(): LOGGER.error('Invalid flood-file path %s', dph_path) raise NameError if not Path(frc_path).exists(): LOGGER.error('Invalid flood-file path %s', frc_path) raise NameError with xr.open_dataset(dph_path) as flood_dph: time = flood_dph.time.data event_index = self._select_event(time, years) bands = event_index + 1 if countries or reg: # centroids as points if ISINatIDGrid: dest_centroids = RiverFlood._select_exact_area(countries, reg)[0] meta_centroids = copy.copy(dest_centroids) meta_centroids.set_lat_lon_to_meta() self.set_raster(files_intensity=[dph_path], files_fraction=[frc_path], band=bands.tolist(), transform=meta_centroids.meta['transform'], width=meta_centroids.meta['width'], height=meta_centroids.meta['height'], resampling=Resampling.nearest) x_i = ((dest_centroids.lon - self.centroids.meta['transform'][2]) / self.centroids.meta['transform'][0]).astype(int) y_i = ((dest_centroids.lat - self.centroids.meta['transform'][5]) / self.centroids.meta['transform'][4]).astype(int) fraction = self.fraction[:, y_i * self.centroids.meta['width'] + x_i] intensity = self.intensity[:, y_i * self.centroids.meta['width'] + x_i] self.centroids = dest_centroids self.intensity = sp.sparse.csr_matrix(intensity) self.fraction = sp.sparse.csr_matrix(fraction) else: if reg: iso_codes = region2isos(reg) # envelope containing counties cntry_geom = get_land_geometry(iso_codes) self.set_raster(files_intensity=[dph_path], files_fraction=[frc_path], band=bands.tolist(), geometry=cntry_geom) # self.centroids.set_meta_to_lat_lon() else: cntry_geom = get_land_geometry(countries) self.set_raster(files_intensity=[dph_path], files_fraction=[frc_path], band=bands.tolist(), geometry=cntry_geom) # self.centroids.set_meta_to_lat_lon() elif shape: shapes = gpd.read_file(shape) rand_geom = shapes.geometry[0] self.set_raster(files_intensity=[dph_path], files_fraction=[frc_path], band=bands.tolist(), geometry=rand_geom) return elif not centroids: # centroids as raster self.set_raster(files_intensity=[dph_path], files_fraction=[frc_path], band=bands.tolist()) # self.centroids.set_meta_to_lat_lon() else: # use given centroids # if centroids.meta or grid_is_regular(centroids)[0]: #TODO: implement case when meta or regulargrid is defined # centroids.meta or grid_is_regular(centroidsxarray)[0]: # centroids>flood --> error # reprojection, resampling.average (centroids< flood) # (transform) # reprojection change resampling""" # else: if centroids.meta: centroids.set_meta_to_lat_lon() metafrc, fraction = read_raster(frc_path, band=bands.tolist()) metaint, intensity = read_raster(dph_path, band=bands.tolist()) x_i = ((centroids.lon - metafrc['transform'][2]) / metafrc['transform'][0]).astype(int) y_i = ((centroids.lat - metafrc['transform'][5]) / metafrc['transform'][4]).astype(int) fraction = fraction[:, y_i * metafrc['width'] + x_i] intensity = intensity[:, y_i * metaint['width'] + x_i] self.centroids = centroids self.intensity = sp.sparse.csr_matrix(intensity) self.fraction = sp.sparse.csr_matrix(fraction) self.units = 'm' self.tag.file_name = str(dph_path) + ';' + str(frc_path) self.event_id = np.arange(self.intensity.shape[0]) self.event_name = list(map(str, years)) if origin: self.orig = np.ones(self.size, bool) else: self.orig = np.zeros(self.size, bool) self.frequency = np.ones(self.size) / self.size with xr.open_dataset(dph_path) as flood_dph: self.date = np.array([dt.datetime(flood_dph.time[i].dt.year, flood_dph.time[i].dt.month, flood_dph.time[i].dt.day).toordinal() for i in event_index])