def biomass_worker(driver, biomass, out_name, distance='hav'): """Worker function for parallel execution. Computes the biomass emissions (AGB and BGB) by using ``biomass_emissions`` function. Further, the pixel resolution in square meter is computed and delegated to ``biomass_emissions``. Result is stored on disk as raster image by using the metadata profile of the first argument. Args: driver (str or Path): Path to Proximate Deforestation Driver tile. biomass (str or Path): Path to Above-ground Woody Biomass Density stratum. out_name (str or Path): Path plus name of out file. distance (str, optional): Default is Haversine equation. """ with open(driver, 'r') as h1, open(biomass, 'r') as h2: driver_data = h1.read(1) biomass_data = h2.read(1) profile = h1.profile transform = h1.transform haversine = Distance(distance) x = haversine((transform.xoff, transform.yoff), (transform.xoff + transform.a, transform.yoff)) y = haversine((transform.xoff, transform.yoff), (transform.xoff, transform.yoff + transform.e)) area = round(x * y) emissions = biomass_emissions(driver_data, biomass_data, area=area) # write updates the dtype corresponding to the array dtype write(emissions, out_name, **profile)
def soc_worker(driver, soc, intact, out_name, forest_type): """ Worker function for parallel execution. :param driver: str Path to driver raster image. :param soc: str Path to soil organic carbon content image. :param out_name: str Out path of emission image. :param intact: str Path to intact forest raster image. :param kwargs: Parameters for soc_emissions function. Please, refer to the function thesis for a list of possible parameter. """ with open(driver, 'r') as h1, open(soc, 'r') as h2: driver_data = h1.read(1) soc_data = h2.read(1) profile = h1.profile transform = h1.transform haversine = Distance('hav') x = haversine((transform.xoff, transform.yoff), (transform.xoff + transform.a, transform.yoff)) y = haversine((transform.xoff, transform.yoff), (transform.xoff, transform.yoff + transform.e)) area = round(x * y) if intact: with open(intact, 'r') as h3: intact_data = h3.read(1) emissions = soc_emissions(driver_data, soc_data, intact=intact_data, area=area, forest_type=forest_type) else: emissions = soc_emissions(driver_data, soc_data, area=area, forest_type=forest_type) write(emissions, out_name, **profile)
def alignment_worker(template_stratum, strata, ifl, crs, out_path): """Worker function to parallelize alignment process. First, create a warp profile for ``template_stratum`` with ``crs``. After, apply this profile to all strata sets in ``strata`` (should contain a path to template stratum as well). Next, rasterize IFL stratum by applying warp profile. Finally, round bounds of strata and clip them all and write the final product. Args: template_stratum (Path): Template raster stratum. strata (dict): Strata which will be aligned with the template stratum. Dict key must be a string and value should be a list of paths to strata. ifl (geopandas.GeoDataFrame): The Intact Forest Landscape stratum as vector layer. crs (rasterio.crs.CRS): Each stratum will be reprojected to this CRS. out_path (Path): Final and intermediate layers will stored here. """ # make a warp profile for the template stratum by using the requested CRS kwargs = make_warp_profile(template_stratum, crs) # intermediate strata will be stored in out_path kwargs['out'] = out_path # align all strata by applying the warp profile out = raster_alignment(strata, **kwargs) # rasterize ifl vector by applying warp profile data = rasterize_vector(ifl, kwargs['transform'], kwargs['bounds'], (kwargs['height'], kwargs['width'])) name = 'ifl{:x}.tif'.format(id(data)) out['ifl'] = write(data, str(out_path / name), **kwargs) # round strata bounds to int degrees kwargs['bounds'] = round_bounds(kwargs['bounds']) # clip strata to this bounds raster_clip(out, **kwargs)
def classification_worker(gl30, gfc_treecover, gfc_gain, gfc_loss, out_name, distance='hav'): """Worker for parallel execution of the proximate deforestation driver classification. Args: gl30 (str or Path): Path to GlobeLAnd30 stratum gfc_treecover (str or Path): Path to Global Forest Change treecover 2000 stratum gfc_gain (str or Path): Path to Global Forest Change treecover 2000 gain stratum gfc_loss (str or Path): Path to Global Forest Change treecover 2000 loss stratum out_name (str of Path): Store stratum under this path with this name distance (str): Algorithm to use for pixel resolution computation """ with open(gl30, 'r') as h1, open(gfc_treecover, 'r') as h2,\ open(gfc_gain, 'r') as h3, open(gfc_loss, 'r') as h4: landcover_data = h1.read(1) treecover_data = h2.read(1) gain_data = h3.read(1) loss_data = h4.read(1) transform = h1.transform profile = h1.profile # compute cell size for this tile haversine = Distance(distance) x = haversine((transform.xoff, transform.yoff), (transform.xoff + transform.a, transform.yoff)) y = haversine((transform.xoff, transform.yoff), (transform.xoff, transform.yoff + transform.e)) try: driver = superimpose(landcover_data, treecover_data, gain_data, loss_data) reclassified = reclassify(driver, res=(x, y)) np.copyto(driver, reclassified, where=reclassified > 0) write(driver, out_name, **profile) except ValueError as err: LOGGER.error('Strata %s error %s', out_name, str(err))
def raster_alignment(strata, **kwargs): """Documentation pending Args: strata (dict): Strata **kwargs (dict): Warp profile Returns: dict: A dict of str """ out = {} for key, values in strata.items(): length = len(values) # create a name for intermediate stratum tmp_name = '{}{:x}.tif'.format(key, abs(hash(''.join(values) + str(time())))) tmp_name = str(kwargs['out'] / tmp_name) # strata set just one stratum reproject with warp profile if length == 1: try: out[key] = reproject_like(*values, tmp_name, **kwargs) except Exception: LOGGER.error('Failed strata %s includes these files %s', key, values) # strata set greater > 1 merge and reproject elif length > 1: try: data, affine = merge_from(values, bounds=kwargs['bounds'], res=kwargs['res']) out[key] = write(data, tmp_name, **kwargs) except Exception: LOGGER.error('Failed strata %s includes these files %s', key, values) else: LOGGER.warning('Strata %s is empty', key) return out
def raster_clip(to_clip, bounds, **kwargs): """Documentation pending Args: to_clip: bounds: **kwargs: Returns: dict: """ orientation = int_to_orient(bounds.left, bounds.top) out = {} for key, value in to_clip.items(): name = '{}_{}.tif'.format(key, orientation) path = str(kwargs['out'] / name) data, transform = clip_raster(value, bounds) kwargs.update({'transform': transform}) out[key] = write(data, path, **kwargs) return out
import os import pca import raster import numpy as np rasters = [ '/mnt/AllThings/JSY/RSEI/www/lst.tif', '/mnt/AllThings/JSY/RSEI/www/ndbsi.tif', '/mnt/AllThings/JSY/RSEI/www/ndvi.tif', '/mnt/AllThings/JSY/RSEI/www/wet.tif' ] rasterObjs, arrs, mask, projection, pixelResolution, intersectedBbox = raster.read( rasters) # pca transform values, vectors, projected = pca.pcaFnc(arrs) # min-max normalization for projected array for bidx in range(projected.shape[0]): min = np.min(projected[bidx]) max = np.max(projected[bidx]) projected[bidx] = (projected[bidx] - min) / (max - min) # save pca bands outfile = '/home/kikat/pca.tif' if os.path.exists(outfile): os.remove(outfile) raster.write(projected, mask, projection, pixelResolution, intersectedBbox, np.finfo(arrs.dtype).min, outfile)