def download(geojson, asset, EXECUTION_ID, logger): """ Download dataset from GEE assets. """ logger.debug("Entering download function.") d = ee.Image(asset) task = util.export_to_cloudstorage(d.int16(), d.projection(), geojson, 'download', logger, EXECUTION_ID) task.join() logger.debug("Setting up results JSON.") u = URLList(task.get_URL_base(), task.get_files()) gee_results = CloudResults('Raw data download', __version__, [], u) results_schema = CloudResultsSchema() json_results = results_schema.dump(gee_results) return json_results
def run(params, logger): """.""" logger.debug("Loading parameters.") year_start = params.get('year_start', 2001) year_end = params.get('year_end', 2015) geojson = params.get('geojson', None) method = params.get('method', 'ndvi_trend') ndvi_gee_dataset = params.get('ndvi_gee_dataset', None) climate_gee_dataset = params.get('climate_gee_dataset', None) logger.debug("Loading geojson.") if geojson is None: raise GEEIOError("Must specify an input area") else: geojson = json.loads(geojson) if ndvi_gee_dataset is None: raise GEEIOError("Must specify an NDVI dataset") # Check the ENV. Are we running this locally or in prod? if params.get('ENV') == 'dev': EXECUTION_ID = str(random.randint(1000000, 99999999)) else: EXECUTION_ID = params.get('EXECUTION_ID', None) logger.debug("Running main script.") output = productivity_trajectory(year_start, year_end, method, ndvi_gee_dataset, climate_gee_dataset, logger) ndvi_projection = ee.Image(ndvi_gee_dataset).projection() task = util.export_to_cloudstorage( output.unmask(-32768).int16(), ndvi_projection, geojson, 'prod_trajectory', logger, EXECUTION_ID) task.join() logger.debug("Setting up results JSON.") d = [ BandInfo("Productivity trajectory (trend)", 1, no_data_value=-32768, add_to_map=True, metadata={ 'year_start': year_start, 'year_end': year_end }), BandInfo("Productivity trajectory (significance)", 2, no_data_value=-32768, add_to_map=True, metadata={ 'year_start': year_start, 'year_end': year_end }) ] u = URLList(task.get_URL_base(), task.get_files()) gee_results = CloudResults('prod_trajectory', __version__, d, u) results_schema = CloudResultsSchema() json_results = results_schema.dump(gee_results) return json_results.data
def land_cover(year_baseline, year_target, geojson, trans_matrix, remap_matrix, EXECUTION_ID, logger): """ Calculate land cover indicator. """ logger.debug("Entering land_cover function.") ## land cover lc = ee.Image( "users/geflanddegradation/toolbox_datasets/lcov_esacc_1992_2015") lc = lc.where(lc.eq(9999), -32768) lc = lc.updateMask(lc.neq(-32768)) ## target land cover map reclassified to IPCC 6 classes lc_tg_raw = lc.select('y{}'.format(year_target)) lc_tg_remapped = lc_tg_raw.remap(remap_matrix[0], remap_matrix[1]) ## baseline land cover map reclassified to IPCC 6 classes lc_bl_raw = lc.select('y{}'.format(year_baseline)) lc_bl_remapped = lc_bl_raw.remap(remap_matrix[0], remap_matrix[1]) ## compute transition map (first digit for baseline land cover, and second digit for target year land cover) lc_tr = lc_bl_remapped.multiply(10).add(lc_tg_remapped) ## definition of land cover transitions as degradation (-1), improvement (1), or no relevant change (0) lc_dg = lc_tr.remap([ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], trans_matrix) ## Remap persistence classes so they are sequential. This ## makes it easier to assign a clear color ramp in QGIS. lc_tr = lc_tr.remap([ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 12, 13, 14, 15, 16, 17, 21, 2, 23, 24, 25, 26, 27, 31, 32, 3, 34, 35, 36, 37, 41, 42, 43, 4, 45, 46, 47, 51, 52, 53, 54, 5, 56, 57, 61, 62, 63, 64, 65, 6, 67, 71, 72, 73, 74, 75, 76, 7 ]) lc_out = lc_bl_remapped \ .addBands(lc_tg_remapped) \ .addBands(lc_tr) \ .addBands(lc_dg) \ .addBands(lc_bl_raw) \ .addBands(lc_tg_raw) # Create export function to export land deg image task = util.export_to_cloudstorage( lc_out.unmask(-32768).int16(), lc.projection(), geojson, 'land_cover', logger, EXECUTION_ID) task.join() logger.debug("Setting up results JSON.") d = [ BandInfo("Land cover (7 class)", 1, no_data_value=9999, add_to_map=True, metadata={'year': year_baseline}), BandInfo("Land cover (7 class)", 2, no_data_value=9999, add_to_map=True, metadata={'year': year_target}), BandInfo("Land cover transitions", 3, no_data_value=9999, add_to_map=True, metadata={ 'year_baseline': year_baseline, 'year_target': year_target }), BandInfo("Land cover degradation", 4, no_data_value=9999, add_to_map=True, metadata={ 'year_baseline': year_baseline, 'year_target': year_target }), BandInfo("Land cover (ESA classes)", 5, no_data_value=9999, metadata={'year': year_baseline}), BandInfo("Land cover (ESA classes)", 6, no_data_value=9999, metadata={'year': year_target}) ] u = URLList(task.get_URL_base(), task.get_files()) gee_results = CloudResults('land_cover', __version__, d, u) results_schema = CloudResultsSchema() json_results = results_schema.dump(gee_results) return json_results
def soc(year_start, year_end, fl, geojson, remap_matrix, dl_annual_soc, dl_annual_lc, EXECUTION_ID, logger): """ Calculate SOC indicator. """ logger.debug("Entering soc function.") # soc #soc = ee.Image("users/geflanddegradation/toolbox_datasets/soc_sgrid_30cm") soc = ee.Image( "users/geflanddegradation/toolbox_datasets/soc_sgrid_30cm_unccd_20180111" ) soc_t0 = soc.updateMask(soc.neq(-32768)) # land cover - note it needs to be reprojected to match soc so that it can # be output to cloud storage in the same stack lc = ee.Image("users/geflanddegradation/toolbox_datasets/lcov_esacc_1992_2015") \ .select(ee.List.sequence(year_start - 1992, year_end - 1992, 1)) \ .reproject(crs=soc.projection()) lc = lc.where(lc.eq(9999), -32768) lc = lc.updateMask(lc.neq(-32768)) if fl == 'per pixel': # Setup a raster of climate regimes to use for coding Fl automatically climate = ee.Image("users/geflanddegradation/toolbox_datasets/ipcc_climate_zones")\ .remap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [0, 2, 1, 2, 1, 2, 1, 2, 1, 5, 4, 4, 3]) clim_fl = climate.remap([0, 1, 2, 3, 4, 5], [0, 0.8, 0.69, 0.58, 0.48, 0.64]) # create empty stacks to store annual land cover maps stack_lc = ee.Image().select() # create empty stacks to store annual soc maps stack_soc = ee.Image().select() # loop through all the years in the period of analysis to compute changes in SOC for k in range(year_end - year_start): # land cover map reclassified to UNCCD 7 classes (1: forest, 2: # grassland, 3: cropland, 4: wetland, 5: artifitial, 6: bare, 7: water) lc_t0 = lc.select(k).remap(remap_matrix[0], remap_matrix[1]) lc_t1 = lc.select(k + 1).remap(remap_matrix[0], remap_matrix[1]) if (k == 0): # compute transition map (first digit for baseline land cover, and # second digit for target year land cover) lc_tr = lc_t0.multiply(10).add(lc_t1) # compute raster to register years since transition tr_time = ee.Image(2).where(lc_t0.neq(lc_t1), 1) else: # Update time since last transition. Add 1 if land cover remains # constant, and reset to 1 if land cover changed. tr_time = tr_time.where(lc_t0.eq(lc_t1), tr_time.add(ee.Image(1))) \ .where(lc_t0.neq(lc_t1), ee.Image(1)) # compute transition map (first digit for baseline land cover, and # second digit for target year land cover), but only update where # changes actually ocurred. lc_tr_temp = lc_t0.multiply(10).add(lc_t1) lc_tr = lc_tr.where(lc_t0.neq(lc_t1), lc_tr_temp) # stock change factor for land use - note the 99 and -99 will be # recoded using the chosen Fl option lc_tr_fl_0 = lc_tr.remap([ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 1, 99, 1, 0.1, 0.1, 1, 1, 1, 99, 1, 0.1, 0.1, 1, -99, -99, 1, 1 / 0.71, 0.1, 0.1, 1, 1, 1, 0.71, 1, 0.1, 0.1, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) if fl == 'per pixel': lc_tr_fl = lc_tr_fl_0.where(lc_tr_fl_0.eq(99), clim_fl)\ .where(lc_tr_fl_0.eq(-99), ee.Image(1).divide(clim_fl)) else: lc_tr_fl = lc_tr_fl_0.where(lc_tr_fl_0.eq(99), fl)\ .where(lc_tr_fl_0.eq(-99), ee.Image(1).divide(fl)) # stock change factor for management regime lc_tr_fm = lc_tr.remap([ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) # stock change factor for input of organic matter lc_tr_fo = lc_tr.remap([ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]) if (k == 0): soc_chg = (soc_t0.subtract((soc_t0.multiply(lc_tr_fl).multiply( lc_tr_fm).multiply(lc_tr_fo)))).divide(20) # compute final SOC stock for the period soc_t1 = soc_t0.subtract(soc_chg) # add to land cover and soc to stacks from both dates for the first # period stack_lc = stack_lc.addBands(lc_t0).addBands(lc_t1) stack_soc = stack_soc.addBands(soc_t0).addBands(soc_t1) else: # compute annual change in soc (updates from previous period based # on transition and time <20 years) soc_chg = soc_chg.where(lc_t0.neq(lc_t1), (stack_soc.select(k).subtract(stack_soc.select(k) \ .multiply(lc_tr_fl) \ .multiply(lc_tr_fm) \ .multiply(lc_tr_fo))).divide(20)) \ .where(tr_time.gt(20), 0) # compute final SOC for the period socn = stack_soc.select(k).subtract(soc_chg) # add land cover and soc to stacks only for the last year in the # period stack_lc = stack_lc.addBands(lc_t1) stack_soc = stack_soc.addBands(socn) # compute soc percent change for the analysis period soc_pch = ((stack_soc.select(year_end - year_start).subtract(stack_soc.select(0))).divide(stack_soc.select(0))) \ .multiply(100) logger.debug("Setting up results JSON.") soc_out = soc_pch d = [ BandInfo("Soil organic carbon (degradation)", 1, no_data_value=-32768, add_to_map=True, metadata={ 'year_start': year_start, 'year_end': year_end }) ] if not dl_annual_soc: # Output percent change and initial and final SOC layers soc_out = soc_out.addBands(stack_soc.select(0)).addBands( stack_soc.select(year_end - year_start)) d.extend([ BandInfo("Soil organic carbon", 2, no_data_value=-32768, add_to_map=True, metadata={'year': year_start}), BandInfo("Soil organic carbon", 3, no_data_value=-32768, add_to_map=True, metadata={'year': year_end}) ]) else: # Output percent change and annual SOC layers soc_out = soc_out.addBands(stack_soc) for year in range(year_start, year_end + 1): if (year == year_start) or (year == year_end): add_to_map = True else: add_to_map = False d.extend([ BandInfo("Soil organic carbon", len(d) + 1, no_data_value=-32768, add_to_map=add_to_map, metadata={'year': year}) ]) if not dl_annual_lc: # Output percent change and initial and final SOC layers soc_out = soc_out.addBands(stack_lc.select(0)).addBands( stack_lc.select(year_end - year_start)) d.extend([ BandInfo("Land cover (7 class)", len(d) + 1, no_data_value=-32768, add_to_map=True, metadata={'year': year_start}), BandInfo("Land cover (7 class)", len(d) + 2, no_data_value=-32768, add_to_map=True, metadata={'year': year_end}) ]) else: soc_out = soc_out.addBands(stack_lc) for year in range(year_start, year_end + 1): if (year == year_start) or (year == year_end): add_to_map = True else: add_to_map = False d.extend([ BandInfo("Land cover (7 class)", len(d) + 1, no_data_value=-32768, add_to_map=add_to_map, metadata={'year': year}) ]) task = util.export_to_cloudstorage( soc_out.unmask(-32768).int16(), soc_out.projection(), geojson, 'soil_organic_carbon', logger, EXECUTION_ID) task.join() u = URLList(task.get_URL_base(), task.get_files()) gee_results = CloudResults('soil_organic_carbon', __version__, d, u) results_schema = CloudResultsSchema() json_results = results_schema.dump(gee_results) return json_results
def productivity_performance(year_start, year_end, ndvi_gee_dataset, geojson, EXECUTION_ID, logger): logger.debug("Entering productivity_performance function.") ndvi_1yr = ee.Image(ndvi_gee_dataset) ndvi_1yr = ndvi_1yr.where(ndvi_1yr.eq(9999), -32768) ndvi_1yr = ndvi_1yr.updateMask(ndvi_1yr.neq(-32768)) # land cover data from esa cci lc = ee.Image( "users/geflanddegradation/toolbox_datasets/lcov_esacc_1992_2015") lc = lc.where(lc.eq(9999), -32768) lc = lc.updateMask(lc.neq(-32768)) # global agroecological zones from IIASA soil_tax_usda = ee.Image( "users/geflanddegradation/toolbox_datasets/soil_tax_usda_sgrid") # Make sure the bounding box of the poly is used, and not the geodesic # version, for the clipping poly = ee.Geometry(geojson, opt_geodesic=False) # compute mean ndvi for the period ndvi_avg = ndvi_1yr.select(ee.List(['y{}'.format(i) for i in range(year_start, year_end + 1)])) \ .reduce(ee.Reducer.mean()).rename(['ndvi']).clip(poly) # Handle case of year_start that isn't included in the CCI data if year_start > 2015: lc_year_start = 2015 elif year_start < 1992: lc_year_start = 1992 else: lc_year_start = year_start # reclassify lc to ipcc classes lc_t0 = lc.select('y{}'.format(lc_year_start)) \ .remap([10, 11, 12, 20, 30, 40, 50, 60, 61, 62, 70, 71, 72, 80, 81, 82, 90, 100, 160, 170, 110, 130, 180, 190, 120, 121, 122, 140, 150, 151, 152, 153, 200, 201, 202, 210], [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36]) # create a binary mask. mask = ndvi_avg.neq(0) # define modis projection attributes modis_proj = ee.Image( "users/geflanddegradation/toolbox_datasets/ndvi_modis_2001_2016" ).projection() # reproject land cover, soil_tax_usda and avhrr to modis resolution lc_proj = lc_t0.reproject(crs=modis_proj) soil_tax_usda_proj = soil_tax_usda.reproject(crs=modis_proj) ndvi_avg_proj = ndvi_avg.reproject(crs=modis_proj) # define unit of analysis as the intersect of soil_tax_usda and land cover units = soil_tax_usda_proj.multiply(100).add(lc_proj) # create a 2 band raster to compute 90th percentile per unit (analysis restricted by mask and study area) ndvi_id = ndvi_avg_proj.addBands(units).updateMask(mask) # compute 90th percentile by unit perc90 = ndvi_id.reduceRegion( reducer=ee.Reducer.percentile([90]).group(groupField=1, groupName='code'), geometry=poly, scale=ee.Number(modis_proj.nominalScale()).getInfo(), maxPixels=1e15) # Extract the cluster IDs and the 90th percentile groups = ee.List(perc90.get("groups")) ids = groups.map(lambda d: ee.Dictionary(d).get('code')) perc = groups.map(lambda d: ee.Dictionary(d).get('p90')) # remap the units raster using their 90th percentile value raster_perc = units.remap(ids, perc) # compute the ration of observed ndvi to 90th for that class obs_ratio = ndvi_avg_proj.divide(raster_perc) # aggregate obs_ratio to original NDVI data resolution (for modis this step does not change anything) obs_ratio_2 = obs_ratio.reduceResolution(reducer=ee.Reducer.mean(), maxPixels=2000) \ .reproject(crs=ndvi_1yr.projection()) # create final degradation output layer (9999 is background), 0 is not # degreaded, -1 is degraded lp_perf_deg = ee.Image(-32768).where(obs_ratio_2.gte(0.5), 0) \ .where(obs_ratio_2.lte(0.5), -1) lp_perf = lp_perf_deg.addBands(obs_ratio_2.multiply(10000)) \ .addBands(units) task = util.export_to_cloudstorage( lp_perf.unmask(-32768).int16(), ndvi_1yr.projection(), geojson, 'prod_performance', logger, EXECUTION_ID) task.join() logger.debug("Setting up results JSON.") d = [ BandInfo("Productivity performance (degradation)", 1, no_data_value=-32768, add_to_map=True, metadata={ 'year_start': year_start, 'year_end': year_end }), BandInfo("Productivity performance (ratio)", 2, no_data_value=-32768, add_to_map=False, metadata={ 'year_start': year_start, 'year_end': year_end }), BandInfo("Productivity performance (units)", 3, no_data_value=-32768, add_to_map=False, metadata={'year_start': year_start}) ] u = URLList(task.get_URL_base(), task.get_files()) gee_results = CloudResults('prod_performance', __version__, d, u) results_schema = CloudResultsSchema() json_results = results_schema.dump(gee_results) return json_results