def analyze(threshold, geojson): """For a given Hansen threshold mask on WHRC biomass data and geometry return a dictionary of total t/ha. """ # logging.info('[Soil Carbon Service]: In Soil carbon service') try: d = {} hansen_asset = SETTINGS.get('gee').get('assets').get('hansen') soil_carbon_asset = SETTINGS.get('gee').get('assets').get( 'soils_30m') region = get_region(geojson) reduce_args = { 'reducer': ee.Reducer.sum().unweighted(), 'geometry': region, 'bestEffort': True, 'scale': 30 } tc_mask = ee.Image(hansen_asset).select('tree_' + str(threshold)).gt(0) sc = ee.Image(soil_carbon_asset).multiply( ee.Image.pixelArea().divide(10000)).mask(tc_mask) # Identify soil carbon value sc_value = sc.reduceRegion(**reduce_args).getInfo() d['total_soil_carbon'] = sc_value # logging.info(f'[Soil Carbon Service]:d = {d}') return d except Exception as error: logging.error(str(error)) raise soilCarbonError(message='Error in soil carbon analysis')
def _gee_biomass(geom, thresh): image1 = get_thresh_image( str(thresh), SETTINGS.get('gee').get('assets').get('biomassloss_v1').get( 'hansen_loss_thresh')) image2 = ee.Image( SETTINGS.get('gee').get('assets').get('biomassloss_v1').get( 'biomass_2000')) region = get_region(geom) # Reducer arguments reduce_args = { 'reducer': ee.Reducer.sum(), 'geometry': region, 'bestEffort': True, 'scale': 90 } # Calculate stats 10000 ha, 10^6 to transform from Mg (10^6g) to Tg(10^12g) and 255 as is the pixel value when true. area_stats = image2.multiply(image1) \ .divide(10000 * 255.0) \ .multiply(ee.Image.pixelArea()) \ .reduceRegion(**reduce_args) carbon_stats = image2.multiply( ee.Image.pixelArea().divide(10000)).reduceRegion(**reduce_args) area_results = area_stats.combine(carbon_stats).getInfo() return area_results
def analyze(threshold, geojson): """For a given Hansen threshold mask on WHRC biomass data and geometry return a dictionary of total t/ha. """ try: d = {} hansen_asset = SETTINGS.get('gee').get('assets').get('hansen') biomass_asset = SETTINGS.get('gee').get('assets').get('whrc_biomass') region = get_region(geojson) reduce_args = {'reducer': ee.Reducer.sum().unweighted(), 'geometry': region, 'bestEffort': True, 'scale': 30} tc_mask = ee.Image(hansen_asset).select(['tree_' + str(threshold)], ['tree_cover']).gt(0) biomass = ee.ImageCollection(biomass_asset).max().multiply(ee.Image.pixelArea().divide(10000)).mask(tc_mask) tree_cover = tc_mask.multiply(ee.Image.pixelArea().divide(10000)).mask(tc_mask) # Identify thresholded biomass value biomass_value = biomass.reduceRegion(**reduce_args).getInfo() tree_cover_value = tree_cover.reduceRegion(**reduce_args).getInfo() d['biomass'] = biomass_value.get('b1', 0) d['tree_cover'] = tree_cover_value.get('tree_cover', 0) return d except Exception as error: logging.error(str(error)) raise WHRCBiomassError(message='Error in WHRC Biomass Analysis')
def analyze(geojson): """For a given geometry and mangrove biomass data return a dictionary of total t/ha. """ try: d = {} biomass_asset = SETTINGS.get('gee').get('assets').get( 'mangrove_biomass') region = get_region(geojson) reduce_args = { 'reducer': ee.Reducer.sum().unweighted(), 'geometry': region, 'bestEffort': True, 'scale': 30 } biomass = ee.Image(biomass_asset).multiply( ee.Image.pixelArea().divide(10000)) # Identify thresholded biomass value biomass_value = biomass.reduceRegion(**reduce_args).getInfo() d['biomass'] = biomass_value return d except Exception as error: logging.error(str(error)) raise MangroveBiomassError( message='Error in Mangrove Biomass Analysis')
def analyze(geojson): """For a given geometry and population density data return a dictionary of total people in a region. """ try: d = {} # The number of people per cell population_asset = SETTINGS.get('gee').get('assets').get( 'population') region = get_region(geojson) reduce_args = { 'reducer': ee.Reducer.sum().unweighted(), 'geometry': region, 'bestEffort': True, 'scale': 30 } # Convert m2 to ha scale_factor = ee.Number(1e4) # The number of persons per cell population = ee.Image(population_asset) # .multiply(ee.Image.pixelArea().divide(scale_factor)) # Total population value within region population_value = population.reduceRegion(**reduce_args).getInfo() d['population'] = population_value return d except Exception as error: logging.error(str(error)) raise PopulationError(message='Error in Population Analysis')
def analyze(geojson, start_date, end_date): """Forma250 microservice class. This service uses the latest image in 'projects/wri-datalab/FormaGlobalGFW' image collection. The bands of that image contain 'alert_delta': the percent of clearing per pixel that occured within the last 3 months, 'alert_near_term_delta': the percent of clearing which occured within the last 1 month, 'alert_date': the first date when the delta passed a threshold of an ecogroup. 'alert_clearing': the % clearing in pixel over the past year, and 'alert_accuracy': the error of clearing based on historical performance. We use the alert_date to identify pixels of alert_delta that correspond to a specific date range. Mask out the rest. And then calculate both a weighted area in ha (weighted by the fractional percent alert_delta), and also a simple count of pixels where clearing occured over a date range. """ try: region = get_region(geojson) asset_id = SETTINGS.get('gee').get('assets').get('forma250GFW') logging.info(asset_id) ic = ee.ImageCollection(asset_id).sort('system:time_start', False) latest = ee.Image(ic.first()) alert_date_band = latest.select('alert_date') milisec_date_start = ee.Date(start_date).millis() milisec_date_end = ee.Date(end_date).millis() date_mask = alert_date_band.gte(milisec_date_start).And( alert_date_band.lte(milisec_date_end)) reduce_sum_args = { 'reducer': ee.Reducer.sum().unweighted(), 'geometry': region, 'bestEffort': True, 'scale': 231.65635826395828, 'crs': "EPSG:4326", 'maxPixels': 9999999999 } area_m2 = latest.select('alert_delta').mask(date_mask).divide( 100).multiply(ee.Image.pixelArea()).reduceRegion( **reduce_sum_args).getInfo() alert_area_ha = squaremeters_to_ha(area_m2['alert_delta']) tmp_counts = date_mask.gt(0).reduceRegion( **reduce_sum_args).getInfo() alert_counts = int(tmp_counts['alert_date']) # logging.info(f"Number of alerts over time period = {alert_counts}") # logging.info(f"Estimated area loss over time period = {alert_area_ha} ha") # # Need to pass the area from the geojson object to area_ha, and also add the # 'area_ha_loss' key/value into the json that is passed to the front-end. return { 'area_ha_loss': alert_area_ha, 'alert_counts': alert_counts } except Exception as error: logging.error(str(error)) raise FormaError(message='Error in Forma250 Analysis')
def latest(): """Gets the date of the latest image """ try: asset_id = SETTINGS.get('gee').get('assets').get('forma250GFW') logging.info(asset_id) ic = ee.ImageCollection(asset_id) latest_im = ic.toList(ic.size()).get(-1).getInfo() latest_date = latest_im['properties']['date'] logging.info('Retreiving latest date: ') logging.info(latest_date) return {'latest': latest_date} except Exception as error: logging.error(str(error)) raise FormaError(message='Error in Forma250 Analysis')
def _gee_hansen(geojson, thresh): image = get_thresh_image(str(thresh), SETTINGS.get('gee').get('assets').get('biomassloss_v1').get('hansen_loss_thresh')) region = get_region(geojson) # Reducer arguments reduce_args = { 'reducer': ee.Reducer.sum(), 'geometry': region, 'bestEffort': True, 'scale': 90 } # Calculate stats area_stats = image.divide(10000 * 255.0) \ .multiply(ee.Image.pixelArea()) \ .reduceRegion(**reduce_args) return area_stats.getInfo()
def analyze(threshold, geojson, begin, end, aggregate_values=True, n_divisions='n4', method='reduce_regions', num_pixels=10000, best_effort=False): try: # logging.info("Starting hansen analysis") # Set constants for the analysis # make all objects SERVER # returns ee.Number num_pixels = ee.Number(num_pixels).long() threshold = ee.Number(threshold) begin = ee.Number.parse(ee.Date(begin).format('yy').slice(0, 2)) end = ee.Number.parse(ee.Date(end).format('yy').slice(0, 2)) # returns ee.String n_divisions = ee.String(n_divisions) lossyear_band = ee.String('lossyear_').cat(threshold.format()) treecover2000_band = ee.String('treecover2000_').cat( threshold.format()) treecover2010_band = ee.String('treecover2010_').cat( threshold.format()) gain20002012_band = ee.String('gain20002012') asset_id = ee.String( SETTINGS.get('gee').get('assets').get('hansen_optimised')) # Get the feature collection of geometries # returns ee.FeatureCollection region_nosplit = get_region(geojson) # logging.info("Got the feature collection of geometries") def divide_geometry_ntimes(feature, n_divisions=n_divisions): """ Optionally divide each of the fc's geoms by nX and return fc. returns ee.FeatureCollection """ fc4 = ee.FeatureCollection(divide_geometry(feature)) fc16 = ee.FeatureCollection(fc4.map(divide_geometry)).flatten() fc64 = ee.FeatureCollection( fc16.map(divide_geometry)).flatten() fc256 = ee.FeatureCollection( fc64.map(divide_geometry)).flatten() out = ee.Dictionary({ 'n0': ee.FeatureCollection(feature), 'n4': fc4, 'n16': fc16, 'n64': fc64, 'n256': fc256, }) return ee.FeatureCollection(out.get(n_divisions)) region_split = ee.FeatureCollection(region_nosplit).map( divide_geometry_ntimes).flatten() # logging.info("Divided the feature collection of geometries") # Choose if to split region # Note code for region_split OR region_nosplit is called depending # value of Boolean split_geometry=True # returns ee.FeatureCollection region_fc = ee.Algorithms.If(n_divisions, region_split, region_nosplit) n_features = ee.Number(ee.FeatureCollection(region_fc).size()) # logging.info(f'Number of features is {n_features.getInfo()}') def get_area(f): return ee.Feature( None, { 'area_ha': ee_squaremeters_to_ha(f.geometry().area()), 'px': f.geometry().area().divide(30.0) }) area_feature = ee.FeatureCollection(region_fc).map(get_area) total_area = area_feature.aggregate_sum('area_ha') # logging.info(f"Total area (Ha) is {total_area.getInfo()}") # logging.info(f"Area (Ha) per feature is {area_feature.aggregate_array('area_ha').getInfo()}") # logging.info(f"Pixels per feature is {area_feature.aggregate_array('px').getInfo()}") # if split_geometry divide numPixels by number of splits num_pixels = ee.Algorithms.If( n_divisions, ee.Number(num_pixels).divide(n_features).long(), num_pixels) # logging.info("Divided the feature collection of geometries") # Get the optimised hansen asset # these are all binary bands, except lossyear hansen_optimised = ee.Image(asset_id) # Identify year 2000 tree cover at given threshold # returns ee.Image treecover2000_image = ee.Image( hansen_optimised.select(treecover2000_band)) treecover2000_image = treecover2000_image.updateMask( treecover2000_image) # returns ee.Number extent2000 = get_extent_fc(treecover2000_image, region_fc, method=method, bestEffort=best_effort, scale=False, numPixels=num_pixels) extent2000 = ee_squaremeters_to_ha(extent2000) # logging.info(f"Calculated tree cover extent in year 2000: {type(extent2000)}") # Identify 2010 tree cover at given threshold # returns ee.Image treecover2010_image = ee.Image( hansen_optimised.select(treecover2010_band)) treecover2010_image = treecover2010_image.updateMask( treecover2010_image) # returns ee.Number extent2010 = get_extent_fc(treecover2010_image, region_fc, method=method, bestEffort=best_effort, scale=False, numPixels=num_pixels) extent2010 = ee_squaremeters_to_ha(extent2010) # logging.info(f"Calculated tree cover extent in year 2010: {type(extent2010)}") # Identify tree gain over data collection period # returns ee.Image # NOTE GAIN IS NOT THRESHOLDED BY TREECOVER2000! # FIXME IT MAKES NO SENSE TO EXPORT THRESHOLDED GAIN! gain20002012_image = ee.Image( hansen_optimised.select('gain20002012')) gain20002012_image = gain20002012_image.updateMask( gain20002012_image) # returns ee.Number gain = get_extent_fc(gain20002012_image, region_fc, method=method, bestEffort=best_effort, scale=False, numPixels=num_pixels) gain = ee_squaremeters_to_ha(gain) # logging.info(f"Calculated tree cover gain between 2000 and 2012:") # Identify loss # returns ee.Image lossyear_image = ee.Image(hansen_optimised.select(lossyear_band)) lossyear_image = lossyear_image.updateMask(lossyear_image) # Select loss pixels from begin year till end year (0-18) # returns ee.Image loss_image_ag = lossyear_image.gte(begin).And( lossyear_image.lte(end)) # returns ee.Number loss_ag = get_extent_fc(loss_image_ag, region_fc, method=method, bestEffort=best_effort, scale=False, numPixels=num_pixels) loss_ag = ee_squaremeters_to_ha(loss_ag) # logging.info("Calculated aggregated tree cover loss in period") # logging.info("Begin calculating yearly tree cover loss during period") # Identify loss area per year from beginning year to end year (inclusive) # Calculate annual biomass loss - add subset images to a collection # and then map a reducer over image collection # returns ee.List() year_list = ee.List.sequence(begin, end, 1) # returns ee.ImageCollection def tmp_f(year): year = ee.Number(year) return lossyear_image \ .updateMask(lossyear_image.eq(ee.Image.constant(year))) \ .divide(year) \ .set({'year': ee.Number(2000).add(year)}) yearly_loss_collection = ee.ImageCollection(year_list.map(tmp_f)) # logging.info("Created annual biomass loss image collection") # returns ee.FeatureCollection def reduceFunction(img): tmp = get_extent_fc(img, region_fc, method=method, bestEffort=best_effort, scale=False, numPixels=num_pixels) out = ee_squaremeters_to_ha(tmp) year = ee.Number(img.get('year')).format('%.0f') return ee.Feature(None, {'year': year, 'loss': out}) output = yearly_loss_collection.map(reduceFunction) # returns ee.Dictionary loss_years = ee.Dictionary.fromLists( \ output.aggregate_array('year'), \ output.aggregate_array('loss')) # logging.info("Calculated yearly tree cover loss during period") # Choose which loss type to return # Note code for loss_ag OR loss_years is called depending # value of Boolean aggregate_values=True loss = ee.Algorithms.If(aggregate_values, loss_ag, loss_years) # Create dictionary of results # ee.Dictionary d = ee.Dictionary({ 'areaHa': total_area, 'loss_start_year': begin, 'loss_end_year': end, 'treeExtent': extent2000, 'treeExtent2010': extent2010, 'gain': gain, 'loss': loss }) # Evaluate the dictionary of results tmp_d = d.getInfo() return tmp_d except Exception as error: logging.error(str(error)) raise HansenError(message='Error in Hansen Analysis')
import ee from flask import Flask from gfwanalysis.config import SETTINGS from gfwanalysis.routes.api import error from gfwanalysis.routes.api.v1 import hansen_endpoints_v1, forma250_endpoints_v1, \ biomass_loss_endpoints_v1, landsat_tiles_endpoints_v1, histogram_endpoints_v1, \ landcover_endpoints_v1, sentinel_tiles_endpoints_v1, highres_tiles_endpoints_v1, \ recent_tiles_endpoints_v1, whrc_biomass_endpoints_v1, mangrove_biomass_endpoints_v1, \ population_endpoints_v1, soil_carbon_endpoints_v1, mc_analysis_endpoints_v1, \ recent_tiles_classifier_v1, composite_service_v1, geodescriber_endpoints_v1 from gfwanalysis.routes.api.v2 import biomass_loss_endpoints_v2, landsat_tiles_endpoints_v2, nlcd_landcover_endpoints_v2 from gfwanalysis.utils.files import load_config_json logging.basicConfig( level=SETTINGS.get('logging', {}).get('level'), format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt='%Y%m%d-%H:%M%p', ) # Initializing GEE gee = SETTINGS.get('gee') ee_user = gee.get('service_account') private_key_file = gee.get('privatekey_file') if private_key_file: logging.info( f'Initializing EE with privatekey.json credential file: {ee_user} | {private_key_file}' ) credentials = ee.ServiceAccountCredentials(ee_user, private_key_file) ee.Initialize(credentials) ee.data.setDeadline(60000)
"""EE LANDSAT TILE URL SERVICE""" import json import logging import ee import redis from gfwanalysis.config import SETTINGS from gfwanalysis.errors import LandsatTilesError r = redis.StrictRedis.from_url(url=SETTINGS.get('redis').get('url')) class RedisService(object): @staticmethod def check_year_mapid(year): text = r.get(year) if text is not None: return json.loads(text) return None @staticmethod def get(year): text = r.get(year) if text is not None: return text return None @staticmethod def set_year_mapid(year, mapid, token):
def analyze(geojson): """ Analyze NLCD Landcover """ try: logging.info(f'[nlcd-landcover-service]: Initialize analysis') # nlcd land cover d = {} # Extract int years to work with for time range valid_years = [{ 'id': 'NLCD2001', 'year': 2001 }, { 'id': 'NLCD2006', 'year': 2006 }, { 'id': 'NLCD2011', 'year': 2011 }, { 'id': 'NLCD2016', 'year': 2016 }] # Gather assets band_name = 'landcover' landcover_asset = SETTINGS.get('gee').get('assets').get( 'us_landcover') image_list = [ ee.Image(f"{landcover_asset}/{year.get('id')}") for year in valid_years ] us_landcover = ee.ImageCollection(image_list).select(band_name) # region = ee.Feature(geojson).geometry() region = get_region(geojson) scale = 30 logging.info( f'[nlcd-landcover-service]: built assets for analysis, using {band_name}' ) # Calculate landcover with a collection operation method stats = us_landcover.map( ImageColIntersect(region, scale, ee.Reducer.frequencyHistogram())).getInfo() logging.info(f'[nlcd-landcover-service]: retreived {stats}') # Format data structure data = [{ 'id': d['id'], 'stats': { k: v * 30 * 30 * 1e-4 for k, v in d['properties'][band_name].items() } } for d in stats['features']] tmp = {} for el in data: year = [y['year'] for y in valid_years if y['id'] == el['id']][0] tmp[year] = lookup('nlcd_landcover', el['stats'], False) d['nlcd_landcover'] = tmp return d except Exception as error: logging.error(str(error)) raise NLCDLandcoverError(message='Error in LandCover Analysis')
def analyze(threshold, geojson, begin, end): """ Takes WHRC Carbon and Hansen loss data and returns amount of biomass, carbon and co2 lost by time. """ try: logging.info(f'[biomass-loss-service]: with properties passed: {threshold}, {begin}, {end}') # hansen tree cover loss by year d = {} # Extract int years to work with for time range start_year = int(begin.split('-')[0]) - 2000 end_year = int(end.split('-')[0]) - 2000 # Gather assets band_name = f'loss_{threshold}' hansen_asset = SETTINGS.get('gee').get('assets').get('hansen') biomass_asset = SETTINGS.get('gee').get('assets').get('whrc_biomass') hansen = ee.Image(hansen_asset).select(band_name) biomass = ee.ImageCollection(biomass_asset).max() region = get_region(geojson) logging.info(f'[biomass-loss-service]: built assets for analysis, using Hansen band {band_name}') # Reducer arguments reduce_args = { 'reducer': ee.Reducer.sum().unweighted(), 'geometry': region, 'bestEffort': True, 'scale': 30, 'tileScale': 16 } # mask hasen data with itself (important step to prevent over-counting) hansen = hansen.mask(hansen) # Calculate annual biomass loss with a collection operation method def reduceFunction(img): out = img.reduceRegion(**reduce_args) return ee.Feature(None, img.toDictionary().combine(out)) # Calculate annual biomass loss - add subset images to a collection and then map a reducer to it # Calculate stats 10000 ha, 10^6 to transform from Mg (10^6g) to Tg(10^12g) and 255 as is the pixel value collectionG = ee.ImageCollection([biomass.multiply(ee.Image.pixelArea().divide(10000)).mask( hansen.updateMask(hansen.eq(year))).set({'year': 2000 + year}) for year in range(start_year, end_year + 1)]) output = collectionG.map(reduceFunction).getInfo() # # Convert to carbon and co2 values and add to output dictionary biomass_to_carbon = 0.5 carbon_to_co2 = 3.67 d['biomassLossByYear'] = {} d['cLossByYear'] = {} d['co2LossByYear'] = {} for row in output.get('features'): yr = row.get('properties').get('year') d['biomassLossByYear'][yr] = row.get('properties').get('b1') d['cLossByYear'][yr] = float(f"{d.get('biomassLossByYear').get(yr) * biomass_to_carbon :3.2f}") d['co2LossByYear'][yr] = float(f"{d.get('cLossByYear').get(yr) * carbon_to_co2 :3.2f}") # # Calculate total biomass loss d['biomassLoss'] = sum([d.get('biomassLossByYear').get(y) for y in d.get('biomassLossByYear')]) return d except Exception as error: logging.error(str(error)) raise BiomassLossError(message='Error in BiomassLoss Analysis')