def export_forest_fire(self, geojsons, task_name, crs, logger, execution_id=None, proj=None): "Export layers to cloud storage" if not execution_id: execution_id = str(random.randint(1000000, 99999999)) else: execution_id = execution_id if not proj: proj = self.image.projection() tasks = [] n = 1 for geojson in geojsons: if task_name: out_name = '{}_{}_{}'.format(execution_id, task_name, n) else: out_name = '{}_{}'.format(execution_id, n) export = { 'image': self.image, 'description': out_name, 'fileNamePrefix': out_name, 'bucket': BUCKET, 'maxPixels': 1e13, 'crs': crs, 'scale': 30, 'region': get_coords(geojson) } t = gee_task(ee.batch.Export.image.toCloudStorage(**export), out_name, logger) tasks.append(t) n += 1 logger.debug("Exporting to cloud storage.") urls = [] for task in tasks: task.join() urls.extend(task.get_urls()) gee_results = CloudResults(task_name, self.band_info, urls) results_schema = CloudResultsSchema() json_results = results_schema.dump(gee_results) return json_results
def run(params, logger): """.""" logger.debug("Loading parameters.") un_adju = params.get('un_adju', None) isi_thr = float(params.get('isi_thr', None)) ntl_thr = float(params.get('ntl_thr', None)) wat_thr = float(params.get('wat_thr', None)) cap_ope = float(params.get('cap_ope', None)) pct_suburban = float(params.get('pct_suburban', None)) pct_urban = float(params.get('pct_urban', None)) geojsons = json.loads(params.get('geojsons', None)) crs = params.get('crs', None) # Check the ENV. Are we running this locally or in prod? if params.get('ENV') == 'dev': EXECUTION_ID = str(random.randint(1000000, 99999999)) else: EXECUTION_ID = params.get('EXECUTION_ID', None) logger.debug("Checking total area of supplied geojsons:") area = 0 for geojson in geojsons: aoi = ee.Geometry.MultiPolygon(get_coords(geojson)) area += aoi.area().getInfo() / (1000*1000) # QGIS code limits area of bounding box to 25,000 sq km, so we shouldn't # ever have bounding boxes exceeding that area, but add an additional check # here (with an error margin of 10,000 sq km...) just in case. if area > 35000: logger.debug("Area ({:.6n} km sq) is too large - failing task".format(area)) raise Exception else: logger.debug("Processing total area of {:.6n} km sq".format(area)) logger.debug("Running main script.") out = urban(isi_thr, ntl_thr, wat_thr, cap_ope, pct_suburban, pct_urban, un_adju, crs, geojsons, EXECUTION_ID, logger) schema = CloudResultsSchema() logger.debug("Deserializing") final_output = schema.load(out[0]) for o in out[1:]: this_out = schema.load(o) final_output.urls.extend(this_out.urls) logger.debug("Serializing") # Now serialize the output again and return it return schema.dump(final_output)
def run(params, logger): """.""" logger.debug("Loading parameters.") # calc_loss = True # calc_gain = True # calc_cover = True calc_gain=params.get('calc_gain') calc_loss=params.get('calc_loss') calc_cover=params.get('calc_cover') # year = 2019 # year = params.get('year') geojsons = json.loads(params.get('geojsons')) # geojsons = json.loads('[{"type": "Polygon", "coordinates": [[[36.67624230333968, -1.4171224908103], [37.131511672225884, -1.4171224908103], [37.131511672225884, -1.139361667107238], [36.67624230333968, -1.139361667107238], [36.67624230333968, -1.4171224908103]]]}]') logger.debug(geojsons) crs = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' # Check the ENV. Are we running this locally or in prod? if params.get('ENV') == 'dev': EXECUTION_ID = str(random.randint(1000000, 99999999)) else: EXECUTION_ID = params.get('EXECUTION_ID', None) logger.debug("Running main script.") outs = [] for geojson in geojsons: this_out = None if calc_loss: loss = forest_loss(geojson, EXECUTION_ID, logger) if not this_out: this_out = loss else: this_out.merge(loss) if calc_gain: gain = forest_gain(geojson, EXECUTION_ID, logger) if not this_out: this_out = gain else: this_out.merge(gain) if calc_cover: cover = forest_cover(geojson, EXECUTION_ID, logger) if not this_out: this_out = cover else: this_out.merge(cover) outs.append(this_out.export([geojson], 'Forest Change', crs, logger, EXECUTION_ID)) # First need to deserialize the data that was prepared for output from # the productivity functions, so that new urls can be appended schema = CloudResultsSchema() logger.debug("Deserializing") final_output = schema.load(outs[0]) for o in outs[1:]: this_out = schema.load(o) final_output.urls.extend(this_out.urls) logger.debug("Serializing") # Now serialize the output again and return it return schema.dump(final_output)
def run(params, logger): """.""" logger.debug("Loading parameters.") prod_mode = params.get('prod_mode') calc_traj = params.get('calc_traj') calc_state = params.get('calc_state') calc_perf = params.get('calc_perf') prod_traj_year_initial = int(params.get('prod_traj_year_initial')) prod_traj_year_final = int(params.get('prod_traj_year_final')) prod_perf_year_initial = int(params.get('prod_perf_year_initial')) prod_perf_year_final = int(params.get('prod_perf_year_final')) prod_state_year_bl_start = int(params.get('prod_state_year_bl_start')) prod_state_year_bl_end = int(params.get('prod_state_year_bl_end')) prod_state_year_tg_start = int(params.get('prod_state_year_tg_start')) prod_state_year_tg_end = int(params.get('prod_state_year_tg_end')) geojsons = json.loads(params.get('geojsons')) crs = params.get('crs') prod_traj_method = params.get('trajectory_method') ndvi_gee_dataset = params.get('ndvi_gee_dataset') climate_gee_dataset = params.get('climate_gee_dataset') # crs = ee.Image("users/geflanddegradation/toolbox_datasets/ndvi_modis_2001_2019").projection().crs() # Check the ENV. Are we running this locally or in prod? if params.get('ENV') == 'dev': EXECUTION_ID = str(random.randint(1000000, 99999999)) else: EXECUTION_ID = params.get('EXECUTION_ID', None) logger.debug("Running productivity indicators.") proj = ee.Image(ndvi_gee_dataset).projection() if prod_mode == 'Trends.Earth productivity': outs = [] for geojson in geojsons: this_out = None if calc_traj: traj = productivity_trajectory(geojsons[0]['coordinates'], int(prod_traj_year_initial), int(prod_traj_year_final), prod_traj_method, ndvi_gee_dataset, climate_gee_dataset, logger) if not this_out: this_out = traj if calc_perf: perf = productivity_performance(geojsons[0]['coordinates'], prod_perf_year_initial, prod_perf_year_final, ndvi_gee_dataset, geojson, EXECUTION_ID, logger) if not this_out: this_out = perf else: this_out.merge(perf) if calc_state: state = productivity_state( geojsons[0]['coordinates'], prod_state_year_bl_start, prod_state_year_bl_end, prod_state_year_tg_start, prod_state_year_tg_end, ndvi_gee_dataset, EXECUTION_ID, logger) if not this_out: this_out = state else: this_out.merge(state) outs.append( this_out.export([geojson], 'productivity', crs, logger, EXECUTION_ID, proj)) # First need to deserialize the data that was prepared for output from # the productivity functions, so that new urls can be appended schema = CloudResultsSchema() logger.debug("Deserializing") final_output = schema.load(outs[0]) for o in outs[1:]: this_out = schema.load(o) final_output.urls.extend(this_out.urls) logger.debug("Serializing") # Now serialize the output again and return it return schema.dump(final_output) elif prod_mode == 'JRC LPD': out = download( 'users/geflanddegradation/toolbox_datasets/lpd_300m_longlat', 'Land Productivity Dynamics (LPD)', 'one time', None, None, EXECUTION_ID, logger) return out.export(geojsons, 'productivity', crs, logger, EXECUTION_ID, proj) else: raise Exception( 'Unknown productivity mode "{}" chosen'.format(prod_mode))
def run(params, logger): """.""" logger.debug("Loading parameters.") prod_mode = params.get('prod_mode') prod_traj_year_initial = params.get('prod_traj_year_initial') prod_traj_year_final = params.get('prod_traj_year_final') prod_perf_year_initial = params.get('prod_perf_year_initial') prod_perf_year_final = params.get('prod_perf_year_final') prod_state_year_bl_start = params.get('prod_state_year_bl_start') prod_state_year_bl_end = params.get('prod_state_year_bl_end') prod_state_year_tg_start = params.get('prod_state_year_tg_start') prod_state_year_tg_end = params.get('prod_state_year_tg_end') lc_year_initial = params.get('lc_year_initial') lc_year_final = params.get('lc_year_final') soc_year_initial = params.get('soc_year_initial') soc_year_final = params.get('soc_year_final') geojsons = json.loads(params.get('geojsons')) crs = params.get('crs') prod_traj_method = params.get('prod_traj_method') ndvi_gee_dataset = params.get('ndvi_gee_dataset') climate_gee_dataset = params.get('climate_gee_dataset') fl = params.get('fl') trans_matrix = params.get('trans_matrix') remap_matrix = params.get('remap_matrix') if len(trans_matrix) != 49: raise GEEIOError("Transition matrix must be a list with 49 entries") if len(remap_matrix) != 2 or len(remap_matrix[0]) != 37 or len(remap_matrix[1]) != 37: raise GEEIOError("Transition matrix must be a list of two lists with 37 entries each") # Check the ENV. Are we running this locally or in prod? if params.get('ENV') == 'dev': EXECUTION_ID = str(random.randint(1000000, 99999999)) else: EXECUTION_ID = params.get('EXECUTION_ID', None) proj = ee.Image(ndvi_gee_dataset).projection() logger.debug("Running productivity indicators.") if prod_mode == 'Trends.Earth productivity': outs = [] for geojson in geojsons: # Need to loop over the geojsons, since performance takes in a # geojson. # TODO: pass performance a second geojson defining the entire extent of # all input geojsons so that the performance is calculated the same # over all input areas. out = productivity_trajectory(prod_traj_year_initial, prod_traj_year_final, prod_traj_method, ndvi_gee_dataset, climate_gee_dataset, logger) prod_perf = productivity_performance(prod_perf_year_initial, prod_perf_year_final, ndvi_gee_dataset, geojson, EXECUTION_ID, logger) out.merge(prod_perf) prod_state = productivity_state(prod_state_year_bl_start, prod_state_year_bl_end, prod_state_year_tg_start, prod_state_year_tg_end, ndvi_gee_dataset, EXECUTION_ID, logger) out.merge(prod_state) logger.debug("Running land cover indicator.") lc = land_cover(lc_year_initial, lc_year_final, trans_matrix, remap_matrix, EXECUTION_ID, logger) lc.selectBands(['Land cover (degradation)', 'Land cover (7 class)']) out.merge(lc) logger.debug("Running soil organic carbon indicator.") soc_out = soc(soc_year_initial, soc_year_final, fl, remap_matrix, False, EXECUTION_ID, logger) soc_out.selectBands(['Soil organic carbon (degradation)', 'Soil organic carbon']) out.merge(soc_out) out.setVisible(['Soil organic carbon (degradation)', 'Land cover (degradation)', 'Productivity trajectory (significance)', 'Productivity state (degradation)', 'Productivity performance (degradation)', 'Land Productivity Dynamics (LPD)']) outs.append(out.export([geojson], 'sdg_sub_indicators', crs, logger, EXECUTION_ID, proj)) # First need to deserialize the data that was prepared for output from # the productivity functions, so that new urls can be appended schema = CloudResultsSchema() logger.debug("Deserializing") final_prod = schema.load(outs[0]) for o in outs[1:]: this_out = schema.load(o) final_prod.urls.extend(this_out.urls) logger.debug("Serializing") # Now serialize the output again so the remaining layers can be added # to it return schema.dump(final_prod) elif prod_mode == 'JRC LPD': out = download('users/geflanddegradation/toolbox_datasets/lpd_300m_longlat', 'Land Productivity Dynamics (LPD)', 'one time', None, None, EXECUTION_ID, logger) # Save as int16 to be compatible with other data out.image = out.image.int16() logger.debug("Running land cover indicator.") lc = land_cover(lc_year_initial, lc_year_final, trans_matrix, remap_matrix, EXECUTION_ID, logger) lc.selectBands(['Land cover (degradation)', 'Land cover (7 class)']) out.merge(lc) logger.debug("Running soil organic carbon indicator.") soc_out = soc(soc_year_initial, soc_year_final, fl, remap_matrix, False, EXECUTION_ID, logger) soc_out.selectBands(['Soil organic carbon (degradation)', 'Soil organic carbon']) out.merge(soc_out) out.setVisible(['Soil organic carbon (degradation)', 'Land cover (degradation)', 'Productivity trajectory (significance)', 'Productivity state (degradation)', 'Productivity performance (degradation)', 'Land Productivity Dynamics (LPD)']) return out.export(geojsons, 'sdg_sub_indicators', crs, logger, EXECUTION_ID, proj) else: raise Exception('Unknown productivity mode "{}" chosen'.format(prod_mode))