def main(ini_path=None, overwrite_flag=False, delay_time=0, gee_key_file=None, max_ready=-1, reverse_flag=False): """Compute annual Tcorr images from gridded images Parameters ---------- ini_path : str Input file path. overwrite_flag : bool, optional If True, overwrite existing files (the default is False). delay_time : float, optional Delay time in seconds between starting export tasks (or checking the number of queued tasks, see "max_ready" parameter). The default is 0. gee_key_file : str, None, optional Earth Engine service account JSON key file (the default is None). max_ready: int, optional Maximum number of queued "READY" tasks. The default is -1 which is implies no limit to the number of tasks that will be submitted. reverse_flag : bool, optional If True, process WRS2 tiles in reverse order. """ logging.info('\nCompute annual Tcorr images from gridded images') wrs2_coll_id = 'projects/earthengine-legacy/assets/' \ 'projects/usgs-ssebop/wrs2_descending_custom' wrs2_tile_field = 'WRS2_TILE' # CGM - Which format should we use for the WRS2 tile? wrs2_tile_fmt = 'p{:03d}r{:03d}' # wrs2_tile_fmt = '{:03d}{:03d}' wrs2_tile_re = re.compile('p?(\d{1,3})r?(\d{1,3})') # List of path/rows to skip wrs2_skip_list = [ 'p049r026', # Vancouver Island, Canada # 'p047r031', # North California coast 'p042r037', # San Nicholas Island, California # 'p041r037', # South California coast 'p040r038', 'p039r038', 'p038r038', # Mexico (by California) 'p037r039', 'p036r039', 'p035r039', # Mexico (by Arizona) 'p034r039', 'p033r039', # Mexico (by New Mexico) 'p032r040', # Mexico (West Texas) 'p029r041', 'p028r042', 'p027r043', 'p026r043', # Mexico (South Texas) 'p019r040', 'p018r040', # West Florida coast 'p016r043', 'p015r043', # South Florida coast 'p014r041', 'p014r042', 'p014r043', # East Florida coast 'p013r035', 'p013r036', # North Carolina Outer Banks 'p013r026', 'p012r026', # Canada (by Maine) 'p011r032', # Rhode Island coast ] wrs2_path_skip_list = [9, 49] wrs2_row_skip_list = [25, 24, 43] mgrs_skip_list = [] export_id_fmt = 'tcorr_gridded_{product}_{wrs2}_annual' asset_id_fmt = '{coll_id}/{wrs2}' # Read config file ini = configparser.ConfigParser(interpolation=None) ini.read_file(open(ini_path, 'r')) # ini = utils.read_ini(ini_path) # try: model_name = 'SSEBOP' # # model_name = ini['INPUTS']['et_model'].upper() # except KeyError: # raise ValueError('"et_model" parameter was not set in INI') # except Exception as e: # raise e try: tmax_source = ini[model_name]['tmax_source'] except KeyError: raise ValueError('"tmax_source" parameter was not set in INI') except Exception as e: raise e try: tcorr_source = ini[model_name]['tcorr_source'] except KeyError: raise ValueError('"tcorr_source" parameter was not set in INI') except Exception as e: raise e try: tcorr_annual_coll_id = '{}_annual'.format(ini['EXPORT']['export_coll']) except KeyError: raise ValueError('"export_coll" parameter was not set in INI') except Exception as e: raise e try: study_area_coll_id = str(ini['INPUTS']['study_area_coll']) except KeyError: raise ValueError('"study_area_coll" parameter was not set in INI') except Exception as e: raise e try: mgrs_ftr_coll_id = str(ini['EXPORT']['mgrs_ftr_coll']) except KeyError: raise ValueError('"mgrs_ftr_coll" parameter was not set in INI') except Exception as e: raise e # Optional parameters try: study_area_property = str(ini['INPUTS']['study_area_property']) except KeyError: study_area_property = None logging.debug(' study_area_property: not set in INI, defaulting to None') except Exception as e: raise e try: study_area_features = str(ini['INPUTS']['study_area_features']) study_area_features = sorted([ x.strip() for x in study_area_features.split(',')]) except KeyError: study_area_features = [] logging.debug(' study_area_features: not set in INI, defaulting to []') except Exception as e: raise e try: wrs2_tiles = str(ini['INPUTS']['wrs2_tiles']) wrs2_tiles = [x.strip() for x in wrs2_tiles.split(',')] wrs2_tiles = sorted([x.lower() for x in wrs2_tiles if x]) except KeyError: wrs2_tiles = [] logging.debug(' wrs2_tiles: not set in INI, defaulting to []') except Exception as e: raise e try: mgrs_tiles = str(ini['EXPORT']['mgrs_tiles']) mgrs_tiles = sorted([x.strip() for x in mgrs_tiles.split(',')]) # CGM - Remove empty strings caused by trailing or extra commas mgrs_tiles = [x.upper() for x in mgrs_tiles if x] logging.debug(f' mgrs_tiles: {mgrs_tiles}') except KeyError: mgrs_tiles = [] logging.debug(' mgrs_tiles: not set in INI, defaulting to []') except Exception as e: raise e try: utm_zones = str(ini['EXPORT']['utm_zones']) utm_zones = sorted([int(x.strip()) for x in utm_zones.split(',')]) logging.debug(f' utm_zones: {utm_zones}') except KeyError: utm_zones = [] logging.debug(' utm_zones: not set in INI, defaulting to []') except Exception as e: raise e # TODO: Add try/except blocks and default values? # TODO: Filter Tcorr scene collection based on collections parameter # collections = [x.strip() for x in ini['INPUTS']['collections'].split(',')] # cloud_cover = float(ini['INPUTS']['cloud_cover']) # min_pixel_count = float(ini['TCORR']['min_pixel_count']) min_scene_count = float(ini['TCORR']['min_scene_count']) # Limit by year month_list = list(range(1, 13)) # try: # month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months']))) # except: # logging.info('\nTCORR "months" parameter not set in the INI,' # '\n Defaulting to all months (1-12)\n') # month_list = list(range(1, 13)) try: year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years']))) except: logging.info('\nTCORR "years" parameter not set in the INI,' '\n Defaulting to all available years\n') year_list = [] # For now only support reading specific Tmax sources if (tmax_source.upper() not in ['DAYMET_MEDIAN_V2'] and not re.match('^projects/.+/tmax/.+_(mean|median)_\d{4}_\d{4}(_\w+)?', tmax_source)): raise ValueError(f'unsupported tmax_source: {tmax_source}') # if (tmax_name.upper() == 'CIMIS' and # ini['INPUTS']['end_date'] < '2003-10-01'): # logging.error( # '\nCIMIS is not currently available before 2003-10-01, exiting\n') # sys.exit() # elif (tmax_name.upper() == 'DAYMET' and # ini['INPUTS']['end_date'] > '2020-12-31'): # logging.warning( # '\nDAYMET is not currently available past 2020-12-31, ' # 'using median Tmax values\n') # # sys.exit() # # elif (tmax_name.upper() == 'TOPOWX' and # # ini['INPUTS']['end_date'] > '2017-12-31'): # # logging.warning( # # '\nDAYMET is not currently available past 2017-12-31, ' # # 'using median Tmax values\n') # # # sys.exit() logging.info('\nInitializing Earth Engine') if gee_key_file: logging.info(f' Using service account key file: {gee_key_file}') # The "EE_ACCOUNT" parameter is not used if the key file is valid ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file)) else: ee.Initialize() logging.debug('\nTmax properties') tmax_coll = ee.ImageCollection(tmax_source) tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0) logging.debug(f' {tmax_source}') # Get the Tcorr image collection properties logging.debug('\nTcorr scene collection') tcorr_coll_id = '{}'.format(ini['EXPORT']['export_coll']) # if not ee.data.getInfo(tcorr_annual_coll_id.rsplit('/', 1)[0]): # logging.info('\nExport collection does not exist and will be built' # '\n {}'.format(tcorr_annual_coll_id.rsplit('/', 1)[0])) # input('Press ENTER to continue') # ee.data.createAsset({'type': 'FOLDER'}, # tcorr_annual_coll_id.rsplit('/', 1)[0]) if not ee.data.getInfo(tcorr_annual_coll_id): logging.info('\nExport collection does not exist and will be built' '\n {}'.format(tcorr_annual_coll_id)) input('Press ENTER to continue') ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, tcorr_annual_coll_id) # Get current running tasks tasks = utils.get_ee_tasks() ready_task_count = sum(1 for t in tasks.values() if t['state'] == 'READY') # ready_task_count = delay_task(ready_task_count, delay_time, max_ready) if logging.getLogger().getEffectiveLevel() == logging.DEBUG: utils.print_ee_tasks(tasks) input('ENTER') # Get current asset list logging.debug('\nGetting GEE asset list') asset_list = utils.get_ee_assets(tcorr_annual_coll_id) if logging.getLogger().getEffectiveLevel() == logging.DEBUG: pprint.pprint(asset_list[:10]) # Get list of MGRS tiles that intersect the study area logging.info('\nBuilding export list') export_list = mgrs_export_tiles( study_area_coll_id=study_area_coll_id, mgrs_coll_id=mgrs_ftr_coll_id, study_area_property=study_area_property, study_area_features=study_area_features, mgrs_tiles=mgrs_tiles, mgrs_skip_list=mgrs_skip_list, utm_zones=utm_zones, wrs2_tiles=wrs2_tiles, ) if not export_list: logging.error('\nEmpty export list, exiting') return False # pprint.pprint(export_list) # input('ENTER') # Build the complete/filtered WRS2 list wrs2_tile_list = list(set( wrs2 for tile_info in export_list for wrs2 in tile_info['wrs2_tiles'])) if wrs2_skip_list: wrs2_tile_list = [wrs2 for wrs2 in wrs2_tile_list if wrs2 not in wrs2_skip_list] if wrs2_path_skip_list: wrs2_tile_list = [wrs2 for wrs2 in wrs2_tile_list if int(wrs2[1:4]) not in wrs2_path_skip_list] if wrs2_row_skip_list: wrs2_tile_list = [wrs2 for wrs2 in wrs2_tile_list if int(wrs2[5:8]) not in wrs2_row_skip_list] wrs2_tile_list = sorted(wrs2_tile_list, reverse=not(reverse_flag)) # wrs2_tile_count = len(wrs2_tile_list) # Get the list of WRS2 tiles that intersect the data area and study area wrs2_coll = ee.FeatureCollection(wrs2_coll_id) \ .filter(ee.Filter.inList(wrs2_tile_field, wrs2_tile_list)) wrs2_info = wrs2_coll.getInfo()['features'] for wrs2_ftr in sorted(wrs2_info, key=lambda k: k['properties']['WRS2_TILE'], reverse=reverse_flag): wrs2_tile = wrs2_ftr['properties'][wrs2_tile_field] wrs2_path, wrs2_row = map(int, wrs2_tile_re.findall(wrs2_tile)[0]) logging.info(f'{wrs2_tile}') export_id = export_id_fmt.format( product=tmax_source.split('/')[-1], wrs2=wrs2_tile) logging.debug(f' Export ID: {export_id}') asset_id = asset_id_fmt.format( coll_id=tcorr_annual_coll_id, wrs2=wrs2_tile) asset_short_id = asset_id.replace( 'projects/earthengine-legacy/assets/', '') logging.debug(f' Asset ID: {asset_id}') if overwrite_flag: if export_id in tasks.keys(): logging.info(' Task already submitted, cancelling') ee.data.cancelTask(tasks[export_id]['id']) # This is intentionally not an "elif" so that a task can be # cancelled and an existing image/file/asset can be removed if asset_id in asset_list or asset_short_id in asset_list: logging.info(' Asset already exists, removing') ee.data.deleteAsset(asset_id) else: if export_id in tasks.keys(): logging.info(' Task already submitted, exiting') continue elif asset_id in asset_list or asset_short_id in asset_list: logging.info(' Asset already exists, skipping') continue # TODO: Move to separate function or outside loop export_crs = 'EPSG:{}'.format(wrs2_ftr['properties']['EPSG']) wrs2_extent = ee.Geometry(wrs2_ftr['geometry'])\ .bounds(1, ee.Projection(export_crs))\ .coordinates().get(0).getInfo() wrs2_extent = [ min([x[0] for x in wrs2_extent]), min([x[1] for x in wrs2_extent]), max([x[0] for x in wrs2_extent]), max([x[1] for x in wrs2_extent])] logging.debug(f' WRS2 Extent: {wrs2_extent}') # Adjust the image extent to the coarse resolution grid # EXPORT_GEO = [5000, 0, 15, 0, -5000, 15] export_cs = EXPORT_GEO[0] export_extent = [ round(math.floor((wrs2_extent[0] - EXPORT_GEO[2]) / export_cs) * export_cs + EXPORT_GEO[2], 8), round(math.floor((wrs2_extent[1] - EXPORT_GEO[5]) / export_cs) * export_cs + EXPORT_GEO[5], 8), round(math.ceil((wrs2_extent[2] - EXPORT_GEO[2]) / export_cs) * export_cs + EXPORT_GEO[2], 8), round(math.ceil((wrs2_extent[3] - EXPORT_GEO[5]) / export_cs) * export_cs + EXPORT_GEO[5], 8), ] export_geo = [export_cs, 0, export_extent[0], 0, -export_cs, export_extent[3]] export_shape = [ int(abs(export_extent[2] - export_extent[0]) / EXPORT_GEO[0]), int(abs(export_extent[3] - export_extent[1]) / EXPORT_GEO[0])] logging.debug(f' Export CRS: {export_crs}') logging.debug(f' Export Geo: {export_geo}') logging.debug(f' Export Extent: {export_extent}') logging.debug(f' Export Shape: {export_shape}') tcorr_coll = ee.ImageCollection(tcorr_coll_id) \ .filterMetadata('wrs2_tile', 'equals', wrs2_tile) \ .filter(ee.Filter.inList('year', year_list)) \ .filterMetadata('tcorr_index', 'equals', 1) \ .filterMetadata('tcorr_coarse_count', 'greater_than', 0) \ .select(['tcorr']) # .filterMetadata('tcorr_pixel_count', 'not_less_than', min_pixel_count) \ # TODO: Should CLOUD_COVER_LAND filter should be re-applied here? # .filterMetadata('CLOUD_COVER_LAND', 'less_than', cloud_cover) # .filterDate(start_date, end_date) # .filterBounds(ee.Geometry(wrs2_ftr['geometry'])) tcorr_count = tcorr_coll.size() # mask_img = ee.Image.constant(0).reproject(export_crs, export_geo) # Compute the gridded Tcorr climo image and count reducer = ee.Reducer.mean()\ .combine(ee.Reducer.count(), sharedInputs=True) tcorr_img = tcorr_coll.reduce(reducer).rename(['tcorr', 'count']) count_img = tcorr_img.select(['count']) output_img = tcorr_img.updateMask(count_img.gte(min_scene_count)) # # Compute stats from the image properties # tcorr_stats = ee.List(tcorr_coll.aggregate_array('tcorr_value')) \ # .reduce(reducer) # tcorr_stats = ee.Dictionary(tcorr_stats) \ # .combine({'median': 0, 'count': 0}, overwrite=False) # tcorr = ee.Number(tcorr_stats.get('median')) # count = ee.Number(tcorr_stats.get('count')) # index = count.lt(min_scene_count)\ # .multiply(TCORR_INDICES['NODATA'] - TCORR_INDICES['ANNUAL'])\ # .add(TCORR_INDICES['ANNUAL']) # # index = ee.Algorithms.If(count.gte(min_scene_count), 6, 9) # # Clip the mask image to the Landsat footprint # # Change mask values to 1 if count >= threshold # # Mask values of 0 will be set to nodata # mask_img = tmax_mask.add(count.gte(min_scene_count)) \ # .clip(ee.Geometry(wrs2_ftr['geometry'])) # output_img = ee.Image( # [mask_img.multiply(tcorr), mask_img.multiply(count)]) \ # .rename(['tcorr', 'count']) \ # .updateMask(mask_img.unmask(0)) # # Write an empty image if the pixel count is too low # # CGM: Check/test if this can be combined into a single If() # tcorr_img = ee.Algorithms.If( # count.gte(min_scene_count), # tmax_mask.add(tcorr), mask_img.updateMask(0)) # count_img = ee.Algorithms.If( # count.gte(min_scene_count), # tmax_mask.add(count), mask_img.updateMask(0)) # # # Clip to the Landsat image footprint # output_img = ee.Image([tcorr_img, count_img]) \ # .rename(['tcorr', 'count']) # Clip to the Landsat image footprint # output_img = output_img.clip(ee.Geometry(wrs2_ftr['geometry'])) # Clear the transparency mask # output_img = output_img.updateMask(output_img.unmask(0)) output_img = output_img.set({ 'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'), 'model_name': model_name, 'model_version': ssebop.__version__, # 'system:time_start': utils.millis(start_dt), # 'tcorr_value': tcorr, 'tcorr_index': TCORR_INDICES['ANNUAL'], 'tcorr_scene_count': tcorr_count, 'tcorr_source': tcorr_source, 'tmax_source': tmax_source, 'wrs2_path': wrs2_path, 'wrs2_row': wrs2_row, 'wrs2_tile': wrs2_tile, 'years': ','.join(map(str, year_list)), # 'year_start': year_list[0], # 'year_end': year_list[-1], }) # pprint.pprint(output_img.getInfo()) # input('ENTER') logging.debug(' Building export task') task = ee.batch.Export.image.toAsset( image=output_img, description=export_id, assetId=asset_id, crs=export_crs, crsTransform='[' + ','.join(list(map(str, export_geo))) + ']', dimensions='{0}x{1}'.format(*export_shape), ) logging.info(' Starting export task') utils.ee_task_start(task) # Pause before starting the next export task utils.delay_task(delay_time, max_ready) logging.debug('')
def main(ini_path=None, overwrite_flag=False, delay=0, key=None, reverse_flag=False): """Compute daily dT images Parameters ---------- ini_path : str Input file path. overwrite_flag : bool, optional If True, generate new images (but with different export dates) even if the dates already have images. If False, only generate images for dates that are missing. The default is False. delay : float, optional Delay time between each export task (the default is 0). key : str, optional File path to an Earth Engine json key file (the default is None). reverse_flag : bool, optional If True, process dates in reverse order. """ logging.info('\nCompute daily dT images') ini = read_ini(ini_path) model_name = 'SSEBOP' # model_name = ini['INPUTS']['et_model'].upper() if ini[model_name]['dt_source'].upper() == 'CIMIS': daily_coll_id = 'projects/climate-engine/cimis/daily' elif ini[model_name]['dt_source'].upper() == 'DAYMET': daily_coll_id = 'NASA/ORNL/DAYMET_V3' elif ini[model_name]['dt_source'].upper() == 'GRIDMET': daily_coll_id = 'IDAHO_EPSCOR/GRIDMET' else: raise ValueError('dt_source must be CIMIS, DAYMET, or GRIDMET') # Check dates if (ini[model_name]['dt_source'].upper() == 'CIMIS' and ini['INPUTS']['end_date'] < '2003-10-01'): logging.error( '\nCIMIS is not currently available before 2003-10-01, exiting\n') sys.exit() elif (ini[model_name]['dt_source'].upper() == 'DAYMET' and ini['INPUTS']['end_date'] > '2017-12-31'): logging.warning('\nDAYMET is not currently available past 2017-12-31, ' 'using median Tmax values\n') # sys.exit() # elif (ini[model_name]['dt_source'].upper() == 'TOPOWX' and # ini['INPUTS']['end_date'] > '2017-12-31'): # logging.warning( # '\nDAYMET is not currently available past 2017-12-31, ' # 'using median Tmax values\n') # # sys.exit() logging.info('\nInitializing Earth Engine') if key: logging.info(' Using service account key file: {}'.format(key)) # The "EE_ACCOUNT" parameter is not used if the key file is valid ee.Initialize(ee.ServiceAccountCredentials('deadbeef', key_file=key)) else: ee.Initialize() # Output dT daily image collection dt_daily_coll_id = '{}/{}_daily'.format( ini['EXPORT']['export_coll'], ini[model_name]['dt_source'].lower()) # Get an input image to set the dT values to logging.debug('\nInput properties') dt_name = ini[model_name]['dt_source'] dt_source = dt_name.split('_', 1)[0] # dt_version = dt_name.split('_', 1)[1] daily_coll = ee.ImageCollection(daily_coll_id) dt_img = ee.Image(daily_coll.first()).select([0]) dt_mask = dt_img.multiply(0) logging.debug(' Collection: {}'.format(daily_coll_id)) logging.debug(' Source: {}'.format(dt_source)) # logging.debug(' Version: {}'.format(dt_version)) logging.debug('\nExport properties') export_proj = dt_img.projection().getInfo() export_geo = export_proj['transform'] if 'crs' in export_proj.keys(): export_crs = export_proj['crs'] elif 'wkt' in export_proj.keys(): export_crs = re.sub(',\s+', ',', export_proj['wkt']) export_shape = dt_img.getInfo()['bands'][0]['dimensions'] export_extent = [ export_geo[2], export_geo[5] + export_shape[1] * export_geo[4], export_geo[2] + export_shape[0] * export_geo[0], export_geo[5] ] logging.debug(' CRS: {}'.format(export_crs)) logging.debug(' Extent: {}'.format(export_extent)) logging.debug(' Geo: {}'.format(export_geo)) logging.debug(' Shape: {}'.format(export_shape)) # Get current asset list if ini['EXPORT']['export_dest'].upper() == 'ASSET': logging.debug('\nGetting asset list') # DEADBEEF - daily is hardcoded in the asset_id for now asset_list = utils.get_ee_assets(dt_daily_coll_id) else: raise ValueError('invalid export destination: {}'.format( ini['EXPORT']['export_dest'])) # Get current running tasks tasks = utils.get_ee_tasks() if logging.getLogger().getEffectiveLevel() == logging.DEBUG: utils.print_ee_tasks() input('ENTER') # Limit by year and month try: month_list = sorted(list(utils.parse_int_set(ini['INPUTS']['months']))) except: logging.info('\nINPUTS "months" parameter not set in the INI,' '\n Defaulting to all months (1-12)\n') month_list = list(range(1, 13)) # try: # year_list = sorted(list(utils.parse_int_set(ini['INPUTS']['years']))) # except: # logging.info('\nINPUTS "years" parameter not set in the INI,' # '\n Defaulting to all available years\n') # year_list = [] # Group asset IDs by image date asset_id_dict = defaultdict(list) for asset_id in asset_list: asset_dt = datetime.datetime.strptime( asset_id.split('/')[-1].split('_')[0], '%Y%m%d') asset_id_dict[asset_dt.strftime('%Y-%m-%d')].append(asset_id) # pprint.pprint(export_dt_dict) iter_start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'], '%Y-%m-%d') iter_end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'], '%Y-%m-%d') logging.debug('Start Date: {}'.format(iter_start_dt.strftime('%Y-%m-%d'))) logging.debug('End Date: {}\n'.format(iter_end_dt.strftime('%Y-%m-%d'))) for export_dt in sorted(utils.date_range(iter_start_dt, iter_end_dt), reverse=reverse_flag): export_date = export_dt.strftime('%Y-%m-%d') # if ((month_list and export_dt.month not in month_list) or # (year_list and export_dt.year not in year_list)): if month_list and export_dt.month not in month_list: logging.debug(f'Date: {export_date} - month not in INI - skipping') continue elif export_date >= datetime.datetime.today().strftime('%Y-%m-%d'): logging.debug(f'Date: {export_date} - unsupported date - skipping') continue logging.info(f'Date: {export_date}') export_id = ini['EXPORT']['export_id_fmt'] \ .format( product=dt_name.lower(), date=export_dt.strftime('%Y%m%d'), export=datetime.datetime.today().strftime('%Y%m%d'), dest=ini['EXPORT']['export_dest'].lower()) logging.debug(' Export ID: {}'.format(export_id)) if ini['EXPORT']['export_dest'] == 'ASSET': asset_id = '{}/{}_{}'.format( dt_daily_coll_id, export_dt.strftime('%Y%m%d'), datetime.datetime.today().strftime('%Y%m%d')) logging.debug(' Asset ID: {}'.format(asset_id)) if overwrite_flag: if export_id in tasks.keys(): logging.debug(' Task already submitted, cancelling') ee.data.cancelTask(tasks[export_id]) # This is intentionally not an "elif" so that a task can be # cancelled and an existing image/file/asset can be removed if (ini['EXPORT']['export_dest'].upper() == 'ASSET' and asset_id in asset_list): logging.debug(' Asset already exists, removing') ee.data.deleteAsset(asset_id) else: if export_id in tasks.keys(): logging.debug(' Task already submitted, exiting') continue elif (ini['EXPORT']['export_dest'].upper() == 'ASSET' and asset_id in asset_list): logging.debug( ' Asset with current export date already exists, ' 'skipping') continue elif len(asset_id_dict[export_date]) > 0: logging.debug( ' Asset with earlier export date already exists, ' 'skipping') continue # Compute dT using a fake Landsat image # The system:time_start property is the only needed value model_obj = ssebop.Image( ee.Image.constant([0, 0]).rename(['ndvi', 'lst']).set({ 'system:time_start': utils.millis(export_dt), 'system:index': 'LC08_043033_20170716', 'system:id': 'LC08_043033_20170716' }), dt_source=dt_source.upper(), elev_source='SRTM', dt_min=ini['SSEBOP']['dt_min'], dt_max=ini['SSEBOP']['dt_max'], ) # Cast to float and set properties dt_img = model_obj.dt.float() \ .set({ 'system:time_start': utils.millis(export_dt), 'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'), 'date': export_dt.strftime('%Y-%m-%d'), 'year': int(export_dt.year), 'month': int(export_dt.month), 'day': int(export_dt.day), 'doy': int(export_dt.strftime('%j')), 'model_name': model_name, 'model_version': ssebop.__version__, 'dt_source': dt_source.upper(), # 'dt_version': dt_version.upper(), }) # Build export tasks if ini['EXPORT']['export_dest'] == 'ASSET': logging.debug(' Building export task') task = ee.batch.Export.image.toAsset( image=ee.Image(dt_img), description=export_id, assetId=asset_id, crs=export_crs, crsTransform='[' + ','.join(list(map(str, export_geo))) + ']', dimensions='{0}x{1}'.format(*export_shape), ) logging.info(' Starting export task') utils.ee_task_start(task) # Pause before starting next task utils.delay_task(delay_time=delay) logging.debug('')
def main(ini_path=None): """Remove earlier versions of daily dT images Parameters ---------- ini_path : str Input file path. """ logging.info('\nRemove earlier versions of daily dT images') ini = read_ini(ini_path) model_name = 'SSEBOP' # model_name = ini['INPUTS']['et_model'].upper() start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'], '%Y-%m-%d') end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'], '%Y-%m-%d') logging.debug('Start Date: {}'.format(start_dt.strftime('%Y-%m-%d'))) logging.debug('End Date: {}\n'.format(end_dt.strftime('%Y-%m-%d'))) try: dt_source = str(ini[model_name]['dt_source']) logging.debug('\ndt_source:\n {}'.format(dt_source)) except KeyError: logging.error(' dt_source: must be set in INI') sys.exit() if dt_source.upper() not in ['CIMIS', 'DAYMET', 'GRIDMET']: raise ValueError('dt_source must be CIMIS, DAYMET, or GRIDMET') # Output dT daily image collection dt_daily_coll_id = '{}/{}_daily'.format( ini['EXPORT']['export_coll'], ini[model_name]['dt_source'].lower()) logging.debug(' {}'.format(dt_daily_coll_id)) logging.info('\nInitializing Earth Engine') ee.Initialize() ee.Number(1).getInfo() # Get list of existing images/files logging.debug('\nGetting GEE asset list') asset_list = utils.get_ee_assets(dt_daily_coll_id) logging.debug('Displaying first 10 images in collection') logging.debug(asset_list[:10]) # Filter asset list by INI start_date and end_date logging.debug('\nFiltering by INI start_date and end_date') asset_re = re.compile('[\w_]+/(\d{8})_\d{8}') asset_list = [ asset_id for asset_id in asset_list if (start_dt <= datetime.datetime.strptime( asset_re.findall(asset_id)[0], '%Y%m%d') and datetime.datetime. strptime(asset_re.findall(asset_id)[0], '%Y%m%d') <= end_dt) ] if not asset_list: logging.info('Empty asset ID list after filter by start/end date, ' 'exiting') return True logging.debug('Displaying first 10 images in collection') logging.debug(asset_list[:10]) # Group asset IDs by image date asset_id_dict = defaultdict(list) for asset_id in asset_list: asset_dt = datetime.datetime.strptime( asset_id.split('/')[-1].split('_')[0], '%Y%m%d') asset_id_dict[asset_dt.strftime('%Y-%m-%d')].append(asset_id) # pprint.pprint(asset_id_dict) # Remove all but the last image when sorted by export date logging.info('\nRemoving assets') for key, asset_list in asset_id_dict.items(): # logging.debug('{}'.format(key)) if len(asset_list) >= 2: for asset_id in sorted(asset_list)[:-1]: logging.info(' Delete: {}'.format(asset_id)) try: ee.data.deleteAsset(asset_id) except Exception as e: logging.info(' Unhandled exception, skipping') logging.debug(e) continue
def main(tmax_source, statistic, year_start, year_end, doy_list=range(1, 367), gee_key_file=None, delay_time=0, max_ready=-1, overwrite_flag=False, elr_flag=False, reverse_flag=False): """Tmax Climatology Assets Parameters ---------- tmax_source : {'CIMIS', 'DAYMET_V3', 'DAYMET_V4', 'GRIDMET'} Maximum air temperature source keyword. statistic : {'median', 'mean'} Climatology statistic. year_start : int Start year. year_end : int End year (inclusive). doy_list : list(int), optional Days of year to process (the default is 1-365). gee_key_file : str, None, optional File path to a service account json key file. delay_time : float, optional Delay time in seconds between starting export tasks (or checking the number of queued tasks, see "max_ready" parameter). The default is 0. max_ready: int, optional Maximum number of queued "READY" tasks. The default is -1 which is implies no limit to the number of tasks that will be submitted. overwrite_flag : bool, optional If True, overwrite existing files (the default is False). key_path : str, None, optional elr_flag : bool, optional If True, apply Elevation Lapse Rate (ELR) adjustment (the default is False). reverse_flag : bool, optional If True, process days in reverse order (the default is False). Returns ------- None Notes ----- Collection is built/filtered using "day of year" based on the system:time_start The DOY 366 collection is built by selecting only the DOY 365 images (so the DOY 366 image should be a copy of the DOY 365 image) Daymet calendar definition https://daac.ornl.gov/DAYMET/guides/Daymet_Daily_V4.html The Daymet calendar is based on a standard calendar year. All Daymet years, including leap years, have 1–365 days. For leap years, the Daymet data include leap day (February 29) and December 31 is discarded from leap years to maintain a 365-day year. """ logging.info(f'\nGenerating {tmax_source} {statistic} asset') tmax_folder = 'projects/earthengine-legacy/assets/projects/usgs-ssebop/tmax' # CGM - Intentionally not setting the time_start # time_start_year = 1980 if statistic.lower() not in ['median', 'mean']: raise ValueError(f'unsupported statistic: {statistic}') logging.info('\nInitializing Earth Engine') if gee_key_file and os.path.isfile(gee_key_file): logging.info( ' Using service account key file: {}'.format(gee_key_file)) # The "EE_ACCOUNT" doesn't seem to be used if the key file is valid ee.Initialize(ee.ServiceAccountCredentials('', key_file=gee_key_file)) else: ee.Initialize() # CGM - Should we set default start/end years if they are not set by the user? if tmax_source.upper() in ['DAYMET_V3', 'DAYMET_V4']: tmax_coll = ee.ImageCollection('NASA/ORNL/' + tmax_source.upper()) \ .select(['tmax']).map(c_to_k) elif tmax_source.upper() == 'CIMIS': tmax_coll = ee.ImageCollection('projects/climate-engine/cimis/daily') \ .select(['Tx'], ['tmax']).map(c_to_k) elif tmax_source.upper() == 'GRIDMET': tmax_coll = ee.ImageCollection('IDAHO_EPSCOR/GRIDMET') \ .select(['tmmx'], ['tmax']) # elif tmax_source.upper() == 'TOPOWX': # tmax_coll = ee.ImageCollection('TOPOWX') \ # .select(['tmmx'], ['tmax']) else: logging.error('Unsupported tmax_source: {}'.format(tmax_source)) return False if elr_flag: id_flag = 'elr' coll_id = f'{tmax_folder}/' \ f'{tmax_source.lower()}_{statistic}_{year_start}_{year_end}_{id_flag}' else: coll_id = f'{tmax_folder}/' \ f'{tmax_source.lower()}_{statistic}_{year_start}_{year_end}' tmax_info = ee.Image(tmax_coll.first()).getInfo() tmax_proj = ee.Image(tmax_coll.first()).projection().getInfo() if 'wkt' in tmax_proj.keys(): tmax_crs = tmax_proj['wkt'].replace(' ', '').replace('\n', '') else: # TODO: Add support for projection have a "crs" key instead of "wkt" raise Exception('unsupported projection type') if tmax_source.upper() in ['DAYMET_V3', 'DAYMET_V4']: # TODO: Check if the DAYMET_V4 grid is aligned to DAYMET_V3 # Custom smaller extent for DAYMET focused on CONUS extent = [-1999750, -1890500, 2500250, 1109500] dimensions = [4500, 3000] transform = [1000, 0, -1999750, 0, -1000, 1109500] # Custom medium extent for DAYMET of CONUS, Mexico, and southern Canada # extent = [-2099750, -3090500, 2900250, 1909500] # dimensions = [5000, 5000] # transform = [1000, 0, -2099750, 0, -1000, 1909500] else: transform = tmax_proj['transform'] dimensions = tmax_info['bands'][0]['dimensions'] logging.info(' CRS: {}'.format(tmax_crs)) logging.info(' Transform: {}'.format(transform)) logging.info(' Dimensions: {}\n'.format(dimensions)) # Build the export collection if it doesn't exist if not ee.data.getInfo(coll_id): logging.info('\nImage collection does not exist and will be built' '\n {}'.format(coll_id)) input('Press ENTER to continue') ee.data.createAsset({'type': 'ImageCollection'}, coll_id) # # Switch type string if use_cloud_api=True # ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, coll_id) # Get current running assets # CGM: This is currently returning the asset IDs without earthengine-legacy assets = utils.get_ee_assets(coll_id) # assets = [asset_id.replace('projects/earthengine-legacy/assets/', '') # for asset_id in assets] # Get current running tasks tasks = utils.get_ee_tasks() if logging.getLogger().getEffectiveLevel() == logging.DEBUG: logging.debug(' Tasks: {}'.format(len(tasks))) input('ENTER') for doy in sorted(doy_list, reverse=reverse_flag): logging.info('DOY: {:03d}'.format(doy)) # CGM - Intentionally not setting the time_start # What year should we use for the system:time_start? # time_start_dt = datetime.datetime.strptime( # '{}_{:03d}'.format(time_start_year, doy), '%Y_%j') # logging.debug(' Time Start Date: {}'.format( # time_start_dt.strftime('%Y-%m-%d'))) asset_id = '{}/{:03d}'.format(coll_id, doy) asset_short_id = asset_id.replace( 'projects/earthengine-legacy/assets/', '') export_id = 'tmax_{}_{}_{}_{}_day{:03d}'.format( tmax_source.lower(), statistic, year_start, year_end, doy) logging.debug(' Asset ID: {}'.format(asset_id)) logging.debug(' Export ID: {}'.format(export_id)) if overwrite_flag: if export_id in tasks.keys(): logging.info(' Task already submitted, cancelling') ee.data.cancelTask(tasks[export_id]) if asset_short_id in assets or asset_id in assets: logging.info(' Asset already exists, removing') ee.data.deleteAsset(asset_id) else: if export_id in tasks.keys(): logging.info(' Task already submitted, skipping') continue elif asset_short_id in assets: logging.info(' Asset already exists, skipping') continue # Filter the Tmax collection the target day of year if doy < 366: tmax_doy_coll = tmax_coll \ .filter(ee.Filter.calendarRange(doy, doy, 'day_of_year')) \ .filter(ee.Filter.calendarRange(year_start, year_end, 'year')) else: # Compute DOY 366 as a copy of the DOY 365 values tmax_doy_coll = tmax_coll \ .filter(ee.Filter.calendarRange(365, 365, 'day_of_year')) \ .filter(ee.Filter.calendarRange(year_start, year_end, 'year')) # Compute the Tmax climo image if statistic.lower() == 'median': tmax_img = ee.Image(tmax_doy_coll.median()) elif statistic.lower() == 'mean': tmax_img = ee.Image(tmax_doy_coll.mean()) # Fill interior water holes with the mean of the surrounding cells # Use the filled image as the source to the where since tmax is nodata # CGM - Check if this is needed for DAYMET_V4 if tmax_source.upper() in ['DAYMET_V3', 'DAYMET_V4']: filled_img = tmax_img.focal_mean(4000, 'circle', 'meters') \ .reproject(tmax_crs, transform) tmax_img = filled_img.where(tmax_img.gt(0), tmax_img) # tmax_img = filled_img.where(tmax_img, tmax_img) if elr_flag: # MF - Could eventually make the DEM source (keyword-based) as an input argument. srtm = ee.Image("CGIAR/SRTM90_V4") srtm_proj = srtm.projection().getInfo() srtm_crs = srtm_proj['crs'] # MF - The SRTM image has crs not wkt. # if 'crs' in srtm_proj.keys(): # srtm_crs = srtm_proj['crs'].replace(' ', '').replace('\n', '') # else: # # TODO: Add support for projection have a "crs" key instead of "wkt" # raise Exception('unsupported projection type') # MF - This should be properly defined at L238(?) # srtm_proj20km = srtm_proj.scale(200,200) # Reduce DEM to median of ~20km cells srtmMedian = srtm.reduceResolution(reducer=ee.Reducer.median(), maxPixels=65536) # Smooth median DEM with 5x5 pixel radius srtmMedian_5x5 = srtmMedian.reduceNeighborhood( ee.Reducer.mean(), ee.Kernel.square(radius=5, units='pixels')) # Reproject to ~20km srtmMedian20km = srtmMedian_5x5.reproject(crs=srtm_crs, scale=200) # Final ELR mask: (DEM-(medDEM.add(100)).gt(0)) srtm_diff = (srtm.subtract(srtmMedian20km.add(100))) srtm_diff_positive = (srtm.subtract(srtmMedian20km.add(100))).gt(0) # Reproject to match Tmax source projection srtm_diff = srtm_diff.reproject(crs=tmax_crs, crsTransform=transform) srtm_diff_positive = srtm_diff_positive.reproject( crs=tmax_crs, crsTransform=transform) srtm_diff_final = srtm_diff.mask(srtm_diff_positive) elr_adjust = ee.Image(tmax_img).expression( '(temperature - (0.005 * (elr_layer)))', { 'temperature': tmax_img, 'elr_layer': srtm_diff_final }) tmax_img = tmax_img.where(srtm_diff_final, elr_adjust) tmax_img = tmax_img.set({ 'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'), 'doy': int(doy), # 'doy': ee.String(ee.Number(doy).format('%03d')), 'year_start': year_start, 'year_end': year_end, 'years': tmax_doy_coll.size(), # CGM - Intentionally not setting the time_start # 'system:time_start': ee.Date( # time_start_dt.strftime('%Y-%m-%d')).millis() }) # Build export tasks logging.debug(' Building export task') task = ee.batch.Export.image.toAsset( tmax_img, description=export_id, assetId=asset_id, dimensions='{0}x{1}'.format(*dimensions), crs=tmax_crs, crsTransform='[' + ','.join(map(str, transform)) + ']', maxPixels=int(1E10), ) # task = ee.batch.Export.image.toCloudStorage( # tmax_img, # description=export_id, # bucket='tmax_', # fileNamePrefix=export_id, # dimensions='{0}x{1}'.format(*dimensions), # crs=tmax_crs, # crsTransform='[' + ','.join(map(str, transform)) + ']', # maxPixels=int(1E10), # fileFormat='GeoTIFF', # formatOptions={'cloudOptimized': True}, # ) logging.info(' Starting export task') utils.ee_task_start(task) # Pause before starting next task utils.delay_task(delay_time, max_ready)