'scale': 10, 'region': roi_geometry } task = ee.batch.Export.image(s2_median, 's2_median %d' % number, task_config) task.start() return 1 listOfMarkers = ee.FeatureCollection([ ee.Feature(ee.Geometry.Point([6.8115234375, 60.88836817267309]), {"system:index": "0"}), ee.Feature(ee.Geometry.Point([15.1611328125, 57.469327688204295]), {"system:index": "1"}), ee.Feature(ee.Geometry.Point([27.0703125, 62.95584745563692]), {"system:index": "2"}) ]) listOfMarkers = listOfMarkers.sort('system:index') #Convert FeatureCollection of marker points to list; #maximum number of markers is 100 listOfMarkersList = listOfMarkers.toList(100) numberOfMarkers = listOfMarkersList.size().getInfo() #Loop through list of markers for export of the individual ROIs i = 0 while (i < numberOfMarkers): exportROIimages(ee.Feature(listOfMarkersList.get(i)), i) i += 1
# %% # Add Earth Engine dataset # LSIB: Large Scale International Boundary Polygons, Simplified # dataset = ee.FeatureCollection('USDOS/LSIB_SIMPLE/2017') # styleParams = { # 'fillColor': 'b5ffb4', # 'color': '00909F', # 'width': 3.0, # } # countries = dataset.style(**styleParams) # Map.addLayer(countries, {}, 'USDOS/LSIB_SIMPLE/2017') # LSIB: Large Scale International Boundary Polygons, Detailed dataset = ee.FeatureCollection('USDOS/LSIB/2013') visParams = { 'palette': ['f5ff64', 'b5ffb4', 'beeaff', 'ffc0e8', '8e8dff', 'adadad'], 'min': 0.0, 'max': 894.0, 'opacity': 0.8, } image = ee.Image().float().paint(dataset, 'iso_num') Map.addLayer(image, visParams, 'USDOS/LSIB/2013') # Map.addLayer(dataset, {}, 'for Inspector', False) # %% """ ## Display Earth Engine data layers """
def getHistoricalMap(self, shape, startYear, endYear, startMonth, endMonth, method='discrete', climatology=True, month=None, defringe=True, pcnt_perm=40, pcnt_temp=8, water_thresh=0.35, ndvi_thresh=0.5, hand_thresh=30, cloud_thresh=10, algorithm='SWT', wcolor='#00008b'): # def spatialSelect(feature): # test = ee.Algorithms.If(geom.contains(feature.geometry()),feature,None) # return ee.Feature(test) #countries = landShp.filterBounds(geom).map(spatialSelect,True) if shape: shape = shape.replace('["', '[') shape = shape.replace('"]', ']') shape = shape.replace('","', ',') shape = ee.FeatureCollection(eval(shape)) else: shape = self.REGION if climatology: if month == None: raise ValueError( 'Month needs to be defined to calculate climatology') if algorithm == 'SWT': iniTime = '{}-01-01'.format(startYear) endTime = '{}-12-31'.format(endYear) # get images images = getLandsatCollection(geom, iniTime, endTime, climatology, month, defringe, cloud_thresh) # Height Above Nearest Drainage (HAND) HAND = ee.Image('users/arjenhaag/SERVIR-Mekong/HAND_MERIT') # get HAND mask HAND_mask = HAND.gt(float(hand_thresh)) water = SurfaceWaterAlgorithm(geom, images, pcnt_perm, pcnt_temp, water_thresh, ndvi_thresh, HAND_mask).clip(countries) waterMap = getTileLayerUrl( water.updateMask(water.eq(2)).visualize( min=0, max=2, palette='#ffffff,#9999ff,' + wcolor)) elif algorithm == 'JRC': water = self.JRCAlgorithm(shape, startYear, endYear, startMonth, endMonth, method).clip(shape) #water = JRCAlgorithm(geom,iniTime,endTime).clip(countries) waterMap = self.getTileLayerUrl( water.visualize(min=0, max=1, bands='water', palette='#ffffff,' + wcolor)) else: raise NotImplementedError( 'Selected algorithm string not available. Options are: "SWT" or "JRC"' ) return waterMap
def appendBand(current, previous): # Rename the band previous = ee.Image(previous) # This dataset only has a single layer, regions encoded with a number indicating aggricultural ground. current = current.select([0]) # Append it to the result (Note: only return current item on first element/iteration) accum = ee.Algorithms.If(ee.Algorithms.IsEqual(previous, None), current, previous.addBands(ee.Image(current))) # Return the accumulation return accum # County shape layer in Google Earth Engine Feature Collection. # The public instance is fine but it is important that the county_region = ee.FeatureCollection( 'ft:1S4EB6319wWW2sWQDPhDvmSBIVrD3iEmCLYB7nMM') imgcoll = ee.ImageCollection('MODIS/051/MCD12Q1') \ .filterBounds(ee.Geometry.Rectangle(-106.5, 50,-64, 23))\ .filterDate('2002-12-31','2016-8-4') img = imgcoll.iterate(appendBand) img = ee.Image(img) img_0 = ee.Image(ee.Number(-100)) img_16000 = ee.Image(ee.Number(16000)) img = img.min(img_16000) img = img.max(img_0) for loc1, loc2, lat, lon in locations.values: fname = '{}_{}'.format(int(loc1), int(loc2))
# %% Map = emap.Map(center=[40, -100], zoom=4) Map.add_basemap('ROADMAP') # Add Google Map Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset Map.setCenter(-122.45, 37.75, 13) bart = ee.FeatureCollection('ft:1xCCZkVn8DIkB7i7RVkvsYWxAxsdsQZ6SbD9PCXw') parks = ee.FeatureCollection('ft:10KC6VfBWMUvNcuxU7mbSEg__F_4UVe9uDkCldBw') buffered_bart = bart.map(lambda f: f.buffer(2000)) join_filter = ee.Filter.withinDistance(2000, '.geo', None, '.geo') close_parks = ee.Join.simple().apply(parks, bart, join_filter) Map.addLayer(buffered_bart, {'color': 'b0b0b0'}, "BART Stations") Map.addLayer(close_parks, {'color': '008000'}, "Parks") # %% """ ## Display Earth Engine data layers """ # %%
# In[21]: #geemap.js_snippet_to_py(js_snippet, add_new_cell=True, import_ee=True, import_geemap=True, show_map=True) # ### Load in collections and required images # In[3]: # load collections and required images collection = ee.ImageCollection('MODIS/006/MOD09A1').filterDate('2000-01-01', '2020-12-31') forestmask = ee.Image("users/marcogirardello/phenoutils/mask_unchanged_500m") smallgrid = ee.FeatureCollection('users/marcogirardello/phenoutils/grid_export_phenology3') # ### <span style="color:blue">Pre-processing step 1: filtering data by quality flags and calculated NDVI.</span> # This include snow, cloud, fire, cloud shadows and the land/water mask # In[4]: # end and start date of period of interest start_date = ee.Date.fromYMD(2001, 1, 1) end_date = ee.Date.fromYMD(2019, 12, 31) # In[5]:
# %% Map = folium.Map(location=[40, -100], zoom_start=4) Map.setOptions('HYBRID') # %% ''' ## Add Earth Engine Python script ''' # %% # Load input imagery: Landsat 7 5-year composite. image = ee.Image('LANDSAT/LE7_TOA_5YEAR/2008_2012') # Load a FeatureCollection of counties in Maine. maineCounties = ee.FeatureCollection('TIGER/2016/Counties') \ .filter(ee.Filter.eq('STATEFP', '23')) # Add reducer output to the Features in the collection. maineMeansFeatures = image.reduceRegions(**{ 'collection': maineCounties, 'reducer': ee.Reducer.mean(), 'scale': 30, }) feature = ee.Feature(maineMeansFeatures.first()).select(image.bandNames()) # print(feature.getInfo()) properties = feature.propertyNames() # Print the first feature, to illustrate the result. print(feature.toDictionary(properties).getInfo())
## Create an interactive map The default basemap is `Google Maps`. [Additional basemaps](https://github.com/giswqs/geemap/blob/master/geemap/basemaps.py) can be added using the `Map.add_basemap()` function. """ # %% Map = geemap.Map(center=[40, -100], zoom=4) Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset dataset = ee.FeatureCollection('TIGER/2010/Tracts_DP1') visParams = { 'min': 0, 'max': 4000, 'opacity': 0.8, } # Turn the strings into numbers dataset = dataset.map( lambda f: f.set('shape_area', ee.Number.parse(f.get('dp0010001')))) # Map.setCenter(-103.882, 43.036, 8) image = ee.Image().float().paint(dataset, 'dp0010001') Map.addLayer(image, visParams, 'TIGER/2010/Tracts_DP1') # Map.addLayer(dataset, {}, 'for Inspector', False)
import ee import geemap # Create a map centered at (lat, lon). Map = geemap.Map(center=[40, -100], zoom=4) # get a single feature countries = ee.FeatureCollection("USDOS/LSIB_SIMPLE/2017") country = countries.filter(ee.Filter.eq('country_na', 'Ukraine')) Map.addLayer(country, {'color': 'orange'}, 'feature collection layer') # TEST: center feature on a map Map.centerObject(country) # Display the map. Map
#!/usr/bin/env python """Buffer Example. Display the area within 2 kilometers of any San Francisco BART station. """ import ee import geemap # Create a map centered at (lat, lon). Map = geemap.Map(center=[40, -100], zoom=4) Map.setCenter(-122.4, 37.7, 11) bart_stations = ee.FeatureCollection('GOOGLE/EE/DEMOS/bart-locations') buffered = bart_stations.map(lambda f: f.buffer(2000)) unioned = buffered.union() Map.addLayer(unioned, {'color': '800080'}, "BART stations") # Display the map. Map
import ee import config import oauth2client ee.Initialize(config.EE_CREDENTIALS) IMAGE_COLLECTION = ee.ImageCollection('JRC/GSW1_0/MonthlyHistory') watershed = ee.FeatureCollection( "ft:1vTonxuDFs7rBkt02H3ZzFy1SSFsNPhlPlRE15pVr", "geometry") lulc = ee.Image("users/servirmekong/california/RegionalLC") pal = '6f6f6f,aec3d4,b1f9ff,111149,287463,152106,c3aa69,9ad2a5,7db087,486f50,387242,115420,cc0013,8dc33b,ffff00,a1843b,cec2a5,674c06,3bc3b2,f4a460,800080' def showLandCover(mylegend): print "legend", mylegend mymask = ee.Image(0) # enable all checked boxes for value in mylegend: print value tempmask = lulc.eq(ee.Number(int(value))) mymask = mymask.add(tempmask) print "returning" return lulc.updateMask(mymask).getMapId({ 'min': '0', 'max': '20', 'palette': pal
# %% Map = emap.Map(center=[40, -100], zoom=4) Map.add_basemap('ROADMAP') # Add Google Map Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset # Create a FeatureCollection from an Earth Engine Table. # Load census roads. roads = ee.FeatureCollection('TIGER/2016/Roads') # Get only interstates. interstates = roads.filter(ee.Filter.eq('rttyp', 'I')) # Get only surface roads. surfaceRoads = roads.filter(ee.Filter.eq('rttyp', 'M')) # Display the roads in different colors. Map.addLayer(surfaceRoads, {'color': 'gray'}, 'surface roads') Map.addLayer(interstates, {'color': 'red'}, 'interstates') # %% """ ## Display Earth Engine data layers """
def main(ini_path=None, overwrite_flag=False, delay_time=0, gee_key_file=None, max_ready=-1, reverse_flag=False): """Compute monthly Tcorr images by WRS2 tile Parameters ---------- ini_path : str Input file path. overwrite_flag : bool, optional If True, overwrite existing files (the default is False). delay_time : float, optional Delay time in seconds between starting export tasks (or checking the number of queued tasks, see "max_ready" parameter). The default is 0. gee_key_file : str, None, optional Earth Engine service account JSON key file (the default is None). max_ready: int, optional Maximum number of queued "READY" tasks. The default is -1 which is implies no limit to the number of tasks that will be submitted. reverse_flag : bool, optional If True, process WRS2 tiles in reverse order. """ logging.info('\nCompute monthly Tcorr images by WRS2 tile') # TODO: Read from INI study_area_extent = [-124, 35, -119, 42] # study_area_extent = [-121.7, 39, -121.7, 39] # study_area_extent = None ini = utils.read_ini(ini_path) model_name = 'SSEBOP' # model_name = ini['INPUTS']['et_model'].upper() tmax_name = ini[model_name]['tmax_source'] export_id_fmt = 'tcorr_scene_{product}_{wrs2}_month{month:02d}' asset_id_fmt = '{coll_id}/{wrs2}_month{month:02d}' tcorr_monthly_coll_id = '{}/{}_monthly'.format( ini['EXPORT']['export_coll'], tmax_name.lower()) wrs2_coll_id = 'projects/earthengine-legacy/assets/' \ 'projects/usgs-ssebop/wrs2_descending_custom' wrs2_tile_field = 'WRS2_TILE' # wrs2_path_field = 'ROW' # wrs2_row_field = 'PATH' try: wrs2_tiles = str(ini['INPUTS']['wrs2_tiles']) wrs2_tiles = [x.strip() for x in wrs2_tiles.split(',')] wrs2_tiles = sorted([x.lower() for x in wrs2_tiles if x]) except KeyError: wrs2_tiles = [] logging.debug(' wrs2_tiles: not set in INI, defaulting to []') except Exception as e: raise e try: study_area_extent = str(ini['INPUTS']['study_area_extent']) \ .replace('[', '').replace(']', '').split(',') study_area_extent = [float(x.strip()) for x in study_area_extent] except KeyError: study_area_extent = None logging.debug(' study_area_extent: not set in INI') except Exception as e: raise e # TODO: Add try/except blocks and default values? collections = [x.strip() for x in ini['INPUTS']['collections'].split(',')] cloud_cover = float(ini['INPUTS']['cloud_cover']) min_pixel_count = float(ini['TCORR']['min_pixel_count']) min_scene_count = float(ini['TCORR']['min_scene_count']) if (tmax_name.upper() == 'CIMIS' and ini['INPUTS']['end_date'] < '2003-10-01'): logging.error( '\nCIMIS is not currently available before 2003-10-01, exiting\n') sys.exit() elif (tmax_name.upper() == 'DAYMET' and ini['INPUTS']['end_date'] > '2018-12-31'): logging.warning('\nDAYMET is not currently available past 2018-12-31, ' 'using median Tmax values\n') # sys.exit() # elif (tmax_name.upper() == 'TOPOWX' and # ini['INPUTS']['end_date'] > '2017-12-31'): # logging.warning( # '\nDAYMET is not currently available past 2017-12-31, ' # 'using median Tmax values\n') # # sys.exit() # Extract the model keyword arguments from the INI # Set the property name to lower case and try to cast values to numbers model_args = { k.lower(): float(v) if utils.is_number(v) else v for k, v in dict(ini[model_name]).items() } # et_reference_args = { # k: model_args.pop(k) # for k in [k for k in model_args.keys() if k.startswith('et_reference_')]} logging.info('\nInitializing Earth Engine') if gee_key_file: logging.info( ' Using service account key file: {}'.format(gee_key_file)) # The "EE_ACCOUNT" parameter is not used if the key file is valid ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file), use_cloud_api=True) else: ee.Initialize(use_cloud_api=True) logging.debug('\nTmax properties') tmax_source = tmax_name.split('_', 1)[0] tmax_version = tmax_name.split('_', 1)[1] tmax_coll_id = 'projects/earthengine-legacy/assets/' \ 'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower()) tmax_coll = ee.ImageCollection(tmax_coll_id) tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0) logging.debug(' Collection: {}'.format(tmax_coll_id)) logging.debug(' Source: {}'.format(tmax_source)) logging.debug(' Version: {}'.format(tmax_version)) logging.debug('\nExport properties') export_info = utils.get_info(ee.Image(tmax_mask)) if 'daymet' in tmax_name.lower(): # Custom smaller extent for DAYMET focused on CONUS export_extent = [-1999750, -1890500, 2500250, 1109500] export_shape = [4500, 3000] export_geo = [1000, 0, -1999750, 0, -1000, 1109500] # Custom medium extent for DAYMET of CONUS, Mexico, and southern Canada # export_extent = [-2099750, -3090500, 2900250, 1909500] # export_shape = [5000, 5000] # export_geo = [1000, 0, -2099750, 0, -1000, 1909500] export_crs = export_info['bands'][0]['crs'] else: export_crs = export_info['bands'][0]['crs'] export_geo = export_info['bands'][0]['crs_transform'] export_shape = export_info['bands'][0]['dimensions'] # export_geo = ee.Image(tmax_mask).projection().getInfo()['transform'] # export_crs = ee.Image(tmax_mask).projection().getInfo()['crs'] # export_shape = ee.Image(tmax_mask).getInfo()['bands'][0]['dimensions'] export_extent = [ export_geo[2], export_geo[5] + export_shape[1] * export_geo[4], export_geo[2] + export_shape[0] * export_geo[0], export_geo[5] ] export_geom = ee.Geometry.Rectangle(export_extent, proj=export_crs, geodesic=False) logging.debug(' CRS: {}'.format(export_crs)) logging.debug(' Extent: {}'.format(export_extent)) logging.debug(' Geo: {}'.format(export_geo)) logging.debug(' Shape: {}'.format(export_shape)) if study_area_extent is None: if 'daymet' in tmax_name.lower(): # CGM - For now force DAYMET to a slightly smaller "CONUS" extent study_area_extent = [-125, 25, -65, 49] # study_area_extent = [-125, 25, -65, 52] elif 'cimis' in tmax_name.lower(): study_area_extent = [-124, 35, -119, 42] else: # TODO: Make sure output from bounds is in WGS84 study_area_extent = tmax_mask.geometry().bounds().getInfo() logging.debug(f'\nStudy area extent not set in INI, ' f'default to {study_area_extent}') study_area_geom = ee.Geometry.Rectangle(study_area_extent, proj='EPSG:4326', geodesic=False) if not ee.data.getInfo(tcorr_monthly_coll_id): logging.info('\nExport collection does not exist and will be built' '\n {}'.format(tcorr_monthly_coll_id)) input('Press ENTER to continue') ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, tcorr_monthly_coll_id) # Get current asset list logging.debug('\nGetting GEE asset list') asset_list = utils.get_ee_assets(tcorr_monthly_coll_id) # if logging.getLogger().getEffectiveLevel() == logging.DEBUG: # pprint.pprint(asset_list[:10]) # Get current running tasks tasks = utils.get_ee_tasks() if logging.getLogger().getEffectiveLevel() == logging.DEBUG: logging.debug(' Tasks: {}\n'.format(len(tasks))) input('ENTER') # if cron_flag: # # CGM - This seems like a silly way of getting the date as a datetime # # Why am I doing this and not using the commented out line? # end_dt = datetime.date.today().strftime('%Y-%m-%d') # end_dt = datetime.datetime.strptime(end_dt, '%Y-%m-%d') # end_dt = end_dt + datetime.timedelta(days=-4) # # end_dt = datetime.datetime.today() + datetime.timedelta(days=-1) # start_dt = end_dt + datetime.timedelta(days=-64) # else: start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'], '%Y-%m-%d') end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'], '%Y-%m-%d') start_date = start_dt.strftime('%Y-%m-%d') end_date = end_dt.strftime('%Y-%m-%d') next_date = (end_dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d') logging.debug('Start Date: {}'.format(start_date)) logging.debug('End Date: {}\n'.format(end_date)) # Limit by year and month try: month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months']))) except: logging.info('\nTCORR "months" parameter not set in the INI,' '\n Defaulting to all months (1-12)\n') month_list = list(range(1, 13)) try: year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years']))) except: logging.info('\nTCORR "years" parameter not set in the INI,' '\n Defaulting to all available years\n') year_list = [] # Get the list of WRS2 tiles that intersect the data area and study area wrs2_coll = ee.FeatureCollection(wrs2_coll_id) \ .filterBounds(export_geom) \ .filterBounds(study_area_geom) if wrs2_tiles: wrs2_coll = wrs2_coll.filter( ee.Filter.inList(wrs2_tile_field, wrs2_tiles)) wrs2_info = wrs2_coll.getInfo()['features'] for wrs2_ftr in sorted(wrs2_info, key=lambda k: k['properties']['WRS2_TILE'], reverse=reverse_flag): wrs2_tile = wrs2_ftr['properties'][wrs2_tile_field] logging.info('{}'.format(wrs2_tile)) wrs2_path = int(wrs2_tile[1:4]) wrs2_row = int(wrs2_tile[5:8]) # wrs2_path = wrs2_ftr['properites']['PATH'] # wrs2_row = wrs2_ftr['properites']['ROW'] for month in month_list: logging.info('Month: {}'.format(month)) export_id = export_id_fmt.format(product=tmax_name.lower(), wrs2=wrs2_tile, month=month) logging.debug(' Export ID: {}'.format(export_id)) asset_id = asset_id_fmt.format(coll_id=tcorr_monthly_coll_id, wrs2=wrs2_tile, month=month) logging.debug(' Asset ID: {}'.format(asset_id)) if overwrite_flag: if export_id in tasks.keys(): logging.debug(' Task already submitted, cancelling') ee.data.cancelTask(tasks[export_id]['id']) # This is intentionally not an "elif" so that a task can be # cancelled and an existing image/file/asset can be removed if asset_id in asset_list: logging.debug(' Asset already exists, removing') ee.data.deleteAsset(asset_id) else: if export_id in tasks.keys(): logging.debug(' Task already submitted, exiting') continue elif asset_id in asset_list: logging.debug(' Asset already exists, skipping') continue # CGM: I couldn't find a way to build this from the Collection class # TODO: Will need to be changed/updated for SR collection # TODO: Add code to handle real time collections landsat_coll = ee.ImageCollection([]) if 'LANDSAT/LC08/C01/T1_TOA' in collections: l8_coll = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA') \ .filterMetadata('WRS_PATH', 'equals', wrs2_path) \ .filterMetadata('WRS_ROW', 'equals', wrs2_row) \ .filterMetadata('CLOUD_COVER_LAND', 'less_than', cloud_cover) \ .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \ .filter(ee.Filter.gt('system:time_start', ee.Date('2013-03-24').millis())) \ .filter(ee.Filter.calendarRange(month, month, 'month')) # .filterDate(start_date, next_date) landsat_coll = landsat_coll.merge(l8_coll) if 'LANDSAT/LE07/C01/T1_TOA' in collections: l7_coll = ee.ImageCollection('LANDSAT/LE07/C01/T1_TOA') \ .filterMetadata('WRS_PATH', 'equals', wrs2_path) \ .filterMetadata('WRS_ROW', 'equals', wrs2_row) \ .filterMetadata('CLOUD_COVER_LAND', 'less_than', cloud_cover) \ .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \ .filter(ee.Filter.calendarRange(month, month, 'month')) # .filterDate(start_date, next_date) landsat_coll = landsat_coll.merge(l7_coll) if 'LANDSAT/LT05/C01/T1_TOA' in collections: l5_coll = ee.ImageCollection('LANDSAT/LT05/C01/T1_TOA') \ .filterMetadata('WRS_PATH', 'equals', wrs2_path) \ .filterMetadata('WRS_ROW', 'equals', wrs2_row) \ .filterMetadata('CLOUD_COVER_LAND', 'less_than', cloud_cover) \ .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \ .filter(ee.Filter.lt('system:time_start', ee.Date('2011-12-31').millis())) \ .filter(ee.Filter.calendarRange(month, month, 'month')) # .filterDate(start_date, next_date) landsat_coll = landsat_coll.merge(l5_coll) # if 'LANDSAT/LT04/C01/T1_TOA' in collections: # l4_coll = ee.ImageCollection('LANDSAT/LT04/C01/T1_TOA') \ # .filterMetadata('WRS_PATH', 'equals', wrs2_path) \ # .filterMetadata('WRS_ROW', 'equals', wrs2_row) \ # .filterMetadata('CLOUD_COVER_LAND', 'less_than', cloud_cover) \ # .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \ # .filter(ee.Filter.calendarRange(month, month, 'month')) # # .filterDate(start_date, next_date) # landsat_coll = landsat_coll.merge(l4_coll) def tcorr_img_func(landsat_img): # TODO: Will need to be changed for SR t_obj = ssebop.Image.from_landsat_c1_toa( landsat_img, **model_args) t_stats = ee.Dictionary(t_obj.tcorr_stats) \ .combine({'tcorr_p5': 0, 'tcorr_count': 0}, overwrite=False) tcorr = ee.Number(t_stats.get('tcorr_p5')) count = ee.Number(t_stats.get('tcorr_count')) return tmax_mask.add(tcorr) \ .rename(['tcorr']) \ .set({ 'system:time_start': ee.Image(landsat_img).get('system:time_start'), 'tcorr_value': tcorr, 'tcorr_pixel_count': count, 'scene_id': t_obj._scene_id, }) # Filter the Tcorr image collection based on the pixel counts tcorr_coll = ee.ImageCollection(landsat_coll.map(tcorr_img_func)) \ .filterMetadata('tcorr_pixel_count', 'not_less_than', min_pixel_count) # Use a common reducer for the image and property stats reducer = ee.Reducer.median() \ .combine(ee.Reducer.count(), sharedInputs=True) # Compute stats from the collection images # This might be used when Tcorr is spatial # tcorr_img = tcorr_coll.reduce(reducer).rename(['tcorr', 'count']) # Compute stats from the image properties tcorr_stats = ee.List(tcorr_coll.aggregate_array('tcorr_value')) \ .reduce(reducer) tcorr_stats = ee.Dictionary(tcorr_stats) \ .combine({'median': 0, 'count': 0}, overwrite=False) tcorr = ee.Number(tcorr_stats.get('median')) count = ee.Number(tcorr_stats.get('count')) index = ee.Algorithms.If(count.gte(min_scene_count), 1, 9) # Write an empty image if the pixel count is too low # CGM: Check/test if this can be combined into a single If() tcorr_img = ee.Algorithms.If(count.gte(min_scene_count), tmax_mask.add(tcorr), tmax_mask.updateMask(0)) count_img = ee.Algorithms.If(count.gte(min_scene_count), tmax_mask.add(count), tmax_mask.updateMask(0)) # Clip to the Landsat image footprint output_img = ee.Image([tcorr_img, count_img]) \ .rename(['tcorr', 'count']) \ .clip(ee.Geometry(wrs2_ftr['geometry'])) # Clear the transparency mask output_img = output_img.updateMask(output_img.unmask(0)) output_img = output_img.set({ 'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'), 'model_name': model_name, 'model_version': ssebop.__version__, 'month': int(month), # 'system:time_start': utils.millis(start_dt), 'tcorr_value': tcorr, 'tcorr_index': index, 'tcorr_scene_count': count, 'tmax_source': tmax_source.upper(), 'tmax_version': tmax_version.upper(), 'wrs2_path': wrs2_path, 'wrs2_row': wrs2_row, 'wrs2_tile': wrs2_tile, 'years': ','.join(map(str, year_list)), # 'year_start': year_list[0], # 'year_end': year_list[-1], }) # pprint.pprint(output_img.getInfo()) # input('ENTER') logging.debug(' Building export task') task = ee.batch.Export.image.toAsset( image=output_img, description=export_id, assetId=asset_id, crs=export_crs, crsTransform='[' + ','.join(list(map(str, export_geo))) + ']', dimensions='{0}x{1}'.format(*export_shape), ) logging.info(' Starting export task') utils.ee_task_start(task) # Pause before starting the next export task utils.delay_task(delay_time, max_ready) logging.debug('')
# Load a Landsat collection, map the NDVI and cloud masking functions over it. collection = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA') \ .filterBounds(ee.Geometry.Point([-122.262, 37.8719])) \ .filterDate('2014-03-01', '2014-05-31') \ .map(addNDVI) \ .map(cloudMask) # Reduce the collection to the mean of each pixel and display. meanImage = collection.reduce(ee.Reducer.mean()) vizParams = {'bands': ['B5_mean', 'B4_mean', 'B3_mean'], 'min': 0, 'max': 0.5} Map.setCenter(-122.262, 37.8719, 10) Map.addLayer(meanImage, vizParams, 'mean') # Load a region in which to compute the mean and display it. counties = ee.FeatureCollection('TIGER/2016/Counties') santaClara = ee.Feature( counties.filter(ee.Filter.eq('NAME', 'Santa Clara')).first()) Map.addLayer(ee.Image().paint(santaClara, 0, 2), {'palette': 'yellow'}, 'Santa Clara') # Get the mean of NDVI in the region. mean = meanImage.select(['nd_mean' ]).reduceRegion(**{ 'reducer': ee.Reducer.mean(), 'geometry': santaClara.geometry(), 'scale': 30 }) # Print mean NDVI for the region. print('Santa Clara spring mean NDVI:', mean.get('nd_mean').getInfo())
Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset # Example of FeatureCollection.reduceToImage() # Define a feature collection with a value we want to average. fc = ee.FeatureCollection([ ee.Feature(ee.Geometry.Rectangle(-122.4550, 37.8035, -122.4781, 37.7935), {'value': 0}), ee.Feature( ee.Geometry.Polygon([[-122.4427, 37.8027], [-122.4587, 37.7987], [-122.4440, 37.7934]]), {'value': 1}) ]) # Reduce the collection to an image, where each pixel # is the mean of the 'value' property in all features # intersecting that pixel. image_reduced = fc.reduceToImage(['value'], 'mean') Map.setCenter(-122.4561, 37.7983, 14) Map.addLayer(image_reduced, { 'min': 0, 'max': 1, 'palette': ['008800', '00FF00'] }, "Image")
def gee_to_drive(s1, s2): today = ee.Date(datetime.now()) woreda = ee.FeatureCollection("users/ramcharankankanala/Final") gpm = ee.ImageCollection("NASA/GPM_L3/IMERG_V06") lstTerra8 = ee.ImageCollection("MODIS/006/MOD11A2").filterDate( '2001-06-26', today) brdfReflect = ee.ImageCollection("MODIS/006/MCD43A4") brdfQA = ee.ImageCollection("MODIS/006/MCD43A2") # string1 = str(input('Start date:')) # string2 = str(input('End date:')) string1 = s1 string2 = s2 reqStartDate = ee.Date(string1) reqEndDate = ee.Date(string2) # print(reqStartDate) lstEarliestDate = lstTerra8.first().date() # print(lstEarliestDate) # Filter collection to dates from beginning to requested priorLstImgcol = lstTerra8.filterDate(lstEarliestDate, reqStartDate) # Get the latest (max) date of this collection of earlier images lstPrevMax = priorLstImgcol.reduceColumns(ee.Reducer.max(), ["system:time_start"]) lstStartDate = ee.Date(lstPrevMax.get('max')) # print('lstStartDate', lstStartDate); gpmAllMax = gpm.reduceColumns(ee.Reducer.max(), ["system:time_start"]) gpmAllEndDateTime = ee.Date(gpmAllMax.get('max')) gpmAllEndDate = ee.Date.fromYMD( **{ 'year': gpmAllEndDateTime.get('year'), 'month': gpmAllEndDateTime.get('month'), 'day': gpmAllEndDateTime.get('day') }) precipStartDate = ee.Date( ee.Algorithms.If( gpmAllEndDate.millis().lt(reqStartDate.millis()), # if data ends before requested start, take last data date gpmAllEndDate, # otherwise use requested date as normal reqStartDate)) # print('precipStartDate', precipStartDate); brdfAllMax = brdfReflect.reduceColumns(ee.Reducer.max(), ["system:time_start"]) brdfAllEndDate = ee.Date(brdfAllMax.get('max')) brdfStartDate = ee.Date( ee.Algorithms.If( brdfAllEndDate.millis().lt(reqStartDate.millis()), # if data ends before requested start, take last data date brdfAllEndDate, # otherwise use requested date as normal reqStartDate)) # print('brdfStartDate', brdfStartDate); # Step 2: Precipitation # Step 2a: Precipitation filtering and dates # Filter gpm by date, using modified start if necessary gpmFiltered = gpm.filterDate(precipStartDate, reqEndDate.advance( 1, 'day')).select('precipitationCal') # Calculate date of most recent measurement for gpm (in modified requested window) gpmMax = gpmFiltered.reduceColumns(ee.Reducer.max(), ["system:time_start"]) gpmEndDate = ee.Date(gpmMax.get('max')) precipEndDate = gpmEndDate # print('precipEndDate ', precipEndDate); precipDays = precipEndDate.difference(precipStartDate, 'day') precipDatesPrep = ee.List.sequence(0, precipDays, 1) precipDatesPrep.getInfo() def makePrecipDates(n): return precipStartDate.advance(n, 'day') precipDates = precipDatesPrep.map(makePrecipDates) # precipDates.getInfo() def calcDailyPrecip(curdate): curyear = ee.Date(curdate).get('year') curdoy = ee.Date(curdate).getRelative('day', 'year').add(1) totprec = gpmFiltered.select('precipitationCal').filterDate( ee.Date(curdate), ee.Date(curdate).advance( 1, 'day')).sum().multiply(0.5).rename('totprec') return totprec.set('doy', curdoy).set('year', curyear).set('system:time_start', curdate) dailyPrecipExtended = ee.ImageCollection.fromImages( precipDates.map(calcDailyPrecip)) dailyPrecip = dailyPrecipExtended.filterDate( reqStartDate, precipEndDate.advance(1, 'day')) precipSummary = dailyPrecip.filterDate(reqStartDate, reqEndDate.advance(1, 'day')) def sumZonalPrecip(image): # To get the doy and year, we convert the metadata to grids and then summarize image2 = image.addBands( [image.metadata('doy').int(), image.metadata('year').int()]) # Reduce by regions to get zonal means for each county output = image2.select(['year', 'doy', 'totprec'], ['year', 'doy', 'totprec']).reduceRegions( **{ 'collection': woreda, 'reducer': ee.Reducer.mean(), 'scale': 1000 }) return output # Map the zonal statistics function over the filtered precip data precipWoreda = precipSummary.map(sumZonalPrecip) # Flatten the results for export precipFlat = precipWoreda.flatten() # Step 3a: Calculate LST variables # Filter Terra LST by altered LST start date lstFiltered = lstTerra8.filterDate( lstStartDate, reqEndDate.advance(8, 'day')).filterBounds(woreda).select( 'LST_Day_1km', 'QC_Day', 'LST_Night_1km', 'QC_Night') def filterLstQA(image): qaday = image.select(['QC_Day']) qanight = image.select(['QC_Night']) dayshift = qaday.rightShift(6) nightshift = qanight.rightShift(6) daymask = dayshift.lte(2) nightmask = nightshift.lte(2) outimage = ee.Image(image.select(['LST_Day_1km', 'LST_Night_1km'])) outmask = ee.Image([daymask, nightmask]) return outimage.updateMask(outmask) lstFilteredQA = lstFiltered.map(filterLstQA) def rescaleLst(image): lst_day = image.select('LST_Day_1km').multiply(0.02).subtract( 273.15).rename('lst_day') lst_night = image.select('LST_Night_1km').multiply(0.02).subtract( 273.15).rename('lst_night') lst_mean = image.expression( '(day + night) / 2', { 'day': lst_day.select('lst_day'), 'night': lst_night.select('lst_night') }).rename('lst_mean') return image.addBands(lst_day).addBands(lst_night).addBands(lst_mean) lstVars = lstFilteredQA.map(rescaleLst) lstRange = lstVars.reduceColumns(ee.Reducer.max(), ["system:time_start"]) lstEndDate = ee.Date(lstRange.get('max')).advance(7, 'day') lstDays = lstEndDate.difference(lstStartDate, 'day') lstDatesPrep = ee.List.sequence(0, lstDays, 1) def makeLstDates(n): return lstStartDate.advance(n, 'day') lstDates = lstDatesPrep.map(makeLstDates) def calcDailyLst(curdate): curyear = ee.Date(curdate).get('year') curdoy = ee.Date(curdate).getRelative('day', 'year').add(1) moddoy = curdoy.divide(8).ceil().subtract(1).multiply(8).add(1) basedate = ee.Date.fromYMD(curyear, 1, 1) moddate = basedate.advance(moddoy.subtract(1), 'day') lst_day = lstVars.select('lst_day').filterDate( moddate, moddate.advance(1, 'day')).first().rename('lst_day') lst_night = lstVars.select('lst_night').filterDate( moddate, moddate.advance(1, 'day')).first().rename('lst_night') lst_mean = lstVars.select('lst_mean').filterDate( moddate, moddate.advance(1, 'day')).first().rename('lst_mean') return lst_day.addBands(lst_night).addBands(lst_mean).set( 'doy', curdoy).set('year', curyear).set('system:time_start', curdate) dailyLstExtended = ee.ImageCollection.fromImages( lstDates.map(calcDailyLst)) dailyLst = dailyLstExtended.filterDate(reqStartDate, lstEndDate.advance(1, 'day')) lstSummary = dailyLst.filterDate(reqStartDate, reqEndDate.advance(1, 'day')) def sumZonalLst(image): # To get the doy and year, we convert the metadata to grids and then summarize image2 = image.addBands( [image.metadata('doy').int(), image.metadata('year').int()]) # Reduce by regions to get zonal means for each county output = image2.select( ['doy', 'year', 'lst_day', 'lst_night', "lst_mean"], ['doy', 'year', 'lst_day', 'lst_night', 'lst_mean']).reduceRegions( **{ 'collection': woreda, 'reducer': ee.Reducer.mean(), 'scale': 1000 }) return output # Map the zonal statistics function over the filtered lst data lstWoreda = lstSummary.map(sumZonalLst) # Flatten the results for export lstFlat = lstWoreda.flatten() # Step 4: BRDF / Spectral Indices # Step 4a: Calculate spectral indices # Filter BRDF-Adjusted Reflectance by Date brdfReflectVars = brdfReflect.filterDate( brdfStartDate, reqEndDate.advance(1, 'day')).filterBounds(woreda).select([ 'Nadir_Reflectance_Band1', 'Nadir_Reflectance_Band2', 'Nadir_Reflectance_Band3', 'Nadir_Reflectance_Band4', 'Nadir_Reflectance_Band5', 'Nadir_Reflectance_Band6', 'Nadir_Reflectance_Band7' ], ['red', 'nir', 'blue', 'green', 'swir1', 'swir2', 'swir3']) # Filter BRDF QA by Date brdfReflectQA = brdfQA.filterDate( brdfStartDate, reqEndDate.advance(1, 'day')).filterBounds(woreda).select([ 'BRDF_Albedo_Band_Quality_Band1', 'BRDF_Albedo_Band_Quality_Band2', 'BRDF_Albedo_Band_Quality_Band3', 'BRDF_Albedo_Band_Quality_Band4', 'BRDF_Albedo_Band_Quality_Band5', 'BRDF_Albedo_Band_Quality_Band6', 'BRDF_Albedo_Band_Quality_Band7', 'BRDF_Albedo_LandWaterType' ], ['qa1', 'qa2', 'qa3', 'qa4', 'qa5', 'qa6', 'qa7', 'water']) idJoin = ee.Filter.equals(leftField='system:time_end', rightField='system:time_end') # Define the join innerJoin = ee.Join.inner('NBAR', 'QA') # Apply the join brdfJoined = innerJoin.apply(brdfReflectVars, brdfReflectQA, idJoin) def addQABands(image): nbar = ee.Image(image.get('NBAR')) qa = ee.Image(image.get('QA')).select(['qa2']) water = ee.Image(image.get('QA')).select(['water']) return nbar.addBands([qa, water]) brdfMerged = ee.ImageCollection(brdfJoined.map(addQABands)) def filterBrdf(image): qaband = image.select(['qa2']) # Right now, only using QA info for the NIR band wband = image.select(['water']) qamask = qaband.lte(2) and wband.eq(1) nir_r = image.select('nir').multiply(0.0001).rename('nir_r') red_r = image.select('red').multiply(0.0001).rename('red_r') swir1_r = image.select('swir1').multiply(0.0001).rename('swir1_r') swir2_r = image.select('swir2').multiply(0.0001).rename('swir2_r') blue_r = image.select('blue').multiply(0.0001).rename('blue_r') return image.addBands(nir_r).addBands(red_r).addBands( swir1_r).addBands(swir2_r).addBands(blue_r).updateMask(qamask) brdfFilteredVars = brdfMerged.map(filterBrdf) def calcBrdfIndices(image): curyear = ee.Date(image.get("system:time_start")).get('year') curdoy = ee.Date(image.get("system:time_start")).getRelative( 'day', 'year').add(1) ndvi = image.normalizedDifference(['nir_r', 'red_r']).rename('ndvi') savi = image.expression('1.5 * (nir - red) / (nir + red + 0.5)', { 'nir': image.select('nir_r'), 'red': image.select('red_r') }).rename('savi') evi = image.expression( '2.5 * (nir - red) / (nir + 6 * red - 7.5 * blue + 1)', { 'nir': image.select('nir_r'), 'red': image.select('red_r'), 'blue': image.select('blue_r') }).rename('evi') ndwi5 = image.normalizedDifference(['nir_r', 'swir1_r']).rename('ndwi5') ndwi6 = image.normalizedDifference(['nir_r', 'swir2_r']).rename('ndwi6') return image.addBands(ndvi).addBands(savi).addBands(evi).addBands( ndwi5).addBands(ndwi6).set('doy', curdoy).set('year', curyear) brdfFilteredVars = brdfFilteredVars.map(calcBrdfIndices) brdfRange = brdfFilteredVars.reduceColumns(ee.Reducer.max(), ["system:time_start"]) brdfEndDate = ee.Date(brdfRange.get('max')) brdfDays = brdfEndDate.difference(brdfStartDate, 'day') brdfDatesPrep = ee.List.sequence(0, brdfDays, 1) def makeBrdfDates(n): return brdfStartDate.advance(n, 'day') brdfDates = brdfDatesPrep.map(makeBrdfDates) def calcDailyBrdf(curdate): curyear = ee.Date(curdate).get('year') curdoy = ee.Date(curdate).getRelative('day', 'year').add(1) brdfTemp = brdfFilteredVars.filterDate( ee.Date(curdate), ee.Date(curdate).advance(1, 'day')) brdfSize = brdfTemp.size() outimg = ee.Image( ee.Algorithms.If( brdfSize.eq(0), ee.Image.constant(0).selfMask().addBands( ee.Image.constant(0).selfMask()).addBands( ee.Image.constant(0).selfMask()).addBands( ee.Image.constant(0).selfMask()).addBands( ee.Image.constant(0).selfMask()).rename([ 'ndvi', 'evi', 'savi', 'ndwi5', 'ndwi6' ]).set('doy', curdoy).set('year', curyear).set( 'system:time_start', curdate), brdfTemp.first())) return outimg dailyBrdfExtended = ee.ImageCollection.fromImages( brdfDates.map(calcDailyBrdf)) dailyBrdf = dailyBrdfExtended.filterDate(reqStartDate, brdfEndDate.advance(1, 'day')) brdfSummary = dailyBrdf.filterDate(reqStartDate, reqEndDate.advance(1, 'day')) # Function to calculate zonal statistics for spectral indices by county def sumZonalBrdf(image): # To get the doy and year, we convert the metadata to grids and then summarize image2 = image.addBands( [image.metadata('doy').int(), image.metadata('year').int()]) # educe by regions to get zonal means for each county output = image2.select( ['doy', 'year', 'ndvi', 'savi', 'evi', 'ndwi5', 'ndwi6'], ['doy', 'year', 'ndvi', 'savi', 'evi', 'ndwi5', 'ndwi6' ]).reduceRegions(**{ 'collection': woreda, 'reducer': ee.Reducer.mean(), 'scale': 1000 }) return output # ap the zonal statistics function over the filtered spectral index data brdfWoreda = brdfSummary.map(sumZonalBrdf) # latten the results for export brdfFlat = brdfWoreda.flatten() def downloadsummary(): link = exportSummaries() url1 = link[0] url2 = link[1] url3 = link[2] print('precipURL:', url1) print('lstURL:', url2) print('brdfURL:', url3) wget.download(link[0], string1 + 'to' + string2 + 'precipFlat.csv') wget.download(link[1], string1 + 'to' + string2 + 'lstFlat.csv') wget.download(link[2], string1 + 'to' + string2 + 'brdfFlat.csv') print("Data downloaded to local drive") def datatolocaldrive(): link = exportSummaries() url1 = link[0] url2 = link[1] url3 = link[2] print('precipURL:', url1) print('lstURL:', url2) print('brdfURL:', url3) r = requests.get(url1, allow_redirects=True) with open(string1 + 'to' + string2 + 'precipFlat.csv', 'wb') as f: f.write(r.content) r1 = requests.get(url2, allow_redirects=True) with open(string1 + 'to' + string2 + 'lstFlat.csv', 'wb') as f1: f1.write(r1.content) r2 = requests.get(url3, allow_redirects=True) with open(string1 + 'to' + string2 + 'brdfFlat.csv', 'wb') as f2: f2.write(r2.content) def datatolocal(): link = exportSummaries() url1 = link[0] url2 = link[1] url3 = link[2] print('precipURL:', url1) print('lstURL:', url2) print('brdfURL:', url3) request.urlretrieve(url1, string1 + 'to' + string2 + 'precipFlat.csv') request.urlretrieve(url2, string1 + 'to' + string2 + 'lstFlat.csv') request.urlretrieve(url3, string1 + 'to' + string2 + 'brdfFlat.csv') def ExportToDrive(): props1 = { 'driveFolder': 'Ethiopiadata', 'driveFileNamePrefix': 'precip' + string1 + 'to' + string2, 'selectors': [ 'NewPCODE', 'R_NAME', 'W_NAME', 'Z_NAME', 'doy', 'year', 'totprec' ], 'fileFormat': 'CSV' } task1 = ee.batch.Export.table( precipFlat, 'Export_precip' + string1 + 'to' + string2, props1) props2 = { 'driveFolder': 'Ethiopiadata', 'driveFileNamePrefix': 'lst' + string1 + 'to' + string2, 'selectors': [ 'NewPCODE', 'R_NAME', 'W_NAME', 'Z_NAME', 'doy', 'year', 'lst_day', 'lst_night', "lst_mean" ], 'fileFormat': 'CSV' } task2 = ee.batch.Export.table(lstFlat, 'Export_lst' + string1 + 'to' + string2, props2) props3 = { 'driveFolder': 'Ethiopiadata', 'driveFileNamePrefix': 'brdf' + string1 + 'to' + string2, 'selectors': [ 'NewPCODE', 'R_NAME', 'W_NAME', 'Z_NAME', 'doy', 'year', 'ndvi', 'savi', 'evi', 'ndwi5', 'ndwi6' ], 'fileFormat': 'CSV' } task3 = ee.batch.Export.table(brdfFlat, 'Export_brdf' + string1 + 'to' + string2, props3) task1.start() task2.start() task3.start() print( "Data will Export to google drive in to Ethiopiadata folder which will take a while depending on date range:--------" ) #downloadsummary() ExportToDrive()
# Load input NAIP imagery and build a mosaic. naipCollection = ee.ImageCollection('USDA/NAIP/DOQQ') \ .filterBounds(redwoods) \ .filterDate('2012-01-01', '2012-12-31') naip = naipCollection.mosaic() # Compute NDVI from the NAIP imagery. naipNDVI = naip.normalizedDifference(['N', 'R']) # Compute standard deviation (SD) as texture of the NDVI. texture = naipNDVI.reduceNeighborhood(**{ 'reducer': ee.Reducer.stdDev(), 'kernel': ee.Kernel.circle(7), }) # Display the results. Map.centerObject(ee.FeatureCollection(redwoods), 12) Map.addLayer(naip, {}, 'NAIP input imagery') Map.addLayer(naipNDVI, {'min': -1, 'max': 1, 'palette': ['FF0000', '00FF00']}, 'NDVI') Map.addLayer(texture, {'min': 0, 'max': 0.3}, 'SD of NDVI') # %% """ ## Display Earth Engine data layers """ # %% Map.addLayerControl() # This line is not needed for ipyleaflet-based Map. Map
y2012 = [] y2015 = [] for row in out.iterrows(): geom = ee.Geometry.Point(row[1]['longitude'], row[1]['latitude']).buffer(7500) feat = ee.Feature(geom, {'hh_refno':row[1]['hh_refno'], 'year':str(row[1]['year'])}) if row[1]['year'] == 2011: y2011.append(feat) elif row[1]['year'] == 2012: y2012.append(feat) elif row[1]['year'] == 2015: y2015.append(feat) else: raise ValueError('Invalid year. Must be 2011, 2012, or 2015') y2011 = ee.FeatureCollection(y2011) y2012a = ee.FeatureCollection(y2012[:3000]) y2012b = ee.FeatureCollection(y2012[3000:6000]) y2012c = ee.FeatureCollection(y2012[6000:9000]) y2012d = ee.FeatureCollection(y2012[9000:12000]) y2012e = ee.FeatureCollection(y2012[12000:]) y2015a = ee.FeatureCollection(y2015[:3000]) y2015b = ee.FeatureCollection(y2015[3000:6000]) y2015c = ee.FeatureCollection(y2015[6000:9000]) y2015d = ee.FeatureCollection(y2015[9000:12000]) y2015e = ee.FeatureCollection(y2015[12000:]) y2011 = [y2011] y2012 = [y2012a, y2012b, y2012c, y2012e, y2012e] y2015 = [y2015a, y2015b, y2015c, y2015d, y2015e]
''' Created on Aug 11, 2019 @author: nbiswas ''' import ee, os ee.Initialize() allreservoirs = ee.FeatureCollection("users/nbiswas/grand1p3_reservoirs_saafseasia"); L8 = ee.ImageCollection("LANDSAT/LT05/C01/T1"); # geometry = table2 Date_Start = ee.Date('1984-01-01'); Date_End = ee.Date('2013-12-31'); cloud_thresh = 20; def clipimage(img): return img.clip(bgeometry); def getQABits(image, start, end, newName): #Compute the bits we need to extract. pattern = 0; for i in range(start, end+1): pattern += 2**i #Return a single band image of the extracted QA bits, giving the band a new name. return image.select([0], [newName]).bitwiseAnd(pattern).rightShift(start);
def wrs2_tile_export_generator(study_area_path, wrs2_coll, cell_size=30, output_crs=None, output_osr=None, wrs2_tile_list=[], wrs2_tile_field='WRS2_TILE', snap_x=15, snap_y=15, wrs2_buffer=0, n_max=1000, simplify_buffer=1000): """Generate WRS2 tile image metadata for the study area geometry Args: study_area_path (str): File path of the study area shapefile wrs2_coll (str): WRS2 Landsat footprint asset ID. (should default to "projects/eeflux/wrs2_descending_custom") cell_size (float): Cell size [m]. Defaults to 30. output_crs (str): Output CRS (for setting 'crs' parameter in EE calls). Defaults to None. output_osr (osr.SpatialReference): Output coordinate system. Defaults to None. wrs2_tile_field (str): WRS2 tile field name in the fusion table Defaults to 'WRS2_TILE' wrs2_tile_list (list): User defined WRS2 tile subset snap_x (float): X snap coordinate [m]. Defaults to 15. snap_y (float): Y snap coordinate [m]. Defaults to 15. wrs2_buffer (float): WRS2 footprint buffer distance [m]. Defaults to 10000. n_max (int): Maximum number of WRS2 tiles to join to feature. Defaults to 1000. simplify_buffer (float): Study area buffer/simplify distance [m]. Defaults to 1000. Yields: dict: export information """ logging.info('\nReading study area shapefile') logging.info(' {}'.format(study_area_path)) study_area_ds = ogr.Open(study_area_path, 0) study_area_lyr = study_area_ds.GetLayer() study_area_osr = study_area_lyr.GetSpatialRef() study_area_proj = study_area_osr.ExportToWkt() # study_area_osr = study_area_osr.ExportToProj4() # logging.debug(' Projection: {}'.format(study_area_proj)) # Convert WKT to EE WKT # study_area_crs = re.sub( # '\s+', '', ee.Projection(study_area_proj).wkt().getInfo()) study_area_crs = str(study_area_proj) logging.debug(' Study area projection: {}'.format(study_area_crs)) # Get the dissolved/unioned geometry of the study area output_geom = ogr.Geometry(ogr.wkbMultiPolygon) # shape_list = [] for study_area_ftr in study_area_lyr: # Union each feature output_geom = output_geom.Union(study_area_ftr.GetGeometryRef()) study_area_ds = None # Project the study area geometry to the EPSG:3857 # so units will be meters for buffering and simplifying temp_crs = 'EPSG:3857' temp_osr = osr.SpatialReference() temp_osr.ImportFromEPSG(3857) output_tx = osr.CoordinateTransformation(study_area_osr, temp_osr) output_geom.Transform(output_tx) # Buffer/simplify values are assuming the geometry units are in meters output_simplify = output_geom.Buffer(simplify_buffer) \ .SimplifyPreserveTopology(simplify_buffer) # Generate an EE feature output_ee_geom = ee.Geometry(json.loads(output_simplify.ExportToJson()), temp_crs, False) # Pre-filter the WRS2 descending collection # with the buffered study area geometry # Then buffer the WRS2 descending collection if wrs2_buffer: wrs2_coll = ee.FeatureCollection(wrs2_coll) \ .filterBounds(output_ee_geom.buffer(wrs2_buffer, 1)) \ .map(lambda ftr: ftr.buffer(wrs2_buffer, 1)) else: wrs2_coll = ee.FeatureCollection(wrs2_coll) \ .filterBounds(output_ee_geom) # Join intersecting geometries join_coll = ee.Join.saveAll(matchesKey='scenes').apply( ee.FeatureCollection([ee.Feature(output_ee_geom)]), wrs2_coll, ee.Filter.intersects(leftField='.geo', rightField='.geo', maxError=10)) # It is not necessary to map over the join collection # since there is only one study area feature output_wrs2_tiles = ee.List(ee.Feature(join_coll.first()).get('scenes')) def wrs2_bounds(ftr): crs = ee.String('EPSG:').cat( ee.Number(ee.Feature(ftr).get('EPSG')).format('%d')) extent = ee.Feature(ftr).geometry() \ .bounds(1, ee.Projection(crs)).coordinates().get(0) # extent = ee.Array(extent).transpose().toList() # extent = ee.List([ # ee.List(extent.get(0)).reduce(ee.Reducer.min()), # ee.List(extent.get(1)).reduce(ee.Reducer.min()), # ee.List(extent.get(0)).reduce(ee.Reducer.max()), # ee.List(extent.get(1)).reduce(ee.Reducer.max()) # ]) return ee.Feature( None, { 'crs': crs, 'extent': extent, 'wrs2_tile': ee.Feature(ftr).get(wrs2_tile_field) }) output_list = output_wrs2_tiles.map(wrs2_bounds).getInfo() for output_info in output_list: wrs2_tile = output_info['properties']['wrs2_tile'] if wrs2_tile_list and wrs2_tile not in wrs2_tile_list: logging.debug( ' WRS2 tile {} not in INI WRS2 tiles, skipping'.format( wrs2_tile)) continue # Use output CRS if it was set, otherwise use WRS2 tile CRS if output_crs is None: wrs2_tile_crs = output_info['properties']['crs'] else: wrs2_tile_crs = output_crs output_extent = output_info['properties']['extent'] output_extent = [ min([x[0] for x in output_extent]), min([x[1] for x in output_extent]), max([x[0] for x in output_extent]), max([x[1] for x in output_extent]) ] # Adjust extent to the cell size adjust_size = 2 * cell_size output_extent[0] = math.floor( (output_extent[0] - snap_x) / adjust_size) * adjust_size + snap_x output_extent[1] = math.floor( (output_extent[1] - snap_y) / adjust_size) * adjust_size + snap_y output_extent[2] = math.ceil( (output_extent[2] - snap_x) / adjust_size) * adjust_size + snap_x output_extent[3] = math.ceil( (output_extent[3] - snap_y) / adjust_size) * adjust_size + snap_y output_geo = [ cell_size, 0, output_extent[0], 0, -cell_size, output_extent[3] ] # output_geom = extent_geom(output_extent) output_shape = '{0}x{1}'.format( int(abs(output_extent[2] - output_extent[0]) / cell_size), int(abs(output_extent[3] - output_extent[1]) / cell_size)) max_pixels = 2 * reduce(mul, map(int, output_shape.split('x'))) yield { 'crs': wrs2_tile_crs, 'extent': output_extent, 'geo': output_geo, # 'geojson': json.loads(output_geom.ExportToJson()), 'index': wrs2_tile, 'maxpixels': max_pixels, 'wrs2_tiles': [wrs2_tile], 'shape': output_shape }
import ee import time import pandas as pd from pull_MODIS import export_oneimage, appendBand_0 ee.Initialize() locations = pd.read_csv('locations_major.csv') county_region = ee.FeatureCollection( 'ft:18Ayj5e7JxxtTPm1BdMnnzWbZMrxMB49eqGDTsaSp') imgcoll = ee.ImageCollection('MODIS/051/MCD12Q1') \ .filterBounds(ee.Geometry.Rectangle(-106.5, 50,-64, 23))\ .filterDate('2001-12-31','2015-12-31') img = imgcoll.iterate(appendBand_0) img = ee.Image(img) # img_0=ee.Image(ee.Number(0)) # img_5000=ee.Image(ee.Number(5000)) # # img=img.min(img_5000) # img=img.max(img_0) # img=ee.Image(ee.Number(100)) # img=ee.ImageCollection('LC8_L1T').mosaic() for loc1, loc2, lat, lon in locations.values: file_name = '{}_{}'.format(int(loc1), int(loc2)) offset = 0.11 scale = 500
The default basemap is `Google Maps`. [Additional basemaps](https://github.com/giswqs/geemap/blob/master/geemap/basemaps.py) can be added using the `Map.add_basemap()` function. """ # %% Map = geemap.Map(center=[40, -100], zoom=4) Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset collection = ee.ImageCollection('USDA/NAIP/DOQQ') fromFT = ee.FeatureCollection('ft:1CLldB-ULPyULBT2mxoRNv7enckVF0gCQoD2oH7XP') polys = fromFT.geometry() # polys = ee.Geometry.Polygon( # [[[-99.29615020751953, 46.725459351792374], # [-99.2116928100586, 46.72404725733022], # [-99.21443939208984, 46.772037733479884], # [-99.30267333984375, 46.77321343419932]]]) centroid = polys.centroid() lng, lat = centroid.getInfo()['coordinates'] print("lng = {}, lat = {}".format(lng, lat)) lng_lat = ee.Geometry.Point(lng, lat) naip = collection.filterBounds(polys) naip_2015 = naip.filterDate('2015-01-01', '2015-12-31') ppr = naip_2015.mosaic()
## Create an interactive map The default basemap is `Google Maps`. [Additional basemaps](https://github.com/giswqs/geemap/blob/master/geemap/basemaps.py) can be added using the `Map.add_basemap()` function. """ # %% Map = geemap.Map(center=[40, -100], zoom=4) Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset fc = ee.FeatureCollection('TIGER/2018/States') print(fc.first().getInfo()) new_fc = fc.select(['STUSPS', 'NAME', 'ALAND'], ['abbr', 'name', 'area']) print(new_fc.first().getInfo()) propertyNames = new_fc.first().propertyNames() print(propertyNames.getInfo()) # %% """ ## Display Earth Engine data layers """ # %%
# Import Modules import ee, datetime # Check User Credentials ee.Initialize() # Load in AOI Fusion Table ft2 = ee.FeatureCollection("ft:137bhlHOilFr5iTTuSMWJo7jRLCnSCHQaICzLzmJQ") # Load in Flood Scene flood = ee.Image( 'COPERNICUS/S1_GRD/S1A_IW_GRDH_1SDV_20160103T062204_20160103T062229_009326_00D7AC_C9F2' ) # Determine Orbit/Track Metadata im_meta1 = flood.getInfo() im_meta2 = im_meta1['properties'] direction = im_meta2['orbitProperties_pass'] orbit = im_meta2['relativeOrbitNumber_start'] print 'Orbit Direction: ', direction print 'Track Number: ', orbit # Load in Sentinel-1 Collection collection = ee.ImageCollection("COPERNICUS/S1_GRD") # Filter S1 Collection filter = collection.filterDate(datetime.datetime(2015,7,3), datetime.datetime(2015,11,5)).filterBounds(ft2)\ .filter(ee.Filter().eq('transmitterReceiverPolarisation', 'VH')).filter(ee.Filter().eq('instrumentMode', 'IW'))\ .filter(ee.Filter().eq('orbitProperties_pass', direction)).filter(ee.Filter().eq('relativeOrbitNumber_start', orbit)) ### Be useful to get a number of images in the collection and use in the filter.toList() command ### Theoretically there could be hundreds of images going into this calculation in the future list = filter.toList(100) metalist = list.getInfo() for image in metalist: props = image['properties'] print props['system:index'] # Create Median reference image
The default basemap is `Google Satellite`. [Additional basemaps](https://github.com/giswqs/geemap/blob/master/geemap/geemap.py#L13) can be added using the `Map.add_basemap()` function. """ # %% Map = emap.Map(center=[40, -100], zoom=4) Map.add_basemap('ROADMAP') # Add Google Map Map # %% """ ## Add Earth Engine Python script """ # %% # Add Earth Engine dataset HUC10 = ee.FeatureCollection("USGS/WBD/2017/HUC10") HUC08 = ee.FeatureCollection('USGS/WBD/2017/HUC08') roi = HUC08.filter(ee.Filter.eq('name', 'Pipestem')) Map.centerObject(roi, 10) Map.addLayer(ee.Image().paint(roi, 0, 1), {}, 'HUC8') bound = ee.Geometry(roi.geometry()).bounds() Map.addLayer(ee.Image().paint(bound, 0, 1), {'palette': 'red'}, "Minimum bounding geometry") # %% """ ## Display Earth Engine data layers """
import ee from datetime import datetime import pandas as pd ee.Initialize() #Importação do Shapefile com massas d'água table = ee.FeatureCollection( "users/mvcastro1975/massa_dagua/massa_dagua_maior_100ha") #Importa dataset com recorrência mensal de água monthly_hist = ee.ImageCollection('JRC/GSW1_0/MonthlyHistory').select('water') def retorna_lista(img, featCol): data_count = img.reduceRegions(featCol, ee.Reducer.count(), 500) data_count = data_count.map( lambda feat: feat.set('data_analise', image_fst.date().format())) data_mean = img.reduceRegions(featCol, ee.Reducer.mean(), 500) data_mean = data_mean.map(lambda feat: feat.set('data_analise', image_fst.date().format())) data_median = img.reduceRegions(featCol, ee.Reducer.median(), 500) data_median = data_median.map( lambda feat: feat.set('data_analise', image_fst.date().format())) return [data_count.getInfo(), data_mean.getInfo(), data_median.getInfo()]
"""Buffer Example. Display the area within 2 kilometers of any San Francisco BART station. """ import ee import ee.mapclient ee.Initialize() ee.mapclient.centerMap(-122.4, 37.7, 11) bart_stations = ee.FeatureCollection( 'ft:1xCCZkVn8DIkB7i7RVkvsYWxAxsdsQZ6SbD9PCXw') buffered = bart_stations.map(lambda f: f.buffer(2000)) unioned = buffered.union() ee.mapclient.addToMap(unioned, {'color': '800080'})
# %% Map = folium.Map(location=[40, -100], zoom_start=4) Map.setOptions('HYBRID') # %% ''' ## Add Earth Engine Python script ''' # %% def addArea(feature): return feature.set({'areaHa': feature.geometry().area().divide(100 * 100)}) # Load watersheds from a data table. sheds = ee.FeatureCollection('USGS/WBD/2017/HUC06') # This function computes the feature's geometry area and adds it as a property. # addArea = function(feature) { # return feature.set({areaHa: feature.geometry().area().divide(100 * 100)}) # } # Map the area getting function over the FeatureCollection. areaAdded = sheds.map(addArea) # Print the first feature from the collection with the added property. print('First feature:', areaAdded.first().getInfo()) # %%
def __init__(self): """Initialize the environment.""" # Initialize the Earth Engine object, using the authentication credentials. ee.Initialize() self.timeString = time.strftime("%Y%m%d_%H%M%S") # SEASONS: # '0': Dry Cool: Nov - Feb (305 - 59) # '1': Dry Hot: Mar - Apr (60 - 181) # '2': Rainy: May - Oct (182 - 304) startjulian = {'drycool':305,'dryhot':60,'rainy':182} endjulian = {'drycool':59,'dryhot':181,'rainy':304} # set dates self.startYear = int(args.year); self.endYear = int(args.year); self.startJulian = startjulian[args.season] self.endJulian = endjulian[args.season] if args.season == 'drycool': self.startYear = int(args.year)-1 self.NgheAn = [[103.876,18.552],[105.806,18.552],[105.806,19.999],[103.876,19.999],[103.876,18.552]] #self.NgheAn = [[100.876,18.552],[105.806,18.552],[105.806,22.999],[100.876,22.999],[100.876,18.552]] collectionName = "projects/servir-mekong/usgs_sr_composites/" + args.season self.mekongRegion = ee.FeatureCollection('ft:1LEGeqwlBCAlN61ie5ol24NdUDqB1MgpFR_sJNWQJ') self.collection = ee.ImageCollection(collectionName) # variables for the tdom filter self.applyTDOM = True self.TDOMyears = 25 self.shadowSumBands = ['nir','swir1']; self.zScoreThresh = -0.8 self.shadowSumThresh = 0.35; self.dilatePixels = 2 #users/servirmekong/usgs_sr_composites/drycool self.outputName = args.season + str(self.startYear) + "_" + str(self.endYear) # variable to filter cloud threshold self.metadataCloudCoverMax = 40 # threshold for landsatCloudScore self.cloudThreshold = 10 self.hazeThresh = 200 # apply a filter to filter for high values self.filterPercentile = True self.filterPercentileYears = 25 # percentiles to filter for bad data self.lowPercentile = 2 self.highPercentile = 80 # whether to use imagecolletions self.useL4=True self.useL5=True self.useL7=True self.useL7scanline = False self.useL8=True # On May 31, 2003 the Scan Line Corrector (SLC) in the ETM+ instrument failed self.l7Failed = ee.Date.fromYMD(2003,5,31) # apply cloud masks self.maskSR = True # get indicices self.calcIndices = True # bands for tasselcap !maybe move self.tcInputBands = ee.List(['blue','green','red','nir','swir1','swir2']) # bands to select self.bandNamesLandsat = ee.List(['blue','green','red','nir','swir1','thermal','swir2','sr_atmos_opacity','pixel_qa','radsat_qa']) # bands for export self.exportBands = ee.List(['blue','green','red','nir','swir1','thermal','swir2']) # bands for dividing self.divideBands = ee.List(['blue','green','red','nir','swir1','swir2']) #bands for stdev self.stdDevBands = ee.List(['blue','green','red','nir','swir1','thermal','swir2']) #,'ND_nir_red','ND_nir_swir2','ND_green_swir1']); self.stdDevExportsBands = ee.List(['blue_stdev','green_stdev','red_stdev','nir_stdev','swir1_stdev','thermal_stdev','swir2_stdev']) #,'ND_nir_red','ND_nir_swir2','ND_green_swir1']); # calculate stdev for indices self.stdIndiceDevBands = ee.List(["ND_nir_swir2","ND_green_swir1","ND_nir_red"]) self.stdIndiceDevBandsExport = ee.List(["ND_nir_swir2_stdDev","ND_green_swir1_stdDev","ND_nir_red_stdDev"]) # apply defringe self.defringe = True # pixel size self.pixSize = 30 # user ID #self.userID = "users/servirmekong/assemblage/" #self.userID = "projects/servir-mekong/temp/nghean_medoid_" #self.userID = "projects/servir-mekong/usgs_sr_composites/" + args.season + "/" self.userID = "projects/servir-mekong/usgs_sr_composites/" + args.season + "/SC_" self.landsat4count = 0 self.landsat5count = 0 self.landsat7count = 0 self.landsat8count = 0 # define the landsat bands self.sensorBandDictLandsatSR = ee.Dictionary({'L8' : ee.List([1,2,3,4,5,7,6,9,10,11]), 'L7' : ee.List([0,1,2,3,4,5,6,7,9,10]), 'L5' : ee.List([0,1,2,3,4,5,6,7,9,10]), 'L4' : ee.List([0,1,2,3,4,5,6,7,9,10])}) # just placeholders for now self.calcMedoid = False self.calcMedian = True self.calcMean = False self.fillGaps = True self.fillGapYears = 25 # threshold for defringing landsat5 and 7 self.fringeCountThreshold = 279 self.k = ee.Kernel.fixed(41, 41, [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]);
def aggregate_daily(image_coll, start_date, end_date, agg_type='mean'): """Aggregate images by day The primary purpose of this function is to join separate Landsat images from the same path into a single daily image. Parameters ---------- image_coll : ee.ImageCollection Input image collection. start_date : date, number, string Start date. Needs to be an EE readable date (i.e. ISO Date string or milliseconds). end_date : date, number, string End date. Needs to be an EE readable date (i.e. ISO Date string or milliseconds). agg_type : {'mean'}, optional Aggregation type (the default is 'mean'). Returns ------- ee.ImageCollection() Notes ----- This function should be used to mosaic Landsat images from same path but different rows Aggregation is currently hardcoded to 'mean' system:time_start of returned images will be 0 UTC (not the image time) """ # Build a collection of date "features" to join to date_list = ee.List.sequence( ee.Date(start_date).millis(), ee.Date(end_date).millis(), # ee.Date(end_date).advance(1, 'day').millis(), 24 * 3600 * 1000) def set_date(time): return ee.Feature( None, { 'system:index': ee.Date(time).format('yyyy-MM-dd'), 'system:time_start': ee.Number(time).int64(), 'DATE': ee.Date(time).format('yyyy-MM-dd') }) # Add a date property to the image collection def set_image_date(img): return ee.Image( img.set('DATE', ee.Date( img.get('system:time_start')).format('yyyy-MM-dd'))) join_coll = ee.FeatureCollection( ee.Join.saveAll('join').apply( ee.FeatureCollection(date_list.map(set_date)), ee.ImageCollection(image_coll.map(set_image_date)), ee.Filter.equals(leftField='DATE', rightField='DATE'))) def aggregate_func(ftr): # The composite image time will be 0 UTC (not Landsat time) # if agg_type.lower() == 'mean': return ee.Image( ee.ImageCollection.fromImages( ftr.get('join')).mean().copyProperties( ftr, system_properties + ['DATE'])) return ee.ImageCollection(join_coll.map(aggregate_func))