def export_special(roi, description): fc = ee.FeatureCollection(roi) roi_mask = fc.geometry().bounds().getInfo()['coordinates'] image_list = list_assets('users/dgketchum/IrrMapper/version_2') # years = [str(x) for x in range(1986, 1991)] # target_images = [x for x in image_list if x.endswith(years[0])] # target = ee.ImageCollection(image_list) # target = target.mosaic().select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0]) # range_images = [x for x in image_list if x.endswith(tuple(years))] coll = ee.ImageCollection(image_list) sum = ee.ImageCollection(coll.mosaic().select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0])).sum().toDouble() sum_mask = sum.lt(3) img = sum.mask(sum_mask).toDouble() task = ee.batch.Export.image.toDrive( sum, description='IrrMapper_V2_sum_years', # folder='Irrigation', region=roi_mask, scale=30, maxPixels=1e13, # fileNamePrefix='IrrMapper_V2_{}_{}'.format(description, period) ) task.start()
def first_detection(): # this doesn't work, but it works in Code Editor for state in TARGET_STATES: bounds = os.path.join(BOUNDARIES, state) roi = ee.FeatureCollection(bounds) mask = roi.geometry().bounds().getInfo()['coordinates'] image_list = list_assets('users/dgketchum/classy') out_images = [] for yr in range(1986, 2017): yr_img = [x for x in image_list if x.endswith(str(yr))] coll = ee.ImageCollection(yr_img) classed = coll.mosaic().select('classification').remap( [0, 1, 2, 3], [yr, 0, 0, 0]).rename('{}_min'.format(yr)) out_images.append(classed) coll = ee.ImageCollection(out_images) img = coll.reduce(ee.Reducer.minMax()).rename('min', 'max') pprint(img.getInfo()) task = ee.batch.Export.image.toAsset(image=img, description='{}'.format(state), assetId=os.path.join( ASSET_ROOT, '{}'.format(state)), fileNamePrefix='{}'.format(state), region=mask, scale=30, maxPixels=1e10) print(state) task.start() break
def export_raster(): target_bn = 'projects/ee-dgketchum/assets/IrrMapper/IrrMapper_RF' image_list = list_assets('users/dgketchum/IrrMapper/version_2') for yr in range(1987, 2022): images = [x for x in image_list if x.endswith(str(yr))] coll = ee.ImageCollection(images) _properties = { 'image_id': 'IrrMapper_RF_{}'.format(yr), 'system:time_start': ee.Date.fromYMD(yr, 1, 1), 'system:time_end': ee.Date.fromYMD(yr, 12, 31) } img = coll.select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0]) img = img.updateMask( img.neq(0)).rename('classification').set(_properties) id_ = os.path.join(target_bn, '{}'.format(yr)) task = ee.batch.Export.image.toAsset( image=img, description='IrrMapper_RF_{}'.format(yr), assetId=id_, pyramidingPolicy={'.default': 'mode'}, scale=30, maxPixels=1e13) task.start() print(yr)
def reduce_classification(tables, years=None, description=None, cdl_mask=False, min_years=0): """ Reduce Regions, i.e. zonal stats: takes a statistic from a raster within the bounds of a vector. Use this to get e.g. irrigated area within a county, HUC, or state. This can mask based on Crop Data Layer, and can mask data where the sum of irrigated years is less than min_years. This will output a .csv to GCS wudr bucket. :param tables: vector data over which to take raster statistics :param years: years over which to run the stats :param description: export name append str :param cdl_mask: :param min_years: :return: """ sum_mask = None image_list = list_assets('users/dgketchum/IrrMapper/version_2') fc = ee.FeatureCollection(tables) if min_years > 0: coll = ee.ImageCollection(image_list) sum = ee.ImageCollection(coll.mosaic().select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0])).sum() sum_mask = sum.lt(min_years) # first = True for yr in years: if yr not in [2002, 2007, 2012]: yr_img = [x for x in image_list if x.endswith(str(yr))] coll = ee.ImageCollection(yr_img) tot = coll.mosaic().select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0]) if cdl_mask and min_years > 0: # cultivated/uncultivated band only available 2013 to 2017 cdl = ee.Image('USDA/NASS/CDL/2013') cultivated = cdl.select('cultivated') cdl_crop_mask = cultivated.eq(2) tot = tot.mask(cdl_crop_mask).mask(sum_mask) elif min_years > 0: tot = tot.mask(sum_mask) elif cdl_mask: cdl = ee.Image('USDA/NASS/CDL/2013') cultivated = cdl.select('cultivated') cdl_crop_mask = cultivated.eq(2) tot = tot.mask(cdl_crop_mask) tot = tot.multiply(ee.Image.pixelArea()) reduce = tot.reduceRegions(collection=fc, reducer=ee.Reducer.sum(), scale=30) task = ee.batch.Export.table.toCloudStorage( reduce, description='{}_area_{}_'.format(description, yr), bucket='wudr', fileNamePrefix='{}_area_{}_'.format(description, yr), fileFormat='CSV') task.start() print(yr)
def export_raster(roi, description): fc = ee.FeatureCollection(roi) mask = fc.geometry().bounds().getInfo()['coordinates'] image_list = list_assets('users/dgketchum/IrrMapper/version_2') for yr in range(1986, 2019): yr_img = [x for x in image_list if x.endswith(str(yr))] coll = ee.ImageCollection(yr_img) img = ee.ImageCollection(coll.mosaic().select('classification')) img = img.first() task = ee.batch.Export.image.toDrive( img, description='IrrMapper_V2_{}_{}'.format(description, yr), folder='Irrigation', region=mask, scale=30, maxPixels=1e13, fileNamePrefix='IrrMapper_V2_{}_{}'.format(description, yr)) task.start() print(yr)
def request_validation_extract(file_prefix='validation'): """ This takes a sample points set and extracts the classification result. This is a roundabout cross-validation. Rather than using holdouts in the Random Forest classifier, we just run all the training data to train the classifier, and come back later with this function and a seperate set of points (with known classes) to independently test classifier accuracy. Other options to achieve this is to use out-of-bag cross validation, or set up a sckikit-learn RF classifier and use k-folds cross validation. :param file_prefix: :return: """ roi = ee.FeatureCollection(ROI) plots = ee.FeatureCollection(VALIDATION_POINTS).filterBounds(roi) image_list = list_assets('users/dgketchum/IrrMapper/version_2') for yr in YEARS: yr_img = [x for x in image_list if x.endswith(str(yr))] coll = ee.ImageCollection(yr_img) classified = coll.mosaic().select('classification') start = '{}-01-01'.format(yr) d = datetime.strptime(start, '%Y-%m-%d') epoch = datetime.utcfromtimestamp(0) start_millisec = (d - epoch).total_seconds() * 1000 filtered = plots.filter(ee.Filter.eq('YEAR', ee.Number(start_millisec))) plot_sample_regions = classified.sampleRegions( collection=filtered, properties=['POINT_TYPE', 'YEAR'], scale=30) task = ee.batch.Export.table.toCloudStorage( plot_sample_regions, description='{}_{}'.format(file_prefix, yr), bucket='wudr', fileNamePrefix='{}_{}'.format(file_prefix, yr), fileFormat='CSV') task.start() print(yr)
def get_ndvi_stats(tables, years, out_name): fc = ee.FeatureCollection(tables) i = get_ndvi_series(years, fc) image_list = list_assets('users/dgketchum/IrrMapper/version_2') for yr in years: coll = ee.ImageCollection(image_list).filterDate('{}-01-01'.format(yr), '{}-12-31'.format(yr)) remap = coll.mosaic().select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0]) cls = remap.rename('irr_{}'.format(yr)) i = i.addBands(cls) stats = i.reduceRegions(collection=fc, reducer=ee.Reducer.mean(), scale=30) task = ee.batch.Export.table.toCloudStorage( stats, description='{}'.format(out_name), bucket='wudr', fileNamePrefix='{}'.format(out_name), fileFormat='KML') task.start()