gcs_bucket = gcs_client.bucket(os.environ.get("GEE_STAGING_BUCKET")) # initialize ee (Google Earth Engine Python API) for uploading to GEE auth = ee.ServiceAccountCredentials( os.getenv('GEE_SERVICE_ACCOUNT'), os.getenv('GOOGLE_APPLICATION_CREDENTIALS')) ee.Initialize(auth) logger.info('Uploading processed data to Google Cloud Storage.') gcs_uris = util_cloud.gcs_upload(processed_data_file, dataset_name, gcs_bucket=gcs_bucket) logger.info('Uploading processed data to Google Earth Engine.') # generate bands component of GEE upload manifest mf_bands = util_cloud.gee_manifest_bands(data_dict, dataset_name) # upload processed data file to GEE asset_name = f'projects/resource-watch-gee/{dataset_name}' manifest = util_cloud.gee_manifest_complete(asset_name, gcs_uris[0], mf_bands) logger.debug(manifest) task_id = util_cloud.gee_ingest(manifest, public=True) util_cloud.gcs_remove(gcs_uris, gcs_bucket=gcs_bucket) logger.info('Files deleted from Google Cloud Storage.') ''' Upload original data and processed data to Amazon S3 storage ''' # amazon storage info aws_bucket = 'wri-projects' s3_prefix = 'resourcewatch/raster/'
image_name, gcs_bucket=gcsBucket) # define asset name asset_name = 'projects/resource-watch-gee/{}/{}'.format( dataset_name, image_name) # define band dictionary for manifest upload, with the missing data, b1 as band name, and pyramiding pollicy as mean upload_data_dict = OrderedDict() upload_data_dict[image_name] = { 'missing_data': [ data_dict.get(data_file), ], 'pyramiding_policy': 'MEAN', 'band_ids': ['b1'] } # upload to google earth engine mf_bands = util_cloud.gee_manifest_bands(upload_data_dict, dataset_name=image_name) manifest = util_cloud.gee_manifest_complete(asset_name, gcs_uris[0], mf_bands) task_id = util_cloud.gee_ingest(manifest, public=False) # remove from google cloud storage util_cloud.gcs_remove(gcs_uris, gcs_bucket=gcsBucket) ''' Upload original data and processed data to Amazon S3 storage ''' # initialize AWS variables aws_bucket = 'wri-projects' s3_prefix = 'resourcewatch/raster/' logger.info('Uploading original data to S3.') # Upload raw data file to S3