# Upload processed data file to GEE asset_name = f'projects/resource-watch-gee/{dataset_name}' # name bands according to variable names in original netcdf vars = ['b0'] bands = [{ 'id': var, 'tileset_band_index': vars.index(var), 'pyramiding_policy': pyramiding_policy, 'missing_data': { 'values': [nodata_value] } } for var in vars] # create manifest for asset upload manifest = util_cloud.gee_manifest_complete(asset_name, gcs_uris[0], bands) # upload processed data file to GEE task_id = util_cloud.gee_ingest(manifest, public=True) # remove files from Google Cloud Storage util_cloud.gcs_remove(gcs_uris, gcs_bucket=gcsBucket) print('Files deleted from Google Cloud Storage.') ''' Upload original data and processed data to Amazon S3 storage ''' # initialize AWS variables aws_bucket = 'wri-projects' s3_prefix = 'resourcewatch/raster/' logger.info('Uploading original data to S3.') # Upload raw data file to S3
band_ids = ['b1'] task_id = [] # Upload processed data files to GEE for uri in gcs_uris: # generate an asset name for the current file by using the filename (minus the file type extension) asset_name = f'projects/resource-watch-gee/{dataset_name}/{os.path.basename(uri)[:-4]}' # create the band manifest for this asset mf_bands = [{ 'id': band_id, 'tileset_band_index': band_ids.index(band_id), 'tileset_id': os.path.basename(uri)[:-4], 'pyramidingPolicy': pyramiding_policy } for band_id in band_ids] # create complete manifest for asset upload manifest = util_cloud.gee_manifest_complete(asset_name, uri, mf_bands) # upload the file from GCS to GEE task = util_cloud.gee_ingest(manifest) print(asset_name + ' uploaded to GEE') task_id.append(task) # remove files from Google Cloud Storage util_cloud.gcs_remove(gcs_uris, gcs_bucket=gcsBucket) logger.info('Files deleted from Google Cloud Storage.') ''' Upload original data and processed data to Amazon S3 storage ''' # initialize AWS variables aws_bucket = 'wri-projects' s3_prefix = 'resourcewatch/raster/'