mf_bands = [{ 'id': band_id, 'tileset_band_index': band_ids.index(band_id), 'missing_data': { 'values': missing_values }, 'tileset_id': tileset_id, 'pyramidingPolicy': pyramiding_policy } for band_id in band_ids] # create complete manifest for asset upload manifest = util_cloud.gee_manifest_complete(asset_name, gcs_uri[0], mf_bands) # upload the file from Google Cloud Storage to Google Earth Engine task = util_cloud.gee_ingest(manifest) print(asset_name + ' uploaded to GEE') task_id.append(task) # remove files from Google Cloud Storage util_cloud.gcs_remove(gcs_uri, gcs_bucket=gcsBucket) logger.info('Files deleted from Google Cloud Storage.') ''' Upload original data and processed data to Amazon S3 storage ''' # initialize AWS variables aws_bucket = 'wri-public-data' s3_prefix = 'resourcewatch/raster/' # Copy the raw data into a zipped file to upload to S3
# name bands according to variable names in original netcdf vars = ['b0'] bands = [{ 'id': var, 'tileset_band_index': vars.index(var), 'pyramiding_policy': pyramiding_policy, 'missing_data': { 'values': [nodata_value] } } for var in vars] # create manifest for asset upload manifest = util_cloud.gee_manifest_complete(asset_name, gcs_uris[0], bands) # upload processed data file to GEE task_id = util_cloud.gee_ingest(manifest, public=True) # remove files from Google Cloud Storage util_cloud.gcs_remove(gcs_uris, gcs_bucket=gcsBucket) print('Files deleted from Google Cloud Storage.') ''' Upload original data and processed data to Amazon S3 storage ''' # initialize AWS variables aws_bucket = 'wri-projects' s3_prefix = 'resourcewatch/raster/' logger.info('Uploading original data to S3.') # Upload raw data file to S3 # Copy the raw data into a zipped file to upload to S3 raw_data_dir = os.path.join(data_dir, dataset_name + '.zip')
dataset_name, image_name) # define band dictionary for manifest upload, with the missing data, b1 as band name, and pyramiding pollicy as mean upload_data_dict = OrderedDict() upload_data_dict[image_name] = { 'missing_data': [ data_dict.get(data_file), ], 'pyramiding_policy': 'MEAN', 'band_ids': ['b1'] } # upload to google earth engine mf_bands = util_cloud.gee_manifest_bands(upload_data_dict, dataset_name=image_name) manifest = util_cloud.gee_manifest_complete(asset_name, gcs_uris[0], mf_bands) task_id = util_cloud.gee_ingest(manifest, public=False) # remove from google cloud storage util_cloud.gcs_remove(gcs_uris, gcs_bucket=gcsBucket) ''' Upload original data and processed data to Amazon S3 storage ''' # initialize AWS variables aws_bucket = 'wri-projects' s3_prefix = 'resourcewatch/raster/' logger.info('Uploading original data to S3.') # Upload raw data file to S3 # Copy the raw data into a zipped file to upload to S3 uploaded = util_cloud.aws_upload(raw_data_file, aws_bucket, s3_prefix + os.path.basename(raw_data_file))