Esempio n. 1
0
def upload(user, source_path, destination_path, metadata_path=None, nodata_value=None, bucket_name=None, band_names=[]):
    """
    Uploads content of a given directory to GEE. The function first uploads an asset to Google Cloud Storage (GCS)
    and then uses ee.data.startIngestion to put it into GEE, Due to GCS intermediate step, users is asked for
    Google's account name and password.

    In case any exception happens during the upload, the function will repeat the call a given number of times, after
    which the error will be propagated further.

    :param user: name of a Google account
    :param source_path: path to a directory
    :param destination_path: where to upload (absolute path)
    :param metadata_path: (optional) path to file with metadata
    :param nodata_value: (optinal) value to burn into raster for missind data in the image
    :return:
    """
    submitted_tasks_id = {}

    __verify_path_for_upload(destination_path)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)

    if len(all_images_paths) == 0:
        print(str(path)+' does not contain any tif images.')
        sys.exit(1)

    metadata = load_metadata_from_csv(metadata_path) if metadata_path else None

    if user is not None:
        password = getpass.getpass()
        google_session = __get_google_auth_session(user, password)
    else:
        storage_client = storage.Client()

    __create_image_collection(destination_path)

    images_for_upload_path = __find_remaining_assets_for_upload(all_images_paths, destination_path)
    no_images = len(images_for_upload_path)

    if no_images == 0:
        print('No images found that match '+str(path)+' Exiting...')
        sys.exit(1)

    failed_asset_writer = FailedAssetsWriter()

    for current_image_no, image_path in enumerate(images_for_upload_path):
        print('Processing image '+str(current_image_no+1)+' out of '+str(no_images)+': '+str(image_path))
        filename = __get_filename_from_path(path=image_path)

        asset_full_path = destination_path + '/' + filename

        if metadata and not filename in metadata:
            print("No metadata exists for image "+str(filename)+" : it will not be ingested")
            failed_asset_writer.writerow([filename, 0, 'Missing metadata'])
            continue

        properties = metadata[filename] if metadata else None

        try:
            if user is not None:
                gsid = __upload_file_gee(session=google_session,
                                                  file_path=image_path)
            else:
                gsid = __upload_file_gcs(storage_client, bucket_name, image_path)

            asset_request = __create_asset_request(asset_full_path, gsid, properties, nodata_value, band_names)

            task_id = __start_ingestion_task(asset_request)
            submitted_tasks_id[task_id] = filename
            __periodic_check(current_image=current_image_no, period=20, tasks=submitted_tasks_id, writer=failed_asset_writer)
        except Exception as e:
            print('Upload of '+str(filename)+' has failed.')
            failed_asset_writer.writerow([filename, 0, str(e)])

    __check_for_failed_tasks_and_report(tasks=submitted_tasks_id, writer=failed_asset_writer)
    failed_asset_writer.close()
Esempio n. 2
0
def upload(user, source_path, destination_path, metadata_path=None, nodata_value=None, bucket_name=None):
    submitted_tasks_id = {}

    __verify_path_for_upload(destination_path)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)
    if len(all_images_paths) == 0:
        print('%s does not contain any tif images.', path)
        sys.exit(1)

    metadata = load_metadata_from_csv(metadata_path) if metadata_path else None

    if user is not None:
        password = getpass.getpass()
        google_session = __get_google_auth_session(user, password)
    else:
        storage_client = storage.Client()

    __create_image_collection(destination_path)

    images_for_upload_path = __find_remaining_assets_for_upload(all_images_paths, destination_path)
    no_images = len(images_for_upload_path)

    if no_images == 0:
        print('No images found that match %s. Exiting...', path)
        sys.exit(1)

    failed_asset_writer = FailedAssetsWriter()

    for current_image_no, image_path in enumerate(images_for_upload_path):
        print('Processing image '+str(current_image_no+1)+' out of '+str(no_images)+': '+str(image_path))
        filename = __get_filename_from_path(path=image_path)

        asset_full_path = destination_path + '/' + filename

        if metadata and not filename in metadata:
            print("No metadata exists for image "+str(filename)+" : it will not be ingested")
            failed_asset_writer.writerow([filename, 0, 'Missing metadata'])
            continue

        properties = metadata[filename] if metadata else None
        try:
            if user is not None:
                gsid = __upload_file_gee(session=google_session,
                                                  file_path=image_path)
            else:
                gsid = __upload_file_gcs(storage_client, bucket_name, image_path)

            df=pd.read_csv(metadata_path)
            dd=(df.applymap(type) == str).all(0)
            for ind, val in dd.iteritems():
                if val==True:
                    slist.append(ind)
            intcol= list(df.select_dtypes(include=['int64']).columns)
            floatcol = list(df.select_dtypes(include=['float64']).columns)
            with open(metadata_path, 'r') as f:
                reader = csv.DictReader(f,delimiter=",")
                for i, line in enumerate(reader):
                    if line["id_no"]==os.path.basename(image_path).split('.')[0]:
                        j={}
                        for integer in intcol:
                            value=integer
                            j[value]=int(line[integer])
                        for s in slist:
                            value=s
                            j[value]=str(line[s])
                        for f in floatcol:
                            value=f
                            j[value]=float(line[f])
                        # j['id']=destination_path+'/'+line["id_no"]
                        # j['tilesets'][0]['sources'][0]['primaryPath']=gsid
                        json_data = json.dumps(j)
                        main_payload={"id": asset_full_path,"tilesets": [{"sources": [{"primaryPath": gsid,"additionalPaths": []}]}],"properties": j,"missingData": {"value": nodata_value}}
                        with open(os.path.join(lp,'data.json'), 'w') as outfile:
                            json.dump(main_payload, outfile)
                        subprocess.call("earthengine upload image --manifest "+'"'+os.path.join(lp,'data.json')+'"',shell=True)
        except Exception as e:
            print(e)
            print('Upload of '+str(filename)+' has failed.')
            failed_asset_writer.writerow([filename, 0, str(e)])

        __check_for_failed_tasks_and_report(tasks=submitted_tasks_id, writer=failed_asset_writer)
        failed_asset_writer.close()
def upload(user,
           source_path,
           destination_path,
           metadata_path=None,
           multipart_upload=False,
           nodata_value=None):
    """
    Uploads content of a given directory to GEE. The function first uploads an asset to Google Cloud Storage (GCS)
    and then uses ee.data.startIngestion to put it into GEE, Due to GCS intermediate step, users is asked for
    Google's account name and password.

    In case any exception happens during the upload, the function will repeat the call a given number of times, after
    which the error will be propagated further.

    :param user: name of a Google account
    :param source_path: path to a directory
    :param destination_path: where to upload (absolute path)
    :param metadata_path: (optional) path to file with metadata
    :param multipart_upload: (optional) alternative mode op upload - use if the other one fails
    :param nodata_value: (optinal) value to burn into raster for missind data in the image
    :return:
    """
    submitted_tasks_id = {}

    __verify_path_for_upload(destination_path)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)

    if len(all_images_paths) == 0:
        logging.error('%s does not contain any tif images.', path)
        sys.exit(1)

    metadata = load_metadata_from_csv(metadata_path) if metadata_path else None

    password = getpass.getpass()
    google_session = __get_google_auth_session(user, password)

    __create_image_collection(destination_path)

    images_for_upload_path = __find_remaining_assets_for_upload(
        all_images_paths, destination_path)
    no_images = len(images_for_upload_path)

    if no_images == 0:
        logging.error('No images found that match %s. Exiting...', path)
        sys.exit(1)

    failed_asset_writer = FailedAssetsWriter()

    for current_image_no, image_path in enumerate(images_for_upload_path):
        logging.info('Processing image %d out of %d: %s', current_image_no + 1,
                     no_images, image_path)
        filename = __get_filename_from_path(path=image_path)

        asset_full_path = destination_path + '/' + filename

        if metadata and not filename in metadata:
            logging.warning(
                "No metadata exists for image %s: it will not be ingested",
                filename)
            failed_asset_writer.writerow([filename, 0, 'Missing metadata'])
            continue

        properties = metadata[filename] if metadata else None

        try:
            task_id = __upload_to_gcs_and_start_ingestion_task(
                asset_full_path, google_session, image_path, properties,
                multipart_upload, nodata_value)
            submitted_tasks_id[task_id] = filename
            __periodic_check(current_image=current_image_no,
                             period=20,
                             tasks=submitted_tasks_id,
                             writer=failed_asset_writer)
        except Exception as e:
            logging.exception('Upload of %s has failed.', filename)
            failed_asset_writer.writerow([filename, 0, str(e)])

    __check_for_failed_tasks_and_report(tasks=submitted_tasks_id,
                                        writer=failed_asset_writer)
    failed_asset_writer.close()
def upload(user,
           source_path,
           destination_path,
           manifest=None,
           metadata_path=None,
           multipart_upload=False,
           nodata_value=None,
           bucket_name=None):
    """
    Uploads content of a given directory to GEE. The function first uploads an asset to Google Cloud Storage (GCS)
    and then uses ee.data.startIngestion to put it into GEE, Due to GCS intermediate step, users is asked for
    Google's account name and password.

    In case any exception happens during the upload, the function will repeat the call a given number of times, after
    which the error will be propagated further.

    :param user: name of a Google account
    :param source_path: path to a directory
    :param destination_path: where to upload (absolute path)
    :param metadata_path: (optional) path to file with metadata
    :param multipart_upload: (optional) alternative mode op upload - use if the other one fails
    :param nodata_value: (optinal) value to burn into raster for missind data in the image
    :return:
    """
    submitted_tasks_id = {}

    __verify_path_for_upload(destination_path)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)
    if len(all_images_paths) == 0:
        logging.error('%s does not contain any tif images.', path)
        sys.exit(1)

    metadata = load_metadata_from_csv(metadata_path) if metadata_path else None

    if user is not None:
        password = getpass.getpass()
        google_session = __get_google_auth_session(user, password)
    else:
        storage_client = storage.Client()

    __create_image_collection(destination_path)

    images_for_upload_path = __find_remaining_assets_for_upload(
        all_images_paths, destination_path)
    no_images = len(images_for_upload_path)

    if no_images == 0:
        logging.error('No images found that match %s. Exiting...', path)
        sys.exit(1)

    failed_asset_writer = FailedAssetsWriter()

    for current_image_no, image_path in enumerate(images_for_upload_path):
        logging.info('Processing image %d out of %d: %s', current_image_no + 1,
                     no_images, image_path)
        filename = __get_filename_from_path(path=image_path)

        asset_full_path = destination_path + '/' + filename

        if metadata and not filename in metadata:
            logging.warning(
                "No metadata exists for image %s: it will not be ingested",
                filename)
            failed_asset_writer.writerow([filename, 0, 'Missing metadata'])
            continue

        properties = metadata[filename] if metadata else None
        try:
            if user is not None:
                gsid = __upload_file_gee(session=google_session,
                                         file_path=image_path,
                                         use_multipart=multipart_upload)
            else:
                gsid = __upload_file_gcs(storage_client, bucket_name,
                                         image_path)

            asset_request = __create_asset_request(asset_full_path, gsid,
                                                   properties, nodata_value)

            if manifest == "PSO":
                with open(metadata_path, 'r') as myfile:
                    head = myfile.readlines()[0:1]
                    delim = str(head).split(',')
                    headlist = list(delim)
                with open(metadata_path, 'r') as f:
                    reader = csv.DictReader(f, delimiter=",")
                    for i, line in enumerate(reader):
                        absolute = (
                            "earthengine upload image " + "--asset_id=" +
                            destination_path + '/' + filename + ' ' + ' -p ' +
                            '"' + "(string)" + "id_no" + '=' + filename + '"' +
                            ' -p ' + '"' + "(number)" + headlist[1] + '=' +
                            line['system:time_start'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[2] + '=' + line['platform'] +
                            '"' + ' -p ' + '"' + "(string)" + headlist[3] +
                            '=' + line['satType'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[4] + '=' + line['satID'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[5] +
                            '=' + line['tileID'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[6] + '=' + line['numBands'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[7] +
                            '=' + line['cloudcover'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[8] + '=' + line['incAngle'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[9] +
                            '=' + line['illAzAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[10] + '=' +
                            line['illElvAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[11] + '=' + line['azAngle'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[12] +
                            '=' + line['spcAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[13] + '=' + line['rsf'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[14] +
                            '=' + line['refCoeffB1'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[15] + '=' +
                            line['refCoeffB2'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[16] + '=' +
                            line['refCoeffB3'] + '"' + ' -p ' + '"' +
                            "(number)" + "refCoeffB4" + '=' +
                            line['refCoeffB4'] + '"' + ' --nodata_value=0')
                    b = absolute + ' ' + gsid
                    print(
                        subprocess.check_output(b)
                    )  ##Executes the command line function to start ingestion process
            elif manifest == "PS4B_SR":
                with open(metadata_path, 'r') as myfile:
                    head = myfile.readlines()[0:1]
                    delim = str(head).split(',')
                    headlist = list(delim)
                with open(metadata_path, 'r') as f:
                    reader = csv.DictReader(f, delimiter=",")
                    for i, line in enumerate(reader):
                        absolute = (
                            "earthengine upload image " + "--asset_id=" +
                            destination_path + '/' + filename + ' ' + ' -p ' +
                            '"' + "(string)" + "id_no" + '=' + filename + '"' +
                            ' -p ' + '"' + "(string)" + headlist[1] + '=' +
                            line['platform'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[2] + '=' + line['satType'] +
                            '"' + ' -p ' + '"' + "(string)" + headlist[3] +
                            '=' + line['satID'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[4] + '=' + line['numBands'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[5] +
                            '=' + line['cloudcover'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[6] + '=' +
                            line['system:time_start'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[7] + '=' + line['AtmModel'] +
                            '"' + ' -p ' + '"' + "(string)" + headlist[8] +
                            '=' + line['Aerosol_Model'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[9] + '=' +
                            line['AOT_Method'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[10] + '=' + line['AOT_Std'] +
                            '"' + ' -p ' + '"' + "(number)" + headlist[11] +
                            '=' + line['AOT_Used'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[12] + '=' +
                            line['AOT_Status'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[13] + '=' +
                            line['AOT_MeanQual'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[14] + '=' +
                            line['LUTS_Version'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[15] + '=' +
                            line['SolarZenAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[16] + '=' +
                            line['AOT_Coverage'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[17] + '=' +
                            line['AOT_Source'] + '"' + ' -p ' + '"' +
                            "(string)" + headlist[18] + '=' +
                            line['AtmCorr_Alg'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[19] + '=' +
                            line['incAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[20] + '=' +
                            line['illAzAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[21] + '=' +
                            line['illElvAngle'] + '"' + ' -p ' + '"' +
                            "(number)" + headlist[22] + '=' + line['azAngle'] +
                            '"' + ' -p ' + '"' + "(number)" + 'spcAngle' +
                            '=' + line['spcAngle'] + '"' + ' --nodata_value=0')
                    b = absolute + ' ' + gsid
                    print(
                        subprocess.check_output(b)
                    )  ##Executes the command line function to start ingestion process
            else:
                task_id = __start_ingestion_task(asset_request)
                submitted_tasks_id[task_id] = filename
                __periodic_check(current_image=current_image_no,
                                 period=20,
                                 tasks=submitted_tasks_id,
                                 writer=failed_asset_writer)
        except Exception as e:
            logging.exception('Upload of %s has failed.', filename)
            failed_asset_writer.writerow([filename, 0, str(e)])

        __check_for_failed_tasks_and_report(tasks=submitted_tasks_id,
                                            writer=failed_asset_writer)
        failed_asset_writer.close()
Esempio n. 5
0
def upload(user,
           source_path,
           destination_path,
           manifest=None,
           metadata_path=None,
           multipart_upload=False,
           nodata_value=None,
           bucket_name=None):
    """
    Uploads content of a given directory to GEE. The function first uploads an asset to Google Cloud Storage (GCS)
    and then uses ee.data.startIngestion to put it into GEE, Due to GCS intermediate step, users is asked for
    Google's account name and password.

    In case any exception happens during the upload, the function will repeat the call a given number of times, after
    which the error will be propagated further.

    :param user: name of a Google account
    :param source_path: path to a directory
    :param destination_path: where to upload (absolute path)
    :param metadata_path: (optional) path to file with metadata
    :param multipart_upload: (optional) alternative mode op upload - use if the other one fails
    :param nodata_value: (optinal) value to burn into raster for missind data in the image
    :return:
    """
    submitted_tasks_id = {}

    __verify_path_for_upload(destination_path)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)
    if len(all_images_paths) == 0:
        logging.error('%s does not contain any tif images.', path)
        sys.exit(1)

    metadata = load_metadata_from_csv(metadata_path) if metadata_path else None

    if user is not None:
        password = getpass.getpass()
        google_session = __get_google_auth_session(user, password)
    else:
        storage_client = storage.Client()

    __create_image_collection(destination_path)

    images_for_upload_path = __find_remaining_assets_for_upload(
        all_images_paths, destination_path)
    no_images = len(images_for_upload_path)

    if no_images == 0:
        logging.error('No images found that match %s. Exiting...', path)
        sys.exit(1)

    failed_asset_writer = FailedAssetsWriter()

    for current_image_no, image_path in enumerate(images_for_upload_path):
        logging.info('Processing image %d out of %d: %s', current_image_no + 1,
                     no_images, image_path)
        filename = __get_filename_from_path(path=image_path)

        asset_full_path = destination_path + '/' + filename

        if metadata and not filename in metadata:
            logging.warning(
                "No metadata exists for image %s: it will not be ingested",
                filename)
            failed_asset_writer.writerow([filename, 0, 'Missing metadata'])
            continue

        properties = metadata[filename] if metadata else None
        if manifest == "PSO":
            data = manifest_lib.data_pso
        elif manifest == "PSO_DN":
            data = manifest_lib.data_psodn
        elif manifest == "PSO_V":
            data = manifest_lib.data_psov
        elif manifest == "PS4B":
            data = manifest_lib.data_ps4b
        elif manifest == "PS4B_SR":
            data = manifest_lib.data_ps4bsr
        elif manifest == "PS4B_DN":
            data = manifest_lib.data_ps4bdn
        elif manifest == "PS3B":
            data = manifest_lib.data_ps3b
        elif manifest == "PS3B_DN":
            data = manifest_lib.data_ps3bdn
        elif manifest == "PS3B_V":
            data = manifest_lib.data_ps3bv
        elif manifest == "REO":
            data = manifest_lib.data_reo
        elif manifest == "REO_V":
            data = manifest_lib.data_reov
        else:
            print("No Manifest Provided")
            sys.exit()
        try:
            if user is not None:
                gsid = __upload_file_gee(session=google_session,
                                         file_path=image_path,
                                         use_multipart=multipart_upload)
            else:
                gsid = __upload_file_gcs(storage_client, bucket_name,
                                         image_path)

            asset_request = __create_asset_request(asset_full_path, gsid,
                                                   properties, nodata_value)

            df = pd.read_csv(metadata_path)
            stringcol = list(df.select_dtypes(include=['object']).columns)
            intcol = list(df.select_dtypes(include=['int64']).columns)
            floatcol = list(df.select_dtypes(include=['float64']).columns)
            with open(metadata_path, 'r') as f:
                reader = csv.DictReader(f, delimiter=",")
                for i, line in enumerate(reader):
                    if line["id_no"] == os.path.basename(image_path).split(
                            '.')[0]:
                        for key, value in data['properties'].items():
                            for integer in intcol:
                                try:
                                    data['properties'][integer] = int(
                                        line[integer])
                                except Exception as e:
                                    print(e)
                            for s in stringcol:
                                try:
                                    data['properties'][s] = str(line[s])
                                except Exception as e:
                                    print(e)
                            for f in floatcol:
                                try:
                                    data['properties'][f] = float(line[f])
                                except Exception as e:
                                    print(e)
                        data['id'] = destination_path + '/' + line["id_no"]
                        data['tilesets'][0]['sources'][0]['primaryPath'] = gsid
                        json_data = json.dumps(data)
                        with open(os.path.join(lp, 'data.json'),
                                  'w') as outfile:
                            json.dump(data, outfile)
                        subprocess.call("earthengine upload_manifest " + '"' +
                                        os.path.join(lp, 'data.json') + '"',
                                        shell=True)
        except Exception as e:
            logging.exception('Upload of %s has failed.', filename)
            failed_asset_writer.writerow([filename, 0, str(e)])

        __check_for_failed_tasks_and_report(tasks=submitted_tasks_id,
                                            writer=failed_asset_writer)
        failed_asset_writer.close()
Esempio n. 6
0
def selupload(user,
              source_path,
              destination_path,
              manifest=None,
              metadata_path=None,
              nodata_value=None,
              bucket_name=None):
    submitted_tasks_id = {}

    __verify_path_for_upload(destination_path)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)
    if len(all_images_paths) == 0:
        print('%s does not contain any tif images.', path)
        sys.exit(1)

    metadata = load_metadata_from_csv(metadata_path) if metadata_path else None

    if user is not None:
        password = getpass.getpass()
        google_session = __get_google_auth_session(user, password)
    else:
        storage_client = storage.Client()

    __create_image_collection(destination_path)

    images_for_upload_path = __find_remaining_assets_for_upload(
        all_images_paths, destination_path)
    no_images = len(images_for_upload_path)

    if no_images == 0:
        print('No images found that match %s. Exiting...', path)
        sys.exit(1)

    failed_asset_writer = FailedAssetsWriter()

    for current_image_no, image_path in enumerate(images_for_upload_path):
        print('Processing image ' + str(current_image_no + 1) + ' out of ' +
              str(no_images) + ': ' + str(image_path))
        filename = __get_filename_from_path(path=image_path)

        asset_full_path = destination_path + '/' + filename

        if metadata and not filename in metadata:
            print("No metadata exists for image " + str(filename) +
                  " : it will not be ingested")
            failed_asset_writer.writerow([filename, 0, 'Missing metadata'])
            continue

        properties = metadata[filename] if metadata else None
        if manifest == "PSO":
            data = manifest_lib.data_pso
        elif manifest == "PSO_DN":
            data = manifest_lib.data_psodn
        elif manifest == "PSO_V":
            data = manifest_lib.data_psov
        elif manifest == "PS4B":
            data = manifest_lib.data_ps4b
        elif manifest == "PS4B_SR":
            data = manifest_lib.data_ps4bsr
        elif manifest == "PS4B_DN":
            data = manifest_lib.data_ps4bdn
        elif manifest == "PS3B":
            data = manifest_lib.data_ps3b
        elif manifest == "PS3B_DN":
            data = manifest_lib.data_ps3bdn
        elif manifest == "PS3B_V":
            data = manifest_lib.data_ps3bv
        elif manifest == "REO":
            data = manifest_lib.data_reo
        elif manifest == "REO_V":
            data = manifest_lib.data_reov
        else:
            print("No Manifest Provided")
            sys.exit()
        try:
            if user is not None:
                gsid = __upload_file_gee(session=google_session,
                                         file_path=image_path)
            else:
                gsid = __upload_file_gcs(storage_client, bucket_name,
                                         image_path)

            asset_request = __create_asset_request(asset_full_path, gsid,
                                                   properties, nodata_value)

            df = pd.read_csv(metadata_path)
            stringcol = list(df.select_dtypes(include=['object']).columns)
            intcol = list(df.select_dtypes(include=['int64']).columns)
            floatcol = list(df.select_dtypes(include=['float64']).columns)
            with open(metadata_path, 'r') as f:
                reader = csv.DictReader(f, delimiter=",")
                for i, line in enumerate(reader):
                    if line["id_no"] == os.path.basename(image_path).split(
                            '.')[0]:
                        for key, value in data['properties'].items():
                            for integer in intcol:
                                try:
                                    data['properties'][integer] = int(
                                        line[integer])
                                except Exception as e:
                                    print(e)
                            for s in stringcol:
                                try:
                                    data['properties'][s] = str(line[s])
                                except Exception as e:
                                    print(e)
                            for f in floatcol:
                                try:
                                    data['properties'][f] = float(line[f])
                                except Exception as e:
                                    print(e)
                        data['id'] = destination_path + '/' + line["id_no"]
                        data['tilesets'][0]['sources'][0]['primaryPath'] = gsid
                        json_data = json.dumps(data)
                        with open(os.path.join(lp, 'data.json'),
                                  'w') as outfile:
                            json.dump(data, outfile)
                        subprocess.call("earthengine upload_manifest " + '"' +
                                        os.path.join(lp, 'data.json') + '"',
                                        shell=True)
        except Exception as e:
            print('Upload of ' + str(filename) + ' has failed.')
            failed_asset_writer.writerow([filename, 0, str(e)])

        __check_for_failed_tasks_and_report(tasks=submitted_tasks_id,
                                            writer=failed_asset_writer)
        failed_asset_writer.close()
Esempio n. 7
0
def upload(user,
           source_path,
           destination_path=None,
           metadata_path=None,
           collection_name=None,
           multipart_upload=False,
           nodata_value=None):
    """
    Uploads content of a given directory to GEE. The function first uploads an asset to Google Cloud Storage (GCS)
    and then uses ee.data.startIngestion to put it into GEE, Due to GCS intermediate step, users is asked for
    Google's account name and password.

    In case any exception happens during the upload, the function will repeat the call a given number of times, after
    which the error will be propagated further.

    :param user: name of a Google account
    :param source_path: path to a directory
    :param destination_path: where to upload (absolute path)
    :param metadata_path: (optional) path to file with metadata
    :param collection_name: (optional) name to be given for the uploaded collection
    :return:
    """

    metadata = metadata_loader.load_metadata_from_csv(
        metadata_path) if metadata_path else None

    password = getpass.getpass()
    google_session = __get_google_auth_session(user, password)

    absolute_directory_path_for_upload = __get_absolute_path_for_upload(
        collection_name, destination_path)
    helper_functions.create_image_collection(
        absolute_directory_path_for_upload)

    path = os.path.join(os.path.expanduser(source_path), '*.tif')
    all_images_paths = glob.glob(path)
    no_images = len(all_images_paths)

    images_for_upload_path = __find_remaining_assets_for_upload(
        all_images_paths, absolute_directory_path_for_upload)

    for current_image_no, image_path in enumerate(images_for_upload_path):
        logging.info('Processing image %d out of %d: %s', current_image_no + 1,
                     no_images, image_path)
        filename = helper_functions.get_filename_from_path(path=image_path)

        asset_full_path = absolute_directory_path_for_upload + '/' + filename

        if metadata and not filename in metadata:
            logging.warning(
                "No metadata exists for image %s: it will not be ingested",
                filename)
            with open('assets_missing_metadata.log',
                      'a') as missing_metadata_file:
                missing_metadata_file.write(image_path + '\n')
            continue

        properties = metadata[filename] if metadata else None

        try:
            r = __upload_to_gcs_and_start_ingestion_task(
                current_image_no, asset_full_path, google_session, image_path,
                properties, multipart_upload, nodata_value)
        except Exception as e:
            logging.exception('Upload of %s has failed.', filename)