Beispiel #1
0
def lightning_on_processing_node_removed(sender, processing_node_id, **kwargs):
    node_ids = ds.get_json('nodes', [])
    try:
        node_ids.remove(processing_node_id)
        logger.info("Removing lightning node {}".format(str(processing_node_id)))
        ds.set_json('nodes', node_ids)
    except ValueError:
        pass
Beispiel #2
0
def ddb_cleanup(sender, task_id, **kwargs):

    from app.plugins import logger

    # When a task is removed, simply remove clutter
    # When a task is re-processed, make sure we can re-share it if we shared a task previously

    logger.info("Cleaning up DroneDB datastore for task {}".format(
        str(task_id)))

    datastore = get_current_plugin().get_global_data_store()
    status_key = get_status_key(task_id)

    logger.info("Info task {0} ({1})".format(str(task_id), status_key))

    datastore.del_key(status_key)
Beispiel #3
0
    def refresh_token(self):
        
        if (self.public):
            logger.info("Cannot refresh token.")
            return False
        
        try:
        
            response = self.wrapped_call('POST', self.__refresh_url)

            self.token = response.json()['token']

            if (self.update_token is not None):
                self.update_token(self.token)
        
        except Exception as e:
            raise Exception("Failed to refresh token.") from e       
Beispiel #4
0
    def _read_folder(self, path, recursive=0, extensions=None):
        if len(path) == 0 or path[-1] != '/': path = path + '/'
        logger.info(" read folder:" + path)
        name = pathlib.Path(path).name
        #files = self._client.list(path)
        files = self._get_files(path)

        alldirs = []
        if recursive != 0 and path != '/':
            parent = str(pathlib.Path(path).parent)
            #print("PATH:",path,"PARENT:",parent)
            alldirs += [Folder('[/..] ' + parent, parent, 0)]

        if len(files) == 0:
            return alldirs

        contents = []
        folders = []
        for f in files:
            if f[0] == '.': continue
            if f[-1] == '/':
                #Include subfolders?
                if recursive > 0:
                    #print(path + f)
                    alldirs += self._read_folder(path + f,
                                                 recursive - 1,
                                                 extensions=extensions)
                elif recursive < 0:
                    #Add the folders without counting their images, unknown image count
                    alldirs += [Folder(f[0:-1], path + f, -1)]
                else:
                    #Just add folders to list if not going into them
                    folders += [f]
            else:
                ext = pathlib.Path(f).suffix.lower()
                if extensions is None or ext in extensions:
                    contents += [f]

        #Skip current if no images or subfolders
        if len(contents) or len(folders):
            #Remove trailing slash for name
            #alldirs += [Folder(name[0:-1], path, len(contents))]
            alldirs += [Folder(name, path, len(contents))]
        logger.info(" read folder entries: " + str(len(alldirs)))
        return alldirs
Beispiel #5
0
    def connect_dict(self, options, user_id):
        """Connect to the server if necessary, the connection can be re-used by other methods
        (not required if each request is self-contained)

        This method takes a dict containing connection details:
        "webdav_hostname", "webdav_login", "webdav_password"
        """
        if self._client:
            try:
                self._client.info("/")
            except (WebDavException) as e:
                logger.info("WebDAV client exception, re-connecting:" + str(e))
                self._client = None

        if self._client is None and options:
            #Dummy field for decryption
            es = ServerTokenField(self.name, user_id)
            options['webdav_password'] = es.decrypt_value(
                options['webdav_password'])
            self._client = Client(options)
Beispiel #6
0
def import_files(task_id, files):
    logger.info("Will import {} files".format(len(files)))
    task = models.Task.objects.get(pk=task_id)
    task.create_task_directories()
    task.save()
    
    try:
        downloaded_total = 0
        for file in files: 
            download_file(task, file)
            task.check_if_canceled()
            models.Task.objects.filter(pk=task.id).update(upload_progress=(float(downloaded_total) / float(len(files))))
            downloaded_total += 1

    except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
        raise NodeServerError(e)

    task.refresh_from_db()
    task.pending_action = None
    task.processing_time = 0
    task.partial = False
    task.save()
Beispiel #7
0
def import_files(task_id, files):
    import requests
    from app import models
    from app.plugins import logger

    def download_file(task, file):
        path = task.task_path(file['name'])
        download_stream = requests.get(file['url'], stream=True, timeout=60)

        with open(path, 'wb') as fd:
            for chunk in download_stream.iter_content(4096):
                fd.write(chunk)

        models.ImageUpload.objects.create(task=task, image=path)

    logger.info("Will import {} files".format(len(files)))
    task = models.Task.objects.get(pk=task_id)
    task.create_task_directories()
    task.save()

    try:
        downloaded_total = 0
        for file in files:
            download_file(task, file)
            task.check_if_canceled()
            models.Task.objects.filter(pk=task.id).update(
                upload_progress=(float(downloaded_total) / float(len(files))))
            downloaded_total += 1

    except (requests.exceptions.Timeout,
            requests.exceptions.ConnectionError) as e:
        raise NodeServerError(e)

    task.refresh_from_db()
    task.pending_action = None
    task.processing_time = 0
    task.partial = False
    task.save()
    def get(self, request, platform_name, root):
        if not len(root):
            root = '/'
        platform = get_platform_by_name(platform_name)

        if platform == None:
            return Response(
                {
                    'error':
                    'Failed to find a platform with the name \'{}\''.format(
                        platform_name)
                },
                status=status.HTTP_400_BAD_REQUEST)

        ds = get_current_plugin().get_user_data_store(request.user)

        server_url_field = platform.get_server_url_field()
        server_url = server_url_field.get_stored_value(ds)

        if server_url == "":
            return Response(
                {
                    'error':
                    'You can\'t ask for the folders when there is no server configured'
                },
                status=status.HTTP_412_PRECONDITION_FAILED)

        #Open connection (if any, eg: WebDAV)
        logger.info("Connecting..." + platform_name)
        platform.connect(ds, request.user.id)
        logger.info("Getting folder list")
        folders = platform.list_folders_in_server(server_url, root)

        return Response(
            {'folders': [folder.serialize() for folder in folders]},
            status=status.HTTP_200_OK)
Beispiel #9
0
    def get_files_list(self, orgSlug, dsSlug, folder=None):

        try:

            # Type 1 is folder
            params = {'path': '' if folder is None else folder}

            logger.info(self.__get_files_list_url.format(orgSlug, dsSlug))

            # Get the folders
            response = self.wrapped_call('GET',
                                         self.__get_files_list_url.format(
                                             orgSlug, dsSlug),
                                         params=params)

            # Exclude folders
            files = filter(lambda itm: itm['type'] != 1, response.json())

            return [
                {
                    'path':
                    o['path'],
                    # extract name from path
                    'name':
                    o['path'].split('/')[-1],
                    'type':
                    o['type'],
                    'size':
                    o['size'],
                    'url':
                    self.__download_file_url.format(orgSlug, dsSlug, o['path'])
                } for o in files
            ]

        except Exception as e:
            raise Exception("Failed to get files list.") from e
Beispiel #10
0
def upload_to_ion(
    task_id,
    asset_type,
    token,
    asset_path,
    name,
    description="",
    attribution="",
    options={},
):
    import sys
    import time
    import logging
    import requests
    from os import path
    from shutil import rmtree
    from enum import Enum
    from app.plugins import logger
    from plugins.cesiumion.api_views import (
        get_asset_info,
        set_asset_info,
        AssetType,
        ASSET_TO_OUTPUT,
        ASSET_TO_SOURCE,
        ASSET_TO_FILE,
        pluck,
        )
    from plugins.cesiumion.model_tools import (
        to_ion_texture_model, 
        IonInvalidZip,
        )
    from plugins.cesiumion.globals import ION_API_URL
    class LoggerAdapter(logging.LoggerAdapter):
        def __init__(self, prefix, logger):
            super().__init__(logger, {})
            self.prefix = prefix

        def process(self, msg, kwargs):
            return "[%s] %s" % (self.prefix, msg), kwargs

    class TaskUploadProgress(object):
        def __init__(self, file_path, task_id, asset_type, logger=None, log_step_size=0.05):
            self._task_id = task_id
            self._asset_type = asset_type
            self._logger = logger

            self._uploaded_bytes = 0
            self._total_bytes = float(path.getsize(file_path))
            self._asset_info = get_asset_info(task_id, asset_type)

            self._last_log = 0
            self._log_step_size = log_step_size

        @property
        def asset_info(self):
            return self._asset_info

        def __call__(self, total_bytes):
            self._uploaded_bytes += total_bytes
            progress = self._uploaded_bytes / self._total_bytes
            if progress == 1:
                progress = 1

            self._asset_info["upload"]["progress"] = progress
            if self._logger is not None and progress - self._last_log > self._log_step_size:
                self._logger.info(f"Upload progress: {progress * 100}%")
                self._last_log = progress

            set_asset_info(self._task_id, self._asset_type, self._asset_info)

    asset_logger = LoggerAdapter(prefix=f"Task {task_id} {asset_type}", logger=logger)
    asset_type = AssetType[asset_type]
    asset_info = get_asset_info(task_id, asset_type)
    del_directory = None

    try:
        import boto3
    except ImportError:
        import subprocess

        asset_logger.info(f"Manually installing boto3...")
        subprocess.call([sys.executable, "-m", "pip", "install", "boto3"])
        import boto3

    try:
        # Update asset_path based off
        if asset_type == AssetType.TEXTURED_MODEL:
            try:
                asset_path, del_directory = to_ion_texture_model(asset_path)
                logger.info("Created ion texture model!")
            except IonInvalidZip as e:
                logger.info("Non geo-referenced texture model, using default file.")
            except Exception as e:
                logger.warning("Failed to convert to ion texture model")
                logger.warning(e)

        headers = {"Authorization": f"Bearer {token}"}
        data = {
            "name": name,
            "description": description,
            "attribution": attribution,
            "type": ASSET_TO_OUTPUT[asset_type],
            "options": {**options, "sourceType": ASSET_TO_SOURCE[asset_type]},
        }

        # Create Asset Request
        asset_logger.info(f"Creating asset of type {asset_type}")
        res = requests.post(f"{ION_API_URL}/assets", json=data, headers=headers)
        res.raise_for_status()
        ion_info, upload_meta, on_complete = pluck(
            res.json(), "assetMetadata", "uploadLocation", "onComplete"
        )
        ion_id = ion_info["id"]
        access_key, secret_key, token, endpoint, bucket, file_prefix = pluck(
            upload_meta,
            "accessKey",
            "secretAccessKey",
            "sessionToken",
            "endpoint",
            "bucket",
            "prefix",
        )

        # Upload
        asset_logger.info("Starting upload")
        uploat_stats = TaskUploadProgress(asset_path, task_id, asset_type, asset_logger)
        key = path.join(file_prefix, ASSET_TO_FILE[asset_type])
        boto3.client(
            "s3",
            endpoint_url=endpoint,
            aws_access_key_id=access_key,
            aws_secret_access_key=secret_key,
            aws_session_token=token,
        ).upload_file(asset_path, Bucket=bucket, Key=key, Callback=uploat_stats)
        asset_info = uploat_stats.asset_info
        asset_info["id"] = ion_id
        asset_info["upload"]["active"] = False
        asset_info["process"]["active"] = True
        set_asset_info(task_id, asset_type, asset_info)

        # On Complete Handler
        asset_logger.info("Upload complete")
        method, url, fields = pluck(on_complete, "method", "url", "fields")
        res = requests.request(method, url=url, headers=headers, data=fields)
        res.raise_for_status()

        # Processing Status Refresh
        asset_logger.info("Starting processing")
        refresh = True
        while refresh:
            res = requests.get(f"{ION_API_URL}/assets/{ion_id}", headers=headers)
            res.raise_for_status()

            state, percent_complete = pluck(res.json(), "status", "percentComplete")
            progress = float(percent_complete) / 100
            if "ERROR" in state.upper():
                asset_info["error"] = f"Processing failed"
                asset_logger.info("Processing failed...")
                refresh = False
            if progress >= 1:
                refresh = False

            if asset_info["process"]["progress"] != progress:
                asset_info["process"]["progress"] = progress
                asset_logger.info(f"Processing {percent_complete}% - {state}")
                set_asset_info(task_id, asset_type, asset_info)
            time.sleep(2)

        asset_logger.info("Processing complete")
        asset_info["process"]["progress"] = 1
        asset_info["process"]["active"] = False
    except requests.exceptions.HTTPError as e:
        if e.response.status_code == 401:
            asset_info["error"] = "Invalid ion token!"
        elif e.response.status_code == 404:
            asset_info["error"] = "Missing permisssions on ion token!"
        else:
            asset_info["error"] = str(e)
        asset_logger.error(e)
    except Exception as e:
        asset_info["error"] = str(e)
        asset_logger.error(e)

    if del_directory != None:
        rmtree(del_directory)

    set_asset_info(task_id, asset_type, asset_info)
Beispiel #11
0
def share_to_ddb(pk, settings, files):

    from app.plugins import logger

    status_key = get_status_key(pk)
    datastore = get_current_plugin().get_global_data_store()

    registry_url, username, password, token = settings

    ddb = DroneDB(registry_url, username, password, token)

    # Init share (to check)
    share_token = ddb.share_init()

    status = datastore.get_json(status_key)

    status['totalFiles'] = len(files)
    status['totalSize'] = sum(i['size'] for i in files)

    datastore.set_json(status_key, status)

    for file in files:

        # check that file exists
        if not os.path.exists(file['path']):
            logger.info("File {} does not exist".format(file['path']))
            continue

        attempt = 0

        while attempt < 3:
            try:

                attempt += 1

                up = ddb.share_upload(share_token, file['path'], file['name'])

                logger.info("Uploaded " + file['name'] +
                            " to Dronedb (hash: " + up['hash'] + ")")

                status['uploadedFiles'] += 1
                status['uploadedSize'] += file['size']

                datastore.set_json(status_key, status)

                break

            except Exception as e:

                if (attempt == 3):
                    logger.error("Error uploading file {}: {}".format(
                        file['name'], str(e)))
                    status['error'] = "Error uploading file {}: {}".format(
                        file['name'], str(e))
                    status['status'] = 2  # Error
                    datastore.set_json(status_key, status)
                    return
                else:
                    logger.info(
                        "Error uploading file {}: {}. Retrying...".format(
                            file['name'], str(e)))
                    time.sleep(5)
                    continue

    res = ddb.share_commit(share_token)

    status['status'] = 3  # Done
    status['shareUrl'] = registry_url + res['url']

    logger.info("Shared on url " + status['shareUrl'])

    datastore.set_json(status_key, status)
Beispiel #12
0
def upload_to_ion(
    task_id,
    asset_type,
    token,
    asset_path,
    name,
    description="",
    attribution="",
    options={},
):
    asset_logger = LoggerAdapter(prefix=f"Task {task_id} {asset_type}",
                                 logger=logger)
    asset_type = AssetType[asset_type]
    asset_info = get_asset_info(task_id, asset_type)
    del_directory = None

    try:
        import boto3
    except ImportError:
        import subprocess

        asset_logger.info(f"Manually installing boto3...")
        subprocess.call([sys.executable, "-m", "pip", "install", "boto3"])
        import boto3

    try:
        # Update asset_path based off
        if asset_type == AssetType.TEXTURED_MODEL:
            try:
                asset_path, del_directory = to_ion_texture_model(asset_path)
                logger.info("Created ion texture model!")
            except IonInvalidZip as e:
                logger.info(
                    "Non geo-referenced texture model, using default file.")
            except Exception as e:
                logger.warning("Failed to convert to ion texture model")
                logger.warning(e)

        headers = {"Authorization": f"Bearer {token}"}
        data = {
            "name": name,
            "description": description,
            "attribution": attribution,
            "type": ASSET_TO_OUTPUT[asset_type],
            "options": {
                **options, "sourceType": ASSET_TO_SOURCE[asset_type]
            },
        }

        # Create Asset Request
        asset_logger.info(f"Creating asset of type {asset_type}")
        res = requests.post(f"{ION_API_URL}/assets",
                            json=data,
                            headers=headers)
        res.raise_for_status()
        ion_info, upload_meta, on_complete = pluck(res.json(), "assetMetadata",
                                                   "uploadLocation",
                                                   "onComplete")
        ion_id = ion_info["id"]
        access_key, secret_key, token, endpoint, bucket, file_prefix = pluck(
            upload_meta,
            "accessKey",
            "secretAccessKey",
            "sessionToken",
            "endpoint",
            "bucket",
            "prefix",
        )

        # Upload
        asset_logger.info("Starting upload")
        uploat_stats = TaskUploadProgress(asset_path, task_id, asset_type,
                                          asset_logger)
        key = path.join(file_prefix, ASSET_TO_FILE[asset_type])
        boto3.client(
            "s3",
            endpoint_url=endpoint,
            aws_access_key_id=access_key,
            aws_secret_access_key=secret_key,
            aws_session_token=token,
        ).upload_file(asset_path,
                      Bucket=bucket,
                      Key=key,
                      Callback=uploat_stats)
        asset_info = uploat_stats.asset_info
        asset_info["id"] = ion_id
        asset_info["upload"]["active"] = False
        asset_info["process"]["active"] = True
        set_asset_info(task_id, asset_type, asset_info)

        # On Complete Handler
        asset_logger.info("Upload complete")
        method, url, fields = pluck(on_complete, "method", "url", "fields")
        res = requests.request(method, url=url, headers=headers, data=fields)
        res.raise_for_status()

        # Processing Status Refresh
        asset_logger.info("Starting processing")
        refresh = True
        while refresh:
            res = requests.get(f"{ION_API_URL}/assets/{ion_id}",
                               headers=headers)
            res.raise_for_status()

            state, percent_complete = pluck(res.json(), "status",
                                            "percentComplete")
            progress = float(percent_complete) / 100
            if "ERROR" in state.upper():
                asset_info["error"] = f"Processing failed"
                asset_logger.info("Processing failed...")
                refresh = False
            if progress >= 1:
                refresh = False

            if asset_info["process"]["progress"] != progress:
                asset_info["process"]["progress"] = progress
                asset_logger.info(f"Processing {percent_complete}% - {state}")
                set_asset_info(task_id, asset_type, asset_info)
            time.sleep(2)

        asset_logger.info("Processing complete")
        asset_info["process"]["progress"] = 1
        asset_info["process"]["active"] = False
    except requests.exceptions.HTTPError as e:
        if e.response.status_code == 401:
            asset_info["error"] = "Invalid ion token!"
        elif e.response.status_code == 404:
            asset_info["error"] = "Missing permisssions on ion token!"
        else:
            asset_info["error"] = str(e)
        asset_logger.error(e)
    except Exception as e:
        asset_info["error"] = str(e)
        asset_logger.error(e)

    if del_directory != None:
        rmtree(del_directory)

    set_asset_info(task_id, asset_type, asset_info)
def import_files(task_id, files, user_id, connection_details=None):
    import requests
    from app import models
    from app.plugins import logger

    connection = None
    if connection_details and connection_details['type'] == 'webdav':
        #Use webdav connection
        from coreplugins.cloudimport.extensions.cloud_webdav import CloudWebDAV
        connection = CloudWebDAV('', '')
        connection.connect_dict(connection_details, user_id)

    def download_file(task, file):
        path = task.task_path(file['name'])
        if connection:
            max_attempts = 10
            for i in range(max_attempts):
                try:
                    connection.download(file['url'], path)
                    break
                except (Exception) as e:
                    logger.error("Exception ocurred downloading file: " +
                                 file['url'] + " : " + str(e))
                    logger.info("Attempting to re-connect...")
                    import time
                    time.sleep(2)
                    connection.connect_dict(connection_details, user_id)
        else:
            download_stream = requests.get(file['url'],
                                           stream=True,
                                           timeout=60)

            with open(path, 'wb') as fd:
                for chunk in download_stream.iter_content(4096):
                    fd.write(chunk)

        models.ImageUpload.objects.create(task=task, image=path)

    logger.info("Will import {} files".format(len(files)))
    task = models.Task.objects.get(pk=task_id)
    task.create_task_directories()
    task.save()

    try:
        downloaded_total = 0
        for file in files:
            download_file(task, file)
            task.check_if_canceled()
            models.Task.objects.filter(pk=task.id).update(
                upload_progress=(float(downloaded_total) / float(len(files))))
            downloaded_total += 1

    except (requests.exceptions.Timeout,
            requests.exceptions.ConnectionError) as e:
        raise NodeServerError(e)

    task.refresh_from_db()
    task.pending_action = None
    task.processing_time = 0
    task.partial = False
    task.save()