Example #1
0
    def post(self, request, project_pk=None, pk=None):
        task = self.get_and_check_task(request, pk)

        # Read form data
        folder_url = request.data.get('selectedFolderUrl', None)
        platform_name = request.data.get('platform', None)

        # Make sure both values are set
        if folder_url == None or platform_name == None:
            return Response(
                {'error': 'Folder URL and platform name must be set.'},
                status=status.HTTP_400_BAD_REQUEST)

        # Fetch the platform by name
        platform = get_platform_by_name(platform_name)

        connection_details = None
        if hasattr(platform, 'get_credentials'):
            #Use webdav connection - get the connection info from user data
            ds = get_current_plugin().get_user_data_store(request.user)
            connection_details = platform.get_credentials(ds, request.user.id)
            #Connect for the folder verify step
            platform.connect(ds, request.user.id)

        # Make sure that the platform actually exists
        if platform == None:
            return Response(
                {
                    'error':
                    'Failed to find a platform with the name \'{}\''.format(
                        platform_name)
                },
                status=status.HTTP_400_BAD_REQUEST)

        # Verify that the folder url is valid
        if platform.verify_folder_url(folder_url) == None:
            return Response({'error': 'Invalid URL'},
                            status=status.HTTP_400_BAD_REQUEST)

        # Get the files from the folder
        files = platform.import_from_folder(folder_url)

        # Update the task with the new information
        task.console_output += "Importing {} images...\n".format(len(files))
        task.images_count = len(files)
        task.pending_action = pending_actions.IMPORT
        task.save()

        # Associate the folder url with the project and task
        combined_id = "{}_{}".format(project_pk, pk)
        get_current_plugin().get_global_data_store().set_string(
            combined_id, platform.basepath + folder_url)

        # Start importing the files in the background
        serialized = [file.serialize() for file in files]
        run_function_async(import_files, task.id, serialized, request.user.id,
                           connection_details)

        return Response({}, status=status.HTTP_200_OK)
Example #2
0
    def get(self, request, platform_name):
        platform = get_platform_by_name(platform_name)

        if platform == None:
            return Response(
                {
                    'error':
                    'Failed to find a platform with the name \'{}\''.format(
                        platform_name)
                },
                status=status.HTTP_400_BAD_REQUEST)

        ds = get_current_plugin().get_user_data_store(request.user)

        server_url_field = platform.get_server_url_field()
        server_url = server_url_field.get_stored_value(ds)

        if server_url == server_url_field.default_value:
            return Response(
                {
                    'error':
                    'You can\'t ask for the folders when there is no server configured'
                },
                status=status.HTTP_412_PRECONDITION_FAILED)

        folders = platform.list_folders_in_server(server_url)

        return Response(
            {'folders': [folder.serialize() for folder in folders]},
            status=status.HTTP_200_OK)
Example #3
0
    def post(self, request, pk=None):
        task = self.get_and_check_task(request, pk)
        plugin = get_current_plugin()

        if task.dsm_extent is None:
            return Response({'error': 'No DSM layer is available.'},
                            status=status.HTTP_400_BAD_REQUEST)

        reference = request.data.get('reference', 'global')
        if reference.lower() == 'ground' and task.dtm_extent is None:
            return Response(
                {
                    'error':
                    'No DTM layer is available. You need one to set the ground as reference.'
                },
                status=status.HTTP_400_BAD_REQUEST)

        try:
            context = grass.create_context({
                'auto_cleanup': False,
                'location': 'epsg:3857'
            })
            dsm = os.path.abspath(task.get_asset_download_path("dsm.tif"))
            dtm = os.path.abspath(task.get_asset_download_path(
                "dtm.tif")) if reference.lower() == 'ground' else None
            epsg = int(request.data.get('epsg', '3857'))
            interval = request.data.get('interval', '5')
            format = request.data.get('format', 'GPKG')
            supported_formats = ['GPKG', 'ESRI Shapefile', 'DXF', 'GeoJSON']
            if not format in supported_formats:
                raise GrassEngineException(
                    "Invalid format {} (must be one of: {})".format(
                        format, ",".join(supported_formats)))
            noise_filter_size = float(request.data.get('noise_filter_size', 2))

            current_dir = os.path.dirname(os.path.abspath(__file__))
            context.add_param('dsm_file', dsm)
            context.add_param('interval', interval)
            context.add_param('format', format)
            context.add_param('noise_filter_size', noise_filter_size)
            context.add_param('epsg', epsg)
            context.add_param('python_script_path',
                              os.path.join(current_dir, "elevationmap.py"))
            context.add_param('python_path', plugin.get_python_packages_path())

            if dtm != None:
                context.add_param('dtm', '--dtm {}'.format(dtm))
            else:
                context.add_param('dtm', '')
            context.set_location(dsm)

            celery_task_id = execute_grass_script.delay(
                os.path.join(current_dir, "calc_elevation_map.grass"),
                context.serialize()).task_id

            return Response({'celery_task_id': celery_task_id},
                            status=status.HTTP_200_OK)
        except GrassEngineException as e:
            return Response({'error': str(e)}, status=status.HTTP_200_OK)
Example #4
0
def get_settings(request):
    ds = get_current_plugin().get_user_data_store(request.user)

    registry_url = ds.get_string('registry_url') or DEFAULT_HUB_URL
    username = ds.get_string('username') or None
    password = ds.get_string('password') or None
    token = ds.get_string('token') or None

    return registry_url, username, password, token
Example #5
0
    def post(self, request, project_pk=None, pk=None):

        task = self.get_and_check_task(request, pk)

        # Read form data
        ddb_url = request.data.get('ddb_url', None)

        if ddb_url == None:
            return Response({'error': 'DroneDB url must be set.'},
                            status=status.HTTP_400_BAD_REQUEST)

        registry_url, orgSlug, dsSlug, folder = parse_url(ddb_url).values()

        _, username, password, token = get_settings(request)
        ddb = DroneDB(registry_url, username, password, token,
                      lambda token: update_token(request, token))

        # Get the files from the folder
        rawfiles = ddb.get_files_list(orgSlug, dsSlug, folder)
        files = [file for file in rawfiles if is_valid(file['path'])]

        # Verify that the folder url is valid
        if len(files) == 0:
            return Response({'error': 'Empty dataset or folder.'},
                            status=status.HTTP_400_BAD_REQUEST)

        # Update the task with the new information
        task.console_output += "Importing {} images...\n".format(len(files))
        task.images_count = len(files)
        task.pending_action = pending_actions.IMPORT
        task.save()

        # Associate the folder url with the project and task
        combined_id = "{}_{}".format(project_pk, pk)

        datastore = get_current_plugin().get_global_data_store()
        datastore.set_json(
            combined_id, {
                "ddbUrl":
                ddb_url,
                "token":
                ddb.token,
                "ddbWebUrl":
                "{}/r/{}/{}/{}".format(to_web_protocol(registry_url), orgSlug,
                                       dsSlug, folder.rstrip('/'))
            })

        #ddb.refresh_token()

        # Start importing the files in the background
        serialized = {'token': ddb.token, 'files': files}
        run_function_async(import_files, task.id, serialized)

        return Response({}, status=status.HTTP_200_OK)
Example #6
0
    def get(self, request, project_pk=None, pk=None):

        # Assert that task exists
        self.get_and_check_task(request, pk)

        # Check if there is an imported url associated with the project and task
        combined_id = "{}_{}".format(project_pk, pk)
        folder_url = get_current_plugin().get_global_data_store().get_string(combined_id, default = None)

        if folder_url == None:
            return Response({}, status=status.HTTP_200_OK)
        else:
            return Response({'folder_url': folder_url}, status=status.HTTP_200_OK)
Example #7
0
    def get(self, request, project_pk=None, pk=None):

        # Assert that task exists
        self.get_and_check_task(request, pk)

        # Check if there is an imported url associated with the project and task
        combined_id = "{}_{}".format(project_pk, pk)
        data = get_current_plugin().get_global_data_store().get_json(
            combined_id, default=None)

        if data == None or 'ddbWebUrl' not in data:
            return Response({'ddbWebUrl': None}, status=status.HTTP_200_OK)
        else:
            return Response({'ddbUrl': data['ddbWebUrl']},
                            status=status.HTTP_200_OK)
Example #8
0
    def post(self, request, pk):

        from app.plugins import logger

        task = self.get_and_check_task(request, pk)

        status_key = get_status_key(pk)

        datastore = get_current_plugin().get_global_data_store()

        data = {
            'status': 1,  # Running
            'shareUrl': None,
            'uploadedFiles': 0,
            'totalFiles': 0,
            'uploadedSize': 0,
            'totalSize': 0,
            'error': None
        }

        datastore.set_json(status_key, data)

        settings = get_settings(request)

        available_assets = [
            task.get_asset_file_or_zipstream(f)[0]
            for f in list(set(task.available_assets) & set(DRONEDB_ASSETS))
        ]

        if 'textured_model.zip' in task.available_assets:
            texture_files = [
                join(task.assets_path('odm_texturing'), f)
                for f in listdir(task.assets_path('odm_texturing'))
                if isfile(join(task.assets_path('odm_texturing'), f))
            ]
            available_assets.extend(texture_files)

        assets_path = task.assets_path()

        files = [{
            'path': f,
            'name': f[len(assets_path) + 1:],
            'size': os.path.getsize(f)
        } for f in available_assets]

        share_to_ddb.delay(pk, settings, files)

        return Response(data, status=status.HTTP_200_OK)
Example #9
0
def ddb_cleanup(sender, task_id, **kwargs):

    from app.plugins import logger

    # When a task is removed, simply remove clutter
    # When a task is re-processed, make sure we can re-share it if we shared a task previously

    logger.info("Cleaning up DroneDB datastore for task {}".format(
        str(task_id)))

    datastore = get_current_plugin().get_global_data_store()
    status_key = get_status_key(task_id)

    logger.info("Info task {0} ({1})".format(str(task_id), status_key))

    datastore.del_key(status_key)
Example #10
0
    def serialize(self, **kwargs):
        base_payload = {
            'name': self.name,
            'folder_url_example': self.folder_url_example
        }
        if kwargs['user'] != None:
            ds = get_current_plugin().get_user_data_store(kwargs['user'])
            server_url_field = self.get_server_url_field()
            stored_value = server_url_field.get_stored_value(ds)
            if stored_value != server_url_field.default_value:
                # If the user is set, and there is a server url set, then consider this platform as
                # a library. Otherwise, consider it a plain platform
                base_payload['type'] = 'library'
                base_payload[server_url_field.key] = stored_value
                return base_payload

        base_payload['type'] = 'platform'
        return base_payload
Example #11
0
    def serialize(self, **kwargs):
        base_payload = {
            'name': self.name,
            'folder_url_example': self.folder_url_example
        }
        if kwargs['user'] != None:
            ds = get_current_plugin().get_user_data_store(kwargs['user'])
            server_url_field = self.get_server_url_field()
            server_url = server_url_field.get_stored_value(ds)
            server_user_field = self.get_server_user_field()
            server_user = server_user_field.get_stored_value(ds)
            server_token_field = self.get_server_token_field(kwargs['user'].id)
            server_token = server_token_field.get_stored_value(ds)
            base_payload['type'] = 'library'
            base_payload[server_url_field.key] = server_url
            base_payload[server_user_field.key] = server_user
            base_payload[server_token_field.key] = server_token
            return base_payload

        base_payload['type'] = 'platform'
        return base_payload
Example #12
0
    def get(self, request, pk):

        task = self.get_and_check_task(request, pk)

        # Associate the folder url with the project and task
        status_key = get_status_key(pk)

        datastore = get_current_plugin().get_global_data_store()

        task_info = datastore.get_json(
            status_key,
            {
                'status': 0,  # Idle
                'shareUrl': None,
                'uploadedFiles': 0,
                'totalFiles': 0,
                'uploadedSize': 0,
                'totalSize': 0,
                'error': None
            })

        return Response(task_info, status=status.HTTP_200_OK)
    def get(self, request, platform_name, root):
        if not len(root):
            root = '/'
        platform = get_platform_by_name(platform_name)

        if platform == None:
            return Response(
                {
                    'error':
                    'Failed to find a platform with the name \'{}\''.format(
                        platform_name)
                },
                status=status.HTTP_400_BAD_REQUEST)

        ds = get_current_plugin().get_user_data_store(request.user)

        server_url_field = platform.get_server_url_field()
        server_url = server_url_field.get_stored_value(ds)

        if server_url == "":
            return Response(
                {
                    'error':
                    'You can\'t ask for the folders when there is no server configured'
                },
                status=status.HTTP_412_PRECONDITION_FAILED)

        #Open connection (if any, eg: WebDAV)
        logger.info("Connecting..." + platform_name)
        platform.connect(ds, request.user.id)
        logger.info("Getting folder list")
        folders = platform.list_folders_in_server(server_url, root)

        return Response(
            {'folders': [folder.serialize() for folder in folders]},
            status=status.HTTP_200_OK)
Example #14
0
    def post(self, request, pk=None):

        role = request.data.get('role', 'reference')
        if role == 'reference':
            reference_pk = pk
            compare_task_pk = request.data.get('other_task', None)
        else:
            reference_pk = request.data.get('other_task', None)
            compare_task_pk = pk

        reference_task = self.get_and_check_task(request, reference_pk)
        if compare_task_pk is None:
            return Response({'error': 'You must select a task to compare to.'},
                            status=status.HTTP_400_BAD_REQUEST)
        compare_task = self.get_and_check_task(request, compare_task_pk)

        reference_pc = os.path.abspath(
            reference_task.get_asset_download_path("georeferenced_model.laz"))
        reference_dsm = os.path.abspath(
            reference_task.get_asset_download_path("dsm.tif"))
        reference_dtm = os.path.abspath(
            reference_task.get_asset_download_path("dtm.tif"))

        compare_pc = os.path.abspath(
            compare_task.get_asset_download_path("georeferenced_model.laz"))
        compare_dsm = os.path.abspath(
            compare_task.get_asset_download_path("dsm.tif"))
        compare_dtm = os.path.abspath(
            compare_task.get_asset_download_path("dtm.tif"))

        plugin = get_current_plugin()

        # We store the aligned DEMs on the persistent folder, to avoid recalculating them in the future
        aligned_dsm = plugin.get_persistent_path("{}_{}_dsm.tif".format(
            pk, compare_task_pk))
        aligned_dtm = plugin.get_persistent_path("{}_{}_dtm.tif".format(
            pk, compare_task_pk))

        try:
            context = grass.create_context({
                'auto_cleanup':
                False,
                'location':
                'epsg:3857',
                'python_path':
                plugin.get_python_packages_path()
            })
            format = request.data.get('format', 'GPKG')
            epsg = int(request.data.get('epsg', '3857'))
            supported_formats = ['GPKG', 'ESRI Shapefile', 'DXF', 'GeoJSON']
            if not format in supported_formats:
                raise GrassEngineException(
                    "Invalid format {} (must be one of: {})".format(
                        format, ",".join(supported_formats)))
            min_area = float(request.data.get('min_area', 40))
            min_height = float(request.data.get('min_height', 5))
            resolution = float(request.data.get('resolution', 0.5))
            display_type = request.data.get('display_type', 'contour')
            can_align_and_rasterize = request.data.get('align', 'false')

            current_dir = os.path.dirname(os.path.abspath(__file__))
            context.add_param('reference_pc', reference_pc)
            context.add_param('compare_pc', compare_pc)
            context.add_param('reference_dsm', reference_dsm)
            context.add_param('reference_dtm', reference_dtm)
            context.add_param('compare_dsm', compare_dsm)
            context.add_param('compare_dtm', compare_dtm)
            context.add_param('aligned_dsm', aligned_dsm)
            context.add_param('aligned_dtm', aligned_dtm)
            context.add_param('format', format)
            context.add_param('epsg', epsg)
            context.add_param('display_type', display_type)
            context.add_param('resolution', resolution)
            context.add_param('min_area', min_area)
            context.add_param('min_height', min_height)
            context.add_param('can_align_and_rasterize',
                              can_align_and_rasterize)

            celery_task_id = execute_grass_script.delay(
                os.path.join(current_dir, "changedetection.py"),
                context.serialize()).task_id

            return Response({'celery_task_id': celery_task_id},
                            status=status.HTTP_200_OK)
        except GrassEngineException as e:
            return Response({'error': str(e)}, status=status.HTTP_200_OK)
Example #15
0
 def get_current_plugin_test(self):
     return get_current_plugin()
Example #16
0
def update_token(request, token):
    ds = get_current_plugin().get_user_data_store(request.user)
    ds.set_string('token', token)
Example #17
0
def share_to_ddb(pk, settings, files):

    from app.plugins import logger

    status_key = get_status_key(pk)
    datastore = get_current_plugin().get_global_data_store()

    registry_url, username, password, token = settings

    ddb = DroneDB(registry_url, username, password, token)

    # Init share (to check)
    share_token = ddb.share_init()

    status = datastore.get_json(status_key)

    status['totalFiles'] = len(files)
    status['totalSize'] = sum(i['size'] for i in files)

    datastore.set_json(status_key, status)

    for file in files:

        # check that file exists
        if not os.path.exists(file['path']):
            logger.info("File {} does not exist".format(file['path']))
            continue

        attempt = 0

        while attempt < 3:
            try:

                attempt += 1

                up = ddb.share_upload(share_token, file['path'], file['name'])

                logger.info("Uploaded " + file['name'] +
                            " to Dronedb (hash: " + up['hash'] + ")")

                status['uploadedFiles'] += 1
                status['uploadedSize'] += file['size']

                datastore.set_json(status_key, status)

                break

            except Exception as e:

                if (attempt == 3):
                    logger.error("Error uploading file {}: {}".format(
                        file['name'], str(e)))
                    status['error'] = "Error uploading file {}: {}".format(
                        file['name'], str(e))
                    status['status'] = 2  # Error
                    datastore.set_json(status_key, status)
                    return
                else:
                    logger.info(
                        "Error uploading file {}: {}. Retrying...".format(
                            file['name'], str(e)))
                    time.sleep(5)
                    continue

    res = ddb.share_commit(share_token)

    status['status'] = 3  # Done
    status['shareUrl'] = registry_url + res['url']

    logger.info("Shared on url " + status['shareUrl'])

    datastore.set_json(status_key, status)