Example #1
0
 def addGeoJSON(projectId: int,
                feature: Dict,
                original_path=None) -> List[Feature]:
     """
     Add a GeoJSON feature to a project
     :param projectId: int
     :param feature: dict
     :return: Feature
     """
     try:
         data = geojson.loads(json.dumps(feature))
     except ValueError:
         raise InvalidGeoJSON
     features = []
     if data["type"] == "Feature":
         feat = Feature.fromGeoJSON(data)
         feat.project_id = projectId
         # strip out image_src, thumb_src if they are there from the old hazmapper geojson
         feat = FeaturesService._importHazmapperV1Images(feat)
         db_session.add(feat)
         features.append(feat)
     elif data["type"] == "FeatureCollection":
         fc = geojson.FeatureCollection(data)
         for feature in fc.features:
             feat = Feature.fromGeoJSON(feature)
             feat.project_id = projectId
             feat = FeaturesService._importHazmapperV1Images(feat)
             db_session.add(feat)
             features.append(feat)
     else:
         raise InvalidGeoJSON(
             "Valid GeoJSON must be either a Feature or FeatureCollection.")
     db_session.commit()
     return features
Example #2
0
    def fromFileObj(pointCloudId: int, fileObj: IO, fileName: str):
        """
        Add a point cloud file

        When point cloud file has been processed, a feature will be created/updated with feature
        asset containing processed point cloud

        Different processing steps are applied asynchronously by default.

        :param pointCloudId: int
        :param fileObj: IO
        :param fileName: str
        :return: processingTask: Task
        """
        PointCloudService.check_file_extension(fileName)

        point_cloud = PointCloudService.get(pointCloudId)

        file_path = PointCloudService.putPointCloudInOriginalsFileDir(point_cloud.path, fileObj, fileName);

        try:
            result = check_point_cloud.apply_async(args=[file_path])
            result.get();
        except InvalidCoordinateReferenceSystem as e:
            os.remove(file_path)
            logger.error("Point cloud file ({}) missing required coordinate reference system".format(file_path))
            raise e

        result = get_point_cloud_info.apply_async(args=[pointCloudId])
        point_cloud.files_info = json.dumps(result.get())

        db_session.add(point_cloud)
        db_session.commit()

        return PointCloudService._process_point_clouds(pointCloudId)
Example #3
0
    def fromShapefile(projectId: int,
                      fileObj: IO,
                      metadata: Dict,
                      additional_files: List[IO],
                      original_path=None) -> Feature:
        """ Create features from shapefile

        :param projectId: int
        :param fileObj: file descriptor
        :param additional_files: file descriptor for all the other non-.shp files
        :param metadata: Dict of <key, val> pairs   [IGNORED}
        :param original_path: str path of original file location  [IGNORED}
        :return: Feature
        """
        features = []
        for geom, properties in VectorService.process_shapefile(
                fileObj, additional_files):
            feat = Feature()
            feat.project_id = projectId
            feat.the_geom = from_shape(geometries.convert_3D_2D(geom),
                                       srid=4326)
            feat.properties = properties
            db_session.add(feat)
            features.append(feat)

        db_session.commit()
        return features
Example #4
0
    def addOverlay(projectId: int, fileObj: IO, bounds: List[float],
                   label: str) -> Overlay:
        """

        :param projectId: int
        :param fileObj: IO
        :param bounds: List [minLon, minLat, maxLon, maxLat]
        :param label: str
        :return: None
        """

        imdata = ImageService.processOverlay(fileObj)
        ov = Overlay()
        ov.label = label
        ov.minLon = bounds[0]
        ov.minLat = bounds[1]
        ov.maxLon = bounds[2]
        ov.maxLat = bounds[3]
        ov.project_id = projectId
        ov.uuid = uuid.uuid4()
        asset_path = os.path.join(str(make_project_asset_dir(projectId)),
                                  str(ov.uuid) + '.jpeg')
        ov.path = get_asset_relative_path(asset_path)
        imdata.original.save(asset_path, 'JPEG')
        imdata.thumb.save(
            pathlib.Path(asset_path).with_suffix(".thumb.jpeg"), "JPEG")
        db_session.add(ov)
        db_session.commit()
        return ov
Example #5
0
    def _process_point_clouds(pointCloudId: int) -> Task:
        """
        Process point cloud files

        :param pointCloudId: int
        :return: processingTask: Task
        """
        point_cloud = PointCloudService.get(pointCloudId)

        celery_task_id = celery_uuid()
        task = Task()
        task.process_id = celery_task_id
        task.status = "RUNNING"
        task.description = "Processing point cloud #{}".format(pointCloudId)

        point_cloud.task = task

        db_session.add(task)
        db_session.add(point_cloud)
        db_session.commit()

        logger.info("Starting potree processing task (#{}:  '{}') for point cloud (#{}).".format(
            task.id, celery_task_id, pointCloudId))

        # Process asynchronously lidar file and add a feature asset
        convert_to_potree.apply_async(args=[pointCloudId], task_id=celery_task_id)

        return task
Example #6
0
def feature_fixture():
    home = os.path.dirname(__file__)
    with open(os.path.join(home, 'fixtures/properties.json'), 'rb') as f:
        feat = Feature.fromGeoJSON(json.loads(f.read()))
        feat.project_id = 1
        db_session.add(feat)
        db_session.commit()
        yield feat
Example #7
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     logger.info("Task ({}, point cloud {}) failed: {}".format(
         task_id, args, exc))
     failed_task = db_session.query(Task).filter(
         Task.process_id == task_id).first()
     failed_task.status = "FAILED"
     failed_task.description = ""
     db_session.add(failed_task)
     db_session.commit()
Example #8
0
 def fromLatLng(projectId: int, lat: float, lng: float,
                metadata: Dict) -> Feature:
     point = Point(lng, lat)
     f = Feature()
     f.project_id = projectId
     f.the_geom = from_shape(point, srid=4326)
     f.properties = metadata or {}
     db_session.add(f)
     db_session.commit()
     return f
Example #9
0
def projects_fixture2():
    proj = Project(name="test2", description="description2")
    u1 = db_session.query(User).filter(User.username == "test1").first()
    proj.users.append(u1)
    proj.tenant_id = u1.tenant_id
    db_session.add(proj)
    db_session.commit()
    yield proj

    shutil.rmtree(get_project_asset_dir(proj.id), ignore_errors=True)
Example #10
0
    def create(username: str, tenant: str, jwt: str = None) -> User:
        """

        :rtype: User
        """
        u = User(username=username, tenant_id=tenant)
        if jwt:
            u.jwt = jwt
        db_session.add(u)
        db_session.commit()
        return u
Example #11
0
 def create(user: User, status: AnyStr, message: AnyStr) -> Notification:
     note = Notification(username=user.username,
                         tenant_id=user.tenant_id,
                         status=status,
                         message=message)
     try:
         db_session.add(note)
         db_session.commit()
         return note
     except Exception:
         db_session.rollback()
         raise
Example #12
0
def addRandomMarkers():
    proj = Project(name="test", description="test", tenant_id="designsafe")
    user = db_session.query(User).filter(User.username == "jmeiring").first()
    proj.users.append(user)
    db_session.add(user)
    for i in range(0, 10000):
        p = Point(random.uniform(-180, 180), random.uniform(-90, 90))
        feat = Feature(
            the_geom=from_shape(p, srid=4326),
        )
        feat.project = proj
        db_session.add(feat)
    db_session.commit()
Example #13
0
def _update_point_cloud_task(pointCloudId: int,
                             description: str = None,
                             status: str = None):
    task = pointcloud.PointCloudService.get(pointCloudId).task
    if description is not None:
        task.description = description
    if status is not None:
        task.status = status
    try:
        db_session.add(task)
        db_session.commit()
    except Exception:
        db_session.rollback()
        raise
Example #14
0
    def fromImage(projectId: int,
                  fileObj: IO,
                  metadata: Dict,
                  original_path: str = None) -> Feature:
        """
        Create a Point feature from a georeferenced image
        :param projectId: int
        :param fileObj: file
        :param metadata: dict
        :return: None
        """
        imdata = ImageService.processImage(fileObj)
        point = Point(imdata.coordinates)
        f = Feature()
        f.project_id = projectId
        f.the_geom = from_shape(point, srid=4326)
        f.properties = metadata

        asset_uuid = uuid.uuid4()
        base_filepath = make_project_asset_dir(projectId)
        asset_path = os.path.join(base_filepath, str(asset_uuid) + '.jpeg')

        fa = FeatureAsset(
            uuid=asset_uuid,
            asset_type="image",
            original_path=original_path,
            display_path=original_path,
            path=get_asset_relative_path(asset_path),
            feature=f,
        )
        f.assets.append(fa)
        thumbnail_path = os.path.join(base_filepath,
                                      str(asset_uuid) + ".thumb.jpeg")
        resized_image_path = os.path.join(base_filepath,
                                          str(asset_uuid) + '.jpeg')
        try:
            imdata.thumb.save(thumbnail_path, "JPEG")
            imdata.resized.save(resized_image_path, "JPEG")
        except:
            if os.path.exists(thumbnail_path):
                os.remove(thumbnail_path)
            if os.path.exists(resized_image_path):
                os.remove(resized_image_path)
            raise
        finally:
            fileObj.close()
        db_session.add(f)
        db_session.commit()
        return f
Example #15
0
    def makeObservable(proj: Project, user: User, watch_content: bool):
        """
        Makes a project an observable project
        Requires project's system_path, system_id, tenant_id to exist
        :param proj: Project
        :param user: User
        :param watch_content: bool
        :return: None
        """
        folder_name = Path(proj.system_path).name
        name = proj.system_id + '/' + folder_name

        # TODO: Handle no storage system found
        system = AgaveUtils(user.jwt).systemsGet(proj.system_id)

        obs = ObservableDataProject(system_id=proj.system_id,
                                    path=proj.system_path,
                                    watch_content=watch_content)

        users = get_system_users(proj.tenant_id, user.jwt, proj.system_id)
        logger.info(
            "Updating project:{} to have the following users: {}".format(
                name, users))
        project_users = [
            UserService.getOrCreateUser(u, tenant=proj.tenant_id)
            for u in users
        ]
        proj.users = project_users

        obs.project = proj

        try:
            db_session.add(obs)
            db_session.commit()
        except IntegrityError as e:
            db_session.rollback()
            logger.exception(
                "User:{} tried to create an observable project that already exists: '{}'"
                .format(user.username, name))
            raise ObservableProjectAlreadyExists(
                "'{}' project already exists".format(name))

        if watch_content:
            import_from_agave.apply_async(args=[
                obs.project.tenant_id, user.id, obs.system_id, obs.path,
                obs.project_id
            ])
Example #16
0
    def addTileServer(projectId: int, data: Dict):
        """

        :param projectId: int
        :param data: Dict
        :return: ts: TileServer
        """
        ts = TileServer()

        for key, value in data.items():
            setattr(ts, key, value)

        ts.project_id = projectId

        db_session.add(ts)
        db_session.commit()
        return ts
Example #17
0
def observable_projects_fixture():
    u1 = db_session.query(User).filter(User.username == "test1").first()
    proj = Project(name="test_observable",
                   description="description",
                   tenant_id=u1.tenant_id)
    obs = ObservableDataProject(
        system_id="testSystem",
        path="/testPath",
        watch_content=True
    )
    obs.project = proj
    proj.users.append(u1)
    db_session.add(obs)
    db_session.add(proj)
    db_session.commit()
    yield obs

    shutil.rmtree(get_project_asset_dir(proj.id), ignore_errors=True)
Example #18
0
    def fromGPX(projectId: int,
                fileObj: IO,
                metadata: Dict,
                original_path=None) -> Feature:

        # TODO: Fiona should support reading from the file directly, this MemoryFile business
        #  should not be needed
        with fiona.io.MemoryFile(fileObj) as memfile:
            with memfile.open(layer="tracks") as collection:
                track = collection[0]
                shp = shape(track["geometry"])
                feat = Feature()
                feat.project_id = projectId
                feat.the_geom = from_shape(geometries.convert_3D_2D(shp),
                                           srid=4326)
                feat.properties = metadata or {}
                db_session.add(feat)
                db_session.commit()
                return feat
Example #19
0
    def create(projectId: int, data: dict, user: User) -> PointCloud:
        """
        Create a PointCloud for a user.
        :param projectId: int
        :param data: dict
        :param user: User
        :return: PointCloud
        """

        point_cloud_uid = uuid.uuid4()
        point_cloud_path = os.path.join(make_project_asset_dir(projectId), str(point_cloud_uid))
        file_point_cloud_path = os.path.join(point_cloud_path, PointCloudService.ORIGINAL_FILES_DIR)
        pathlib.Path(file_point_cloud_path).mkdir(parents=True, exist_ok=True)

        point_cloud = PointCloud(**data)
        point_cloud.project_id = projectId
        point_cloud.tenant_id = user.tenant_id
        point_cloud.uuid = point_cloud_uid
        point_cloud.path = get_asset_relative_path(point_cloud_path)

        db_session.add(point_cloud)
        db_session.commit()
        return point_cloud
Example #20
0
def refresh_observable_projects():
    try:
        obs = db_session.query(ObservableDataProject).all()
        for i, o in enumerate(obs):
            # we need a user with a jwt for importing
            importing_user = next((u for u in o.project.users if u.jwt))
            logger.info(
                "Refreshing observable project ({}/{}): observer:{} system:{} path:{}"
                .format(i, len(obs), importing_user, o.system_id, o.path))
            current_user_names = set([u.username for u in o.project.users])

            # we need to add any users who have been added to the system roles
            # (note that we do not delete any that are no longer listed on system roles; we only add users)
            system_users = set(
                get_system_users(o.project.tenant_id, importing_user.jwt,
                                 o.system_id))
            updated_user_names = system_users.union(current_user_names)
            if updated_user_names != current_user_names:
                logger.info("Updating to add the following users:{}   "
                            "Updated user list is now: {}".format(
                                updated_user_names - current_user_names,
                                updated_user_names))
                o.project.users = [
                    UserService.getOrCreateUser(u, tenant=o.project.tenant_id)
                    for u in updated_user_names
                ]
                db_session.add(o)
                db_session.commit()

            # perform the importing
            if o.watch_content:
                import_from_agave(o.project.tenant_id, importing_user.id,
                                  o.system_id, o.path, o.project.id)
    except Exception:
        logger.exception(
            "Unhandled exception when importing observable project")
        db_session.rollback()
Example #21
0
    def create(data: dict, user: User) -> Project:
        """
        Create a new map project for a user.
        :param data: dict
        :param user: User
        :return: Project
        """
        project = Project(**data['project'])

        project.tenant_id = user.tenant_id
        project.users.append(user)

        if data.get('observable', False):
            try:
                ProjectsService.makeObservable(
                    project, user, data.get('watch_content', False))
            except Exception as e:
                logger.exception("{}".format(e))
                raise e

        db_session.add(project)
        db_session.commit()

        return project
Example #22
0
def task_fixture():
    task = Task(process_id="1234", status="SUCCESS", description="description")
    db_session.add(task)
    db_session.commit()
    yield task
Example #23
0
def import_from_agave(tenant_id: str, userId: int, systemId: str, path: str,
                      projectId: int):
    user = db_session.query(User).get(userId)
    client = AgaveUtils(user.jwt)
    logger.info("Importing for project:{} directory:{}/{} for user:{}".format(
        projectId, systemId, path, user.username))
    listing = client.listing(systemId, path)
    # First item is always a reference to self
    files_in_directory = listing[1:]
    filenames_in_directory = [str(f.path) for f in files_in_directory]
    for item in files_in_directory:
        if item.type == "dir" and not str(item.path).endswith("/.Trash"):
            import_from_agave(tenant_id, userId, systemId, item.path,
                              projectId)
        # skip any junk files that are not allowed
        if item.path.suffix.lower().lstrip(
                '.') not in FeaturesService.ALLOWED_EXTENSIONS:
            continue
        else:
            try:
                # first check if there already is a file in the DB
                item_system_path = os.path.join(item.system,
                                                str(item.path).lstrip("/"))
                targetFile = ImportsService.getImport(projectId, systemId,
                                                      str(item.path))
                if targetFile:
                    logger.info("Already imported {}".format(item_system_path))
                    continue

                # If its a RApp project folder, grab the metadata from tapis meta service
                if is_member_of_rapp_project_folder(item_system_path):
                    logger.info("RApp: importing:{} for user:{}".format(
                        item_system_path, user.username))
                    if item.path.suffix.lower().lstrip(
                            '.'
                    ) not in FeaturesService.ALLOWED_GEOSPATIAL_FEATURE_ASSET_EXTENSIONS:
                        logger.info("{path} is unsupported; skipping.".format(
                            path=item_system_path))
                        continue

                    logger.info("{} {} {}".format(item_system_path,
                                                  item.system, item.path))

                    try:
                        meta = get_metadata_using_service_account(
                            tenant_id, item.system, item.path)
                    except MissingServiceAccount:
                        logger.error(
                            "No service account. Unable to get metadata for {}:{}"
                            .format(item.system, item.path))
                        return {}

                    logger.debug(
                        "metadata from service account for file:{} : {}".
                        format(item_system_path, meta))

                    if not meta:
                        logger.info("No metadata for {}; skipping file".format(
                            item_system_path))
                        continue
                    geolocation = meta.get("geolocation")
                    if not geolocation:
                        logger.info("No geolocation for:{}; skipping".format(
                            item_system_path))
                        continue
                    lat, lon = _parse_rapid_geolocation(geolocation)
                    tmpFile = client.getFile(systemId, item.path)
                    feat = FeaturesService.fromLatLng(projectId, lat, lon, {})
                    feat.properties = meta
                    db_session.add(feat)
                    tmpFile.filename = Path(item.path).name
                    try:
                        FeaturesService.createFeatureAsset(
                            projectId,
                            feat.id,
                            tmpFile,
                            original_path=item_system_path)
                    except:
                        # remove newly-created placeholder feature if we fail to create an asset
                        FeaturesService.delete(feat.id)
                        raise RuntimeError("Unable to create feature asset")
                    NotificationsService.create(
                        user, "success",
                        "Imported {f}".format(f=item_system_path))
                    tmpFile.close()
                elif item.path.suffix.lower().lstrip(
                        '.') in FeaturesService.ALLOWED_GEOSPATIAL_EXTENSIONS:
                    logger.info("importing:{} for user:{}".format(
                        item_system_path, user.username))
                    tmpFile = client.getFile(systemId, item.path)
                    tmpFile.filename = Path(item.path).name
                    additional_files = get_additional_files(
                        systemId, item.path, client, filenames_in_directory)
                    FeaturesService.fromFileObj(
                        projectId,
                        tmpFile, {},
                        original_path=item_system_path,
                        additional_files=additional_files)
                    NotificationsService.create(
                        user, "success",
                        "Imported {f}".format(f=item_system_path))
                    tmpFile.close()
                else:
                    continue
                # Save the row in the database that marks this file as already imported so it doesn't get added again
                targetFile = ImportsService.createImportedFile(
                    projectId, systemId, str(item.path), item.lastModified)
                db_session.add(targetFile)
                db_session.commit()

            except Exception as e:
                db_session.rollback()
                logger.error(
                    "Could not import for user:{} from agave:{}/{}".format(
                        user.username, systemId, path))
                NotificationsService.create(
                    user, "error",
                    "Error importing {f}".format(f=item_system_path))
                logger.exception(e)
                continue
Example #24
0
def import_point_clouds_from_agave(userId: int, files, pointCloudId: int):
    user = db_session.query(User).get(userId)
    client = AgaveUtils(user.jwt)

    point_cloud = pointcloud.PointCloudService.get(pointCloudId)
    celery_task_id = celery_uuid()

    task = Task()
    task.process_id = celery_task_id
    task.status = "RUNNING"

    point_cloud.task = task
    db_session.add(point_cloud)

    new_asset_files = []
    failed_message = None
    for file in files:
        _update_point_cloud_task(pointCloudId,
                                 description="Importing file ({}/{})".format(
                                     len(new_asset_files) + 1, len(files)))

        NotificationsService.create(user, "success", task.description)

        system_id = file["system"]
        path = file["path"]

        try:
            tmp_file = client.getFile(system_id, path)
            tmp_file.filename = Path(path).name
            file_path = pointcloud.PointCloudService.putPointCloudInOriginalsFileDir(
                point_cloud.path, tmp_file, tmp_file.filename)
            tmp_file.close()

            # save file path as we might need to delete it if there is a problem
            new_asset_files.append(file_path)

            # check if file is okay
            check_point_cloud.apply(args=[file_path], throw=True)

        except InvalidCoordinateReferenceSystem:
            logger.error("Could not import point cloud file due to missing"
                         " coordinate reference system: {}:{}".format(
                             system_id, path))
            failed_message = 'Error importing {}: missing coordinate reference system'.format(
                path)
        except Exception as e:
            logger.error(
                "Could not import point cloud file for user:{} from tapis: {}/{} : {}"
                .format(user.username, system_id, path, e))
            failed_message = 'Unknown error importing {}:{}'.format(
                system_id, path)

        if failed_message:
            for file_path in new_asset_files:
                logger.info("removing {}".format(file_path))
                os.remove(file_path)
            _update_point_cloud_task(pointCloudId,
                                     description=failed_message,
                                     status="FAILED")
            NotificationsService.create(user, "error", failed_message)
            return

    _update_point_cloud_task(pointCloudId,
                             description="Running potree converter",
                             status="RUNNING")

    point_cloud.files_info = json.dumps(get_point_cloud_info(pointCloudId))
    try:
        db_session.add(point_cloud)
        db_session.add(task)
        db_session.commit()
    except:
        db_session.rollback()
        raise
    NotificationsService.create(
        user, "success",
        "Running potree converter (for point cloud {}).".format(pointCloudId))

    try:
        convert_to_potree.apply(args=[pointCloudId],
                                task_id=celery_task_id,
                                throw=True)
        NotificationsService.create(
            user, "success",
            "Completed potree converter (for point cloud {}).".format(
                pointCloudId))
    except:
        logger.exception("point cloud:{} conversion failed for user:{}".format(
            pointCloudId, user.username))
        _update_point_cloud_task(pointCloudId, description="", status="FAILED")
        NotificationsService.create(
            user, "error",
            "Processing failed for point cloud ({})!".format(pointCloudId))
        return
Example #25
0
def public_projects_fixture(projects_fixture):
    projects_fixture.public = True
    db_session.add(projects_fixture)
    db_session.commit()
    yield projects_fixture
Example #26
0
def convert_to_potree(self, pointCloudId: int) -> None:
    """
    Use the potree converter to convert a LAS/LAZ file to potree format
    :param pointCloudId: int
    :return: None
    """
    from geoapi.models import Feature, FeatureAsset
    from geoapi.services.point_cloud import PointCloudService

    point_cloud = PointCloudService.get(pointCloudId)

    path_to_original_point_clouds = get_asset_path(
        point_cloud.path, PointCloudService.ORIGINAL_FILES_DIR)
    path_temp_processed_point_cloud_path = get_asset_path(
        point_cloud.path, PointCloudService.PROCESSED_DIR)

    input_files = [
        get_asset_path(path_to_original_point_clouds, file)
        for file in os.listdir(path_to_original_point_clouds)
        if pathlib.Path(file).suffix.lstrip('.').lower() in
        PointCloudService.LIDAR_FILE_EXTENSIONS
    ]

    outline = get_bounding_box_2d(input_files)

    command = [
        "PotreeConverter", "--verbose", "-i", path_to_original_point_clouds,
        "-o", path_temp_processed_point_cloud_path, "--overwrite",
        "--generate-page", "index"
    ]
    if point_cloud.conversion_parameters:
        command.extend(point_cloud.conversion_parameters.split())
    logger.info("Processing point cloud (#{}):  {}".format(
        pointCloudId, " ".join(command)))
    subprocess.run(command, check=True, capture_output=True, text=True)

    # Create preview viewer html (with no menu and now nsf logo)
    with open(
            os.path.join(path_temp_processed_point_cloud_path, "preview.html"),
            'w+') as preview:
        with open(
                os.path.join(path_temp_processed_point_cloud_path,
                             "index.html"), 'r') as viewer:
            content = viewer.read()
            content = re.sub(r"<div class=\"nsf_logo\"(.+?)</div>",
                             '',
                             content,
                             flags=re.DOTALL)
            content = content.replace("viewer.toggleSidebar()",
                                      "$('.potree_menu_toggle').hide()")
            preview.write(content)

    if point_cloud.feature_id:
        feature = point_cloud.feature
    else:
        feature = Feature()
        feature.project_id = point_cloud.project_id

        asset_uuid = uuid.uuid4()
        base_filepath = make_project_asset_dir(point_cloud.project_id)
        asset_path = os.path.join(base_filepath, str(asset_uuid))
        fa = FeatureAsset(uuid=asset_uuid,
                          asset_type="point_cloud",
                          path=get_asset_relative_path(asset_path),
                          feature=feature)
        feature.assets.append(fa)
        point_cloud.feature = feature

    feature.the_geom = from_shape(geometries.convert_3D_2D(outline), srid=4326)
    point_cloud.task.status = "FINISHED"
    point_cloud.task.description = ""

    point_cloud_asset_path = get_asset_path(feature.assets[0].path)
    shutil.rmtree(point_cloud_asset_path, ignore_errors=True)
    shutil.move(path_temp_processed_point_cloud_path, point_cloud_asset_path)

    try:
        db_session.add(point_cloud)
        db_session.add(feature)
        db_session.commit()
    except:
        db_session.rollback()
        raise