示例#1
0
 def create(user: User, status: AnyStr, message: AnyStr) -> Notification:
     note = Notification(username=user.username,
                         tenant_id=user.tenant_id,
                         status=status,
                         message=message)
     try:
         db_session.add(note)
         db_session.commit()
         return note
     except Exception:
         db_session.rollback()
         raise
示例#2
0
def _update_point_cloud_task(pointCloudId: int,
                             description: str = None,
                             status: str = None):
    task = pointcloud.PointCloudService.get(pointCloudId).task
    if description is not None:
        task.description = description
    if status is not None:
        task.status = status
    try:
        db_session.add(task)
        db_session.commit()
    except Exception:
        db_session.rollback()
        raise
示例#3
0
    def makeObservable(proj: Project, user: User, watch_content: bool):
        """
        Makes a project an observable project
        Requires project's system_path, system_id, tenant_id to exist
        :param proj: Project
        :param user: User
        :param watch_content: bool
        :return: None
        """
        folder_name = Path(proj.system_path).name
        name = proj.system_id + '/' + folder_name

        # TODO: Handle no storage system found
        system = AgaveUtils(user.jwt).systemsGet(proj.system_id)

        obs = ObservableDataProject(system_id=proj.system_id,
                                    path=proj.system_path,
                                    watch_content=watch_content)

        users = get_system_users(proj.tenant_id, user.jwt, proj.system_id)
        logger.info(
            "Updating project:{} to have the following users: {}".format(
                name, users))
        project_users = [
            UserService.getOrCreateUser(u, tenant=proj.tenant_id)
            for u in users
        ]
        proj.users = project_users

        obs.project = proj

        try:
            db_session.add(obs)
            db_session.commit()
        except IntegrityError as e:
            db_session.rollback()
            logger.exception(
                "User:{} tried to create an observable project that already exists: '{}'"
                .format(user.username, name))
            raise ObservableProjectAlreadyExists(
                "'{}' project already exists".format(name))

        if watch_content:
            import_from_agave.apply_async(args=[
                obs.project.tenant_id, user.id, obs.system_id, obs.path,
                obs.project_id
            ])
示例#4
0
def import_file_from_agave(userId: int, systemId: str, path: str,
                           projectId: int):
    user = db_session.query(User).get(userId)
    client = AgaveUtils(user.jwt)
    try:
        tmpFile = client.getFile(systemId, path)
        tmpFile.filename = Path(path).name
        additional_files = get_additional_files(systemId, path, client)
        FeaturesService.fromFileObj(projectId,
                                    tmpFile, {},
                                    original_path=path,
                                    additional_files=additional_files)
        NotificationsService.create(user, "success",
                                    "Imported {f}".format(f=path))
        tmpFile.close()
    except Exception as e:
        db_session.rollback()
        logger.exception("Could not import file from agave: {} :: {}".format(
            systemId, path))
        NotificationsService.create(user, "error",
                                    "Error importing {f}".format(f=path))
        raise e
示例#5
0
def refresh_observable_projects():
    try:
        obs = db_session.query(ObservableDataProject).all()
        for i, o in enumerate(obs):
            # we need a user with a jwt for importing
            importing_user = next((u for u in o.project.users if u.jwt))
            logger.info(
                "Refreshing observable project ({}/{}): observer:{} system:{} path:{}"
                .format(i, len(obs), importing_user, o.system_id, o.path))
            current_user_names = set([u.username for u in o.project.users])

            # we need to add any users who have been added to the system roles
            # (note that we do not delete any that are no longer listed on system roles; we only add users)
            system_users = set(
                get_system_users(o.project.tenant_id, importing_user.jwt,
                                 o.system_id))
            updated_user_names = system_users.union(current_user_names)
            if updated_user_names != current_user_names:
                logger.info("Updating to add the following users:{}   "
                            "Updated user list is now: {}".format(
                                updated_user_names - current_user_names,
                                updated_user_names))
                o.project.users = [
                    UserService.getOrCreateUser(u, tenant=o.project.tenant_id)
                    for u in updated_user_names
                ]
                db_session.add(o)
                db_session.commit()

            # perform the importing
            if o.watch_content:
                import_from_agave(o.project.tenant_id, importing_user.id,
                                  o.system_id, o.path, o.project.id)
    except Exception:
        logger.exception(
            "Unhandled exception when importing observable project")
        db_session.rollback()
示例#6
0
def import_from_agave(tenant_id: str, userId: int, systemId: str, path: str,
                      projectId: int):
    user = db_session.query(User).get(userId)
    client = AgaveUtils(user.jwt)
    logger.info("Importing for project:{} directory:{}/{} for user:{}".format(
        projectId, systemId, path, user.username))
    listing = client.listing(systemId, path)
    # First item is always a reference to self
    files_in_directory = listing[1:]
    filenames_in_directory = [str(f.path) for f in files_in_directory]
    for item in files_in_directory:
        if item.type == "dir" and not str(item.path).endswith("/.Trash"):
            import_from_agave(tenant_id, userId, systemId, item.path,
                              projectId)
        # skip any junk files that are not allowed
        if item.path.suffix.lower().lstrip(
                '.') not in FeaturesService.ALLOWED_EXTENSIONS:
            continue
        else:
            try:
                # first check if there already is a file in the DB
                item_system_path = os.path.join(item.system,
                                                str(item.path).lstrip("/"))
                targetFile = ImportsService.getImport(projectId, systemId,
                                                      str(item.path))
                if targetFile:
                    logger.info("Already imported {}".format(item_system_path))
                    continue

                # If its a RApp project folder, grab the metadata from tapis meta service
                if is_member_of_rapp_project_folder(item_system_path):
                    logger.info("RApp: importing:{} for user:{}".format(
                        item_system_path, user.username))
                    if item.path.suffix.lower().lstrip(
                            '.'
                    ) not in FeaturesService.ALLOWED_GEOSPATIAL_FEATURE_ASSET_EXTENSIONS:
                        logger.info("{path} is unsupported; skipping.".format(
                            path=item_system_path))
                        continue

                    logger.info("{} {} {}".format(item_system_path,
                                                  item.system, item.path))

                    try:
                        meta = get_metadata_using_service_account(
                            tenant_id, item.system, item.path)
                    except MissingServiceAccount:
                        logger.error(
                            "No service account. Unable to get metadata for {}:{}"
                            .format(item.system, item.path))
                        return {}

                    logger.debug(
                        "metadata from service account for file:{} : {}".
                        format(item_system_path, meta))

                    if not meta:
                        logger.info("No metadata for {}; skipping file".format(
                            item_system_path))
                        continue
                    geolocation = meta.get("geolocation")
                    if not geolocation:
                        logger.info("No geolocation for:{}; skipping".format(
                            item_system_path))
                        continue
                    lat, lon = _parse_rapid_geolocation(geolocation)
                    tmpFile = client.getFile(systemId, item.path)
                    feat = FeaturesService.fromLatLng(projectId, lat, lon, {})
                    feat.properties = meta
                    db_session.add(feat)
                    tmpFile.filename = Path(item.path).name
                    try:
                        FeaturesService.createFeatureAsset(
                            projectId,
                            feat.id,
                            tmpFile,
                            original_path=item_system_path)
                    except:
                        # remove newly-created placeholder feature if we fail to create an asset
                        FeaturesService.delete(feat.id)
                        raise RuntimeError("Unable to create feature asset")
                    NotificationsService.create(
                        user, "success",
                        "Imported {f}".format(f=item_system_path))
                    tmpFile.close()
                elif item.path.suffix.lower().lstrip(
                        '.') in FeaturesService.ALLOWED_GEOSPATIAL_EXTENSIONS:
                    logger.info("importing:{} for user:{}".format(
                        item_system_path, user.username))
                    tmpFile = client.getFile(systemId, item.path)
                    tmpFile.filename = Path(item.path).name
                    additional_files = get_additional_files(
                        systemId, item.path, client, filenames_in_directory)
                    FeaturesService.fromFileObj(
                        projectId,
                        tmpFile, {},
                        original_path=item_system_path,
                        additional_files=additional_files)
                    NotificationsService.create(
                        user, "success",
                        "Imported {f}".format(f=item_system_path))
                    tmpFile.close()
                else:
                    continue
                # Save the row in the database that marks this file as already imported so it doesn't get added again
                targetFile = ImportsService.createImportedFile(
                    projectId, systemId, str(item.path), item.lastModified)
                db_session.add(targetFile)
                db_session.commit()

            except Exception as e:
                db_session.rollback()
                logger.error(
                    "Could not import for user:{} from agave:{}/{}".format(
                        user.username, systemId, path))
                NotificationsService.create(
                    user, "error",
                    "Error importing {f}".format(f=item_system_path))
                logger.exception(e)
                continue
示例#7
0
def import_point_clouds_from_agave(userId: int, files, pointCloudId: int):
    user = db_session.query(User).get(userId)
    client = AgaveUtils(user.jwt)

    point_cloud = pointcloud.PointCloudService.get(pointCloudId)
    celery_task_id = celery_uuid()

    task = Task()
    task.process_id = celery_task_id
    task.status = "RUNNING"

    point_cloud.task = task
    db_session.add(point_cloud)

    new_asset_files = []
    failed_message = None
    for file in files:
        _update_point_cloud_task(pointCloudId,
                                 description="Importing file ({}/{})".format(
                                     len(new_asset_files) + 1, len(files)))

        NotificationsService.create(user, "success", task.description)

        system_id = file["system"]
        path = file["path"]

        try:
            tmp_file = client.getFile(system_id, path)
            tmp_file.filename = Path(path).name
            file_path = pointcloud.PointCloudService.putPointCloudInOriginalsFileDir(
                point_cloud.path, tmp_file, tmp_file.filename)
            tmp_file.close()

            # save file path as we might need to delete it if there is a problem
            new_asset_files.append(file_path)

            # check if file is okay
            check_point_cloud.apply(args=[file_path], throw=True)

        except InvalidCoordinateReferenceSystem:
            logger.error("Could not import point cloud file due to missing"
                         " coordinate reference system: {}:{}".format(
                             system_id, path))
            failed_message = 'Error importing {}: missing coordinate reference system'.format(
                path)
        except Exception as e:
            logger.error(
                "Could not import point cloud file for user:{} from tapis: {}/{} : {}"
                .format(user.username, system_id, path, e))
            failed_message = 'Unknown error importing {}:{}'.format(
                system_id, path)

        if failed_message:
            for file_path in new_asset_files:
                logger.info("removing {}".format(file_path))
                os.remove(file_path)
            _update_point_cloud_task(pointCloudId,
                                     description=failed_message,
                                     status="FAILED")
            NotificationsService.create(user, "error", failed_message)
            return

    _update_point_cloud_task(pointCloudId,
                             description="Running potree converter",
                             status="RUNNING")

    point_cloud.files_info = json.dumps(get_point_cloud_info(pointCloudId))
    try:
        db_session.add(point_cloud)
        db_session.add(task)
        db_session.commit()
    except:
        db_session.rollback()
        raise
    NotificationsService.create(
        user, "success",
        "Running potree converter (for point cloud {}).".format(pointCloudId))

    try:
        convert_to_potree.apply(args=[pointCloudId],
                                task_id=celery_task_id,
                                throw=True)
        NotificationsService.create(
            user, "success",
            "Completed potree converter (for point cloud {}).".format(
                pointCloudId))
    except:
        logger.exception("point cloud:{} conversion failed for user:{}".format(
            pointCloudId, user.username))
        _update_point_cloud_task(pointCloudId, description="", status="FAILED")
        NotificationsService.create(
            user, "error",
            "Processing failed for point cloud ({})!".format(pointCloudId))
        return
示例#8
0
def convert_to_potree(self, pointCloudId: int) -> None:
    """
    Use the potree converter to convert a LAS/LAZ file to potree format
    :param pointCloudId: int
    :return: None
    """
    from geoapi.models import Feature, FeatureAsset
    from geoapi.services.point_cloud import PointCloudService

    point_cloud = PointCloudService.get(pointCloudId)

    path_to_original_point_clouds = get_asset_path(
        point_cloud.path, PointCloudService.ORIGINAL_FILES_DIR)
    path_temp_processed_point_cloud_path = get_asset_path(
        point_cloud.path, PointCloudService.PROCESSED_DIR)

    input_files = [
        get_asset_path(path_to_original_point_clouds, file)
        for file in os.listdir(path_to_original_point_clouds)
        if pathlib.Path(file).suffix.lstrip('.').lower() in
        PointCloudService.LIDAR_FILE_EXTENSIONS
    ]

    outline = get_bounding_box_2d(input_files)

    command = [
        "PotreeConverter", "--verbose", "-i", path_to_original_point_clouds,
        "-o", path_temp_processed_point_cloud_path, "--overwrite",
        "--generate-page", "index"
    ]
    if point_cloud.conversion_parameters:
        command.extend(point_cloud.conversion_parameters.split())
    logger.info("Processing point cloud (#{}):  {}".format(
        pointCloudId, " ".join(command)))
    subprocess.run(command, check=True, capture_output=True, text=True)

    # Create preview viewer html (with no menu and now nsf logo)
    with open(
            os.path.join(path_temp_processed_point_cloud_path, "preview.html"),
            'w+') as preview:
        with open(
                os.path.join(path_temp_processed_point_cloud_path,
                             "index.html"), 'r') as viewer:
            content = viewer.read()
            content = re.sub(r"<div class=\"nsf_logo\"(.+?)</div>",
                             '',
                             content,
                             flags=re.DOTALL)
            content = content.replace("viewer.toggleSidebar()",
                                      "$('.potree_menu_toggle').hide()")
            preview.write(content)

    if point_cloud.feature_id:
        feature = point_cloud.feature
    else:
        feature = Feature()
        feature.project_id = point_cloud.project_id

        asset_uuid = uuid.uuid4()
        base_filepath = make_project_asset_dir(point_cloud.project_id)
        asset_path = os.path.join(base_filepath, str(asset_uuid))
        fa = FeatureAsset(uuid=asset_uuid,
                          asset_type="point_cloud",
                          path=get_asset_relative_path(asset_path),
                          feature=feature)
        feature.assets.append(fa)
        point_cloud.feature = feature

    feature.the_geom = from_shape(geometries.convert_3D_2D(outline), srid=4326)
    point_cloud.task.status = "FINISHED"
    point_cloud.task.description = ""

    point_cloud_asset_path = get_asset_path(feature.assets[0].path)
    shutil.rmtree(point_cloud_asset_path, ignore_errors=True)
    shutil.move(path_temp_processed_point_cloud_path, point_cloud_asset_path)

    try:
        db_session.add(point_cloud)
        db_session.add(feature)
        db_session.commit()
    except:
        db_session.rollback()
        raise