Exemplo n.º 1
0
 def _read(self):
     '''
     Download project from given project directory. Checks item and annotation directoris existing and dataset not empty.
     Consistency checks. Every video must have an annotation, and the correspondence must be one to one.
     '''
     super(VideoProject, self)._read()
     self._key_id_map = KeyIdMap()
     self._key_id_map.load_json(self._get_key_id_map_path())
    def append(self,
               dataset_id,
               ann: PointcloudEpisodeAnnotation,
               frame_to_pointcloud_ids,
               key_id_map: KeyIdMap = None):
        if key_id_map is None:
            # create for internal purposes (to link figures and tags to objects)
            key_id_map = KeyIdMap()

        figures = []
        pointcloud_ids = []
        for i, frame in enumerate(ann.frames):
            for fig in frame.figures:
                if frame_to_pointcloud_ids.get(
                        i) is None:  # skip unmapped frames
                    continue

                figures.append(fig)
                pointcloud_ids.append(frame_to_pointcloud_ids[i])

        self._api.pointcloud_episode.object.append_to_dataset(
            dataset_id, ann.objects, key_id_map)
        self._api.pointcloud_episode.figure.append_to_dataset(
            dataset_id, figures, pointcloud_ids, key_id_map)
Exemplo n.º 3
0
def download_pointcloud_episode_project(api,
                                        project_id,
                                        dest_dir,
                                        dataset_ids=None,
                                        download_pcd=True,
                                        download_realated_images=True,
                                        download_annotations=True,
                                        log_progress=False,
                                        batch_size=10):
    key_id_map = KeyIdMap()
    project_fs = PointcloudEpisodeProject(dest_dir, OpenMode.CREATE)
    meta = ProjectMeta.from_json(api.project.get_meta(project_id))
    project_fs.set_meta(meta)

    datasets_infos = []
    if dataset_ids is not None:
        for ds_id in dataset_ids:
            datasets_infos.append(api.dataset.get_info_by_id(ds_id))
    else:
        datasets_infos = api.dataset.get_list(project_id)

    for dataset in datasets_infos:
        dataset_fs = project_fs.create_dataset(dataset.name)
        pointclouds = api.pointcloud_episode.get_list(dataset.id)

        if download_annotations:
            # Download annotation to project_path/dataset_path/annotation.json
            ann_json = api.pointcloud_episode.annotation.download(dataset.id)
            annotation = dataset_fs.annotation_class.from_json(
                ann_json, meta, key_id_map)
            dataset_fs.set_ann(annotation)

            # frames --> pointcloud mapping to project_path/dataset_path/frame_pointcloud_map.json
            frame_name_map = api.pointcloud_episode.get_frame_name_map(
                dataset.id)
            frame_pointcloud_map_path = dataset_fs.get_frame_pointcloud_map_path(
            )
            dump_json_file(frame_name_map, frame_pointcloud_map_path)

        # Download data
        if log_progress:
            ds_progress = Progress('Downloading dataset: {!r}'.format(
                dataset.name),
                                   total_cnt=len(pointclouds))

        for batch in batched(pointclouds, batch_size=batch_size):
            pointcloud_ids = [pointcloud_info.id for pointcloud_info in batch]
            pointcloud_names = [
                pointcloud_info.name for pointcloud_info in batch
            ]

            for pointcloud_id, pointcloud_name in zip(pointcloud_ids,
                                                      pointcloud_names):
                pointcloud_file_path = dataset_fs.generate_item_path(
                    pointcloud_name)
                if download_pcd is True:
                    api.pointcloud_episode.download_path(
                        pointcloud_id, pointcloud_file_path)
                else:
                    touch(pointcloud_file_path)

                if download_realated_images:
                    related_images_path = dataset_fs.get_related_images_path(
                        pointcloud_name)
                    related_images = api.pointcloud_episode.get_list_related_images(
                        pointcloud_id)
                    for rimage_info in related_images:
                        name = rimage_info[ApiField.NAME]
                        rimage_id = rimage_info[ApiField.ID]

                        path_img = os.path.join(related_images_path, name)
                        path_json = os.path.join(related_images_path,
                                                 name + ".json")

                        api.pointcloud_episode.download_related_image(
                            rimage_id, path_img)
                        dump_json_file(rimage_info, path_json)

                dataset_fs.add_item_file(pointcloud_name,
                                         pointcloud_file_path,
                                         _validate_item=False)
            if log_progress:
                ds_progress.iters_done_report(len(batch))

    project_fs.set_key_id_map(key_id_map)
Exemplo n.º 4
0
def upload_pointcloud_episode_project(directory,
                                      api,
                                      workspace_id,
                                      project_name=None,
                                      log_progress=False):
    # STEP 0 — create project remotely
    project_locally = PointcloudEpisodeProject.read_single(directory)
    project_name = project_locally.name if project_name is None else project_name

    if api.project.exists(workspace_id, project_name):
        project_name = api.project.get_free_name(workspace_id, project_name)

    project_remotely = api.project.create(workspace_id, project_name,
                                          ProjectType.POINT_CLOUD_EPISODES)
    api.project.update_meta(project_remotely.id,
                            project_locally.meta.to_json())

    uploaded_objects = KeyIdMap()
    for dataset_locally in project_locally.datasets:
        ann_json_path = dataset_locally.get_ann_path()

        if os.path.isfile(ann_json_path):
            ann_json = load_json_file(ann_json_path)
            episode_annotation = PointcloudEpisodeAnnotation.from_json(
                ann_json, project_locally.meta)
        else:
            episode_annotation = PointcloudEpisodeAnnotation()

        dataset_remotely = api.dataset.create(
            project_remotely.id,
            dataset_locally.name,
            description=episode_annotation.description,
            change_name_if_conflict=True)

        # STEP 1 — upload episodes
        items_infos = {'names': [], 'paths': [], 'metas': []}

        for item_name in dataset_locally:
            item_path, related_images_dir = dataset_locally.get_item_paths(
                item_name)
            frame_idx = dataset_locally.get_frame_idx(item_name)

            item_meta = {"frame": frame_idx}

            items_infos['names'].append(item_name)
            items_infos['paths'].append(item_path)
            items_infos['metas'].append(item_meta)

        ds_progress = Progress(
            'Uploading pointclouds: {!r}'.format(dataset_remotely.name),
            total_cnt=len(dataset_locally)) if log_progress else None
        pcl_infos = api.pointcloud_episode.upload_paths(
            dataset_remotely.id,
            names=items_infos['names'],
            paths=items_infos['paths'],
            metas=items_infos['metas'],
            progress_cb=ds_progress.iters_done_report
            if log_progress else None)

        # STEP 2 — upload annotations
        frame_to_pcl_ids = {
            pcl_info.frame: pcl_info.id
            for pcl_info in pcl_infos
        }
        api.pointcloud_episode.annotation.append(dataset_remotely.id,
                                                 episode_annotation,
                                                 frame_to_pcl_ids,
                                                 uploaded_objects)

        # STEP 3 — upload photo context
        img_infos = {'img_paths': [], 'img_metas': []}

        # STEP 3.1 — upload images
        for pcl_info in pcl_infos:
            related_items = dataset_locally.get_related_images(pcl_info.name)
            images_paths_for_frame = [
                img_path for img_path, _ in related_items
            ]

            img_infos['img_paths'].extend(images_paths_for_frame)

        img_progress = Progress(
            'Uploading photo context: {!r}'.format(dataset_remotely.name),
            total_cnt=len(img_infos['img_paths'])) if log_progress else None

        images_hashes = api.pointcloud_episode.upload_related_images(
            img_infos['img_paths'],
            progress_cb=img_progress.iters_done_report
            if log_progress else None)

        # STEP 3.2 — upload images metas
        images_hashes_iterator = images_hashes.__iter__()
        for pcl_info in pcl_infos:
            related_items = dataset_locally.get_related_images(pcl_info.name)

            for _, meta_json in related_items:
                img_hash = next(images_hashes_iterator)
                img_infos['img_metas'].append({
                    ApiField.ENTITY_ID:
                    pcl_info.id,
                    ApiField.NAME:
                    meta_json[ApiField.NAME],
                    ApiField.HASH:
                    img_hash,
                    ApiField.META:
                    meta_json[ApiField.META]
                })

        api.pointcloud_episode.add_related_images(img_infos['img_metas'])

    return project_remotely.id, project_remotely.name
Exemplo n.º 5
0
def download_video_project(api,
                           project_id,
                           dest_dir,
                           dataset_ids=None,
                           download_videos=True,
                           log_progress=False):
    '''
    Download project with given id in destination directory
    :param api: Api class object
    :param project_id: int
    :param dest_dir: str
    :param dataset_ids: list of integers
    :param download_videos: bool
    :param log_progress: bool
    '''
    LOG_BATCH_SIZE = 1

    key_id_map = KeyIdMap()

    project_fs = VideoProject(dest_dir, OpenMode.CREATE)

    meta = ProjectMeta.from_json(api.project.get_meta(project_id))
    project_fs.set_meta(meta)

    datasets_infos = []
    if dataset_ids is not None:
        for ds_id in dataset_ids:
            datasets_infos.append(api.dataset.get_info_by_id(ds_id))
    else:
        datasets_infos = api.dataset.get_list(project_id)

    for dataset in datasets_infos:
        dataset_fs = project_fs.create_dataset(dataset.name)
        videos = api.video.get_list(dataset.id)

        ds_progress = None
        if log_progress:
            ds_progress = Progress('Downloading dataset: {!r}'.format(
                dataset.name),
                                   total_cnt=len(videos))
        for batch in batched(videos, batch_size=LOG_BATCH_SIZE):
            video_ids = [video_info.id for video_info in batch]
            video_names = [video_info.name for video_info in batch]

            ann_jsons = api.video.annotation.download_bulk(
                dataset.id, video_ids)

            for video_id, video_name, ann_json in zip(video_ids, video_names,
                                                      ann_jsons):
                if video_name != ann_json[ApiField.VIDEO_NAME]:
                    raise RuntimeError(
                        "Error in api.video.annotation.download_batch: broken order"
                    )

                video_file_path = dataset_fs.generate_item_path(video_name)
                if download_videos is True:
                    api.video.download_path(video_id, video_file_path)
                else:
                    touch(video_file_path)

                dataset_fs.add_item_file(video_name,
                                         video_file_path,
                                         ann=VideoAnnotation.from_json(
                                             ann_json, project_fs.meta,
                                             key_id_map),
                                         _validate_item=False)

            ds_progress.iters_done_report(len(batch))

    project_fs.set_key_id_map(key_id_map)
Exemplo n.º 6
0
 def _create(self):
     '''
     Creates a leaf directory and empty meta.json file. Generate exception error if project directory already exists and is not empty.
     '''
     super()._create()
     self.set_key_id_map(KeyIdMap())
Exemplo n.º 7
0
def download_pointcloud_project(api,
                                project_id,
                                dest_dir,
                                dataset_ids=None,
                                download_items=True,
                                log_progress=False):
    LOG_BATCH_SIZE = 1

    key_id_map = KeyIdMap()

    project_fs = PointcloudProject(dest_dir, OpenMode.CREATE)

    meta = ProjectMeta.from_json(api.project.get_meta(project_id))
    project_fs.set_meta(meta)

    datasets_infos = []
    if dataset_ids is not None:
        for ds_id in dataset_ids:
            datasets_infos.append(api.dataset.get_info_by_id(ds_id))
    else:
        datasets_infos = api.dataset.get_list(project_id)

    for dataset in datasets_infos:
        dataset_fs = project_fs.create_dataset(dataset.name)
        pointclouds = api.pointcloud.get_list(dataset.id)

        ds_progress = None
        if log_progress:
            ds_progress = Progress('Downloading dataset: {!r}'.format(
                dataset.name),
                                   total_cnt=len(pointclouds))
        for batch in batched(pointclouds, batch_size=LOG_BATCH_SIZE):
            pointcloud_ids = [pointcloud_info.id for pointcloud_info in batch]
            pointcloud_names = [
                pointcloud_info.name for pointcloud_info in batch
            ]

            ann_jsons = api.pointcloud.annotation.download_bulk(
                dataset.id, pointcloud_ids)

            for pointcloud_id, pointcloud_name, ann_json in zip(
                    pointcloud_ids, pointcloud_names, ann_jsons):
                if pointcloud_name != ann_json[ApiField.NAME]:
                    raise RuntimeError(
                        "Error in api.video.annotation.download_batch: broken order"
                    )

                pointcloud_file_path = dataset_fs.generate_item_path(
                    pointcloud_name)
                if download_items is True:
                    api.pointcloud.download_path(pointcloud_id,
                                                 pointcloud_file_path)

                    related_images_path = dataset_fs.get_related_images_path(
                        pointcloud_name)
                    related_images = api.pointcloud.get_list_related_images(
                        pointcloud_id)
                    for rimage_info in related_images:
                        name = rimage_info[ApiField.NAME]

                        if not has_valid_ext(name):
                            new_name = get_file_name(
                                name)  # to fix cases like .png.json
                            if has_valid_ext(new_name):
                                name = new_name
                                rimage_info[ApiField.NAME] = name
                            else:
                                raise RuntimeError(
                                    'Something wrong with photo context filenames.\
                                                    Please, contact support')

                        rimage_id = rimage_info[ApiField.ID]

                        path_img = os.path.join(related_images_path, name)
                        path_json = os.path.join(related_images_path,
                                                 name + ".json")

                        api.pointcloud.download_related_image(
                            rimage_id, path_img)
                        dump_json_file(rimage_info, path_json)

                else:
                    touch(pointcloud_file_path)

                dataset_fs.add_item_file(pointcloud_name,
                                         pointcloud_file_path,
                                         ann=PointcloudAnnotation.from_json(
                                             ann_json, project_fs.meta,
                                             key_id_map),
                                         _validate_item=False)

            ds_progress.iters_done_report(len(batch))

    project_fs.set_key_id_map(key_id_map)
Exemplo n.º 8
0
def upload_pointcloud_project(directory,
                              api,
                              workspace_id,
                              project_name=None,
                              log_progress=False):
    project_fs = PointcloudProject.read_single(directory)
    if project_name is None:
        project_name = project_fs.name

    if api.project.exists(workspace_id, project_name):
        project_name = api.project.get_free_name(workspace_id, project_name)

    project = api.project.create(workspace_id, project_name,
                                 ProjectType.POINT_CLOUDS)
    api.project.update_meta(project.id, project_fs.meta.to_json())

    uploaded_objects = KeyIdMap()
    for dataset_fs in project_fs:
        dataset = api.dataset.create(project.id,
                                     dataset_fs.name,
                                     change_name_if_conflict=True)

        ds_progress = None
        if log_progress:
            ds_progress = Progress('Uploading dataset: {!r}'.format(
                dataset.name),
                                   total_cnt=len(dataset_fs))

        for item_name in dataset_fs:

            item_path, related_images_dir, ann_path = dataset_fs.get_item_paths(
                item_name)
            related_items = dataset_fs.get_related_images(item_name)

            try:
                _, meta = related_items[0]
                timestamp = meta[ApiField.META]['timestamp']
                if timestamp:
                    item_meta = {"timestamp": timestamp}
            except (KeyError, IndexError):
                item_meta = {}

            pointcloud = api.pointcloud.upload_path(dataset.id, item_name,
                                                    item_path, item_meta)

            # validate_item_annotation
            ann_json = load_json_file(ann_path)
            ann = PointcloudAnnotation.from_json(ann_json, project_fs.meta)

            # ignore existing key_id_map because the new objects will be created
            api.pointcloud.annotation.append(pointcloud.id, ann,
                                             uploaded_objects)

            # upload related_images if exist
            if len(related_items) != 0:
                rimg_infos = []
                for img_path, meta_json in related_items:
                    img = api.pointcloud.upload_related_image(img_path)[0]
                    rimg_infos.append({
                        ApiField.ENTITY_ID: pointcloud.id,
                        ApiField.NAME: meta_json[ApiField.NAME],
                        ApiField.HASH: img,
                        ApiField.META: meta_json[ApiField.META]
                    })

                api.pointcloud.add_related_images(rimg_infos)
            if log_progress:
                ds_progress.iters_done_report(1)