示例#1
0
def add_pointclouds_to_project():
    task_config = load_json_file(TaskPaths.TASK_CONFIG_PATH)

    task_id = task_config['task_id']
    append_to_existing_project = task_config['append_to_existing_project']

    server_address = task_config['server_address']
    token = task_config['api_token']

    #instance_type = task_config.get("instance_type", sly.ENTERPRISE)

    api = sly.Api(server_address, token)

    task_info = api.task.get_info_by_id(task_id)
    api.add_additional_field('taskId', task_id)
    api.add_header('x-task-id', str(task_id))

    workspace_id = task_info["workspaceId"]
    project_name = task_config.get('project_name')
    if project_name is None:
        project_name = task_config["res_names"]["project"]

    project_info = None
    if append_to_existing_project is True:
        project_info = api.project.get_info_by_name(workspace_id, project_name, expected_type=sly.ProjectType.POINT_CLOUDS, raise_error=True)

    files_list = api.task.get_import_files_list(task_id)

    #find related images
    related_items_info = {} #item_dir->item_name_processed->[img or json info]
    related_items = {}
    for file_info in files_list:
        original_path = file_info["filename"]
        if 'related_images' in original_path:
            related_items[original_path] = file_info
            item_dir = original_path.split('/related_images')[0]
            item_name_processed = os.path.basename(os.path.dirname(original_path))

            if item_dir not in related_items_info:
                related_items_info[item_dir] = {}
            if item_name_processed not in related_items_info[item_dir]:
                related_items_info[item_dir][item_name_processed] = []
            related_items_info[item_dir][item_name_processed].append(file_info)

    added_items = []
    for file_info in files_list:
        ds_info = None
        original_path = file_info["filename"]
        if original_path in related_items:
            continue

        try:
            file_name = sly.fs.get_file_name_with_ext(original_path)
            ext = sly.fs.get_file_ext(original_path)

            hash = file_info["hash"]

            if is_valid_ext(ext):
                if project_info is None:
                    project_info = api.project.create(workspace_id, project_name, type=sly.ProjectType.POINT_CLOUDS, change_name_if_conflict=True)
                if ds_info is None:
                    ds_name = get_dataset_name(original_path)
                    ds_info = api.dataset.get_or_create(project_info.id, ds_name)
                item_name = api.pointcloud.get_free_name(ds_info.id, file_name)
                item_info = api.pointcloud.upload_hash(ds_info.id, item_name, hash)
                added_items.append((ds_info, item_info, original_path))
        except Exception as e:
            sly.logger.warning("File skipped {!r}: error occurred during processing {!r}".format(original_path, str(e)))

    # add related images for all added items
    for ds_info, item_info, import_path in added_items:
        item_dir = os.path.dirname(import_path)
        item_import_name = sly.fs.get_file_name_with_ext(import_path)
        item_context_dir = item_import_name.replace(".", "_")

        if item_dir not in related_items_info:
            continue
        if item_context_dir not in related_items_info[item_dir]:
            continue

        files = related_items_info[item_dir][item_context_dir]
        temp_dir = os.path.join(sly.TaskPaths.DATA_DIR, item_context_dir)
        sly.fs.mkdir(temp_dir)
        context_img_to_hash = {}
        for file_import_info in files:
            original_path = file_import_info["filename"]
            save_name = sly.fs.get_file_name_with_ext(original_path)
            api.task.download_import_file(task_id, original_path, os.path.join(temp_dir, save_name))
            context_img_to_hash[os.path.join(temp_dir, save_name)] = file_import_info

        related_items = []
        files = sly.fs.list_files(temp_dir, sly.image.SUPPORTED_IMG_EXTS)
        for file in files:
            img_meta_path = os.path.join(temp_dir, sly.fs.get_file_name_with_ext(file) + ".json")
            img_meta = {}
            if sly.fs.file_exists(img_meta_path):
                img_meta = load_json_file(img_meta_path)
                if not image.has_valid_ext(img_meta[ApiField.NAME]):
                    raise RuntimeError('Wrong format: name field contains path with unsupported extension')
                if img_meta[ApiField.NAME] != sly.fs.get_file_name_with_ext(file):
                    raise RuntimeError('Wrong format: name field contains wrong image path')
            related_items.append((file, img_meta, context_img_to_hash[file]['hash']))

        if len(related_items) != 0:
            rimg_infos = []
            for img_path, meta_json, hash in related_items:
                rimg_infos.append({ApiField.ENTITY_ID: item_info.id,
                                   ApiField.NAME: meta_json.get(ApiField.NAME, sly.fs.get_file_name_with_ext(img_path)),
                                   ApiField.HASH: hash,
                                   ApiField.META: meta_json.get(ApiField.META, {}) })
            api.pointcloud.add_related_images(rimg_infos)

        sly.fs.remove_dir(temp_dir)

        pass

    if project_info is not None:
        sly.logger.info('PROJECT_CREATED', extra={'event_type': sly.EventType.PROJECT_CREATED, 'project_id': project_info.id})
    else:
        temp_str = "Project"
        if append_to_existing_project is True:
            temp_str = "Dataset"
        raise RuntimeError("{} wasn't created: 0 files with supported formats were found. Supported formats: {!r}"
                           .format(temp_str, ALLOWED_POINTCLOUD_EXTENSIONS))
    pass
示例#2
0
def add_videos_to_project():
    task_config = load_json_file(TaskPaths.TASK_CONFIG_PATH)

    task_id = task_config['task_id']
    append_to_existing_project = task_config['append_to_existing_project']

    server_address = task_config['server_address']
    token = task_config['api_token']

    instance_type = task_config.get("instance_type", sly.ENTERPRISE)

    api = sly.Api(server_address, token)

    task_info = api.task.get_info_by_id(task_id)
    api.add_additional_field('taskId', task_id)
    api.add_header('x-task-id', str(task_id))

    workspace_id = task_info["workspaceId"]
    project_name = task_config.get('project_name')
    if project_name is None:
        project_name = task_config["res_names"]["project"]

    project_info = None
    if append_to_existing_project is True:
        project_info = api.project.get_info_by_name(
            workspace_id,
            project_name,
            expected_type=sly.ProjectType.VIDEOS,
            raise_error=True)

    files_list = api.task.get_import_files_list(task_id)
    for file_info in files_list:
        original_path = file_info["filename"]
        try:
            file_name = sly.fs.get_file_name_with_ext(original_path)
            all_streams = file_info["meta"]["streams"]
            hash = file_info["hash"]
            ds_info = None

            video_streams = get_video_streams(all_streams)
            for stream_info in video_streams:
                stream_index = stream_info["index"]

                if instance_type == sly.COMMUNITY:
                    if _check_video_requires_processing(
                            file_info, stream_info) is True:
                        warn_video_requires_processing(file_name)
                        continue

                if project_info is None:
                    project_info = api.project.create(
                        workspace_id,
                        project_name,
                        type=sly.ProjectType.VIDEOS,
                        change_name_if_conflict=True)

                if ds_info is None:
                    ds_name = get_dataset_name(original_path)
                    ds_info = api.dataset.get_or_create(
                        project_info.id, ds_name)

                item_name = file_name
                info = api.video.get_info_by_name(ds_info.id, item_name)
                if info is not None:
                    item_name = gen_video_stream_name(file_name, stream_index)
                    sly.logger.warning(
                        "Name {!r} already exists in dataset {!r}: renamed to {!r}"
                        .format(file_name, ds_info.name, item_name))

                _ = api.video.upload_hash(ds_info.id, item_name, hash,
                                          stream_index)
        except Exception as e:
            sly.logger.warning(
                "File skipped {!r}: error occurred during processing {!r}".
                format(original_path, str(e)))

    if project_info is not None:
        sly.logger.info('PROJECT_CREATED',
                        extra={
                            'event_type': sly.EventType.PROJECT_CREATED,
                            'project_id': project_info.id
                        })
    else:
        temp_str = "Project"
        if append_to_existing_project is True:
            temp_str = "Dataset"
        raise RuntimeError(
            "{} wasn't created: 0 files with supported codecs ({}) and containers ({}). It is a limitation for Community Edition (CE)."
            .format(temp_str, _SUPPORTED_CODECS, _SUPPORTED_CONTAINERS))
    pass
示例#3
0
def add_pointclouds_to_project():
    task_config = load_json_file(TaskPaths.TASK_CONFIG_PATH)

    task_id = task_config['task_id']
    append_to_existing_project = task_config['append_to_existing_project']

    server_address = task_config['server_address']
    token = task_config['api_token']

    #instance_type = task_config.get("instance_type", sly.ENTERPRISE)

    api = sly.Api(server_address, token)

    task_info = api.task.get_info_by_id(task_id)
    api.add_additional_field('taskId', task_id)
    api.add_header('x-task-id', str(task_id))

    workspace_id = task_info["workspaceId"]
    project_name = task_config.get('project_name')
    if project_name is None:
        project_name = task_config["res_names"]["project"]

    project_info = None
    if append_to_existing_project is True:
        project_info = api.project.get_info_by_name(workspace_id, project_name, expected_type=sly.ProjectType.VIDEOS, raise_error=True)

    files_list = api.task.get_import_files_list(task_id)

    #find related images
    related_items_info = {} #item_dir->item_name_processed->[img or json info]
    related_items = {}
    for file_info in files_list:
        original_path = file_info["filename"]
        if 'related_images' in original_path:
            related_items[original_path] = file_info
            item_dir = original_path.split('/related_images')[0]
            item_name_processed = os.path.basename(os.path.dirname(original_path))

            if item_dir not in related_items_info:
                related_items_info[item_dir] = {}
            if item_name_processed not in related_items_info[item_dir]:
                related_items_info[item_dir][item_name_processed] = []
            related_items_info[item_dir][item_name_processed].append(file_info)

    project_dir = os.path.join(sly.TaskPaths.DATA_DIR, project_name)

    progress = sly.Progress('Processing', len(files_list), sly.logger)
    added_items = []
    for file_info in files_list:
        ds_info = None
        original_path = file_info["filename"]
        if original_path in related_items:
            progress.iter_done_report()
            continue

        try:
            file_name = sly.fs.get_file_name_with_ext(original_path)
            ext = sly.fs.get_file_ext(original_path)

            if ext == '.ply':
                if project_info is None:
                    project_info = api.project.create(workspace_id, project_name, type=sly.ProjectType.POINT_CLOUDS, change_name_if_conflict=True)
                if ds_info is None:
                    ds_name = get_dataset_name(original_path)
                    ds_info = api.dataset.get_or_create(project_info.id, ds_name)

                save_path = project_dir + original_path

                api.task.download_import_file(task_id, original_path, save_path)
                #points = pcl.load(save_path)
                points = o3d.io.read_point_cloud(save_path)

                new_file_name = sly.fs.get_file_name(file_name) + ".pcd"
                pcd_save_path = os.path.join(os.path.dirname(save_path), new_file_name)
                #pcl.save(points, pcd_save_path)
                # https://stackoverflow.com/questions/51350493/convertion-of-ply-format-to-pcd-format/62488893#62488893
                # https://stackoverflow.com/questions/61774682/converting-from-ply-to-pcd-format
                o3d.io.write_point_cloud(pcd_save_path, points, write_ascii=True)

                item_name = api.pointcloud.get_free_name(ds_info.id, new_file_name)
                item_info = api.pointcloud.upload_path(ds_info.id, item_name, pcd_save_path)

                added_items.append((ds_info, item_info, original_path))
            else:
                raise ValueError("Plugin supports only *.ply files.")

        except Exception as e:
            sly.logger.warning("File skipped {!r}: error occurred during processing - {!r}".format(original_path, str(e)))

        progress.iter_done_report()

    # add related images for all added items
    for ds_info, item_info, import_path in added_items:
        item_dir = os.path.dirname(import_path)
        item_import_name = sly.fs.get_file_name_with_ext(import_path)
        item_context_dir = item_import_name.replace(".", "_")

        if item_dir not in related_items_info:
            continue
        if item_context_dir not in related_items_info[item_dir]:
            continue

        files = related_items_info[item_dir][item_context_dir]
        temp_dir = os.path.join(sly.TaskPaths.DATA_DIR, item_context_dir)
        sly.fs.mkdir(temp_dir)
        context_img_to_hash = {}
        for file_import_info in files:
            original_path = file_import_info["filename"]
            save_name = sly.fs.get_file_name_with_ext(original_path)
            api.task.download_import_file(task_id, original_path, os.path.join(temp_dir, save_name))
            context_img_to_hash[os.path.join(temp_dir, save_name)] = file_import_info

        related_items = []
        files = sly.fs.list_files(temp_dir, sly.image.SUPPORTED_IMG_EXTS)
        for file in files:
            img_meta_path = os.path.join(temp_dir, sly.fs.get_file_name_with_ext(file) + ".json")
            img_meta = {}
            if sly.fs.file_exists(img_meta_path):
                img_meta = load_json_file(img_meta_path)
            related_items.append((file, img_meta, context_img_to_hash[file]['hash']))

        if len(related_items) != 0:
            rimg_infos = []
            for img_path, meta_json, hash in related_items:
                rimg_infos.append({ApiField.ENTITY_ID: item_info.id,
                                   ApiField.NAME: meta_json.get(ApiField.NAME, sly.fs.get_file_name_with_ext(img_path)),
                                   ApiField.HASH: hash,
                                   ApiField.META: meta_json.get(ApiField.META, {}) })
            api.pointcloud.add_related_images(rimg_infos)

        sly.fs.remove_dir(temp_dir)

        pass

    if project_info is not None:
        sly.logger.info('PROJECT_CREATED', extra={'event_type': sly.EventType.PROJECT_CREATED, 'project_id': project_info.id})
    else:
        temp_str = "Project"
        if append_to_existing_project is True:
            temp_str = "Dataset"
        raise RuntimeError("{} wasn't created: 0 files with supported formats were found. Supported formats: {!r}"
                           .format(temp_str, ALLOWED_POINTCLOUD_EXTENSIONS))
    pass
示例#4
0
def add_images_to_project():
    sly.fs.ensure_base_path(sly.TaskPaths.RESULTS_DIR)

    task_config = load_json_file(TaskPaths.TASK_CONFIG_PATH)

    task_id = task_config['task_id']
    append_to_existing_project = task_config['append_to_existing_project']
    server_address = task_config['server_address']
    token = task_config['api_token']

    convert_options = task_config.get('options', {})
    normalize_exif = convert_options.get('normalize_exif', True)
    remove_alpha_channel = convert_options.get('remove_alpha_channel', True)
    need_download = normalize_exif or remove_alpha_channel

    api = sly.Api(server_address, token, retry_count=5)

    task_info = api.task.get_info_by_id(task_id)
    api.add_additional_field('taskId', task_id)
    api.add_header('x-task-id', str(task_id))

    workspace_id = task_info["workspaceId"]
    project_name = task_config.get('project_name', None)
    if project_name is None:
        project_name = task_config["res_names"]["project"]

    files_list = api.task.get_import_files_list(task_id)
    if len(files_list) == 0:
        raise RuntimeError("There are no import files")

    project_info = None
    if append_to_existing_project is True:
        project_info = api.project.get_info_by_name(
            workspace_id,
            project_name,
            expected_type=sly.ProjectType.IMAGES,
            raise_error=True)
    else:
        project_info = api.project.create(workspace_id,
                                          project_name,
                                          type=sly.ProjectType.IMAGES,
                                          change_name_if_conflict=True)

    dataset_to_item = defaultdict(dict)
    for dataset in api.dataset.get_list(project_info.id):
        images = api.image.get_list(dataset.id)
        for image_info in images:
            dataset_to_item[dataset.name][image_info.name] = None

    for file_info in files_list:
        original_path = file_info["filename"]
        try:
            sly.image.validate_ext(original_path)
            item_hash = file_info["hash"]
            ds_name = get_dataset_name(original_path)
            item_name = sly.fs.get_file_name_with_ext(original_path)

            if item_name in dataset_to_item[ds_name]:
                temp_name = sly.fs.get_file_name(original_path)
                temp_ext = sly.fs.get_file_ext(original_path)
                new_item_name = "{}_{}{}".format(temp_name, sly.rand_str(5),
                                                 temp_ext)
                sly.logger.warning(
                    "Name {!r} already exists in dataset {!r}: renamed to {!r}"
                    .format(item_name, ds_name, new_item_name))
                item_name = new_item_name
            dataset_to_item[ds_name][item_name] = item_hash
        except Exception as e:
            sly.logger.warning(
                "File skipped {!r}: error occurred during processing {!r}".
                format(original_path, str(e)))

    for ds_name, ds_items in dataset_to_item.items():
        ds_info = api.dataset.get_or_create(project_info.id, ds_name)

        names = []  # list(ds_items.keys())
        hashes = []  #list(ds_items.values())
        for name, hash in ds_items.items():
            if hash is None:
                #existing image => skip
                continue
            else:
                names.append(name)
                hashes.append(hash)

        paths = [
            os.path.join(sly.TaskPaths.RESULTS_DIR,
                         h.replace("/", "a") + sly.image.DEFAULT_IMG_EXT)
            for h in hashes
        ]
        progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(ds_items))

        for batch_names, batch_hashes, batch_paths in zip(
                sly.batched(names, 10), sly.batched(hashes, 10),
                sly.batched(paths, 10)):
            if need_download is True:
                res_batch_names = []
                res_batch_paths = []
                api.image.download_paths_by_hashes(batch_hashes, batch_paths)
                for name, path in zip(batch_names, batch_paths):
                    try:
                        img = sly.image.read(path, remove_alpha_channel)
                        sly.image.write(path, img, remove_alpha_channel)
                        res_batch_names.append(name)
                        res_batch_paths.append(path)
                    except Exception as e:
                        sly.logger.warning("Skip image {!r}: {}".format(
                            name, str(e)),
                                           extra={'file_path': path})
                api.image.upload_paths(ds_info.id, res_batch_names,
                                       res_batch_paths)

                for path in res_batch_paths:
                    sly.fs.silent_remove(path)
                #sly.fs.clean_dir(sly.TaskPaths.RESULTS_DIR)
                progress.iters_done_report(len(batch_names))
            else:
                api.image.upload_hashes(ds_info.id,
                                        batch_names,
                                        batch_hashes,
                                        progress_cb=progress.iters_done_report)

    if project_info is not None:
        sly.logger.info('PROJECT_CREATED',
                        extra={
                            'event_type': sly.EventType.PROJECT_CREATED,
                            'project_id': project_info.id
                        })
    else:
        temp_str = "Project"
        if append_to_existing_project is True:
            temp_str = "Dataset"
        raise RuntimeError("{} wasn't created: 0 files were added")
    pass