Пример #1
0
    def _convert_sample(self, sample_info):
        log_dct = sample_info._asdict()  # ok, it's documented
        # logger.trace('Will process sample.', extra=log_dct)

        sample_data = sample_info.ia_data
        sly.copy_file(sample_data['src_img_path'],
                      sample_info.img_path)  # img is ready

        ann = self._get_ann(sample_data['segm_path'],
                            sample_data.get('inst_path'), log_dct)
        packed_ann = ann.pack()
        sly.json_dump(packed_ann, sample_info.ann_path)  # ann is ready
Пример #2
0
    def convert(self):
        in_datasets = self._find_in_datasets()

        # map input structure to output
        out_pr = sly.ProjectStructure(self.settings['res_names']['project'])

        for ds_name, ds_path in in_datasets:
            img_fnames = sly.ImportImgLister.list_images(ds_path)
            for name_with_ext in img_fnames:
                img_name, img_ext = osp.splitext(name_with_ext)
                src_img_path = osp.join(ds_path, name_with_ext)
                dt = {
                    'src_img_path': src_img_path,
                    'image_ext': img_ext,
                }
                out_pr.add_item(ds_name, img_name, dt)
            logger.info(
                'Found source dataset with raw images: "{}", {} sample(s).'.
                format(ds_name, len(img_fnames)))

        out_pr_fs = sly.ProjectFS(self.out_dir, out_pr)
        out_pr_fs.make_dirs()

        res_meta = sly.ProjectMeta()  # empty
        res_meta.to_dir(out_pr_fs.project_path)

        progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt)
        for sample_info in out_pr_fs:
            sample_data = sample_info.ia_data
            src_img_path = sample_data['src_img_path']
            sly.copy_file(src_img_path, sample_info.img_path)  # img is ready

            image = Image.open(sample_info.img_path)
            exif_data = pyexiv2.metadata.ImageMetadata(sample_info.img_path)
            exif_data.read()

            if exif_data.get_orientation() != 1:
                logger.debug('Image with flip/rot EXIF',
                             extra={
                                 'orientation': exif_data.get_orientation(),
                                 'image_path': sample_info.img_path
                             })
                image = sly.image_transpose_exif(image)
                image.save(sample_info.img_path)
                exif_data['Exif.Image.Orientation'] = pyexiv2.ExifTag(
                    'Exif.Image.Orientation', 1)
                exif_data.modified = True
                exif_data.write()

            imsize_wh = image.size
            ann = sly.Annotation.new_with_objects(imsize_wh, [])
            sly.json_dump(ann.pack(), sample_info.ann_path)  # ann is ready
            progress.iter_done_report()
Пример #3
0
    def _convert_sample(self, sample_info):
        sample_data = sample_info.ia_data

        try:
            ann = self._load_citysc_annotation(sample_data['orig_ann_path'])
            ann['tags'].append(sample_data['tag_name'])
            packed_ann = ann.pack()
        except Exception:
            raise AnnConvException()  # ok, may continue work with another sample

        self.tags.add(sample_data['tag_name'])
        sly.json_dump(packed_ann, sample_info.ann_path)  # ann is ready
        sly.copy_file(sample_data['orig_img_path'], sample_info.img_path)  # img is ready
Пример #4
0
    def _download_images_from_remote(self, pr_writer, image_id_to_ds, img_infos):
        if len(img_infos) == 0:
            return

        infos_with_paths = [(info, pr_writer.get_img_path(image_id_to_ds[info.id], info.title, info.ext))
                            for info in img_infos]
        hash2path = {x[0].hash: x[1] for x in infos_with_paths}  # for unique hashes
        unique_hashes = list(hash2path.keys())

        ready_paths = []
        ready_hashes = []
        progress = sly.ProgressCounter('Download remote images', len(unique_hashes), ext_logger=self.logger)

        def close_fh(fh):
            fpath = fh.file_path
            if fh.close_and_check():
                ready_paths.append(fpath)
                ready_hashes.append(img_hash)
                progress.iter_done_report()
            else:
                self.logger.warning('file was skipped while downloading',
                                    extra={'img_path': fpath, 'img_hash': img_hash})

        # download by unique hashes
        for batch_img_hashes in sly.batched(unique_hashes, constants.BATCH_SIZE_DOWNLOAD_IMAGES):
            file_handler = None
            img_hash = None
            for chunk in self.api.get_stream_with_data('DownloadImages',
                                                       api_proto.ChunkImage,
                                                       api_proto.ImagesHashes(images_hashes=batch_img_hashes)):
                if chunk.image.hash:  # non-empty hash means beginning of new image
                    if file_handler is not None:
                        close_fh(file_handler)
                    img_hash = chunk.image.hash
                    self.logger.trace('download_images', extra={'img_hash': img_hash})
                    dst_fpath = hash2path[img_hash]
                    file_handler = ChunkedFileWriter(file_path=dst_fpath)

                file_handler.write(chunk.chunk)

            close_fh(file_handler)  # must be not None

        # process non-unique hashes
        for info, dst_path in infos_with_paths:
            origin_path = hash2path[info.hash]
            if (origin_path != dst_path) and osp.isfile(origin_path):
                sly.ensure_base_path(dst_path)
                sly.copy_file(origin_path, dst_path)

        self._write_images_to_agent_storage(ready_paths, ready_hashes)
Пример #5
0
def main():
    logger.info('Hello world.')

    # It isn't necessary, but let's suppose that our data will be stored as for Supervisely task:
    # input in '/sly_task_data/data` and results in '/sly_task_data/results'.
    # So TaskPaths provides the paths.
    task_paths = sly.TaskPaths()

    in_pr_dir = task_paths.project_dir  # the paths includes project name

    in_pr_meta = sly.ProjectMeta.from_dir(in_pr_dir)
    # Now we've read meta of input project.
    logger.info('Input project meta: {} class(es).'.format(
        len(in_pr_meta.classes)))

    in_pr_fs = sly.ProjectFS.from_disk(
        *sly.ProjectFS.split_dir_project(in_pr_dir))
    # Now we've read project structure.
    logger.info(
        'Input project: "{}" contains {} dataset(s) and {} image(s).'.format(
            in_pr_fs.pr_structure.name, len(in_pr_fs.pr_structure.datasets),
            in_pr_fs.image_cnt))

    # It's convenient to create output project structure and store source file paths in ia_data.
    out_pr_structure = sly.ProjectStructure(
        'my_new_project')  # rename project... just for fun
    for item_descr in in_pr_fs:  # iterate over input project
        new_ia_data = {
            'src_ann_path': item_descr.ann_path,
            'src_img_path': item_descr.img_path,
            **item_descr.ia_data  # contains 'image_ext' which is required to write images
        }
        out_pr_structure.add_item(item_descr.ds_name, item_descr.image_name,
                                  new_ia_data)
    # ProjectFS will provide out file paths
    out_pr_fs = sly.ProjectFS(task_paths.results_dir, out_pr_structure)

    # We will add the rectangle to each annotation.
    new_class_title = 'new-region'
    rect_to_add = sly.Rect(left=20, top=20, right=50, bottom=100)

    # Ok, start processing.
    out_pr_fs.make_dirs()  # create all directories required for writing
    for item_descr in out_pr_fs:  # iterate over output project
        logger.info('Processing sample',
                    extra={
                        'dataset': item_descr.ds_name,
                        'image_name': item_descr.image_name
                    })

        # Copy image unchanged.
        sly.copy_file(item_descr.ia_data['src_img_path'], item_descr.img_path)

        # Read annotation.
        ann_packed = sly.json_load(item_descr.ia_data['src_ann_path'])
        ann = sly.Annotation.from_packed(ann_packed, in_pr_meta)

        # Add new figure to the annotation.
        # Method to construct figures returns iterable of new figures.
        # (e.g., line cropped with image bounds may produce some lines), but here we'll get not more than one figure
        # ...or no figures if image is less than 20x20.
        new_figures = sly.FigureRectangle.from_rect(new_class_title,
                                                    ann.image_size_wh,
                                                    rect_to_add)
        ann['objects'].extend(new_figures)

        # Save annotation.
        sly.json_dump(ann.pack(), item_descr.ann_path)

    # OK, and don't forget to create and save output project meta.
    # We'll save given data and add new class with shape "rectangle".
    out_pr_meta = deepcopy(in_pr_meta)
    out_pr_meta.classes.add({
        'title': new_class_title,
        'shape': 'rectangle',
        'color': '#FFFF00'
    })
    # Then store the meta.
    out_pr_meta.to_dir(out_pr_fs.project_path)

    logger.info('Done.')
Пример #6
0
    def convert(self):
        # determine dir level by project meta file
        meta_p = sly.ProjectMeta.find_in_dir(self.in_dir)
        if meta_p:
            # meta_file in input dir
            in_project_root_dir, pr_name_stub = sly.ProjectFS.split_dir_project(self.in_dir)
        else:
            # meta file in subdir of input dir
            possible_projects = sly.get_subdirs(self.in_dir)
            if len(possible_projects) != 1:
                raise RuntimeError('Wrong input project structure, or multiple projects are passed.')
            in_project_root_dir, pr_name_stub = self.in_dir, possible_projects[0]

        # read if it's possible
        try:
            in_fs = sly.ProjectFS.from_disk(in_project_root_dir, pr_name_stub)
            in_pr_meta = sly.ProjectMeta.from_dir(in_fs.project_path)
        except Exception:
            logger.error('Unable to read input meta.', exc_info=False)
            raise

        in_possible_datasets = sly.get_subdirs(in_fs.project_path)
        in_datasets = list(in_fs.pr_structure.datasets.keys())
        if len(in_possible_datasets) != len(in_datasets):
            raise RuntimeError('Excess top-level directories without data (wrong img-ann structure?).')
        for ds_name, the_ds in in_fs.pr_structure.datasets.items():
            req_cnt = the_ds.image_cnt
            dataset_path = in_fs.dataset_path(ds_name)
            anns_path = in_fs.dataset_anns_path(dataset_path)
            imgs_path = in_fs.dataset_imgs_path(dataset_path)
            for subdir in (anns_path, imgs_path):
                items_cnt = len(list(os.scandir(subdir)))
                if items_cnt != req_cnt:
                    raise RuntimeError('Excess files or directories in dataset subdirectory.')

        found_exts = {x.ia_data['image_ext'] for x in in_fs}
        if not all(x in sly.ImportImgLister.extensions for x in found_exts):
            raise RuntimeError('Found image(s) with unsupported types (by extension).')
        sample_cnt = in_fs.pr_structure.image_cnt
        if sample_cnt == 0:
            raise RuntimeError('Empty project, no samples.')

        logger.info('Found source structure: Supervisely format, {} sample(s).'.format(sample_cnt))

        out_pr = deepcopy(in_fs.pr_structure)
        out_pr.name = self.settings['res_names']['project']
        out_pr_fs = sly.ProjectFS(self.out_dir, out_pr)
        out_pr_fs.make_dirs()

        in_pr_meta.to_dir(out_pr_fs.project_path)

        progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt)
        for s in out_pr_fs:
            try:
                src_img_path = in_fs.img_path(s.ds_name, s.image_name)
                sly.copy_file(src_img_path, s.img_path)  # img is ready

                src_ann_path = in_fs.ann_path(s.ds_name, s.image_name)
                packed_ann = sly.json_load(src_ann_path)
                _ = sly.Annotation.from_packed(packed_ann, in_pr_meta)  # to check if it is correct
                sly.copy_file(src_ann_path, s.ann_path)  # ann is ready
            except Exception:
                logger.error('Error occurred while processing input sample', exc_info=False, extra={
                             'dataset_name': s.ds_name, 'image_name': s.image_name,
                             })
                raise
            progress.iter_done_report()
Пример #7
0
 def _read_obj_impl(self, st_path, dst_path):
     sly.ensure_base_path(dst_path)
     sly.copy_file(st_path, dst_path)