Exemplo n.º 1
0
 def _check_paths_valid(self, src_img_dir: str):
     check_dir_exists(src_img_dir)
     img_filename_list = []
     duplicate_img_filename_list = []
     for frame in self:
         img_filename = get_filename(frame.img_path)
         if img_filename not in img_filename_list:
             img_filename_list.append(frame.img_path)
         else:
             duplicate_img_filename_list.append(frame.img_path)
         img_path = f'{src_img_dir}/{img_filename}'
         check_file_exists(img_path)
         if frame.cs_img_path:
             check_file_exists(
                 f'{src_img_dir}/{get_filename(frame.cs_img_path)}')
         if frame.depth_img_path:
             check_file_exists(
                 f'{src_img_dir}/{get_filename(frame.depth_img_path)}')
         if frame.is_img_path:
             check_file_exists(
                 f'{src_img_dir}/{get_filename(frame.is_img_path)}')
     if len(duplicate_img_filename_list) > 0:
         logger.error(
             f'Found the following duplicate image filenames in {self.__class__.__name__}:\n{duplicate_img_filename_list}'
         )
         raise Exception
Exemplo n.º 2
0
    def load_from_dir(cls,
                      img_dir: str,
                      json_dir: str,
                      show_pbar: bool = True) -> NDDS_Frame_Handler:
        check_dir_exists(json_dir)
        check_dir_exists(img_dir)

        img_pathlist = get_valid_image_paths(img_dir)
        json_path_list = [
            path for path in get_all_files_of_extension(dir_path=json_dir,
                                                        extension='json')
            if not get_filename(path).startswith('_')
        ]
        json_path_list.sort()
        handler = NDDS_Frame_Handler()
        if show_pbar:
            pbar = tqdm(total=len(json_path_list), unit='ann(s)', leave=True)
            pbar.set_description(f'Loading {cls.__name__}')
        for json_path in json_path_list:
            check_file_exists(json_path)
            json_rootname = get_rootname_from_path(json_path)
            matching_img_path = None
            matching_cs_img_path = None
            matching_depth_img_path = None
            matching_is_img_path = None
            for img_path in img_pathlist:
                img_rootname = '.'.join(get_filename(img_path).split('.')[:-1])
                if img_rootname == json_rootname:
                    matching_img_path = img_path
                elif img_rootname == f'{json_rootname}.cs':
                    matching_cs_img_path = img_path
                elif img_rootname == f'{json_rootname}.depth':
                    matching_depth_img_path = img_path
                elif img_rootname == f'{json_rootname}.is':
                    matching_is_img_path = img_path
                if matching_img_path and matching_cs_img_path and matching_depth_img_path and matching_is_img_path:
                    break
            if matching_img_path is None:
                logger.error(
                    f"Couldn't find image file that matches rootname of {get_filename(json_path)} in {img_dir}"
                )
                raise FileNotFoundError
            frame = NDDS_Frame(
                img_path=matching_img_path,
                ndds_ann=NDDS_Annotation.load_from_path(json_path),
                cs_img_path=matching_cs_img_path,
                depth_img_path=matching_depth_img_path,
                is_img_path=matching_is_img_path)
            handler.append(frame)
            if show_pbar:
                pbar.update()
        return handler
Exemplo n.º 3
0
 def _check_paths_valid(self, src_img_dir: str):
     check_dir_exists(src_img_dir)
     img_filename_list = []
     duplicate_img_filename_list = []
     for ann in self:
         img_filename = get_filename(ann.img_path)
         if img_filename not in img_filename_list:
             img_filename_list.append(ann.img_path)
         else:
             duplicate_img_filename_list.append(ann.img_path)
         img_path = f'{src_img_dir}/{img_filename}'
         check_file_exists(img_path)
     if len(duplicate_img_filename_list) > 0:
         logger.error(f'Found the following duplicate image filenames in LabelmeAnnotationHandler:\n{duplicate_img_filename_list}')
         raise Exception
Exemplo n.º 4
0
    def load_from_dir(cls,
                      json_dir: str,
                      img_dir: str = None,
                      camera_config_path: str = None,
                      obj_config_path: str = None,
                      show_pbar: bool = False) -> NDDS_Dataset:
        """Loads NDDS_Dataset object from a directory path.

        Arguments:
            json_dir {str} -- [Path to directory with all of the NDDS annotation json files.]

        Keyword Arguments:
            img_dir {str} -- [Path to directory with all of the NDDS image files.] (default: json_dir)
            camera_config_path {str} -- [Path to the camera configuration json file.] (default: f'{json_dir}/_camera_settings.json')
            obj_config_path {str} -- [Path to the object configuration json file.] (default: f'{json_dir}/_object_settings.json')
            show_pbar {bool} -- [Show the progress bar.] (default: {False})

        Returns:
            NDDS_Dataset -- [NDDS_Dataset object]
        """
        check_dir_exists(json_dir)
        if img_dir is None:
            img_dir = json_dir
        else:
            check_dir_exists(img_dir)
        camera_config_path = camera_config_path if camera_config_path is not None else f'{json_dir}/_camera_settings.json'
        check_file_exists(camera_config_path)
        obj_config_path = obj_config_path if obj_config_path is not None else f'{json_dir}/_object_settings.json'
        check_file_exists(obj_config_path)

        return NDDS_Dataset(
            camera_config=CameraConfig.load_from_path(camera_config_path),
            obj_config=ObjectSettings.load_from_path(obj_config_path),
            frames=NDDS_Frame_Handler.load_from_dir(img_dir=img_dir,
                                                    json_dir=json_dir,
                                                    show_pbar=show_pbar))
Exemplo n.º 5
0
    def from_dict(cls,
                  collection_dict: dict,
                  check_paths: bool = True) -> DatasetConfigCollection:
        check_required_keys(collection_dict,
                            required_keys=[
                                'collection_dir', 'dataset_names',
                                'dataset_specific'
                            ])
        collection_dir = collection_dict['collection_dir']
        check_type(collection_dir, valid_type_list=[str])
        dataset_names = collection_dict['dataset_names']
        check_type(dataset_names, valid_type_list=[list])
        check_type_from_list(dataset_names, valid_type_list=[str])
        dataset_specific = collection_dict['dataset_specific']
        check_type(dataset_specific, valid_type_list=[dict])
        collection_tag = None if 'tag' not in collection_dict else collection_dict[
            'tag']
        check_type(collection_tag, valid_type_list=[type(None), str])
        check_required_keys(
            dataset_specific,
            required_keys=['img_dir', 'ann_path', 'ann_format'])
        img_dir = dataset_specific['img_dir']
        check_type(img_dir, valid_type_list=[str, list])
        if type(img_dir) is list:
            check_type_from_list(img_dir, valid_type_list=[str])
            check_list_length(img_dir, correct_length=len(dataset_names))
        ann_path = dataset_specific['ann_path']
        check_type(ann_path, valid_type_list=[str, list])
        if type(ann_path) is list:
            check_type_from_list(ann_path, valid_type_list=[str])
            check_list_length(ann_path, correct_length=len(dataset_names))
        ann_format = dataset_specific['ann_format']
        check_type(ann_format, valid_type_list=[str, list])
        if type(ann_format) is list:
            check_type_from_list(ann_format, valid_type_list=[str])
            check_list_length(ann_format, correct_length=len(dataset_names))
        dataset_tag = None if 'tag' not in dataset_specific else dataset_specific[
            'tag']
        check_type(dataset_tag, valid_type_list=[type(None), str, list])
        if type(dataset_tag) is list:
            check_type_from_list(dataset_tag,
                                 valid_type_list=[type(None), str])
            check_list_length(dataset_tag, correct_length=len(dataset_names))

        dataset_config_list = []
        for i in range(len(dataset_names)):
            if type(img_dir) is str:
                img_dir0 = img_dir
            elif type(img_dir) is list:
                if i >= len(img_dir):
                    raise IndexError
                img_dir0 = img_dir[i]
            else:
                raise Exception

            if type(ann_path) is str:
                ann_path0 = ann_path
            elif type(ann_path) is list:
                if i >= len(ann_path):
                    raise IndexError
                ann_path0 = ann_path[i]
            else:
                raise Exception

            if type(ann_format) is str:
                ann_format0 = ann_format
            elif type(ann_format) is list:
                if i >= len(ann_format):
                    raise IndexError
                ann_format0 = ann_format[i]
            else:
                raise Exception

            if type(dataset_tag) is str or dataset_tag is None:
                dataset_tag0 = dataset_tag
            elif type(dataset_tag) is list:
                if i >= len(dataset_tag):
                    raise IndexError
                dataset_tag0 = dataset_tag[i]
            else:
                raise Exception

            img_dir1 = rel_to_abs_path(
                f'{collection_dir}/{dataset_names[i]}/{img_dir0}')
            ann_path1 = rel_to_abs_path(
                f'{collection_dir}/{dataset_names[i]}/{ann_path0}')
            if check_paths:
                check_dir_exists(img_dir1)
                check_file_exists(ann_path1)
            config = DatasetConfig(img_dir=img_dir1,
                                   ann_path=ann_path1,
                                   ann_format=ann_format0,
                                   tag=dataset_tag0)
            dataset_config_list.append(config)
        return DatasetConfigCollection(dataset_config_list=dataset_config_list,
                                       tag=collection_tag)
Exemplo n.º 6
0
 def load_from_dir(cls, load_dir: str) -> LabelmeAnnotationHandler:
     check_dir_exists(load_dir)
     json_path_list = get_all_files_of_extension(dir_path=load_dir,
                                                 extension='json')
     return cls.load_from_pathlist(json_path_list)