def push_tail(self, path_part: Path): check_type(path_part, valid_type_list=[Path, str]) if type(path_part) is str: path_part0 = Path(path_part) else: path_part0 = path_part return Path.from_split(self.split() + [path_part0.path_str])
def __init__(self, sprite: Sprite, position: Position, velocity: Velocity = Velocity.stationary()): super().__init__(sprite=sprite, position=position) check_type(velocity, Velocity) self.velocity = velocity
def __init__(self, visual_grid: VisualGrid, collision_grid: CollisionGrid): check_type(visual_grid, valid_type_list=[VisualGrid]) check_type(collision_grid, valid_type_list=[CollisionGrid]) if visual_grid.shape != collision_grid.shape: logger.error(f'visual_grid.shape == {visual_grid.shape} != {collision_grid.shape} == collision_grid.shape') raise Exception self.visual_grid = visual_grid self.collision_grid = collision_grid
def __init__(self, sprite: Sprite, position: Position): check_type(sprite, valid_type_list=[Sprite]) check_type(position, valid_type_list=[Position]) self.sprite = sprite self.width = sprite.width self.height = sprite.height self.position = position
def __setitem__(self: H, idx: int, value: T): check_type(value, valid_type_list=[self.obj_type]) if type(idx) is int: self.obj_list[idx] = value elif type(idx) is slice: self.obj_list[idx.start:idx.stop:idx.step] = value else: logger.error(f'Expected int or slice. Got type(idx)={type(idx)}') raise TypeError
def __init__(self: H, obj_types: List[type], obj_list: List[T] = None, random_order: bool = False): check_type(obj_types, valid_type_list=[list]) check_type_from_list(obj_types, valid_type_list=[type, BaseModeMeta]) self.obj_types = obj_types if obj_list is not None: check_type_from_list(obj_list, valid_type_list=obj_types) self.obj_list = obj_list if obj_list is not None else [] self.random_order = random_order
def __init__(self, exported_object_classes: List[str] = None, exported_objects: ExportedObjectHandler = None): super().__init__() self.exported_object_classes = exported_object_classes if exported_object_classes is not None else [] check_type_from_list(self.exported_object_classes, valid_type_list=[str]) self.exported_objects = exported_objects if exported_objects is not None else ExportedObjectHandler( ) check_type(self.exported_objects, valid_type_list=[ExportedObjectHandler])
def from_dict(cls, intrinsic_param_dict: dict) -> Camera: check_type(intrinsic_param_dict, valid_type_list=[dict]) if len(intrinsic_param_dict) > 0: check_required_keys(intrinsic_param_dict, required_keys=['f', 'c', 'T']) check_type_from_list(list(intrinsic_param_dict.values()), valid_type_list=[list]) return Camera(f=intrinsic_param_dict['f'], c=intrinsic_param_dict['c'], T=intrinsic_param_dict['T']) else: return None
def _root_src_tail2dst_head(src_path: Path, dst_path: Path) -> bool: check_type(src_path, valid_type_list=[Path]) check_type(dst_path, valid_type_list=[Path]) if len(src_path) > 0: tail = src_path.pop_tail() if tail != Path(''): dst_path.path_str = (tail + dst_path).to_str() success = True else: success = False else: success = False return success
def _root_src_head2dst_tail(cls, src_path: Path, dst_path: Path) -> bool: check_type(src_path, valid_type_list=[Path]) check_type(dst_path, valid_type_list=[Path]) if len(src_path) > 0: head = src_path.pop_head() if head != Path(''): dst_path.path_str = (dst_path + head).to_str() success = True else: success = False else: success = False return success
def append_contained(self, new_contained_instance: ObjectInstance): check_value(self.instance_type, valid_value_list=['bbox', 'seg']) check_type(new_contained_instance, valid_type_list=[ObjectInstance]) check_value(new_contained_instance.instance_type, valid_value_list=['bbox', 'seg', 'kpt']) # Check Instance Id if new_contained_instance.ndds_ann_obj.instance_id == self.ndds_ann_obj.instance_id: logger.error(f'new_contained_instance.ndds_ann_obj.instance_id == self.ndds_ann_obj.instance_id') logger.error(f'new_contained_instance: {new_contained_instance}') logger.error(f'self: {self}') raise Exception if new_contained_instance.ndds_ann_obj.instance_id in [ contained_instance.ndds_ann_obj.instance_id for contained_instance in self.contained_instance_list ]: logger.error( f'new_contained_instance.ndds_ann_obj.instance_id in ' + \ f'[contained_instance.ndds_ann_obj.instance_id for contained_instance in self.contained_instance_list] == True' ) logger.error(f'new_contained_instance: {new_contained_instance}') logger.error(f'self: {self}') raise Exception # Check (instance_type, instance_name) pair if (new_contained_instance.instance_type, new_contained_instance.instance_name) in [ (contained_instance.instance_type, contained_instance.instance_name) for contained_instance in self.contained_instance_list ]: logger.error( f'(new_contained_instance.instance_type, new_contained_instance.instance_name)=' + \ f'{(new_contained_instance.instance_type, new_contained_instance.instance_name)} ' + \ f'pair already exists in self.contained_instance_list' ) logger.error(f'Existing pairs:') found_inst = None for inst in self.contained_instance_list: logger.error(f'\t(inst.instance_type, inst.instance_name)={(inst.instance_type, inst.instance_name)}') if (inst.instance_type, inst.instance_name) == (new_contained_instance.instance_type, new_contained_instance.instance_name): found_inst = inst.copy() found_inst = ObjectInstance.buffer(found_inst) logger.error(f'\n') if new_contained_instance.ndds_ann_obj != found_inst.ndds_ann_obj: # logger.error(f'new_contained_instance:\n{new_contained_instance}') # logger.error(f'found_inst:\n{found_inst}') for key in NDDS_Annotation_Object.get_constructor_params(): if new_contained_instance.ndds_ann_obj.__dict__[key] != found_inst.ndds_ann_obj.__dict__[key]: logger.error(f'Found difference in key={key}') logger.error(f'\tnew_contained_instance.ndds_ann_obj.__dict__[{key}]:\n\t{new_contained_instance.ndds_ann_obj.__dict__[key]}') logger.error(f'\tfound_inst.ndds_ann_obj.__dict__[{key}]:\n\t{found_inst.ndds_ann_obj.__dict__[key]}') else: logger.error(f"The two instance's ndds_ann_obj are identical.") raise Exception self.contained_instance_list.append(new_contained_instance)
def __setitem__(self, idx: int, value: Path): check_type(value, valid_type_list=[Path, str]) if type(value) is str: value0 = Path(value) else: value0 = value path_parts = self.split() if type(idx) is int: path_parts[idx] = value.path_str elif type(idx) is slice: path_parts[idx.start:idx.stop:idx.step] = value.split() else: logger.error(f'Expected int or slice. Got type(idx)={type(idx)}') raise TypeError
def __init__(self, name: str, horizontal_fov: int, intrinsic_settings: IntrinsicSettings, captured_image_size: CapturedImageSize): super().__init__() check_type(name, valid_type_list=[str]) self.name = name check_type(horizontal_fov, valid_type_list=[int]) self.horizontal_fov = horizontal_fov check_type(intrinsic_settings, valid_type_list=[IntrinsicSettings]) self.intrinsic_settings = intrinsic_settings check_type(captured_image_size, valid_type_list=[CapturedImageSize]) self.captured_image_size = captured_image_size
def __init__( self, class_name: str, segmentation_class_id: int, segmentation_instance_id: int, fixed_model_transform: np.ndarray, cuboid_dimensions: list ): super().__init__() check_type(class_name, valid_type_list=[str]) self.class_name = class_name check_type(segmentation_class_id, valid_type_list=[int]) self.segmentation_class_id = segmentation_class_id check_type(segmentation_instance_id, valid_type_list=[int]) self.segmentation_instance_id = segmentation_instance_id check_type(fixed_model_transform, valid_type_list=[np.ndarray]) if fixed_model_transform.shape != (4, 4): logger.error(f'fixed_model_transform.shape == {fixed_model_transform.shape} != (4, 4)') raise Exception self.fixed_model_transform = fixed_model_transform check_type(cuboid_dimensions, valid_type_list=[list]) self.cuboid_dimensions = cuboid_dimensions
def _get_ann_format_list(self, dataset_paths: list, ann_format) -> list: check_type(item=ann_format, valid_type_list=[str, list]) if type(ann_format) is str: check_value(item=ann_format, valid_value_list=self.valid_ann_formats) ann_format_list = [ann_format] * len(dataset_paths) elif type(ann_format) is list: check_value_from_list(item_list=ann_format, valid_value_list=self.valid_ann_formats) if len(ann_format) == len(dataset_paths): check_type_from_list(item_list=ann_format, valid_type_list=[str]) ann_format_list = ann_format else: logger.error( f"type(ann_format) is list but len(ann_format) == {len(ann_format)} != {len(dataset_paths)} == len(dataset_paths)" ) raise Exception else: raise Exception return ann_format_list
def _get_ann_path_list(self, dataset_paths: list, ann_path) -> list: check_type(item=ann_path, valid_type_list=[str, list]) if type(ann_path) is str: ann_path_list = [ f"{dataset_path}/{ann_path}" for dataset_path in dataset_paths ] elif type(ann_path) is list: if len(ann_path) == len(dataset_paths): check_type_from_list(item_list=ann_path, valid_type_list=[str]) ann_path_list = [ f"{dataset_path}/{ann_path_path}" for dataset_path, ann_path_path in zip( dataset_paths, ann_path) ] else: logger.error( f"type(ann_path) is list but len(ann_path) == {len(ann_path)} != {len(dataset_paths)} == len(dataset_paths)" ) raise Exception else: raise Exception return ann_path_list
def _get_img_dir_list(self, dataset_paths: list, img_dir) -> list: check_type(item=img_dir, valid_type_list=[str, list]) if type(img_dir) is str: img_dir_list = [ f"{dataset_path}/{img_dir}" for dataset_path in dataset_paths ] elif type(img_dir) is list: if len(img_dir) == len(dataset_paths): check_type_from_list(item_list=img_dir, valid_type_list=[str]) img_dir_list = [ f"{dataset_path}/{img_dir_path}" for dataset_path, img_dir_path in zip( dataset_paths, img_dir) ] else: logger.error( f"type(img_dir) is list but len(img_dir) == {len(img_dir)} != {len(dataset_paths)} == len(dataset_paths)" ) raise Exception else: raise Exception return img_dir_list
def __init__(self, r: int, b: int, g: int): super().__init__() check_type(r, valid_type_list=[int]) check_type(g, valid_type_list=[int]) check_type(b, valid_type_list=[int]) self.r = r self.b = b self.g = g
def __init__(self, x: int, y: int, window: Window): super().__init__() check_type(x, valid_type_list=[int]) check_type(y, valid_type_list=[int]) check_type(window, valid_type_list=[Window]) self.x = x self.y = y self.window = window self._check_in_frame()
def __init__(self, bound_obj, coco_cat: COCO_Category, kpt_list: List[Keypoint2D] = None, kpt_label_list: List[str] = None, img_filename: str = None): check_type(bound_obj, valid_type_list=[BBox, Polygon]) self.bound_obj = bound_obj check_type(coco_cat, valid_type_list=[COCO_Category]) self.coco_cat = coco_cat if kpt_list is not None: check_type_from_list(kpt_list, valid_type_list=[Keypoint2D]) self.kpt_list = kpt_list else: self.kpt_list = [] self.kpt_label_list = [] self.img_filename = img_filename if kpt_list is not None or kpt_label_list is not None: if kpt_list is None or kpt_label_list is None: logger.error( f'Must provide both kpt_list and kpt_label_list, or neither.' ) logger.error( f'kpt_list: {kpt_list}\nkpt_label_list: {kpt_label_list}') logger.error(f'Ground truth labels: {self.coco_cat.keypoints}') if self.img_filename is not None: logger.error(f'Image filename: {self.img_filename}') raise Exception if len(kpt_list) != len(kpt_label_list): logger.error( f'len(kpt_list) == {len(kpt_list)} != {len(kpt_label_list)} == len(kpt_label_list)' ) raise Exception for kpt, label in zip(kpt_list, kpt_label_list): self.register(kpt=kpt, label=label) self.postponed_kpt_list = [] self.postponed_kpt_label_list = []
def __init__(self, x: float, y: float, color: GL_Color = GL_Color(255, 0, 0)): super().__init__() check_type(x, valid_type_list=[float, int]) check_type(y, valid_type_list=[float, int]) check_type(color, valid_type_list=[GL_Color]) self.__x = x self.__y = y self.__color = color
def __init__(self: H, obj_type: type, obj_list: List[T] = None): check_type(obj_type, valid_type_list=[type]) self.obj_type = obj_type if obj_list is not None: check_type_from_list(obj_list, valid_type_list=[obj_type]) self.obj_list = obj_list if obj_list is not None else []
def y(self, y: float): check_type(y, valid_type_list=[float, int]) self.__y = y
def x(self, x: float): check_type(x, valid_type_list=[float, int]) self.__x = x
def append(self, ann: LabelmeAnnotation): check_type(ann, valid_type_list=[LabelmeAnnotation]) self.labelme_ann_list.append(ann)
def __setitem__(self, idx: int, value: ID_Mapper): check_type(value, valid_type_list=[ID_Mapper]) self.id_maps[idx] = value
def from_dict(cls, collection_dict: dict, check_paths: bool = True) -> DatasetConfigCollection: check_required_keys(collection_dict, required_keys=[ 'collection_dir', 'dataset_names', 'dataset_specific' ]) collection_dir = collection_dict['collection_dir'] check_type(collection_dir, valid_type_list=[str]) dataset_names = collection_dict['dataset_names'] check_type(dataset_names, valid_type_list=[list]) check_type_from_list(dataset_names, valid_type_list=[str]) dataset_specific = collection_dict['dataset_specific'] check_type(dataset_specific, valid_type_list=[dict]) collection_tag = None if 'tag' not in collection_dict else collection_dict[ 'tag'] check_type(collection_tag, valid_type_list=[type(None), str]) check_required_keys( dataset_specific, required_keys=['img_dir', 'ann_path', 'ann_format']) img_dir = dataset_specific['img_dir'] check_type(img_dir, valid_type_list=[str, list]) if type(img_dir) is list: check_type_from_list(img_dir, valid_type_list=[str]) check_list_length(img_dir, correct_length=len(dataset_names)) ann_path = dataset_specific['ann_path'] check_type(ann_path, valid_type_list=[str, list]) if type(ann_path) is list: check_type_from_list(ann_path, valid_type_list=[str]) check_list_length(ann_path, correct_length=len(dataset_names)) ann_format = dataset_specific['ann_format'] check_type(ann_format, valid_type_list=[str, list]) if type(ann_format) is list: check_type_from_list(ann_format, valid_type_list=[str]) check_list_length(ann_format, correct_length=len(dataset_names)) dataset_tag = None if 'tag' not in dataset_specific else dataset_specific[ 'tag'] check_type(dataset_tag, valid_type_list=[type(None), str, list]) if type(dataset_tag) is list: check_type_from_list(dataset_tag, valid_type_list=[type(None), str]) check_list_length(dataset_tag, correct_length=len(dataset_names)) dataset_config_list = [] for i in range(len(dataset_names)): if type(img_dir) is str: img_dir0 = img_dir elif type(img_dir) is list: if i >= len(img_dir): raise IndexError img_dir0 = img_dir[i] else: raise Exception if type(ann_path) is str: ann_path0 = ann_path elif type(ann_path) is list: if i >= len(ann_path): raise IndexError ann_path0 = ann_path[i] else: raise Exception if type(ann_format) is str: ann_format0 = ann_format elif type(ann_format) is list: if i >= len(ann_format): raise IndexError ann_format0 = ann_format[i] else: raise Exception if type(dataset_tag) is str or dataset_tag is None: dataset_tag0 = dataset_tag elif type(dataset_tag) is list: if i >= len(dataset_tag): raise IndexError dataset_tag0 = dataset_tag[i] else: raise Exception img_dir1 = rel_to_abs_path( f'{collection_dir}/{dataset_names[i]}/{img_dir0}') ann_path1 = rel_to_abs_path( f'{collection_dir}/{dataset_names[i]}/{ann_path0}') if check_paths: check_dir_exists(img_dir1) check_file_exists(ann_path1) config = DatasetConfig(img_dir=img_dir1, ann_path=ann_path1, ann_format=ann_format0, tag=dataset_tag0) dataset_config_list.append(config) return DatasetConfigCollection(dataset_config_list=dataset_config_list, tag=collection_tag)
def __init__(self, a: int, b: int): check_type(a, valid_type_list=[int]) check_type(b, valid_type_list=[int]) self.a = a self.b = b
def check_valid_config(self, collection_dict_list: list): check_type(item=collection_dict_list, valid_type_list=[list]) for i, collection_dict in enumerate(collection_dict_list): check_type(item=collection_dict, valid_type_list=[dict]) check_value_from_list(item_list=list(collection_dict.keys()), valid_value_list=self.main_required_keys) for required_key in self.main_required_keys: if required_key not in collection_dict.keys(): logger.error( f"collection_dict at index {i} is missing required key: {required_key}" ) raise Exception collection_dir = collection_dict['collection_dir'] check_type(item=collection_dir, valid_type_list=[str]) dataset_names = collection_dict['dataset_names'] check_type(item=dataset_names, valid_type_list=[list]) check_type_from_list(item_list=dataset_names, valid_type_list=[str]) dataset_specific = collection_dict['dataset_specific'] check_type(item=dataset_specific, valid_type_list=[dict]) check_value_from_list(item_list=list(dataset_specific.keys()), valid_value_list=self.specific_required_keys) for required_key in self.specific_required_keys: if required_key not in dataset_specific.keys(): logger.error( f"dataset_specific at index {i} is missing required key: {required_key}" ) raise Exception img_dir = dataset_specific['img_dir'] ann_path = dataset_specific['ann_path'] ann_format = dataset_specific['ann_format'] check_type_from_list(item_list=[img_dir, ann_path, ann_format], valid_type_list=[str, list]) if type(img_dir) is list and len(img_dir) != len(dataset_names): logger.error(f"Length mismatch at index: {i}") logger.error( f"type(img_dir) is list but len(img_dir) == {len(img_dir)} != {len(dataset_names)} == len(dataset_names)" ) raise Exception if type(ann_path) is list and len(ann_path) != len(dataset_names): logger.error(f"Length mismatch at index: {i}") logger.error( f"type(ann_path) is list but len(ann_path) == {len(ann_path)} != {len(dataset_names)} == len(dataset_names)" ) raise Exception if type(ann_format) is list and len(ann_format) != len( dataset_names): logger.error(f"Length mismatch at index: {i}") logger.error( f"type(ann_format) is list but len(ann_format) == {len(ann_format)} != {len(dataset_names)} == len(dataset_names)" ) raise Exception if type(ann_format) is str: check_value(item=ann_format, valid_value_list=self.valid_ann_formats) elif type(ann_format) is list: check_value_from_list(item_list=ann_format, valid_value_list=self.valid_ann_formats) else: raise Exception
def append(self: H, item: T): check_type(item, valid_type_list=[self.obj_type]) self.obj_list.append(item)