json_dir = '/path/to/labelme/json/dir' # Load Labelme Handler labelme_handler = LabelmeAnnotationHandler.load_from_dir(load_dir=json_dir) # Define COCO Categories Before Conversion if not file_exists('categories_example.json'): # Save a new categories json if it doesn't already exist. categories = COCO_Category_Handler() categories.append( # Standard Keypoint Example COCO_Category( id=len(categories), supercategory='pet', name='dog', keypoints=[ # The keypoint labels are defined here 'left_eye', 'right_eye', # 0, 1 'mouth_left', 'mouth_center', 'mouth_right' # 2, 3, 4 ], skeleton=[ # The connections between keypoints are defined with indecies here [0, 1], [2, 3], [3,4] ] ) ) categories.append( # Simple Keypoint Example COCO_Category.from_label_skeleton( id=len(categories), supercategory='pet', name='cat', label_skeleton=[ ['left_eye', 'right_eye'], ['mouth_left', 'mouth_center'], ['mouth_center', 'mouth_right']
# Load NDDS Dataset ndds_dataset = NDDS_Dataset.load_from_dir(json_dir=src_dir, show_pbar=True) # Fix NDDS Dataset naming so that it follows convention. (This is not necessary if the NDDS dataset already follows the naming convention.) for frame in ndds_dataset.frames: # Fix Naming Convention for ann_obj in frame.ndds_ann.objects: if ann_obj.class_name.startswith('crescent'): obj_type, obj_name = 'seg', 'crescent' instance_name = '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' # Convert To COCO Dataset crescent_categories = COCO_Category_Handler() crescent_categories.append( COCO_Category(id=len(crescent_categories), name='crescent')) dataset = COCO_Dataset.from_ndds(ndds_dataset=ndds_dataset, categories=crescent_categories, naming_rule='type_object_instance_contained', show_pbar=True, bbox_area_threshold=-1, allow_unfound_seg=True) dataset.move_images(dst_img_dir=dst_dir, preserve_filenames=True, overwrite_duplicates=False, update_img_paths=True, overwrite=True, show_pbar=True) dataset.save_to_path(f'{dst_dir}/output.json', overwrite=True) # dataset.display_preview(show_details=True) dataset.save_video(save_path=f'{dst_dir}/preview.mp4',
from annotation_utils.coco.structs import COCO_Category_Handler, COCO_Category handler = COCO_Category_Handler() handler.append(COCO_Category(id=len(handler), name='garbage')) output_dir = '/home/clayton/workspace/prj/data_keep/data/ndds/categories' handler.save_to_path(f'{output_dir}/garbage.json', overwrite=True)
# Load NDDS Dataset ndds_dataset = NDDS_Dataset.load_from_dir(json_dir=src_dir, show_pbar=True) # Fix NDDS Dataset naming so that it follows convention. (This is not necessary if the NDDS dataset already follows the naming convention.) for frame in ndds_dataset.frames: # Fix Naming Convention for ann_obj in frame.ndds_ann.objects: if ann_obj.class_name.startswith('cup'): obj_type, obj_name = 'seg', 'cup' instance_name = '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' # Convert To COCO Dataset cup_categories = COCO_Category_Handler() cup_categories.append(COCO_Category(id=len(cup_categories), name='cup')) dataset = COCO_Dataset.from_ndds(ndds_dataset=ndds_dataset, categories=cup_categories, naming_rule='type_object_instance_contained', show_pbar=True, bbox_area_threshold=-1, allow_unfound_seg=True) dataset.move_images(dst_img_dir=dst_dir, preserve_filenames=True, overwrite_duplicates=False, update_img_paths=True, overwrite=True, show_pbar=True) dataset.save_to_path(f'{dst_dir}/output.json', overwrite=True) # dataset.display_preview(show_details=True) dataset.save_video(save_path=f'{dst_dir}/preview.mp4',
ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}_{contained_name}' for idx in delete_idx_list[::-1]: del ndds_dataset.frames[idx] print(f"ID deleted {idx}") # Bolt ROI Dataset Creation logger.info('Creating Bolt ROI Dataset') bolt_roi_categories = COCO_Category_Handler() print(f"Bolt_roi_categories :{bolt_roi_categories}") bolt_roi_categories.append( COCO_Category( id=len(bolt_roi_categories), name='bolt-roi' ) ) print(f"Bolt_roi_categories :{bolt_roi_categories}") bolt_roi_dataset = COCO_Dataset.from_ndds( ndds_dataset=ndds_dataset, categories=bolt_roi_categories, naming_rule='type_object_instance_contained', delimiter='_', ignore_unspecified_categories=True, show_pbar=True, bbox_area_threshold=1, default_visibility_threshold=0.01, visibility_threshold_dict={'bolt-roi': 0.01}, allow_unfound_seg=False, class_merge_map={
from logger import logger from annotation_utils.coco.structs import COCO_Category coco_cat = COCO_Category(id=0, supercategory=1.0, name=2) assert type(coco_cat.supercategory) is str assert type(coco_cat.name) is str coco_cat0 = COCO_Category.from_dict(coco_cat.to_dict()) assert type(coco_cat0.supercategory) is str assert type(coco_cat0.name) is str coco_cat = COCO_Category(id=0, supercategory='a', name='b') assert type(coco_cat.supercategory) is str assert type(coco_cat.name) is str coco_cat0 = COCO_Category.from_dict(coco_cat.to_dict()) assert type(coco_cat0.supercategory) is str assert type(coco_cat0.name) is str logger.good(f'Test passed')
from annotation_utils.coco.structs import COCO_Dataset, \ COCO_License, COCO_Image, COCO_Annotation, COCO_Category from common_utils.common_types.bbox import BBox from logger import logger dataset = COCO_Dataset.new(description='Test') dataset.categories.append( COCO_Category(id=len(dataset.categories), supercategory='test_category', name='category_a')) dataset.categories.append( COCO_Category(id=len(dataset.categories), supercategory='test_category', name='category_b')) dataset.categories.append( COCO_Category(id=len(dataset.categories), supercategory='test_category', name='category_c')) for i in range(10): dataset.licenses.append( COCO_License(url=f'test_license_{i}', name=f'Test License {i}', id=len(dataset.licenses))) for i in range(20): dataset.images.append( COCO_Image(license_id=i % len(dataset.licenses), file_name=f'{i}.jpg', coco_url=f'/path/to/{i}.jpg', height=500, width=500,
# Load NDDS Dataset ndds_dataset = NDDS_Dataset.load_from_dir( json_dir='/home/clayton/workspace/prj/data_keep/data/ndds/type1', show_pbar=True) # Fix NDDS Dataset naming so that it follows convention. (This is not necessary if the NDDS dataset already follows the naming convention.) for frame in ndds_dataset.frames: for ann_obj in frame.ndds_ann.objects: if ann_obj.class_name == 'bolt': obj_type, obj_name = 'seg', 'bolt-roi' ann_obj.class_name = f'{obj_type}_{obj_name}' # Define COCO Categories categories = COCO_Category_Handler() categories.append(COCO_Category(id=len(categories), name='bolt-roi')) # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds(ndds_dataset=ndds_dataset, categories=categories, naming_rule='type_object_instance_contained', delimiter='_', ignore_unspecified_categories=True, show_pbar=True, bbox_area_threshold=1, default_visibility_threshold=0.10, visibility_threshold_dict={'bolt-roi': 0.01}, allow_unfound_seg=True, class_merge_map={ 'mark1': 'seg_bolt-roi', 'mark2': 'seg_bolt-roi',
from annotation_utils.coco.structs import COCO_Category_Handler, COCO_Category handler = COCO_Category_Handler() handler.append( COCO_Category(supercategory='screw', name='screw', keypoints=[], skeleton=[], id=len(handler))) handler.append( COCO_Category(supercategory='hole', name='hole', keypoints=[], skeleton=[], id=len(handler))) handler.save_to_path('interphone_ng_object_categories.json')
from annotation_utils.coco.structs import COCO_Category_Handler, COCO_Category handler = COCO_Category_Handler() handler.append( COCO_Category.from_label_skeleton( supercategory='', # Supercategory Name name='bracket_light', # Object Name id=len(handler), label_skeleton='' ) ) handler.save_to_path(save_path='hien_all_2_symbol_categories.json', overwrite=True) # Save Categorie