obj_type, obj_name = 'seg', 'hsr' instance_name = '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' elif ann_obj.class_name.lower() in list('abcdefghijkl'): obj_type, obj_name = 'kpt', 'hsr' instance_name, contained_name = '0', ann_obj.class_name ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}_{contained_name}' else: logger.error(f'Unknown ann_obj.class_name: {ann_obj.class_name}') raise Exception # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds(ndds_dataset=ndds_dataset, categories=hsr_categories, naming_rule='type_object_instance_contained', show_pbar=True, bbox_area_threshold=1, allow_same_instance_for_contained=True, color_interval=5) dataset.move_images(dst_dir, preserve_filenames=True, update_img_paths=True, overwrite=True, show_pbar=True) dataset.save_to_path(f'{dst_dir}/output.json', overwrite=True) dataset.save_video( save_path=f'{dst_dir}/preview_with_mask.avi', fps=5, show_details=True, kpt_idx_offset=-1, )
bolt_roi_categories.append( COCO_Category( id=len(bolt_roi_categories), name='bolt-roi' ) ) print(f"Bolt_roi_categories :{bolt_roi_categories}") bolt_roi_dataset = COCO_Dataset.from_ndds( ndds_dataset=ndds_dataset, categories=bolt_roi_categories, naming_rule='type_object_instance_contained', delimiter='_', ignore_unspecified_categories=True, show_pbar=True, bbox_area_threshold=1, default_visibility_threshold=0.01, visibility_threshold_dict={'bolt-roi': 0.01}, allow_unfound_seg=False, class_merge_map={ 'seg_mark-inner_0': 'seg_bolt-roi_0', 'seg_mark-middle_0': 'seg_bolt-roi_0', 'seg_mark-outer_0': 'seg_bolt-roi_0' } ) bolt_roi_dst_dir = f'{target_dst_dir}/bolt_roi' make_dir_if_not_exists(bolt_roi_dst_dir) bolt_roi_dataset.move_images( dst_img_dir=bolt_roi_dst_dir, preserve_filenames=True, overwrite_duplicates=False, update_img_paths=True, overwrite=True, show_pbar=True ) bolt_roi_dataset.save_to_path(f'{bolt_roi_dst_dir}/output.json', overwrite=True)
# Fix NDDS Dataset naming so that it follows convention. (This is not necessary if the NDDS dataset already follows the naming convention.) for frame in ndds_dataset.frames: # Fix Naming Convention for ann_obj in frame.ndds_ann.objects: if ann_obj.class_name.startswith('crescent'): obj_type, obj_name = 'seg', 'crescent' instance_name = '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' # Convert To COCO Dataset crescent_categories = COCO_Category_Handler() crescent_categories.append( COCO_Category(id=len(crescent_categories), name='crescent')) dataset = COCO_Dataset.from_ndds(ndds_dataset=ndds_dataset, categories=crescent_categories, naming_rule='type_object_instance_contained', show_pbar=True, bbox_area_threshold=-1, allow_unfound_seg=True) dataset.move_images(dst_img_dir=dst_dir, preserve_filenames=True, overwrite_duplicates=False, update_img_paths=True, overwrite=True, show_pbar=True) dataset.save_to_path(f'{dst_dir}/output.json', overwrite=True) # dataset.display_preview(show_details=True) dataset.save_video(save_path=f'{dst_dir}/preview.mp4', fps=5, show_details=True)
logger.error(f'ann_obj.class_name: {ann_obj.class_name}') # raise Exception # Delete Duplicate Objects frame.ndds_ann.objects.delete_duplicates(verbose=True, verbose_ref=frame.img_path) # ndds_dataset.save_to_path(save_path=f'{coco_data_dir}/hook_fixed_ndds.json', overwrite=True) # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds( ndds_dataset=ndds_dataset, # categories=COCO_Category_Handler.load_from_path(f'/home/jitesh/3d/data/categories/hook_7ckpt.json'), categories=COCO_Category_Handler.load_from_path( f'/home/jitesh/3d/data/categories/tropicana.json'), naming_rule='type_object_instance_contained', ignore_unspecified_categories=True, show_pbar=True, bbox_area_threshold=1, default_visibility_threshold=-1, allow_unfound_seg=True, ) make_dir_if_not_exists(coco_data_dir) img_path = f'{coco_data_dir}/img' make_dir_if_not_exists(coco_data_dir) ann_dir = f'{coco_data_dir}/json' make_dir_if_not_exists(ann_dir) dataset.move_images(dst_img_dir=img_path, preserve_filenames=False, update_img_paths=True, overwrite=True, show_pbar=True)
for ann_obj in frame.ndds_ann.objects: if ann_obj.class_name.startswith('hsr'): obj_type, obj_name = 'seg', 'hsr' instance_name = ann_obj.class_name.replace('hsr', '') ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' elif ann_obj.class_name.startswith('point'): obj_type, obj_name = 'kpt', 'hsr' temp = ann_obj.class_name.replace('point', '') instance_name, contained_name = temp[1], temp[0] ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}_{contained_name}' else: logger.error(f'ann_obj.class_name: {ann_obj.class_name}') raise Exception # Delete Duplicate Objects frame.ndds_ann.objects.delete_duplicates(verbose=True, verbose_ref=frame.img_path) ndds_dataset.save_to_path(save_path='hsr_fixed_ndds.json', overwrite=True) # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds( ndds_dataset=ndds_dataset, categories=COCO_Category_Handler.load_from_path( '/home/clayton/workspace/prj/data_keep/data/ndds/categories/hsr.json'), naming_rule='type_object_instance_contained', show_pbar=True, bbox_area_threshold=1) dataset.save_to_path('hsr_ndds2coco_test.json', overwrite=True) dataset.display_preview(show_details=True)
ndds_dataset = NDDS_Dataset.load_from_dir( json_dir='/home/clayton/workspace/prj/data_keep/data/ndds/NewMap2', show_pbar=True) # Fix NDDS Dataset naming so that it follows convention. (This is not necessary if the NDDS dataset already follows the naming convention.) for frame in ndds_dataset.frames: # Fix Naming Convention for ann_obj in frame.ndds_ann.objects: # Note: Part numbers should be specified in the obj_type string. if ann_obj.class_name == 'colorcone1': obj_type, obj_name = 'seg0', 'cone' ann_obj.class_name = f'{obj_type}_{obj_name}' elif ann_obj.class_name == 'colorcone2': obj_type, obj_name = 'seg1', 'cone' ann_obj.class_name = f'{obj_type}_{obj_name}' ndds_dataset.save_to_path(save_path='cone_fixed_ndds.json', overwrite=True) # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds( ndds_dataset=ndds_dataset, categories=COCO_Category_Handler.load_from_path( '/home/clayton/workspace/prj/data_keep/data/ndds/categories/cone.json' ), naming_rule='type_object_instance_contained', delimiter='_', ignore_unspecified_categories=True, show_pbar=True) dataset.save_to_path('cone_ndds2coco_test.json', overwrite=True) dataset.display_preview(show_details=True)
for ann_obj in frame.ndds_ann.objects: if ann_obj.class_name == 'bolt': obj_type, obj_name = 'seg', 'bolt-roi' ann_obj.class_name = f'{obj_type}_{obj_name}' # Define COCO Categories categories = COCO_Category_Handler() categories.append(COCO_Category(id=len(categories), name='bolt-roi')) # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds(ndds_dataset=ndds_dataset, categories=categories, naming_rule='type_object_instance_contained', delimiter='_', ignore_unspecified_categories=True, show_pbar=True, bbox_area_threshold=1, default_visibility_threshold=0.10, visibility_threshold_dict={'bolt-roi': 0.01}, allow_unfound_seg=True, class_merge_map={ 'mark1': 'seg_bolt-roi', 'mark2': 'seg_bolt-roi', 'mark3': 'seg_bolt-roi' }) # Save COCO Dataset dataset.save_to_path('bolt-roi_dataset.json', overwrite=True) # Preview Dataset dataset.display_preview(show_details=True)
obj_type, obj_name, instance_name = 'seg', '80part0', '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' elif ann_obj.class_name == 'part_nine0': obj_type, obj_name, instance_name = 'seg', '90part9', '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' elif ann_obj.class_name == '90part_zero9': obj_type, obj_name, instance_name = 'seg', '90part0', '0' ann_obj.class_name = f'{obj_type}_{obj_name}_{instance_name}' # Convert To COCO Dataset dataset = COCO_Dataset.from_ndds( ndds_dataset=ndds_dataset, categories=COCO_Category_Handler.load_from_path( '/home/clayton/workspace/prj/data_keep/data/ndds/categories/measure_all.json' ), naming_rule='type_object_instance_contained', delimiter='_', ignore_unspecified_categories=True, show_pbar=True, bbox_area_threshold=1, exclude_invalid_polygons=True) dataset.save_to_path(result_json) else: dataset = COCO_Dataset.load_from_path(result_json) dataset.remove_categories_by_name(category_names=['measure']) dataset.images.sort(attr_name='file_name') dataset.save_visualization(save_dir='measure_test_vis', preserve_filenames=True, show_details=True, bbox_thickness=1,