def to_dict(self) -> dict: result = {} for key, val in self.__dict__.items(): logger.cyan( f'{key} in {self.get_constructor_params()}: {key in self.get_constructor_params()}' ) if key in self.get_constructor_params(): result[key] = val return result
def test(self): bgr0 = BGR(50, 25, 99) bgr1 = BGR(50, 200, 99) bgr_interval = BGR_Interval.from_bgr_pair(BGR(0, 0, 0), BGR(100, 100, 100)) bgr_interval0 = BGR_Interval.from_bgr_pair(BGR(25, 25, 25), BGR(50, 50, 50)) bgr_interval1 = BGR_Interval.from_bgr_pair(BGR(25, 25, 25), BGR(50, 125, 50)) left_bgr_interval, right_bgr_interval = bgr_interval.split_at(bgr=bgr0) logger.cyan(f"bgr_interval: {bgr_interval}") logger.cyan(f"Split at {bgr0}") logger.yellow(f"Result: {left_bgr_interval}, {right_bgr_interval}") logger.yellow(f"bgr_interval.contains({bgr0}): {bgr_interval.contains(bgr0)}") logger.yellow(f"bgr_interval.contains({bgr1}): {bgr_interval.contains(bgr1)}") logger.yellow(f"bgr_interval.contains_detailed({bgr1}): {bgr_interval.contains_detailed(bgr1)}") logger.yellow(f"bgr_interval.contains_bgr_interval(left_bgr_interval): {bgr_interval.contains_bgr_interval(left_bgr_interval)}") logger.yellow(f"bgr_interval.contains_bgr_interval(right_bgr_interval): {bgr_interval.contains_bgr_interval(right_bgr_interval)}") logger.purple(f"bgr_interval.contains_bgr_interval_detailed(bgr_interval0): {bgr_interval.contains_bgr_interval_detailed(bgr_interval0)}") logger.purple(f"bgr_interval.contains_bgr_interval_detailed(bgr_interval1): {bgr_interval.contains_bgr_interval_detailed(bgr_interval1)}")
for i in del_idx_list: logger.info(f'frame.img_path: {frame.img_path}') logger.info( f'Deleted duplicate of {frame.ndds_ann.objects[i].class_name}') del frame.ndds_ann.objects[i] # for frame in handler: # logger.cyan(frame.img_path) # logger.purple([ann_obj.class_name for ann_obj in frame.ndds_ann.objects]) # import sys # sys.exit() for frame in handler: logger.green(f'frame.img_path: {frame.img_path}') class_names = [ann_obj.class_name for ann_obj in frame.ndds_ann.objects] labeled_obj_handler = frame.to_labeled_obj_handler() # logger.purple(f'labeled_obj_handler:\n{labeled_obj_handler}') logger.yellow(f'len(labeled_obj_handler): {len(labeled_obj_handler)}') for labeled_obj in labeled_obj_handler: logger.cyan( f'len(labeled_obj.instances): {len(labeled_obj.instances)}') for instance in labeled_obj.instances: logger.blue( f'len(instance.contained_instance_list): {len(instance.contained_instance_list)}' ) for contained_instance in instance.contained_instance_list: logger.white( f'{contained_instance.instance_type}: {contained_instance.instance_name} ' )
from logger import logger from annotation_utils.coco.structs import COCO_Dataset dataset = COCO_Dataset.load_from_path( json_path='/home/clayton/workspace/prj/data_keep/data/toyota/dataset/sim/20200228/28_02_2020_11_18_30_coco-data/HSR-coco.json', img_dir='/home/clayton/workspace/prj/data_keep/data/toyota/dataset/sim/20200228/28_02_2020_11_18_30_coco-data' ) for coco_cat in dataset.categories: logger.purple(f'name: {coco_cat.name}') label_skeleton = coco_cat.get_label_skeleton(skeleton_idx_offset=1) logger.cyan(f'label_skeleton: {label_skeleton}')
from logger import logger from annotation_utils.coco.structs import COCO_License_Handler, COCO_License license0 = COCO_License(url='url_a', id=0, name='license_a') license1 = COCO_License(url='url_b', id=1, name='license_b') license2 = COCO_License(url='url_c', id=2, name='license_c') license_handler = COCO_License_Handler([license0, license1, license2]) license_handler.append(COCO_License(url='url_d', id=3, name='license_d')) logger.purple(license_handler.license_list) license_handler0 = license_handler.copy() del license_handler0[1] license_handler0[1] = COCO_License(url='url_x', id=99, name='license_x') for coco_license in license_handler0: logger.cyan(coco_license) logger.blue(len(license_handler0)) license_handler0.sort(attr_name='name') for coco_license in license_handler0: logger.cyan(coco_license) logger.info('Shuffle') license_handler0.shuffle() for coco_license in license_handler0: logger.cyan(coco_license) coco_license = license_handler0.get_obj_from_id(3) logger.purple(f'coco_license: {coco_license}') logger.purple( f'license_handler0.to_dict_list():\n{license_handler0.to_dict_list()}') license_handler0.save_to_path('license_handler.json', overwrite=True)
import time from logger import logger from common_utils.path_utils import find_shortest_common_rel_path path_list = [ '/path/to/dir/a/b/c/d.png', 'path/lskdjf/to/dir/a/b/c/d.png', 'path/to/a/dir/a/b/c/d.png', 'lksjdfljksdlkfjlsdkfj/c/d.png' ] time0 = time.time_ns() result = find_shortest_common_rel_path(path_list) time1 = time.time_ns() assert result == 'c/d.png' logger.cyan(result) logger.purple(f'{time1-time0} ns ellapsed')
def from_polygon_list_to_merge(self, polygon_list: list) -> Polygon: from shapely.geometry import MultiPolygon as ShapelyMultiPolygon import matplotlib.pyplot as plt import geopandas as gpd valid_polygon_list = [] for polygon in polygon_list: if polygon.size( )[0] > 2: # Filter out polygons with less than 3 vertices. valid_polygon_list.append(polygon) # logger.red(valid_polygon_list) merged_polygon = None for i, valid_polygon in enumerate(valid_polygon_list): if merged_polygon is None: merged_polygon = valid_polygon.to_shapely() logger.yellow( f"{i+1}/{len(valid_polygon_list)}: type(merged_polygon): {type(merged_polygon)}" ) else: if merged_polygon.intersects(valid_polygon.to_shapely()): logger.green(f"intersects!") else: logger.red(f"Not intersects!") if type(merged_polygon) is ShapelyPolygon: logger.cyan(f"Flag0") polygons = gpd.GeoSeries(merged_polygon) new_polygon = gpd.GeoSeries(valid_polygon.to_shapely()) polygons.plot() new_polygon.plot() plt.show() if not merged_polygon.is_valid: logger.error(f"merged_polygon is not valid") raise Exception if not valid_polygon.to_shapely().is_valid: logger.error(f"New polygon is not valid") raise Exception if merged_polygon.intersects(valid_polygon.to_shapely()): merged_polygon = merged_polygon.union( valid_polygon.to_shapely()) else: merged_polygon = cascaded_union( [merged_polygon, valid_polygon.to_shapely()]) if type(merged_polygon) is ShapelyMultiPolygon: logger.cyan(f"Hull") merged_polygon = merged_polygon.convex_hull if type(merged_polygon) is ShapelyPolygon: logger.green(f"Fixed!") elif type(merged_polygon) is ShapelyMultiPolygon: logger.error(f"Not Fixed!") raise Exception else: logger.error( f"Unknown type: {type(merged_polygon)}") raise Exception elif type(merged_polygon) is ShapelyMultiPolygon: logger.error( f"Polygon turned into MultiPolygon in shapely!") raise Exception else: logger.error( f"type(merged_polygon): {type(merged_polygon)}") raise Exception logger.yellow( f"{i+1}/{len(valid_polygon_list)}: type(merged_polygon): {type(merged_polygon)}" ) # logger.yellow(f"{i+1}/{len(valid_polygon_list)}: type(merged_polygon.exterior): {type(merged_polygon.exterior)}") logger.blue( f"{i+1}/{len(valid_polygon_list)}: valid_polygon.size(): {valid_polygon.size()}" ) import sys sys.exit() union = cascaded_union([ valid_polygon.to_shapely() for valid_polygon in valid_polygon_list ]) return self.from_shapely(union)
from logger import logger from common_utils.common_types.point import Point2D_List from annotation_utils.labelme.structs import LabelmeAnnotation, LabelmeShape ann = LabelmeAnnotation.load_from_path( '/home/clayton/workspace/test/labelme_testing/orig_cat.json') ann.shapes.append(shape=LabelmeShape( label='test_bbox', points=Point2D_List.from_list([[50, 50], [100, 100]], demarcation=True), shape_type='rectangle')) for shape in ann.shapes: logger.purple(f'shape.label: {shape.label}') logger.purple(f'shape.shape_type: {shape.shape_type}') logger.cyan( f'shape.points.to_numpy().shape: {shape.points.to_numpy().shape}') ann.save_to_path('/home/clayton/workspace/test/labelme_testing/cat.json')