for i in del_idx_list:
        logger.info(f'frame.img_path: {frame.img_path}')
        logger.info(
            f'Deleted duplicate of {frame.ndds_ann.objects[i].class_name}')
        del frame.ndds_ann.objects[i]

# for frame in handler:
#     logger.cyan(frame.img_path)
#     logger.purple([ann_obj.class_name for ann_obj in frame.ndds_ann.objects])

# import sys
# sys.exit()

for frame in handler:
    logger.green(f'frame.img_path: {frame.img_path}')
    class_names = [ann_obj.class_name for ann_obj in frame.ndds_ann.objects]

    labeled_obj_handler = frame.to_labeled_obj_handler()
    # logger.purple(f'labeled_obj_handler:\n{labeled_obj_handler}')
    logger.yellow(f'len(labeled_obj_handler): {len(labeled_obj_handler)}')
    for labeled_obj in labeled_obj_handler:
        logger.cyan(
            f'len(labeled_obj.instances): {len(labeled_obj.instances)}')
        for instance in labeled_obj.instances:
            logger.blue(
                f'len(instance.contained_instance_list): {len(instance.contained_instance_list)}'
            )
            for contained_instance in instance.contained_instance_list:
                logger.white(
                    f'{contained_instance.instance_type}: {contained_instance.instance_name} '
                )
#blend proc dataset
coco_data_path = "/home/pasonatech/blender_proc/BlenderProc-master/examples/crescent_test/collage_merged_img"

register_coco_instances(
    name = "marker",
    metadata = {},
    json_file = f'{coco_data_path}/coco_annotations.json',
    image_root = f'{coco_data_path}',
)

MetadataCatalog.get("marker").thing_classes = ['1']
abc_metadata_train = MetadataCatalog.get("marker")
logger.purple(abc_metadata_train)
dataset_dicts = DatasetCatalog.get("marker")
logger.blue(dataset_dicts)

#fine tuning
from detectron2.engine import DefaultTrainer
from detectron2.config import get_cfg
from detectron2.model_zoo import get_config_file, get_checkpoint_url

#model_config_path = "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml"
model_config_path = "COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml"


cfg = get_cfg()
cfg.merge_from_file(get_config_file(model_config_path))
cfg.DATASETS.TRAIN = ("marker",)
cfg.DATASETS.TEST = ()   
cfg.DATALOADER.NUM_WORKERS = 2
예제 #3
0
    def to_dict0(self) -> dict:
        # TODO: Work In Progress
        img_dir_list = [
            Path(config.img_dir).abs() for config in self.dataset_config_list
        ]
        ann_path_list = [
            Path(config.ann_path).abs() for config in self.dataset_config_list
        ]
        ann_format_list = [
            config.ann_format for config in self.dataset_config_list
        ]
        img_container_dir = Path.get_longest_container_dir(img_dir_list)
        ann_container_dir = Path.get_longest_container_dir(ann_path_list)
        collection_dir = Path.get_longest_container_dir(
            [img_container_dir, ann_container_dir])
        dataset_names = [
            img_dir.replace(f'{collection_dir.path_str}/', '') \
                if img_dir != collection_dir else Path('') \
                for img_dir in img_dir_list
        ]

        # rel_img_dir_list = [None] * len(dataset_names)
        rel_img_dir_list = [Path('') * len(dataset_names)]

        logger.purple(f'Before dataset_names: {dataset_names}')
        logger.purple(f'Before rel_img_dir_list: {rel_img_dir_list}')

        while True:
            logger.blue(f'Flag0')
            # Adjust dataset_names tails
            tail_moved = Path.tail2head(dataset_names, rel_img_dir_list)

            if not tail_moved:
                break

        while True:
            logger.blue(f'Flag1')
            # Adjust dataset_names heads
            head_moved = Path.head2tail(dataset_names, collection_dir)

            if not head_moved:
                break

        logger.purple(f'After dataset_names: {dataset_names}')
        logger.purple(f'After rel_img_dir_list: {rel_img_dir_list}')

        rel_img_dir_list = [
            rel_img_dir if rel_img_dir is not None else Path('')
            for rel_img_dir in rel_img_dir_list
        ]
        rel_ann_path_list = [
            ann_path.replace(f'{collection_dir}/{dataset_name}/', '') \
                if dataset_name != Path('') else ann_path.replace(f'{collection_dir}/', '') \
                for ann_path, dataset_name in zip(ann_path_list, dataset_names)
        ]

        dataset_names = [
            dataset_name.path_str if dataset_name.path_str != '' else '.'
            for dataset_name in dataset_names
        ]
        rel_img_dir = rel_img_dir_list[0].path_str if len(
            list(dict.fromkeys(rel_img_dir_list))) == 1 else [
                rel_img_dir.path_str for rel_img_dir in rel_img_dir_list
            ]
        if type(rel_img_dir) is str:
            rel_img_dir = rel_img_dir if rel_img_dir != '' else '.'
        elif type(rel_img_dir) is list:
            rel_img_dir = [
                dir_path if dir_path != '' else '.' for dir_path in rel_img_dir
            ]
        else:
            raise Exception
        rel_ann_path = rel_ann_path_list[0].path_str if len(
            list(dict.fromkeys(rel_ann_path_list))) == 1 else [
                rel_ann_path.path_str for rel_ann_path in rel_ann_path_list
            ]
        ann_format = ann_format_list[0] if len(
            list(dict.fromkeys(ann_format_list))) == 1 else ann_format_list

        return {
            'collection_dir': collection_dir.path_str,
            'dataset_names': dataset_names,
            'dataset_specific': {
                'img_dir': rel_img_dir,
                'ann_path': rel_ann_path,
                'ann_format': ann_format
            }
        }
from logger import logger
from annotation_utils.coco.structs import COCO_License_Handler, COCO_License

license0 = COCO_License(url='url_a', id=0, name='license_a')
license1 = COCO_License(url='url_b', id=1, name='license_b')
license2 = COCO_License(url='url_c', id=2, name='license_c')
license_handler = COCO_License_Handler([license0, license1, license2])

license_handler.append(COCO_License(url='url_d', id=3, name='license_d'))
logger.purple(license_handler.license_list)
license_handler0 = license_handler.copy()
del license_handler0[1]
license_handler0[1] = COCO_License(url='url_x', id=99, name='license_x')
for coco_license in license_handler0:
    logger.cyan(coco_license)
logger.blue(len(license_handler0))
license_handler0.sort(attr_name='name')
for coco_license in license_handler0:
    logger.cyan(coco_license)

logger.info('Shuffle')
license_handler0.shuffle()
for coco_license in license_handler0:
    logger.cyan(coco_license)

coco_license = license_handler0.get_obj_from_id(3)
logger.purple(f'coco_license: {coco_license}')

logger.purple(
    f'license_handler0.to_dict_list():\n{license_handler0.to_dict_list()}')
license_handler0.save_to_path('license_handler.json', overwrite=True)
예제 #5
0
    def from_polygon_list_to_merge(self, polygon_list: list) -> Polygon:
        from shapely.geometry import MultiPolygon as ShapelyMultiPolygon
        import matplotlib.pyplot as plt
        import geopandas as gpd

        valid_polygon_list = []
        for polygon in polygon_list:
            if polygon.size(
            )[0] > 2:  # Filter out polygons with less than 3 vertices.
                valid_polygon_list.append(polygon)
        # logger.red(valid_polygon_list)
        merged_polygon = None
        for i, valid_polygon in enumerate(valid_polygon_list):
            if merged_polygon is None:
                merged_polygon = valid_polygon.to_shapely()
                logger.yellow(
                    f"{i+1}/{len(valid_polygon_list)}: type(merged_polygon): {type(merged_polygon)}"
                )
            else:
                if merged_polygon.intersects(valid_polygon.to_shapely()):
                    logger.green(f"intersects!")
                else:
                    logger.red(f"Not intersects!")
                if type(merged_polygon) is ShapelyPolygon:
                    logger.cyan(f"Flag0")
                    polygons = gpd.GeoSeries(merged_polygon)
                    new_polygon = gpd.GeoSeries(valid_polygon.to_shapely())
                    polygons.plot()
                    new_polygon.plot()
                    plt.show()
                    if not merged_polygon.is_valid:
                        logger.error(f"merged_polygon is not valid")
                        raise Exception
                    if not valid_polygon.to_shapely().is_valid:
                        logger.error(f"New polygon is not valid")
                        raise Exception
                    if merged_polygon.intersects(valid_polygon.to_shapely()):
                        merged_polygon = merged_polygon.union(
                            valid_polygon.to_shapely())
                    else:
                        merged_polygon = cascaded_union(
                            [merged_polygon,
                             valid_polygon.to_shapely()])
                    if type(merged_polygon) is ShapelyMultiPolygon:
                        logger.cyan(f"Hull")
                        merged_polygon = merged_polygon.convex_hull
                        if type(merged_polygon) is ShapelyPolygon:
                            logger.green(f"Fixed!")
                        elif type(merged_polygon) is ShapelyMultiPolygon:
                            logger.error(f"Not Fixed!")
                            raise Exception
                        else:
                            logger.error(
                                f"Unknown type: {type(merged_polygon)}")
                            raise Exception
                elif type(merged_polygon) is ShapelyMultiPolygon:
                    logger.error(
                        f"Polygon turned into MultiPolygon in shapely!")
                    raise Exception
                else:
                    logger.error(
                        f"type(merged_polygon): {type(merged_polygon)}")
                    raise Exception

                logger.yellow(
                    f"{i+1}/{len(valid_polygon_list)}: type(merged_polygon): {type(merged_polygon)}"
                )
                # logger.yellow(f"{i+1}/{len(valid_polygon_list)}: type(merged_polygon.exterior): {type(merged_polygon.exterior)}")
            logger.blue(
                f"{i+1}/{len(valid_polygon_list)}: valid_polygon.size(): {valid_polygon.size()}"
            )

        import sys
        sys.exit()
        union = cascaded_union([
            valid_polygon.to_shapely() for valid_polygon in valid_polygon_list
        ])
        return self.from_shapely(union)
from logger import logger
from annotation_utils.dataset.config import DatasetConfigCollectionHandler

handler = DatasetConfigCollectionHandler.load_from_path(
    '/home/clayton/workspace/prj/data_keep/data/toyota/dataset/config/json/box_hsr_kpt_train.json'
)
logger.purple(handler.to_dict_list())
for collection in handler:
    for config in collection:
        logger.blue(config)
handler.save_to_path('test.yaml', overwrite=True)
handler0 = DatasetConfigCollectionHandler.load_from_path('test.yaml')
handler0.save_to_path('test0.yaml', overwrite=True)

fp, fp0 = open('test.yaml'), open('test0.yaml')
line, line0 = fp.readline(), fp0.readline()
for i, [line, line0] in enumerate(zip(fp, fp0)):
    logger.white(f'{i}: {line.strip()}')
    logger.white(f'{i}: {line0.strip()}')
    assert line.strip() == line0.strip()