def test_crop_covered_segments(self): source_dataset = Dataset.from_iterable([ DatasetItem( id=1, image=np.zeros((5, 5, 3)), annotations=[ # The mask is partially covered by the polygon Mask(np.array( [[0, 0, 1, 1, 1], [0, 0, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 0, 0], [1, 1, 1, 0, 0]], ), z_order=0), Polygon([1, 1, 4, 1, 4, 4, 1, 4], z_order=1), ]), ]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 5, 3)), annotations=[ Mask(np.array([[0, 0, 1, 1, 1], [0, 0, 0, 0, 1], [1, 0, 0, 0, 1], [1, 0, 0, 0, 0], [1, 1, 1, 0, 0]], ), z_order=0), Polygon([1, 1, 4, 1, 4, 4, 1, 4], z_order=1), ]), ]) actual = transforms.CropCoveredSegments(source_dataset) compare_datasets(self, target_dataset, actual)
def test_reindex(self): source_dataset = Dataset.from_iterable( [ DatasetItem(id=2, image=np.ones((4, 2, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=0, id=5), ], attributes={'id': 22}) ], categories=[str(i) for i in range(10)]) target_dataset = Dataset.from_iterable( [ DatasetItem(id=2, image=np.ones((4, 2, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=0, id=1, group=1, attributes={'is_crowd': False}), ], attributes={'id': 1}) ], categories=[str(i) for i in range(10)]) with TestDir() as test_dir: self._test_save_and_load(source_dataset, partial(CocoConverter.convert, reindex=True), test_dir, target_dataset=target_dataset)
def __iter__(self): return iter([ DatasetItem( id=1, subset='train', image=np.zeros((5, 5, 3)), annotations=[ # Full instance annotations: polygon + keypoints Points([0, 0, 0, 2, 4, 1], [0, 1, 2], label=3, group=1, id=1), Polygon([0, 0, 4, 0, 4, 4], label=3, group=1, id=1), # Full instance annotations: bbox + keypoints Points([1, 2, 3, 4, 2, 3], group=2, id=2), Bbox(1, 2, 2, 2, group=2, id=2), # Solitary keypoints Points([1, 2, 0, 2, 4, 1], label=5, id=3), # Some other solitary annotations (bug #1387) Polygon([0, 0, 4, 0, 4, 4], label=3, id=4), # Solitary keypoints with no label Points([0, 0, 1, 2, 3, 4], [0, 1, 2], id=5), ]) ])
def test_can_convert_polygons_to_mask(self): source_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((6, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=3, id=4, group=4), Polygon([5, 0, 9, 0, 5, 5], label=3, id=4, group=4), ] ), ], categories=[str(i) for i in range(10)]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((6, 10, 3)), annotations=[ Mask(np.array([ [0, 1, 1, 1, 0, 1, 1, 1, 1, 0], [0, 0, 1, 1, 0, 1, 1, 1, 0, 0], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], # only internal fragment (without the border), # but not everywhere... ), attributes={ 'is_crowd': True }, label=3, id=4, group=4), ], attributes={'id': 1} ), ], categories=[str(i) for i in range(10)]) with TestDir() as test_dir: self._test_save_and_load(source_dataset, partial(CocoInstancesConverter.convert, segmentation_mode='mask'), test_dir, target_dataset=target_dataset)
def test_polygons_to_masks(self): source_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4]), Polygon([5, 0, 9, 0, 5, 5]), ]), ]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Mask( np.array([ [0, 0, 0, 0, 0, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 1, 1, 1, 0, 0], [0, 0, 0, 0, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ]), ), Mask( np.array([ [0, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ]), ), ]), ]) actual = transforms.PolygonsToMasks(source_dataset) compare_datasets(self, target_dataset, actual)
def test_mask_to_polygons(self): source = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Mask( np.array([ [0, 1, 1, 1, 0, 1, 1, 1, 1, 0], [0, 0, 1, 1, 0, 1, 1, 1, 0, 0], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ]), ), ]), ]) expected = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Polygon([3.0, 2.5, 1.0, 0.0, 3.5, 0.0, 3.0, 2.5]), Polygon([5.0, 3.5, 4.5, 0.0, 8.0, 0.0, 5.0, 3.5]), ]), ]) actual = transforms.MasksToPolygons(source) compare_datasets(self, expected, actual)
def test_can_save_and_load_bboxes(self): expected_dataset = Dataset.from_iterable([ DatasetItem(id='a/b/1', subset='train', image=np.ones((10, 15, 3)), annotations=[ Bbox(1, 3, 6, 10), Bbox(0, 1, 3, 5, attributes={'text': 'word 0'}), ]), DatasetItem(id=2, subset='train', image=np.ones((10, 15, 3)), annotations=[ Polygon([0, 0, 3, 0, 4, 7, 1, 8], attributes={'text': 'word 1'}), Polygon([1, 2, 5, 3, 6, 8, 0, 7]), ]), DatasetItem(id=3, subset='train', image=np.ones((10, 15, 3)), annotations=[ Polygon([2, 2, 8, 3, 7, 10, 2, 9], attributes={'text': 'word_2'}), Bbox(0, 2, 5, 9, attributes={'text': 'word_3'}), ]), ]) with TestDir() as test_dir: self._test_save_and_load( expected_dataset, partial(IcdarTextLocalizationConverter.convert, save_images=True), test_dir, 'icdar_text_localization')
def __iter__(self): return iter([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4]), Polygon([5, 0, 9, 0, 5, 5]), ]), ])
def __iter__(self): return iter([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Polygon([3.0, 2.5, 1.0, 0.0, 3.5, 0.0, 3.0, 2.5]), Polygon([5.0, 3.5, 4.5, 0.0, 8.0, 0.0, 5.0, 3.5]), ] ), ])
def __iter__(self): return iter([ DatasetItem(id=0, subset='s1', image=np.zeros((5, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=1, group=4, attributes={'occluded': True}), Points([1, 1, 3, 2, 2, 3], label=2, attributes={ 'occluded': False, 'a1': 'x', 'a2': 42 }), Label(1), Label(2, attributes={ 'a1': 'y', 'a2': 44 }), ], attributes={'frame': 0}), DatasetItem(id=1, subset='s1', annotations=[ PolyLine([0, 0, 4, 0, 4, 4], label=3, group=4, attributes={'occluded': False}), Bbox(5, 0, 1, 9, label=3, group=4, attributes={'occluded': False}), ], attributes={'frame': 1}), DatasetItem(id=2, subset='s2', image=np.ones((5, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], z_order=1, label=3, group=4, attributes={'occluded': False}), ], attributes={'frame': 0}), DatasetItem(id=3, subset='s3', image=Image(path='3.jpg', size=(2, 4)), attributes={'frame': 0}), ])
def test_can_save_dataset_with_cyrillic_and_spaces_in_filename(self): source_dataset = Dataset.from_iterable( [ DatasetItem(id='кириллица с пробелом', subset='train', image=np.ones((16, 16, 3)), annotations=[ Polygon( [0, 4, 4, 4, 5, 6], label=3, attributes={ 'occluded': True, 'a1': 'qwe', 'a2': True, 'a3': 123, }), ]), ], categories={ AnnotationType.label: LabelCategories.from_iterable('label_' + str(label) for label in range(10)), }) target_dataset = Dataset.from_iterable( [ DatasetItem(id='кириллица с пробелом', subset='train', image=np.ones((16, 16, 3)), annotations=[ Polygon( [0, 4, 4, 4, 5, 6], label=0, id=0, attributes={ 'occluded': True, 'username': '', 'a1': 'qwe', 'a2': True, 'a3': 123, }), ]), ], categories={ AnnotationType.label: LabelCategories.from_iterable(['label_3']), }) with TestDir() as test_dir: self._test_save_and_load(source_dataset, partial(LabelMeConverter.convert, save_images=True), test_dir, target_dataset=target_dataset, require_images=True)
def test_remap_labels(self): src_dataset = Dataset.from_iterable( [ DatasetItem( id=1, annotations=[ # Should be remapped Label(1), Bbox(1, 2, 3, 4, label=2), Mask(image=np.array([1]), label=3), # Should be kept Polygon([1, 1, 2, 2, 3, 4], label=4), PolyLine([1, 3, 4, 2, 5, 6]) ]) ], categories={ AnnotationType.label: LabelCategories.from_iterable('label%s' % i for i in range(5)), AnnotationType.mask: MaskCategories(colormap=mask_tools.generate_colormap(5)), }) dst_dataset = Dataset.from_iterable( [ DatasetItem(id=1, annotations=[ Label(1), Bbox(1, 2, 3, 4, label=0), Mask(image=np.array([1]), label=1), Polygon([1, 1, 2, 2, 3, 4], label=2), PolyLine([1, 3, 4, 2, 5, 6], label=None) ]), ], categories={ AnnotationType.label: LabelCategories.from_iterable(['label0', 'label9', 'label4']), AnnotationType.mask: MaskCategories( colormap={ k: v for k, v in mask_tools.generate_colormap(5).items() if k in {0, 1, 3, 4} }) }) actual = transforms.RemapLabels(src_dataset, mapping={ 'label1': 'label9', 'label2': 'label0', 'label3': 'label9', }, default='keep') compare_datasets(self, dst_dataset, actual)
def __iter__(self): return iter([ DatasetItem(id=1, image=np.zeros((6, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=3, id=4, group=4), Polygon([5, 0, 9, 0, 5, 5], label=3, id=4, group=4), ] ), ])
def __iter__(self): return iter([ DatasetItem(id=0, subset='s1', image=np.zeros((5, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=1, group=4, attributes={'occluded': True}), Polygon([5, 0, 9, 0, 5, 5], label=2, group=4, attributes={'unknown': 'bar'}), Points([1, 1, 3, 2, 2, 3], label=2, attributes={ 'a1': 'x', 'a2': 42 }), Label(1), Label(2, attributes={ 'a1': 'y', 'a2': 44 }), ]), DatasetItem(id=1, subset='s1', annotations=[ PolyLine([0, 0, 4, 0, 4, 4], label=3, id=4, group=4), Bbox(5, 0, 1, 9, label=3, id=4, group=4), ]), DatasetItem( id=2, subset='s2', image=np.ones((5, 10, 3)), annotations=[ Polygon([0, 0, 4, 0, 4, 4], label=3, group=4, attributes={ 'z_order': 1, 'occluded': False }), PolyLine([5, 0, 9, 0, 5, 5]), # will be skipped as no label ]), DatasetItem(id=3, subset='s3', image=Image(path='3.jpg', size=(2, 4))), ])
def __iter__(self): return iter([ DatasetItem(id=1, subset='train', image=np.zeros((5, 5, 3)), annotations=[ Points([0, 0, 0, 2, 4, 1], [0, 1, 2], label=3, group=1, id=1, attributes={'is_crowd': False}), Polygon([0, 0, 4, 0, 4, 4], label=3, group=1, id=1, attributes={'is_crowd': False}), Points([1, 2, 3, 4, 2, 3], group=2, id=2, attributes={'is_crowd': False}), Polygon([1, 2, 3, 2, 3, 4, 1, 4], group=2, id=2, attributes={'is_crowd': False}), ]), DatasetItem(id=2, subset='train', annotations=[ Points([1, 2, 0, 2, 4, 1], label=5, group=3, id=3, attributes={'is_crowd': False}), Polygon([0, 1, 4, 1, 4, 2, 0, 2], label=5, group=3, id=3, attributes={'is_crowd': False}), ]), DatasetItem(id=3, subset='val', annotations=[ Points([0, 0, 1, 2, 3, 4], [0, 1, 2], group=3, id=3, attributes={'is_crowd': False}), Polygon([1, 2, 3, 2, 3, 4, 1, 4], group=3, id=3, attributes={'is_crowd': False}), ]), ])
def test_can_crop_covered_segments(self): source_dataset = Dataset.from_iterable([ DatasetItem( id=1, image=np.zeros((5, 5, 3)), annotations=[ Mask(np.array( [[0, 0, 1, 1, 1], [0, 0, 1, 1, 1], [1, 1, 0, 1, 1], [1, 1, 1, 0, 0], [1, 1, 1, 0, 0]], ), label=2, id=1, z_order=0), Polygon([1, 1, 4, 1, 4, 4, 1, 4], label=1, id=2, z_order=1), ]), ], categories=[ str(i) for i in range(10) ]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 5, 3)), annotations=[ Mask(np.array([[0, 0, 1, 1, 1], [0, 0, 0, 0, 1], [1, 0, 0, 0, 1], [1, 0, 0, 0, 0], [1, 1, 1, 0, 0]], ), attributes={'is_crowd': True}, label=2, id=1, group=1), Polygon([1, 1, 4, 1, 4, 4, 1, 4], label=1, id=2, group=2, attributes={'is_crowd': False}), ], attributes={'id': 1}), ], categories=[ str(i) for i in range(10) ]) with TestDir() as test_dir: self._test_save_and_load(source_dataset, partial(CocoInstancesConverter.convert, crop_covered=True), test_dir, target_dataset=target_dataset)
def __iter__(self): return iter([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Polygon( [3.0, 2.5, 1.0, 0.0, 3.5, 0.0, 3.0, 2.5], label=3, id=4, group=4, attributes={ 'is_crowd': False }), Polygon( [5.0, 3.5, 4.5, 0.0, 8.0, 0.0, 5.0, 3.5], label=3, id=4, group=4, attributes={ 'is_crowd': False }), ] ), ])
def _parse_shape_ann(cls, ann, categories): ann_id = ann.get('id') ann_type = ann['type'] attributes = ann.get('attributes', {}) if 'occluded' in categories[AnnotationType.label].attributes: attributes['occluded'] = ann.get('occluded', False) if 'outside' in categories[AnnotationType.label].attributes: attributes['outside'] = ann.get('outside', False) if 'keyframe' in categories[AnnotationType.label].attributes: attributes['keyframe'] = ann.get('keyframe', False) group = ann.get('group') label = ann.get('label') label_id = categories[AnnotationType.label].find(label)[0] z_order = ann.get('z_order', 0) points = ann.get('points', []) if ann_type == 'polyline': return PolyLine(points, label=label_id, z_order=z_order, id=ann_id, attributes=attributes, group=group) elif ann_type == 'polygon': return Polygon(points, label=label_id, z_order=z_order, id=ann_id, attributes=attributes, group=group) elif ann_type == 'points': return Points(points, label=label_id, z_order=z_order, id=ann_id, attributes=attributes, group=group) elif ann_type == 'box': x, y = points[0], points[1] w, h = points[2] - x, points[3] - y return Bbox(x, y, w, h, label=label_id, z_order=z_order, id=ann_id, attributes=attributes, group=group) else: raise NotImplementedError("Unknown annotation type '%s'" % ann_type)
def __iter__(self): return iter([ DatasetItem(id=1, subset='train', image=np.ones((4, 4, 3)), annotations=[ Polygon([0, 1, 2, 1, 2, 3, 0, 3], attributes={ 'is_crowd': False }, label=2, group=1, id=1), ]), DatasetItem(id=2, subset='train', image=np.ones((4, 4, 3)), annotations=[ Mask(np.array([ [0, 1, 0, 0], [0, 1, 0, 0], [0, 1, 1, 1], [0, 0, 0, 0]], ), attributes={ 'is_crowd': True }, label=4, group=3, id=3), ]), DatasetItem(id=3, subset='val', image=np.ones((4, 4, 3)), annotations=[ Mask(np.array([ [0, 0, 0, 0], [1, 1, 1, 0], [1, 1, 0, 0], [0, 0, 0, 0]], ), attributes={ 'is_crowd': True }, label=4, group=3, id=3), ]), ])
def download_labeled_image(task_api, task_id:int, frame_id:int, outdir:str): filename, img = task_api.download_frame(task_id, frame_id, outdir, save=False) labels = dict([(label.id, label) for label in task_api.get_id(task_id).labels]) jobs = task_api.get_jobs(task_id) job_id_with_frame = [job.id for job in jobs if (int(job.start_frame) <= frame_id) & (frame_id <= int(job.stop_frame))] job_annos_list = [job_api.get_annotations(job_id) for job_id in job_id_with_frame] annos = [[anno for anno in job_annos.shapes if anno.frame==frame_id][0] for job_annos in job_annos_list] categories=[label.name for label in labels.values()] items = [] for anno in annos: attriname = dict([ (attr.id, attr.name) for attr in labels[anno.label_id].attributes ]) attris = dict([ (attriname[attr.spec_id], attr.value) for attr in anno.attributes ]) if anno.type=='rectangle': x,y,x2,y2 = anno.points items.append(Bbox(x,y,x2-x, y2-y,attributes=attris, label=categories.index(labels[anno.label_id].name))) elif anno.type=='polygon': items.append(Polygon(anno.points,attributes=attris, label=categories.index(labels[anno.label_id].name))) # image = {'data': np.asarray(img), # 'path': str(Path(outdir)/filename)} dataset = Dataset.from_iterable([ DatasetItem(id=filename, annotations=items, image=np.array(img))], categories=categories) customset = customDataset(dataset) for image_data in customset._imageDatas: image_data.drawItem('s','s').saveImg(Path(outdir)/filename)
def test_can_load_image(self): expected_dataset = Dataset.from_iterable([ DatasetItem(id='img0', subset='train', image=np.ones((8, 8, 3)), annotations=[ Bbox(0, 2, 4, 2, label=0, z_order=1, attributes={ 'occluded': True, 'a1': True, 'a2': 'v3' }), PolyLine([1, 2, 3, 4, 5, 6, 7, 8], attributes={'occluded': False}), ], attributes={'frame': 0}), DatasetItem(id='img1', subset='train', image=np.ones((10, 10, 3)), annotations=[ Polygon([1, 2, 3, 4, 6, 5], z_order=1, attributes={'occluded': False}), Points([1, 2, 3, 4, 5, 6], label=1, z_order=2, attributes={'occluded': False}), ], attributes={'frame': 1}), ], categories={ AnnotationType.label: LabelCategories.from_iterable([ ['label1', '', {'a1', 'a2'}], ['label2'], ]) }) parsed_dataset = CvatImporter()(DUMMY_IMAGE_DATASET_DIR).make_dataset() compare_datasets(self, expected_dataset, parsed_dataset)
def __iter__(self): return iter([ DatasetItem(id=0, subset='train', image=np.ones((8, 8, 3)), annotations=[ Bbox(0, 2, 4, 2, label=0, z_order=1, attributes={ 'occluded': True, 'a1': True, 'a2': 'v3' }), PolyLine([1, 2, 3, 4, 5, 6, 7, 8], z_order=0, attributes={'occluded': False}), ]), DatasetItem(id=1, subset='train', image=np.ones((10, 10, 3)), annotations=[ Polygon([1, 2, 3, 4, 6, 5], z_order=1, attributes={'occluded': False}), Points([1, 2, 3, 4, 5, 6], label=1, z_order=2, attributes={'occluded': False}), ]), ])
def test_transform_labels(self): src_dataset = Dataset.from_iterable( [ DatasetItem(id=1, annotations=[ Label(1), Bbox(1, 2, 3, 4, label=2), Bbox(1, 3, 3, 3), Mask(image=np.array([1]), label=3), Polygon([1, 1, 2, 2, 3, 4], label=4), PolyLine([1, 3, 4, 2, 5, 6], label=5) ]) ], categories=['label%s' % i for i in range(6)]) dst_dataset = Dataset.from_iterable([ DatasetItem( id=1, annotations=[Label(1), Label(2), Label(3), Label(4), Label(5)]), ], categories=[ 'label%s' % i for i in range(6) ]) actual = transforms.AnnsToLabels(src_dataset) compare_datasets(self, dst_dataset, actual)
def test_shapes_to_boxes(self): source_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 5, 3)), annotations=[ Mask(np.array([[0, 0, 1, 1, 1], [0, 0, 0, 0, 1], [1, 0, 0, 0, 1], [1, 0, 0, 0, 0], [1, 1, 1, 0, 0]], ), id=1), Polygon([1, 1, 4, 1, 4, 4, 1, 4], id=2), PolyLine([1, 1, 2, 1, 2, 2, 1, 2], id=3), Points([2, 2, 4, 2, 4, 4, 2, 4], id=4), ]), ]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 5, 3)), annotations=[ Bbox(0, 0, 4, 4, id=1), Bbox(1, 1, 3, 3, id=2), Bbox(1, 1, 1, 1, id=3), Bbox(2, 2, 2, 2, id=4), ]), ]) actual = transforms.ShapesToBoxes(source_dataset) compare_datasets(self, target_dataset, actual)
def __iter__(self): return iter([ DatasetItem(id=1, subset='train', image=np.ones((16, 16, 3)), annotations=[ Bbox(0, 4, 4, 8, label=0, group=2, id=0, attributes={ 'occluded': False, 'username': '', } ), Polygon([0, 4, 4, 4, 5, 6], label=1, id=1, attributes={ 'occluded': True, 'username': '', 'a1': 'qwe', 'a2': True, 'a3': 123, } ), Mask(np.array([[0, 1], [1, 0], [1, 1]]), group=2, id=2, attributes={ 'occluded': False, 'username': '******' } ), Bbox(1, 2, 3, 4, group=1, id=3, attributes={ 'occluded': False, 'username': '', }), Mask(np.array([[0, 0], [0, 0], [1, 1]]), group=1, id=4, attributes={ 'occluded': True, 'username': '' } ), ] ), ])
def test_can_import_instances(self): expected_dataset = Dataset.from_iterable([ DatasetItem(id='000000000001', image=np.ones((10, 5, 3)), subset='val', attributes={'id': 1}, annotations=[ Polygon([0, 0, 1, 0, 1, 2, 0, 2], label=0, id=1, group=1, attributes={ 'is_crowd': False, 'x': 1, 'y': 'hello' }), Mask(np.array([[1, 0, 0, 1, 0]] * 5 + [[1, 1, 1, 1, 0]] * 5), label=0, id=2, group=2, attributes={'is_crowd': True}), ]), ], categories=[ 'TEST', ]) dataset = Dataset.import_from( osp.join(DUMMY_DATASET_DIR, 'coco_instances'), 'coco') compare_datasets(self, expected_dataset, dataset)
def crop_segments(cls, segment_anns, img_width, img_height): segment_anns = sorted(segment_anns, key=lambda x: x.z_order) segments = [] for s in segment_anns: if s.type == AnnotationType.polygon: segments.append(s.points) elif s.type == AnnotationType.mask: if isinstance(s, RleMask): rle = s.rle else: rle = mask_tools.mask_to_rle(s.image) segments.append(rle) segments = mask_tools.crop_covered_segments( segments, img_width, img_height) new_anns = [] for ann, new_segment in zip(segment_anns, segments): fields = {'z_order': ann.z_order, 'label': ann.label, 'id': ann.id, 'group': ann.group, 'attributes': ann.attributes } if ann.type == AnnotationType.polygon: if fields['group'] is None: fields['group'] = cls._make_group_id( segment_anns + new_anns, fields['id']) for polygon in new_segment: new_anns.append(Polygon(points=polygon, **fields)) else: rle = mask_tools.mask_to_rle(new_segment) rle = mask_utils.frPyObjects(rle, *rle['size']) new_anns.append(RleMask(rle=rle, **fields)) return new_anns
def test_can_convert_masks_to_polygons(self): source_dataset = Dataset.from_iterable( [ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Mask(np.array([ [0, 1, 1, 1, 0, 1, 1, 1, 1, 0], [0, 0, 1, 1, 0, 1, 1, 1, 0, 0], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ]), label=3, id=4, group=4), ]), ], categories=[str(i) for i in range(10)]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 10, 3)), annotations=[ Polygon([3.0, 2.5, 1.0, 0.0, 3.5, 0.0, 3.0, 2.5], label=3, id=4, group=4, attributes={'is_crowd': False}), Polygon([5.0, 3.5, 4.5, 0.0, 8.0, 0.0, 5.0, 3.5], label=3, id=4, group=4, attributes={'is_crowd': False}), ], attributes={'id': 1}), ], categories=[ str(i) for i in range(10) ]) with TestDir() as test_dir: self._test_save_and_load(source_dataset, partial(CocoInstancesConverter.convert, segmentation_mode='polygons'), test_dir, target_dataset=target_dataset)
def test_merge_instance_segments(self): source_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 5, 3)), annotations=[ Mask(np.array([ [0, 0, 1, 1, 1], [0, 0, 0, 0, 1], [1, 0, 0, 0, 1], [1, 0, 0, 0, 0], [1, 1, 1, 0, 0]], ), z_order=0, group=1), Polygon([1, 1, 4, 1, 4, 4, 1, 4], z_order=1, group=1), Polygon([0, 0, 0, 2, 2, 2, 2, 0], z_order=1), ] ), ]) target_dataset = Dataset.from_iterable([ DatasetItem(id=1, image=np.zeros((5, 5, 3)), annotations=[ Mask(np.array([ [0, 0, 1, 1, 1], [0, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 0], [1, 1, 1, 0, 0]], ), z_order=0, group=1), Mask(np.array([ [1, 1, 0, 0, 0], [1, 1, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]], ), z_order=1), ] ), ]) actual = transforms.MergeInstanceSegments(source_dataset, include_polygons=True) compare_datasets(self, target_dataset, actual)
def convert_mask(mask): polygons = mask_tools.mask_to_polygons(mask.image) return [ Polygon(points=p, label=mask.label, z_order=mask.z_order, id=mask.id, attributes=mask.attributes, group=mask.group) for p in polygons ]