def xtest_read_real_json(self): ''' references one one file of coco2017 to see if big imports cause any troule if they do, try to increase memory max setting in docker first! ''' import time subject = ImportCoco(self.dataset) start_time = time.time() subject.read_file( os.path.join('/data/coco2017', 'instances_train2017.json')) print("--- %s seconds for read_file ---" % (time.time() - start_time)) start_time = time.time() print(subject.stats()) print("--- %s seconds for stats ---" % (time.time() - start_time)) start_time = time.time() subject.save() print("--- %s seconds for save ---" % (time.time() - start_time)) print(Annotation.objects.count()) print(Annotation.boundingbox_objects.count()) print(Annotation.segmentation_objects.count())
def test_stats_cleaned(self): ''' we only need to import stuff, that belongs to defined images so we can ignore the rest ''' subject = ImportCoco(self.dataset) subject.data = subject.convert(self._incorrect_data()) result = subject.stats() self.assertEqual( result, { 'images': 1, 'images_all': 1, 'annotations': 1, 'annotations_all': 2, 'categories': 1, 'categories_all': 2, 'licenses': 1, 'licenses_all': 2 })