class CocoExporterTests(unittest.TestCase): def setUp(self): rmtree(TMP_DIR, ignore_errors=True) self.ds = FDDB(FDDB_DIR) def test_sanity(self): self.ds.export(TMP_DIR, 'coco')
def main(): parser = argparse.ArgumentParser() parser.add_argument('--input', dest='input', action='store', required=True, help='input csv containing the predictions') parser.add_argument('--data_dir', dest='data_dir', action='store', required=True, help='') parser.add_argument('--output_dir', dest='output_dir', action='store', required=True, help='') args = parser.parse_args() input_csv = args.input data_dir = args.data_dir output_dir = args.output_dir ensure_dir(output_dir) predictions = defaultdict(list) with open(input_csv, 'r') as csvfile: spamreader = csv.reader(csvfile, delimiter=' ') for row in spamreader: match = next(re.finditer(FILENAME_RE, row[0])) _id = match.group(1) score = float(row[1]) xmin = float(row[2]) ymin = float(row[3]) xmax = float(row[4]) ymax = float(row[5]) # left_x top_y width height detection_score predictions[_id].append( (xmin, ymin, xmax - xmin, ymax - ymin, score)) ds = FDDB(data_dir) for fold_id, fold_file in ds.folds(): target_file = os.path.join(output_dir, 'fold-{}-out.txt'.format(fold_id)) with open(fold_file, 'r') as src, open(target_file, 'w') as dest: for img_filename in src: img_filename = img_filename.strip() dest.write('{}\n'.format(img_filename)) if img_filename in predictions: pred = predictions[img_filename] dest.write('{}\n'.format(len(pred))) for p in pred: dest.write('{} {} {} {} {}\n'.format(*p)) else: dest.write('{}\n'.format(0))
def create_dataset(dataset_name, data_dir): if not dataset_name: dataset_name = 'mafa' if dataset_name == 'widerface': from morghulis.widerface import Wider ds = Wider(data_dir) elif dataset_name == 'fddb': from morghulis.fddb import FDDB ds = FDDB(data_dir) elif dataset_name == 'afw': from morghulis.afw import AFW ds = AFW(data_dir) elif dataset_name == 'pascal_faces': from morghulis.pascal_faces import PascalFaces ds = PascalFaces(data_dir) elif dataset_name == 'mafa': from morghulis.mafa import Mafa ds = Mafa(data_dir) elif dataset_name == 'caltech': from morghulis.caltech_faces import CaltechFaces ds = CaltechFaces(data_dir) elif dataset_name == 'ufdd': from morghulis.ufdd import UFDD ds = UFDD(data_dir) else: logging.error('Invalid dataset name %s', dataset_name) raise ValueError('Invalid dataset name %s' % dataset_name) return ds
def main(): parser = argparse.ArgumentParser() parser.add_argument('--dataset', dest='dataset', action='store', required=True, help='widerface or fddb') parser.add_argument('--format', dest='format', action='store', required=True, help='darknet, tensorflow, caffe or coco') parser.add_argument('--data_dir', dest='data_dir', action='store', required=True, help='') parser.add_argument('--output_dir', dest='output_dir', action='store', required=True, help='') args = parser.parse_args() dataset = args.dataset _format = args.format data_dir = args.data_dir output_dir = args.output_dir if dataset == 'widerface': from morghulis.widerface import Wider ds = Wider(data_dir) elif dataset == 'fddb': from morghulis.fddb import FDDB ds = FDDB(data_dir) elif dataset == 'afw': from morghulis.afw import AFW ds = AFW(data_dir) else: logging.error('Invalid dataset name %s', dataset) ds.export(output_dir, _format)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--dataset', dest='dataset', action='store', required=True, help='widerface, fddb or afw') parser.add_argument('--output_dir', dest='output_dir', action='store', required=True, help='') args = parser.parse_args() dataset = args.dataset output_dir = args.output_dir if dataset == 'widerface': from morghulis.widerface import Wider ds = Wider(output_dir) elif dataset == 'fddb': from morghulis.fddb import FDDB ds = FDDB(output_dir) elif dataset == 'afw': from morghulis.afw import AFW ds = AFW(output_dir) elif dataset == 'pascal_faces': from morghulis.pascal_faces import PascalFaces ds = PascalFaces(output_dir) else: logging.error('Invalid dataset name %s', dataset) raise ValueError('Invalid dataset name %s' % dataset) ds.download()
def main(): parser = argparse.ArgumentParser() parser.add_argument('--input', dest='input', action='store', required=True, help='input tfrecord containing the predictions') parser.add_argument('--data_dir', dest='data_dir', action='store', required=True, help='') parser.add_argument('--output_dir', dest='output_dir', action='store', required=True, help='') args = parser.parse_args() input_file = args.input data_dir = args.data_dir output_dir = args.output_dir ensure_dir(output_dir) predictions = extract_predictions_from_tf_record(input_file) ds = FDDB(data_dir) for fold_id, fold_file in ds.folds(): target_file = os.path.join(output_dir, 'fold-{}-out.txt'.format(fold_id)) with open(fold_file, 'r') as src, open(target_file, 'w') as dest: for img_filename in src: img_filename = img_filename.strip() dest.write('{}\n'.format(img_filename)) if img_filename in predictions: pred = predictions[img_filename] dest.write('{}\n'.format(len(pred))) for p in pred: dest.write('{} {} {} {} {}\n'.format(*p)) else: dest.write('{}\n'.format(0))
class FDDBTests(unittest.TestCase): def setUp(self): self.fddb = FDDB(FDDB_DIR) def _get_image(self, img='2002/08/01/big/img_1468'): return [image for image in self.fddb.images() if img in image.filename][0] def test_train_set(self): train_set = [image for image in self.fddb.images()] self.assertEqual(7, len(train_set)) def test_faces(self): image = self._get_image() self.assertEqual(5, len(image.faces)) def test_face_details(self): image = self._get_image() self.assertEqual(5, len(image.faces)) face = image.faces[0] self.assertEqual((7.294545, 96.341818), face.center) self.assertEqual(16.087936, face.w) self.assertEqual(19.110234, face.h) self.assertEqual(-1.469174, face.angle) self.assertEqual(9.555117, face.major_axis_radius) self.assertEqual(8.043968, face.minor_axis_radius) self.assertEqual(0, face.invalid) def test_image(self): image = self._get_image() self.assertEqual(334, image.width) self.assertEqual(450, image.height) self.assertEqual('JPEG', image.format) @unittest.skip("skipping because it takes too long") def test_download(self): self.fddb = FDDB(os.path.join(TMP_DIR, 'fddb_download/')) self.fddb.download()
def setUp(self): rmtree(TMP_DIR, ignore_errors=True) self.dataset = FDDB(FDDB_DIR) self.tfExporter = TensorflowExporter(self.dataset)
def setUp(self): rmtree(TMP_DIR, ignore_errors=True) self.fddb = FDDB(FDDB_DIR) self.darknetExporter = DarknetExporter(self.fddb)
def test_download(self): self.fddb = FDDB(os.path.join(TMP_DIR, 'fddb_download/')) self.fddb.download()
def setUp(self): self.fddb = FDDB(FDDB_DIR)
def setUp(self): rmtree(TMP_DIR, ignore_errors=True) self.ds = FDDB(FDDB_DIR)