Esempio n. 1
0
def create_dataset(dataset_name, data_dir):
    if not dataset_name:
        dataset_name = 'mafa'
    if dataset_name == 'widerface':
        from morghulis.widerface import Wider
        ds = Wider(data_dir)
    elif dataset_name == 'fddb':
        from morghulis.fddb import FDDB
        ds = FDDB(data_dir)
    elif dataset_name == 'afw':
        from morghulis.afw import AFW
        ds = AFW(data_dir)
    elif dataset_name == 'pascal_faces':
        from morghulis.pascal_faces import PascalFaces
        ds = PascalFaces(data_dir)
    elif dataset_name == 'mafa':
        from morghulis.mafa import Mafa
        ds = Mafa(data_dir)
    elif dataset_name == 'caltech':
        from morghulis.caltech_faces import CaltechFaces
        ds = CaltechFaces(data_dir)
    elif dataset_name == 'ufdd':
        from morghulis.ufdd import UFDD
        ds = UFDD(data_dir)
    else:
        logging.error('Invalid dataset name %s', dataset_name)
        raise ValueError('Invalid dataset name %s' % dataset_name)
    return ds
Esempio n. 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--dataset',
                        dest='dataset',
                        action='store',
                        required=True,
                        help='widerface, fddb or afw')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        help='')
    args = parser.parse_args()
    dataset = args.dataset
    output_dir = args.output_dir

    if dataset == 'widerface':
        from morghulis.widerface import Wider
        ds = Wider(output_dir)
    elif dataset == 'fddb':
        from morghulis.fddb import FDDB
        ds = FDDB(output_dir)
    elif dataset == 'afw':
        from morghulis.afw import AFW
        ds = AFW(output_dir)
    elif dataset == 'pascal_faces':
        from morghulis.pascal_faces import PascalFaces
        ds = PascalFaces(output_dir)
    else:
        logging.error('Invalid dataset name %s', dataset)
        raise ValueError('Invalid dataset name %s' % dataset)

    ds.download()
Esempio n. 3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--dataset', dest='dataset', action='store', required=True, help='widerface or fddb')
    parser.add_argument('--format', dest='format', action='store', required=True, help='darknet, tensorflow, caffe or coco')
    parser.add_argument('--data_dir', dest='data_dir', action='store', required=True, help='')
    parser.add_argument('--output_dir', dest='output_dir', action='store', required=True, help='')
    args = parser.parse_args()
    dataset = args.dataset
    _format = args.format
    data_dir = args.data_dir
    output_dir = args.output_dir

    if dataset == 'widerface':
        from morghulis.widerface import Wider
        ds = Wider(data_dir)
    elif dataset == 'fddb':
        from morghulis.fddb import FDDB
        ds = FDDB(data_dir)
    elif dataset == 'afw':
        from morghulis.afw import AFW
        ds = AFW(data_dir)
    else:
        logging.error('Invalid dataset name %s', dataset)

    ds.export(output_dir, _format)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--input',
                        dest='input',
                        action='store',
                        required=True,
                        help='input csv containing the predictions')
    parser.add_argument('--data_dir',
                        dest='data_dir',
                        action='store',
                        required=True,
                        help='')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        help='')
    args = parser.parse_args()
    input_csv = args.input
    data_dir = args.data_dir
    output_dir = args.output_dir
    ensure_dir(output_dir)

    predictions = defaultdict(list)
    with open(input_csv, 'r') as csvfile:
        spamreader = csv.reader(csvfile, delimiter=' ')
        for row in spamreader:
            match = next(re.finditer(FILENAME_RE, row[0]))
            _id = match.group(1)
            score = float(row[1])
            xmin = float(row[2])
            ymin = float(row[3])
            xmax = float(row[4])
            ymax = float(row[5])
            # left_x top_y width height detection_score
            predictions[_id].append(
                (xmin, ymin, xmax - xmin, ymax - ymin, score))

    ds = FDDB(data_dir)
    for fold_id, fold_file in ds.folds():
        target_file = os.path.join(output_dir,
                                   'fold-{}-out.txt'.format(fold_id))

        with open(fold_file, 'r') as src, open(target_file, 'w') as dest:
            for img_filename in src:
                img_filename = img_filename.strip()
                dest.write('{}\n'.format(img_filename))
                if img_filename in predictions:
                    pred = predictions[img_filename]
                    dest.write('{}\n'.format(len(pred)))
                    for p in pred:
                        dest.write('{} {} {} {} {}\n'.format(*p))
                else:
                    dest.write('{}\n'.format(0))
Esempio n. 5
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--input',
                        dest='input',
                        action='store',
                        required=True,
                        help='input tfrecord containing the predictions')
    parser.add_argument('--data_dir',
                        dest='data_dir',
                        action='store',
                        required=True,
                        help='')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        help='')
    args = parser.parse_args()
    input_file = args.input
    data_dir = args.data_dir
    output_dir = args.output_dir
    ensure_dir(output_dir)

    predictions = extract_predictions_from_tf_record(input_file)
    ds = FDDB(data_dir)
    for fold_id, fold_file in ds.folds():
        target_file = os.path.join(output_dir,
                                   'fold-{}-out.txt'.format(fold_id))

        with open(fold_file, 'r') as src, open(target_file, 'w') as dest:
            for img_filename in src:
                img_filename = img_filename.strip()
                dest.write('{}\n'.format(img_filename))
                if img_filename in predictions:
                    pred = predictions[img_filename]
                    dest.write('{}\n'.format(len(pred)))
                    for p in pred:
                        dest.write('{} {} {} {} {}\n'.format(*p))
                else:
                    dest.write('{}\n'.format(0))
Esempio n. 6
0
 def setUp(self):
     rmtree(TMP_DIR, ignore_errors=True)
     self.dataset = FDDB(FDDB_DIR)
     self.tfExporter = TensorflowExporter(self.dataset)
 def setUp(self):
     rmtree(TMP_DIR, ignore_errors=True)
     self.fddb = FDDB(FDDB_DIR)
     self.darknetExporter = DarknetExporter(self.fddb)
 def test_download(self):
     self.fddb = FDDB(os.path.join(TMP_DIR, 'fddb_download/'))
     self.fddb.download()
 def setUp(self):
     self.fddb = FDDB(FDDB_DIR)
 def setUp(self):
     rmtree(TMP_DIR, ignore_errors=True)
     self.ds = FDDB(FDDB_DIR)