Esempio n. 1
0
    def download(self):
        ensure_dir(self.target_dir)

        log.info('Downloading the train images from baidu...')
        log.warning('You might want to download using a better client')
        test_zip = os.path.join(self.target_dir, TRAIN_DATA[0])
        self.download_file_from_google_drive(TRAIN_DATA[1], test_zip)
        self.extract_zip_file(test_zip, self.target_dir)

        log.info('downloading the train annotations from baidu...')
        test_zip = os.path.join(self.target_dir, TRAIN_ANNO[0])
        self.download_file_from_google_drive(TRAIN_ANNO[1], test_zip)
        self.extract_zip_file(test_zip, self.target_dir)

        log.info('downloading the test images from google drive...')
        test_zip = os.path.join(self.target_dir, TEST_DATA[0])
        self.download_file_from_google_drive(TEST_DATA[1], test_zip)
        self.extract_zip_file(test_zip, self.target_dir)

        log.info('downloading the test annotations from google drive...')
        test_zip = os.path.join(self.target_dir, TEST_ANNO[0])
        self.download_file_from_google_drive(TEST_ANNO[1], test_zip)
        self.extract_zip_file(test_zip, self.target_dir)

        log.info('done')
    def download(self):
        ensure_dir(self.target_dir)

        log.info('downloading the training images from google drive...')
        train_zip = os.path.join(self.target_dir, TRAIN_DATA[0])
        self.download_file_from_google_drive(TRAIN_DATA[1], train_zip)
        self.extract_zip_file(train_zip, self.target_dir)

        log.info('downloading the validation images from google drive...')
        val_zip = os.path.join(self.target_dir, VAL_DATA[0])
        self.download_file_from_google_drive(VAL_DATA[1], val_zip)
        self.extract_zip_file(val_zip, self.target_dir)

        log.info('downloading the bounding boxes annotations...')
        annotation_zip_file = self.download_file_from_web_server(
            ANNOTATIONS_URL, self.target_dir)
        self.extract_zip_file(
            os.path.join(self.target_dir, annotation_zip_file),
            self.target_dir)

        log.info('downloading eval tools...')
        tools_zip_file = self.download_file_from_web_server(
            EVAL_TOOLS_URL, self.target_dir)
        self.extract_zip_file(os.path.join(self.target_dir, tools_zip_file),
                              self.target_dir)

        log.info('done')
Esempio n. 3
0
 def download(self):
     ensure_dir(self.target_dir)
     log.info('Downloading dataset...')
     zip_file = self.download_file_from_web_server(IMAGES_URL, self.target_dir)
     log.info('Finished download. Unzipping...')
     self.extract_zip_file(os.path.join(self.target_dir, zip_file), self.target_dir)
     log.info('done')
Esempio n. 4
0
 def export(self, target_dir):
     ensure_dir(target_dir)
     target_file = '{}_{}.json'
     output_filename = os.path.join(
         target_dir, target_file.format(self.dataset.name, 'train'))
     self._export(output_filename, self.dataset.train_set())
     output_filename = os.path.join(
         target_dir, target_file.format(self.dataset.name, 'val'))
     self._export(output_filename, self.dataset.val_set())
    def download(self):
        ensure_dir(self.target_dir)

        log.info('downloading the pascal faces from google drive...')
        train_zip = os.path.join(self.target_dir, TRAINVAL_DATA[0])
        self.download_file_from_google_drive(TRAINVAL_DATA[1], train_zip)
        self.extract_tar_file(train_zip, self.target_dir)

        log.info('done')
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--input',
                        dest='input',
                        action='store',
                        required=True,
                        help='input csv containing the predictions')
    parser.add_argument('--data_dir',
                        dest='data_dir',
                        action='store',
                        required=True,
                        help='')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        help='')
    args = parser.parse_args()
    input_csv = args.input
    data_dir = args.data_dir
    output_dir = args.output_dir
    ensure_dir(output_dir)

    predictions = defaultdict(list)
    with open(input_csv, 'r') as csvfile:
        spamreader = csv.reader(csvfile, delimiter=' ')
        for row in spamreader:
            match = next(re.finditer(FILENAME_RE, row[0]))
            _id = match.group(1)
            score = float(row[1])
            xmin = float(row[2])
            ymin = float(row[3])
            xmax = float(row[4])
            ymax = float(row[5])
            # left_x top_y width height detection_score
            predictions[_id].append(
                (xmin, ymin, xmax - xmin, ymax - ymin, score))

    ds = FDDB(data_dir)
    for fold_id, fold_file in ds.folds():
        target_file = os.path.join(output_dir,
                                   'fold-{}-out.txt'.format(fold_id))

        with open(fold_file, 'r') as src, open(target_file, 'w') as dest:
            for img_filename in src:
                img_filename = img_filename.strip()
                dest.write('{}\n'.format(img_filename))
                if img_filename in predictions:
                    pred = predictions[img_filename]
                    dest.write('{}\n'.format(len(pred)))
                    for p in pred:
                        dest.write('{} {} {} {} {}\n'.format(*p))
                else:
                    dest.write('{}\n'.format(0))
Esempio n. 7
0
    def download(self):
        ensure_dir(self.target_dir)

        log.info('downloading the validation images from google drive...')
        val_zip = os.path.join(self.target_dir, VAL_DATA[0])
        self.download_file_from_google_drive(VAL_DATA[1], val_zip)
        self.extract_zip_file(val_zip, self.target_dir)

        log.info('downloading the bounding boxes annotations...')
        anno_zip = os.path.join(self.target_dir, ANNOTATIONS_DATA[0])
        self.download_file_from_google_drive(ANNOTATIONS_DATA[1], anno_zip)
        self.extract_zip_file(anno_zip, self.target_dir)

        log.info('done')
Esempio n. 8
0
    def _export(self, target_dir, dataset_name='train'):
        log.info('Converting %s data', dataset_name)
        images_root = os.path.join(target_dir, 'images/')
        annotations_root = os.path.join(target_dir, 'labels/')
        ensure_dir(annotations_root)
        with open(os.path.join(target_dir, '{}.txt'.format(dataset_name)), 'w') as full:
            for i in getattr(self.widerface, '{}_set'.format(dataset_name))():
                if len(i.faces) > 0:
                    path = i.copy_to(images_root, include_subdirs=True)
                    full.write('{}\n'.format(path))

                    head, _ = os.path.splitext(path)
                    head, tail = os.path.split(head)
                    annotation_dir = os.path.join(annotations_root, i.subdir) if i.subdir else annotations_root
                    ensure_dir(annotation_dir)
                    annotation_file = os.path.join(annotation_dir, tail+'.txt')
                    with open(annotation_file, 'w') as anno:
                        for face in i.faces:
                            bbox = self._convert(i.size, face)
                            anno.write('0 ' + ' '.join([str(a) for a in bbox]) + '\n')
Esempio n. 9
0
    def download(self):
        ensure_dir(self.target_dir)

        log.info('Downloading images')
        annotation_zip_file = self.download_file_from_web_server(
            IMAGES_URL, self.target_dir)
        log.info('Finished download. Extracting.')
        self.extract_tar_file(
            os.path.join(self.target_dir, annotation_zip_file),
            os.path.join(self.target_dir, 'originalPics/'))

        log.info('Downloading annotations')
        annotation_zip_file = self.download_file_from_web_server(
            ANNOTATIONS_URL, self.target_dir)
        log.info('Finished download. Extracting.')
        self.extract_tar_file(
            os.path.join(self.target_dir, annotation_zip_file),
            self.target_dir)

        log.info('done')
Esempio n. 10
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--input',
                        dest='input',
                        action='store',
                        required=True,
                        help='input tfrecord containing the predictions')
    parser.add_argument('--data_dir',
                        dest='data_dir',
                        action='store',
                        required=True,
                        help='')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        help='')
    args = parser.parse_args()
    input_file = args.input
    data_dir = args.data_dir
    output_dir = args.output_dir
    ensure_dir(output_dir)

    predictions = extract_predictions_from_tf_record(input_file)
    ds = FDDB(data_dir)
    for fold_id, fold_file in ds.folds():
        target_file = os.path.join(output_dir,
                                   'fold-{}-out.txt'.format(fold_id))

        with open(fold_file, 'r') as src, open(target_file, 'w') as dest:
            for img_filename in src:
                img_filename = img_filename.strip()
                dest.write('{}\n'.format(img_filename))
                if img_filename in predictions:
                    pred = predictions[img_filename]
                    dest.write('{}\n'.format(len(pred)))
                    for p in pred:
                        dest.write('{} {} {} {} {}\n'.format(*p))
                else:
                    dest.write('{}\n'.format(0))
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--input',
                        dest='input',
                        action='store',
                        required=True,
                        help='input tf record containing the predictions')
    parser.add_argument('--data_dir',
                        dest='data_dir',
                        action='store',
                        required=True,
                        help='')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        help='')
    args = parser.parse_args()

    tfrecord = args.input
    data_dir = args.data_dir
    output_dir = args.output_dir
    ds = Wider(data_dir)
    events = ds.events()

    for i in read_detections_from(tfrecord):
        event_id = filename.split('_')[0]
        event = events[event_id]
        item_name = os.path.splitext(os.path.basename(filename))[0]
        result_filename = '{}/{}.txt'.format(event, item_name)
        target_file = os.path.join(output_dir, result_filename)
        ensure_dir(target_file)
        with open(target_file, 'w') as dest:
            dest.write('{}\n'.format(item_name))
            total = len(bboxes)
            dest.write('{}\n'.format(total))
            for bbox in bboxes:
                dest.write('{} {} {} {} {}\n'.format(*bbox))
Esempio n. 12
0
    def _prepare(target_dir):
        log.info('Preparing target dir: %s', target_dir)
        ensure_dir(os.path.join(target_dir, 'images/'))
        ensure_dir(os.path.join(target_dir, 'labels/'))
        ensure_dir(os.path.join(target_dir, 'backup/'))
        ensure_dir(os.path.join(target_dir, 'results/'))

        log.info('Creating obj.names')
        with open(os.path.join(target_dir, 'obj.names'), 'w') as obj_names:
            obj_names.write('face\n')

        log.info('Creating obj.data')
        with open(os.path.join(target_dir, 'obj.data'), 'w') as obj_data:
            obj_data.write('classes = 1\n')
            obj_data.write('train = train.txt\n')
            obj_data.write('valid = val.txt\n')
            obj_data.write('names = obj.names\n')
            obj_data.write('backup = backup/\n')
Esempio n. 13
0
import logging
import sys
import unittest

import os

from morghulis.afw import AFW
from morghulis.os_utils import ensure_dir

logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
log = logging.getLogger(__name__)

AFW_DIR = os.path.dirname(__file__) + '/AFW_sample/'
TMP_DIR = '/opt/project/.tmp/'
ensure_dir(TMP_DIR)


class AFWTests(unittest.TestCase):
    def setUp(self):
        self.afw = AFW(AFW_DIR)

    def test_train_set(self):
        for i in self.afw.images():
            log.debug(i)
            for f in i.faces:
                log.debug(f)

    @unittest.skip("skipping because it takes too long")
    def test_download(self):
        self.afw = AFW(os.path.join(TMP_DIR, 'afw_download/'))
        self.afw.download()
Esempio n. 14
0
 def export(self, target_dir):
     ensure_dir(target_dir)
     self._prepare(target_dir)
     self._export(target_dir, 'train')
     self._export(target_dir, 'val')
 def export(self, target_dir):
     ensure_dir(target_dir)
     self._prepare(target_dir)
     self._export(target_dir)
 def download(self):
     ensure_dir(self.target_dir)
     tar_file = self.download_file_from_web_server(IMAGES_URL, self.target_dir)
     self.extract_tar_file(os.path.join(self.target_dir, tar_file), self.target_dir)
     log.info('done')
 def export(self, target_dir):
     ensure_dir(target_dir)
     target_file = '{}.json'
     output_filename = os.path.join(target_dir,
                                    target_file.format(self.dataset.name))
     self._export(output_filename, self.dataset.images())
Esempio n. 18
0
 def export(self, output_dir):
     ensure_dir(output_dir)
     self._export(output_dir, 'train')
     self._export(output_dir, 'val')
 def export(self, output_dir):
     os_utils.ensure_dir(output_dir)
     self._export(output_dir, 'train')