Exemple #1
0
def test_get():
    """Test `get` function"""

    value = dev_env.get('imagenet', 'dirpath_data')
    assert value == '/data/imagenet'

    value = dev_env.get('deep_lesion', 'dirpath_data')
    assert value == '/data/deep_lesion'
#! /usr/bin/env python
"""Download mappings from synset IDs to synset names"""

import os
import subprocess

import pandas as pd

from utils import dev_env

DIRPATH_DATA = dev_env.get('imagenet', 'dirpath_data')
DIRPATH_SYNSET_LISTS = os.path.join(DIRPATH_DATA, 'synset_lists')
SYNSETS_URL = 'http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz'


def download_synset_lists():
    """Download relevant synset lists

    These lists include the synset IDs for the classification and detection
    challenges, including the mapping from ID to word.

    This function downloads a tarred directory containing these lists to
    DIRPATH_SYNSET_LISTS, and then untars it to grab the relevant lists. It
    removes unused lists, and also translates the `det_synset_words.txt` and
    `synset_words.txt` to CSVs for ease of use later.
    """

    fname_tarfile = os.path.basename(SYNSETS_URL)
    fpath_tarfile = os.path.join(DIRPATH_SYNSET_LISTS, fname_tarfile)

    cmd = 'wget {} -P {}'.format(SYNSETS_URL, DIRPATH_SYNSET_LISTS)
[1] Dropbox source: https://nihcc.app.box.com/v/DeepLesion
[2] Background information (bitly link for brevity): https://bit.ly/2uJutbH

Reference script: `batch_download_zips.py` in [1]
"""

import os
import subprocess
from urllib import request

from tqdm import tqdm

from utils import dev_env

DIRPATH_DATA = dev_env.get('deep_lesion', 'dirpath_data')
DIRPATH_ZIPS = os.path.join(DIRPATH_DATA, 'image_zipfiles')
DIRPATH_IMAGES = os.path.join(DIRPATH_DATA, 'images')

ZIPFILE_URLS = [
    'https://nihcc.box.com/shared/static/sp5y2k799v4x1x77f7w1aqp26uyfq7qz.zip',
    'https://nihcc.box.com/shared/static/l9e1ys5e48qq8s409ua3uv6uwuko0y5c.zip',
    'https://nihcc.box.com/shared/static/48jotosvbrw0rlke4u88tzadmabcp72r.zip',
    'https://nihcc.box.com/shared/static/xa3rjr6nzej6yfgzj9z6hf97ljpq1wkm.zip',
    'https://nihcc.box.com/shared/static/58ix4lxaadjxvjzq4am5ehpzhdvzl7os.zip',
    'https://nihcc.box.com/shared/static/cfouy1al16n0linxqt504n3macomhdj8.zip',
    'https://nihcc.box.com/shared/static/z84jjstqfrhhlr7jikwsvcdutl7jnk78.zip',
    'https://nihcc.box.com/shared/static/6viu9bqirhjjz34xhd1nttcqurez8654.zip',
    'https://nihcc.box.com/shared/static/9ii2xb6z7869khz9xxrwcx1393a05610.zip',
    'https://nihcc.box.com/shared/static/2c7y53eees3a3vdls5preayjaf0mc3bn.zip',
    'https://nihcc.box.com/shared/static/2zsqpzru46wsp0f99eaag5yiad42iezz.zip',
Exemple #4
0
#! /usr/bin/env python
"""Plot images from the provided `df_fpaths_images`"""

import argparse
from concurrent.futures import as_completed, ProcessPoolExecutor
import multiprocessing
import os

import imageio
import matplotlib.pyplot as plt
import pandas as pd
from tqdm import tqdm

from utils import dev_env

DIRPATH_IMAGENET = dev_env.get('imagenet', 'dirpath_data')
FPATH_SYNSET_WORDS = os.path.join(DIRPATH_IMAGENET, 'synset_lists',
                                  'synset_words.txt')

N_PROCESSES = multiprocessing.cpu_count() // 2


def add_synset_descriptions(df_fpaths_images):
    """Add the synset description to each training image

    :param df_fpaths_images: rows hold a filepath and sysnet ID for a single
     image
    :type df_fpaths_images: pandas.DataFrame
    :return: df_fpaths_images with a 'synset_description' column added
    :rtype: pandas.DataFrame
    """
Exemple #5
0
location: /data/mpii/annotations/mpii_human_pose_v1_u12_2

Reference Implementations:
https://github.com/princeton-vl/pose-hg-train/blob/master/src/misc/convert_annot.py
https://github.com/princeton-vl/pose-hg-train/blob/master/src/misc/mpii.py
"""

import os

import pandas as pd
from scipy.io import loadmat
from tqdm import tqdm

from utils import dev_env

DIRPATH_MPII = dev_env.get('mpii', 'dirpath_data')
DIRPATH_ANNOTATIONS = os.path.join(DIRPATH_MPII, 'annotations')
FPATH_ANNOTATIONS_MAT = os.path.join(DIRPATH_ANNOTATIONS,
                                     'mpii_human_pose_v1_u12_2',
                                     'mpii_human_pose_v1_u12_1.mat')
FPATH_ANNOTATIONS_PICKLE = os.path.join(DIRPATH_ANNOTATIONS,
                                        'mpii_human_pose_v1_u12_2',
                                        'df_annotations.pickle')


def parse_annotations(annotations):
    """Parse the provided annotations

    For details on the structure of the annotations, see the 'Annotation
    description' section at http://human-pose.mpi-inf.mpg.de/#download.
#! /usr/bin/env python
"""Download the mpii dataset

Reference: http://human-pose.mpi-inf.mpg.de/#download
"""

import os
import subprocess

from utils import dev_env

BASE_URL = 'https://datasets.d2.mpi-inf.mpg.de/andriluka14cvpr/'
ANNOTATIONS_URL = BASE_URL + 'mpii_human_pose_v1_u12_2.zip'
IMAGES_URL = BASE_URL + 'mpii_human_pose_v1.tar.gz'

DIRPATH_DATA = dev_env.get('mpii', 'dirpath_data')
DIRPATH_ANNOTATIONS = os.path.join(DIRPATH_DATA, 'annotations')
DIRPATH_IMAGES = os.path.join(DIRPATH_DATA, 'images')


def download_annotations():
    """Download the mpii annotations

    The zipfile containing the annotations will be downloaded from
    ANNOTATIONS_URL, saved to DIRPATH_ANNOTATIONS, and unzipped.
    """

    fname_annotations_zipfile = os.path.basename(ANNOTATIONS_URL)
    fpath_annotations_zipfile = os.path.join(DIRPATH_ANNOTATIONS,
                                             fname_annotations_zipfile)