def get_threat_heatmaps(mode):
    if not os.path.exists('done'):
        names, labels, x = get_aps_data_hdf5(mode)
        f = h5py.File('data.hdf5', 'w')
        th = f.create_dataset('th', x.shape + (3, ))

        with read_input_dir('hand_labeling/threat_segmentation/base'):
            for i, (name, label,
                    data) in tqdm.tqdm(enumerate(zip(names, labels, x)),
                                       total=len(x)):
                files = glob.glob(name + '*')
                assert files, 'missing hand segmentation for %s' % name

                image = imageio.imread(files[0])
                masks = [
                    _get_mask(image, SEGMENTATION_COLORS[ci])
                    for ci in range(3)
                ]
                with read_input_dir(
                        'hand_labeling/threat_segmentation/revision_v0'):
                    for revision in glob.glob(name + '*'):
                        rlabel = int(revision.split('_')[1].split('.')[0])
                        rci = [i + 1 for i in range(17)
                               if label[i]].index(rlabel)
                        rimage = imageio.imread(revision)
                        masks[rci] = _get_mask(rimage, SEGMENTATION_COLORS[0])

                th[i] = np.stack(masks, axis=-1)

        open('done', 'w').close()
    else:
        f = h5py.File('data.hdf5', 'r')
        th = f['th']
    return th
Example #2
0
def get_mask_training_data():
    if not os.path.exists('done'):
        names, labels, dset_in = get_a3d_projection_data('sample_large', 97)
        f = h5py.File('data.hdf5', 'w')
        dset = f.create_dataset('dset', (len(dset_in), 330, 256, 6))
        name_idx = {x: i for i, x in enumerate(names)}

        with read_input_dir('hand_labeling/a3d_projections'):
            for file in tqdm.tqdm(glob.glob('*')):
                name, angle = file.replace('.png', '').split('_')
                angle = int(angle)
                angle = 0 if angle == 0 else 16 - angle

                image = imageio.imread(file)
                mask = np.all(image == [255, 0, 0], axis=-1)
                idx = name_idx[name]
                dset[idx, ..., :-1] = dset_in[idx, angle]
                dset[idx, ..., -1] = mask

        f.close()
        with open('pkl', 'wb') as f:
            pickle.dump((names, labels), f)
        open('done', 'w').close()

    with open('pkl', 'rb') as f:
        names, labels = pickle.load(f)
    f = h5py.File('data.hdf5', 'r')
    dset = f['dset']
    return names, labels, dset
Example #3
0
def render_synthetic_zone_data(mode):
    assert mode in ('all', 'sample_large', 'sample')
    if not os.path.exists('done'):
        with read_input_dir('makehuman/generated'):
            mesh_paths = sorted(
                ['%s/%s' % (os.getcwd(), x) for x in glob.glob('*.mhx2')])
        if mode == 'sample_large':
            mesh_paths = mesh_paths[:100]
        elif mode == 'sample':
            mesh_paths = mesh_paths[:10]

        with read_input_dir('hand_labeling/blender'):
            texture_path = os.getcwd() + '/zones.png'
        with read_input_dir('scripts/blender'):
            script_path = os.getcwd() + '/render_synthetic_data.py'

        angles = 16
        with open('config.json', 'w') as f:
            json.dump(
                {
                    'num_angles': angles,
                    'texture_path': texture_path,
                    'mesh_paths': mesh_paths
                }, f)
        subprocess.check_call(
            ['blender', '--python', script_path, '--background'])

        f = h5py.File('data.hdf5', 'w')
        dset = f.create_dataset('dset', (len(mesh_paths), angles, 330, 256, 2))

        for i, file in enumerate(tqdm.tqdm(glob.glob('*_depth.png'))):
            zones_file = file.replace('depth', 'zones')
            angle = int(file.split('_')[-2])
            dset[i // angles, angle, ...,
                 0] = skimage.color.rgb2gray(skimage.io.imread(file))
            zones = skimage.io.imread(zones_file)
            labels = _convert_colors_to_label(zones[..., :3])
            dset[i // angles, angle, ..., 1] = labels

        open('done', 'w').close()
    else:
        f = h5py.File('data.hdf5', 'r')
        dset = f['dset']
    return dset
Example #4
0
def get_train_headers(filetype):
    assert filetype in ('a3d', 'aps')
    if os.path.exists('headers.pickle'):
        with open('headers.pickle', 'rb') as f:
            return pickle.load(f)
    else:
        with read_input_dir('competition_data/%s' % filetype):
            headers = {file.split('.')[0]: read_header(file) for file in glob.glob('*')}
        with open('headers.pickle', 'wb') as f:
            pickle.dump(headers, f)
        return headers
def write_aps_hand_labeling_revision_v0(mode):
    names, _, x = get_aps_data_hdf5(mode)
    todo = {}
    with read_input_dir('hand_labeling/threat_segmentation'):
        with open('revision_v0.txt', 'r') as f:
            for line in f:
                name, labels = line[:5], line[6:]
                assert name not in todo, "duplicate revision names"
                todo[name] = [int(x) for x in labels.split(', ')]
    for name, data in tqdm.tqdm(zip(names, x), total=len(x)):
        for label in todo.get(name[:5], []):
            images = np.concatenate(np.rollaxis(data, 2), axis=1) / data.max()
            filename = '%s_%s' % (name, label)
            skimage.io.imsave('%s.png' % filename, np.repeat(images[..., np.newaxis], 3, axis=-1))
Example #6
0
def _get_data_generator(filetype, keep):
    assert filetype in ('a3d', 'aps')

    loc = 'competition_data/%s' % filetype

    with read_input_dir(loc):
        files = sorted(glob.glob('*'))
        random.seed(0)
        random.shuffle(files)
    files = [file for i, file in enumerate(files) if keep(i, file.split('.')[0])]

    def gen():
        for file in tqdm.tqdm(files):
            with read_input_dir(loc):
                data = read_data(file)
            yield file.split('.')[0], data

    return gen
Example #7
0
def generate_random_models(n_models):
    with read_input_dir('makehuman/passengers'):
        ranges = defaultdict(lambda: [float('inf'), float('-inf')])
        for file in glob.glob('*.mhm'):
            with open(file, 'r') as f:
                modifiers = f.readlines()[4:-5]
                for modifier in modifiers:
                    _, m, x = modifier.split(' ')
                    x = float(x)
                    r = ranges[m]
                    r[0], r[1] = min(r[0], x), max(r[1], x)

    np.random.seed(0)
    for i in range(n_models):
        lines = ['version v1.1.1']
        for modifier in ranges:
            val = np.random.uniform(*ranges[modifier])
            lines.append('modifier %s %s' % (modifier, val))
        lines.append('skeleton game_engine.mhskel')
        with open('%s.mhm' % i, 'w') as f:
            f.write('\n'.join(lines))
Example #8
0
def spatial_pool_zones(gen):
    max_procs = multiprocessing.cpu_count()
    batch = []
    with read_input_dir('scripts'):
        exe = os.getcwd() + '/spatial_pooling'
    subprocess.call('rm *.in', shell=True)
    subprocess.call('rm *.out', shell=True)

    def flush_batch():
        filenames = []
        procs = []
        for data in batch:
            random_string = ''.join(
                random.choice(string.ascii_uppercase) for _ in range(64))
            with open('%s.in' % random_string, 'wb') as f:
                f.write(data.astype('float32').tobytes())
            filenames.append(random_string)
            proc = subprocess.Popen(
                [exe, '%s.in' % random_string,
                 '%s.out' % random_string])
            procs.append(proc)

        ret = []
        for proc, file in zip(procs, filenames):
            retcode = proc.wait()
            if retcode != 0:
                raise Exception('failed to do spatial pooling')
            data = np.fromfile('%s.out' % file, dtype='float32').reshape(
                (16, 330, 256, 19))
            subprocess.check_call(['rm', '%s.in' % file])
            subprocess.check_call(['rm', '%s.out' % file])
            ret.append(data[..., 1:])
        batch.clear()
        return ret

    for data in gen:
        batch.append(data)
        if len(batch) == max_procs:
            yield from flush_batch()
    yield from flush_batch()
Example #9
0
 def gen():
     for file in tqdm.tqdm(files):
         with read_input_dir(loc):
             data = read_data(file)
         yield file.split('.')[0], data
def render_synthetic_body_zone_data(mode):
    assert mode in ('sample', 'sample_large', 'all')

    num_angles = 64
    image_size = 256
    with read_input_dir('makehuman/a3daps/meshes'):
        mesh_paths = sorted(
            ['%s/%s' % (os.getcwd(), x) for x in glob.glob('*.mhx2')])
        random.seed(0)
        random.shuffle(mesh_paths)
    if mode == 'sample':
        mesh_paths = mesh_paths[:10]
        num_angles = 8
    elif mode == 'sample_large':
        mesh_paths = mesh_paths[:100]

    done = Counter([x.split('_')[0] for x in glob.glob('*.png')])
    done = set([x for x, y in done.items() if y == 2 * num_angles])
    todo = [
        x for x in mesh_paths if x.split('/')[-1].split('.')[0] not in done
    ]

    if todo:
        with read_input_dir('makehuman/a3daps/textures'):
            texture_path = os.getcwd() + '/colors.png'
        with read_input_dir('scripts/a3daps/makehuman'):
            script_path = os.getcwd() + '/render_synthetic_body_zone_data.py'

        with open('config.json', 'w') as f:
            json.dump(
                {
                    'num_angles': num_angles,
                    'texture_path': texture_path,
                    'mesh_paths': todo
                }, f)
        subprocess.call(['blender', '--python', script_path, '--background'])
    assert len(glob.glob('*.png')) == 2 * num_angles * len(mesh_paths)

    if not os.path.exists('done'):
        f = h5py.File('data.hdf5', 'w')
        x = f.create_dataset(
            'x', (num_angles * len(mesh_paths), 2, image_size, image_size))
        angles = np.zeros(num_angles * len(mesh_paths), dtype='int32')

        for i, file in enumerate(tqdm.tqdm(glob.glob('*_skin.png'))):
            color_file = file.replace('skin', 'color')
            image = skimage.color.rgb2gray(skimage.io.imread(file))
            image = skimage.transform.resize(image, (image_size, image_size))
            color = skimage.io.imread(color_file)
            color = skimage.transform.resize(color, (image_size, image_size))
            labels = _convert_colors_to_label(color[..., 0:3])
            x[i, 0, ...] = image
            x[i, 1, ...] = labels
            angles[i] = int(file.split('_')[1])

        np.save('y.npy', angles)
        open('done', 'w').close()
    else:
        f = h5py.File('data.hdf5', 'r')
        x = f['x']
        angles = np.load('y.npy')
    return x, angles