예제 #1
0
def cli(
        train_prefix,
        part_rangestart,  # pylint: disable=too-many-branches, too-many-locals, too-many-statements, too-many-arguments
        finalize=False,
        debug_run=False):
    """Run a RotatingTree experiment."""
    rangestart = part_rangestart
    pref = 'forest'
    pref += '_' + str(part_rangestart)
    if finalize:
        pref += '_final'
    if debug_run:
        pref += '_debug'
    out_fp = path.join(OUT_DIR, pref + '.z')
    LOGGER.info("Running for configuration `%s`.", out_fp)
    LOGGER.info("Loading data...")
    train_dta, train_annot, val_dta, val_annot = get_data(  # pylint: disable=unused-variable
        train_prefix, part_rangestart, finalize, debug_run)
    # Checks.
    if rangestart > 0 and rangestart < 82:
        # Rotation matrices.
        assert train_annot.max() <= 1.
        assert train_annot.min() >= -1.
        assert val_annot.max() <= 1.
        assert val_annot.min() >= -1.
    import sklearn.ensemble
    rf = sklearn.ensemble.RandomForestRegressor(n_jobs=available_cpu_count())
    LOGGER.info("Fitting...")
    rf.fit(train_dta, train_annot)
    LOGGER.info("Writing results...")
    joblib.dump(rf, out_fp, compress=True)
    LOGGER.info("Done.")
예제 #2
0
def sample_shots(  # pylint: disable=too-many-arguments
        input_folder,
        out_folder,
        num_shots_per_body=7,
        only_missing=False,
        num_threads=-1,
        use_light=False,
        factor=1.):
    """Sample body images with visibilities."""
    _LOGGER.info("Sampling 3D body shots.")
    if num_threads == -1:
        num_threads = available_cpu_count()
    else:
        assert num_threads > 0
    if not _path.exists(out_folder):
        _os.mkdir(out_folder)
    _np.random.seed(1)
    bodies = _glob(_path.join(input_folder, '*.pkl'))
    _LOGGER.info("%d bodies detected.", len(bodies))
    with _pymp.Parallel(num_threads, if_=num_threads > 1) as p:
        for body_idx in p.iterate(tqdm.tqdm(range(len(bodies)))):
            body_filename = bodies[body_idx]
            vis_filename = body_filename + '_vis_overlay.png'
            vis_filename = body_filename + '_vis.png'
            if not _os.path.exists(vis_filename):
                vis_filename = body_filename + '_vis_0.png'
            if not _os.path.exists(vis_filename):
                # Try something else.
                vis_filename = body_filename[:-9]
            out_names = [
                _os.path.join(
                    out_folder,
                    _path.basename(body_filename) + '.' + str(map_idx) +
                    '.png') for map_idx in range(num_shots_per_body)
            ]
            if only_missing:
                all_exist = True
                for fname in out_names:
                    if not _path.exists(fname):
                        all_exist = False
                        break
                if all_exist:
                    continue
            vis_im = _cv2.imread(vis_filename)
            assert vis_im is not None, 'visualization not found: %s' % (
                vis_filename)
            renderings = render_body_impl(body_filename,
                                          [vis_im.shape[1], vis_im.shape[0]],
                                          num_shots_per_body,
                                          quiet=False,
                                          use_light=use_light,
                                          factor=factor)
            for map_idx, vmap in enumerate(renderings):
                _cv2.imwrite(out_names[map_idx], vmap[:, :, ::-1])
    _LOGGER.info("Done.")
예제 #3
0
def main(image_list_file,  # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-arguments
         data_folder,
         result_label_folder,
         n_labels,
         as_nolr=False,
         ev_31_as_6=False):
    """Perform the evaluation for previously written results scoremaps."""
    LOGGER.info("Evaluating segmentation in folder `%s`.", result_label_folder)
    if 'voc' in image_list_file:
        voc_mode = True
        ev_31_as_6 = True
        n_labels = 7
        LOGGER.info("Using VOC part segmentation style.")
    else:
        voc_mode = False
    if ev_31_as_6:
        assert n_labels == 7
    if as_nolr:
        assert n_labels == 7
    classIOUs = np.zeros((n_labels,))
    overallIOU = 0.
    overallAccuracy = 0.
    # Parallel stats.
    TP = pymp.shared.array((n_labels,), dtype='float32')
    FP = pymp.shared.array((n_labels,), dtype='float32')
    FN = pymp.shared.array((n_labels,), dtype='float32')
    imgTP = pymp.shared.array((1,), dtype='float32')
    imgPixels = pymp.shared.array((1,), dtype='float32')
    stat_lock = pymp.shared.lock()
    warned = False
    with open(image_list_file, 'r') as inf:
        image_list = inf.readlines()
    with pymp.Parallel(available_cpu_count(), if_=False and available_cpu_count() > 1) as p:
        for imgnames in p.iterate(tqdm.tqdm(image_list), element_timeout=20):  # pylint: disable=too-many-nested-blocks
            imgname = imgnames.split(" ")[0].strip()[1:]
            old_applied_scale = float(imgnames.split(" ")[2].strip())
            gtname = imgnames.split(" ")[1].strip()[1:]
            gt_file = path.join(data_folder, gtname)
            gtLabels = np.array(Image.open(gt_file))
            if gtLabels.ndim == 3:
                if not warned:
                    LOGGER.warn("Three-layer ground truth detected. Using first.")
                    warned = True
                gtLabels = gtLabels[:, :, 0]
            gtLabels = scipy.misc.imresize(gtLabels, 1. / old_applied_scale,
                                           interp='nearest')
            if as_nolr:
                gtLabels[gtLabels == 4] = 3
                gtLabels[gtLabels == 6] = 5
            LOGGER.debug("Evaluating `%s`...", imgname)
            result_file = path.join(result_label_folder,
                                    path.basename(imgname) + '.npy')
            result_probs = np.load(result_file)
            result_probs = np.array(
                [scipy.misc.imresize(layer, 1. / old_applied_scale, interp='bilinear', mode='F')
                 for layer in result_probs])
            if result_probs.shape[0] > n_labels and not (
                    ev_31_as_6 and result_probs.shape[0] == 32):
                LOGGER.warn('Result has invalid labels: %s!',
                            str(result_probs.shape[0]))
                continue
            if result_probs.min() < 0 or result_probs.max() > 1.:
                LOGGER.warn('Invalid result probabilities: min `%f`, max `%f`!',
                            result_probs.min(), result_probs.max())
                continue
            else:
                MAP = np.argmax(result_probs, axis=0)
                if as_nolr:
                    MAP[MAP == 4] = 3
                    MAP[MAP == 6] = 5
                if ev_31_as_6:
                    if voc_mode:
                        groups_to_use = VOC_REGION_GROUPS
                    else:
                        groups_to_use = six_region_groups
                    for classID in range(1, 32):
                        new_id = -1
                        for group_idx, group in enumerate(groups_to_use):
                            for grelem in group:
                                if regions.reverse_mapping.keys().index(grelem) == classID - 1:  # pylint: disable=no-member
                                    new_id = group_idx + 1
                        assert new_id > 0
                        if not voc_mode:
                            gtLabels[gtLabels == classID] = new_id
                        MAP[MAP == classID] = new_id
                for classID in range(n_labels):
                    classGT = np.equal(gtLabels, classID)
                    classResult = np.equal(MAP, classID)
                    classResult[np.equal(gtLabels, 255)] = 0
                    with stat_lock:
                        TP[classID] = TP[classID] + np.count_nonzero(classGT & classResult)
                        FP[classID] = FP[classID] + np.count_nonzero(classResult & ~classGT)
                        FN[classID] = FN[classID] + np.count_nonzero(~classResult & classGT)
                imgResult = MAP
                imgGT = gtLabels
                imgResult[np.equal(MAP, 255)] = 0
                imgGT[np.equal(gtLabels, 255)] = 0
                with stat_lock:
                    imgTP[0] += np.count_nonzero(np.equal(imgGT, imgResult))
                    imgPixels[0] += np.size(imgGT)
    for classID in range(0, n_labels):
        classIOUs[classID] = TP[classID] / (TP[classID] + FP[classID] + FN[classID])
    if as_nolr:
        classIOUs = classIOUs[(0, 1, 2, 3, 5),]
    overallIOU = np.mean(classIOUs)
    overallAccuracy = imgTP[0] / imgPixels[0]
    if n_labels == 32:
        region_names = ['background'] + regions.reverse_mapping.values()[:-1]  # pylint: disable=no-member
        LOGGER.info("Class IOUs:")
        for region_name, class_iou in zip(region_names, classIOUs):
            LOGGER.info('%s: %f', region_name, class_iou)
    else:
        LOGGER.info('Class IOUs: %s.', str(classIOUs))
    LOGGER.info('Overall IOU: %s.', str(overallIOU))
    LOGGER.info('Overall Accuracy: %s.', str(overallAccuracy))