Example #1
0
def run(name):
    inliers_threshold = 50.
    n_samples = 5000
    epsilon = 0

    print(name)

    path = '../data/{0}/'.format(name)
    data = load(path)

    model_class = fundamental.Fundamental
    img_size = data['img2'].shape[:2]
    nfa_proba = (2. * np.linalg.norm(img_size) / np.prod(img_size))

    sampler = sampling.UniformSampler(n_samples)
    generator = sampling.ModelGenerator(model_class, data['data'], sampler)
    # generator = multigs.ModelGenerator(model_class, data['data'], n_samples)
    min_sample_size = model_class().min_sample_size
    ac_tester = ac.ImageTransformNFA(epsilon, nfa_proba, min_sample_size)
    thresholder = membership.GlobalThresholder(inliers_threshold)

    seed = 0
    # seed = np.random.randint(0, np.iinfo(np.uint32).max)
    print('seed:', seed)
    np.random.seed(seed)

    prefix = name
    test_transformations.test(model_class, data, prefix, generator,
                              thresholder, ac_tester, name)

    plt.close('all')
Example #2
0
def run(subsampling=1, inliers_threshold=0.1, run_regular=True):
    log_filename = 'logs/piazza_bra_s{0}.txt'.format(subsampling)
    logger = utils.Logger(log_filename)
    sys.stdout = logger

    sigma = 1
    epsilon = 0
    local_ratio = 3.

    name = 'Piazza_Bra'
    dirname = '../data/' + name + '/'

    mat = scipy.io.loadmat(dirname + 'Samantha_Bra.mat')
    data = mat['Points']

    # subsample the input points
    points_considered = np.arange(0, data.shape[0], subsampling)
    data = data[points_considered, :]

    n_samples = data.shape[0] * 2
    sampler = sampling.GaussianLocalSampler(sigma, n_samples)
    ransac_gen = sampling.ModelGenerator(plane.Plane, data, sampler)
    thresholder = membership.LocalThresholder(inliers_threshold,
                                              ratio=local_ratio)
    min_sample_size = plane.Plane().min_sample_size
    ac_tester = ac.BinomialNFA(epsilon, 1. / local_ratio, min_sample_size)

    seed = 0
    # seed = np.random.randint(0, np.iinfo(np.uint32).max)
    print('seed:', seed)
    np.random.seed(seed)

    output_prefix = name + '_n{0}'.format(data.shape[0])
    test_3d.test(plane.Plane, data, output_prefix, ransac_gen, thresholder,
                 ac_tester, run_regular=run_regular)

    plt.close('all')

    sys.stdout = logger.stdout
    logger.close()

    return log_filename
Example #3
0
def run(types, inliers_threshold=0.02, local_ratio=3., restimate_gt=False):

    # Sampling ratio with respect to the number of elements
    sampling_factor = 10
    # a contrario test parameters
    epsilon = 0.

    config = {'Star5': line.Line,
              'Star11': line.Line,
              'Stairs': line.Line,
              'Circles': circle.Circle,
              }

    stats_list = []
    mat = scipy.io.loadmat('../data/JLinkageExamples.mat')
    for example in mat.keys():
        for c in types:
            if example.find(c) == 0:
                ex_type = c
                break
        else:
            continue

        model_class = config[ex_type]
        data = mat[example].T

        min_sample_size = model_class().min_sample_size
        n_samples = data.shape[0] * sampling_factor * min_sample_size

        sampler = sampling.UniformSampler(n_samples)
        generator = sampling.ModelGenerator(model_class, data, sampler)

        proba = 1. / local_ratio
        ac_tester = ac.BinomialNFA(epsilon, proba, min_sample_size)
        thresholder = membership.LocalThresholder(inliers_threshold,
                                                  ratio=local_ratio)

        match = re.match('[a-zA-Z]+[0-9]*_', example)
        try:
            match = re.search('[0-9]+', match.group())
            n_groups = int(match.group())
        except AttributeError:
            n_groups = 4
        if restimate_gt:
            gt_groups = ground_truth(data, n_groups, model_class=model_class,
                                     thresholder=thresholder)
        else:
            gt_groups = ground_truth(data, n_groups)

        seed = 0
        # seed = np.random.randint(0, np.iinfo(np.uint32).max)
        print('seed:', seed)
        np.random.seed(seed)

        output_prefix = example
        if restimate_gt:
            dir_name = 'test_2d_restimate_gt'
        else:
            dir_name = 'test_2d_given_gt'

        res = test(model_class, data, output_prefix, generator, thresholder,
                   ac_tester, gt_groups, dir_name=dir_name)
        stats_list.append(res)

        print('-'*40)
        plt.close('all')

    reg_list, comp_list = zip(*stats_list)

    print('Statistics of regular bi-clustering')
    test_utils.compute_stats(reg_list)
    print('Statistics of compressed bi-clustering')
    test_utils.compute_stats(comp_list)
    print('-'*40)
Example #4
0
def evaluate_york(res_dir_name, run_with_lsd=False):
    # RANSAC parameter
    inliers_threshold = np.pi * 1e-2

    logger = test_utils.Logger('logs/' + res_dir_name + '.txt')
    sys.stdout = logger

    dir_name = '/Users/mariano/Documents/datasets/YorkUrbanDB/'
    sampling_factor = 4
    epsilon = 0
    local_ratio = 3.

    ac_tester = ac.BinomialNFA(epsilon, 1. / local_ratio,
                               vp.VanishingPoint().min_sample_size)
    thresholder = membership.LocalThresholder(inliers_threshold,
                                              ratio=local_ratio)

    stats_list = []
    for i, example in enumerate(os.listdir(dir_name)):
        if not os.path.isdir(dir_name + example):
            continue
        img_name = dir_name + '{0}/{0}.jpg'.format(example)
        gray_image = PIL.Image.open(img_name).convert('L')

        gt_name = dir_name + '{0}/{0}LinesAndVP.mat'.format(example)
        mat = scipy.io.loadmat(gt_name)
        gt_lines = mat['lines']
        gt_segments = [lsd.Segment(gt_lines[k, :], gt_lines[k + 1, :])
                       for k in range(0, len(gt_lines), 2)]
        gt_segments = np.array(gt_segments)
        gt_association = np.squeeze(mat['vp_association'])

        if run_with_lsd:
            segments = lsd.compute(gray_image)
            segments = np.array(segments)
            gt_groups = ground_truth(gt_association, gt_segments, segments,
                                     thresholder=thresholder)
        else:
            segments = gt_segments
            gt_groups = [gt_association == v for v in np.unique(gt_association)]

        sampler = sampling.UniformSampler(len(segments) * sampling_factor)
        ransac_gen = sampling.ModelGenerator(vp.VanishingPoint, segments,
                                             sampler)

        seed = 0
        # seed = np.random.randint(0, np.iinfo(np.uint32).max)
        print('seed:', seed)
        np.random.seed(seed)

        res = test(gray_image, segments, res_dir_name, example, ransac_gen,
                   thresholder, ac_tester, gt_groups=gt_groups)
        stats_list.append(res)

        print('-'*40)
        plt.close('all')

    reg_list, comp_list = zip(*stats_list)

    print('Statistics of regular bi-clustering')
    test_utils.compute_stats(reg_list)
    print('Statistics of compressed bi-clustering')
    test_utils.compute_stats(comp_list)

    sys.stdout = logger.stdout
    logger.close()
Example #5
0
def run(subsampling=1, inliers_threshold=0.1, run_regular=True):
    log_filename = 'logs/pozzoveggiani_s{0}.txt'.format(subsampling)
    logger = utils.Logger(log_filename)
    sys.stdout = logger

    sigma = 1
    epsilon = 0
    local_ratio = 3

    name = 'PozzoVeggiani'
    dirname = '../data/' + name + '/'

    mat = scipy.io.loadmat(dirname + 'Results.mat')
    data = mat['Points'].T
    proj_mat = mat['Pmat']
    visibility = mat['Visibility']

    # Removing far away points for display
    keep = functools.reduce(
        np.logical_and,
        [data[:, 0] > -10, data[:, 0] < 20, data[:, 2] > 10, data[:, 2] < 45])
    data = data[keep, :]
    visibility = visibility[keep, :]
    # Re-order dimensions and invert vertical direction to get upright data
    data[:, 1] *= -1
    data = np.take(data, [0, 2, 1], axis=1)
    proj_mat[:, 1, :] *= -1
    proj_mat = np.take(proj_mat, [0, 2, 1, 3], axis=1)

    # subsample the input points
    points_considered = np.arange(0, data.shape[0], subsampling)
    data = data[points_considered, :]
    visibility = visibility[points_considered, :]

    n_samples = data.shape[0] * 2
    sampler = sampling.GaussianLocalSampler(sigma, n_samples)
    generator = sampling.ModelGenerator(plane.Plane, data, sampler)
    thresholder = membership.LocalThresholder(inliers_threshold,
                                              ratio=local_ratio)
    min_sample_size = plane.Plane().min_sample_size
    ac_tester = ac.BinomialNFA(epsilon, 1. / local_ratio, min_sample_size)

    projector = Projector(data, visibility, proj_mat, dirname)

    seed = 0
    # seed = np.random.randint(0, np.iinfo(np.uint32).max)
    print('seed:', seed)
    np.random.seed(seed)

    output_prefix = name + '_n{0}'.format(data.shape[0])
    test_3d.test(plane.Plane,
                 data,
                 output_prefix,
                 generator,
                 thresholder,
                 ac_tester,
                 plotter=projector,
                 run_regular=run_regular)

    plt.close('all')

    sys.stdout = logger.stdout
    logger.close()

    return log_filename