Exemple #1
0
def pc_generate_cyflannKDTree(pc_xyz):
    #conda install -y -c conda-forge cyflann
    try:
        import cyflann
    except ImportError:
        raise pc_generate_cyflannKDTree("pyflann not installed.")
    cyflann.set_distance_type('euclidean')
    pc_xyz_cyflannKDTree_tree = cyflann.FLANNIndex()
    pc_xyz_cyflannKDTree_tree.build_index(pc_xyz, algorithm='kdtree_single')
    return pc_xyz_cyflannKDTree_tree
Exemple #2
0
def test_pickle():
    data = np.random.normal(scale=100, size=(1000, 3))
    query = np.random.normal(scale=100, size=(100, 3))

    idx = cyflann.FLANNIndex(algorithm='kdtree_single')
    idx.build_index(data)
    res_i, res_dists = idx.nn_index(query, 2)

    s = pickle.dumps(idx)
    del idx
    idx = pickle.loads(s)

    res_i2, res_dists2 = idx.nn_index(query, 2)
    assert np.all(res_i == res_i2)
Exemple #3
0
def flann_from_typedbytes(inp, tempdir=_not_passed):
    if tempdir is _not_passed:
        tempdir = DEFAULT_TEMPDIR

    register_read_ndarray(inp)
    length = inp.read_int()
    pts = inp._read()
    index_bytes = inp.read_bytestring()

    with tempfile.NamedTemporaryFile(dir=tempdir) as f:
        f.write(index_bytes)
        f.flush()
        del index_bytes
        index = cyflann.FLANNIndex()
        index.load_index(f.name, pts)

    return index
Exemple #4
0
def test_normal(dim, k):
    data = np.random.normal(scale=100, size=(1000, dim))
    query = np.random.normal(scale=100, size=(100, dim))

    py = pyflann.FLANN(algorithm='kdtree_single')
    cy = cyflann.FLANNIndex(algorithm='kdtree_single')

    py.build_index(data)
    cy.build_index(data)

    py_ids, py_dists = py.nn_index(query, k)
    cy_ids, cy_dists = cy.nn_index(query, k)

    assert np.all(py_ids == cy_ids), \
           "{}/{} different".format(np.sum(py_ids != cy_ids), py_ids.size)
    assert np.allclose(py_dists, cy_dists, atol=1e-5, rtol=1e-4), \
           "max distance {}".format(np.abs(py_dists - cy_dists).max())
Exemple #5
0
def test_normal():
    for dim in [1, 4, 10]:
        for k in [1, 2, 5]:
            data = np.random.normal(scale=100, size=(1000, dim))
            query = np.random.normal(scale=100, size=(100, dim))

            py = pyflann.FLANN(algorithm='kdtree_single')
            cy = cyflann.FLANNIndex(algorithm='kdtree_single')

            py.build_index(data)
            cy.build_index(data)

            py_ids, py_dists = py.nn_index(query, k)
            cy_ids, cy_dists = cy.nn_index(query, k)

            f = partial(check_match, py_ids, py_dists, cy_ids, cy_dists)
            f.description = 'normal vs pyflann - dim {} - k {}'.format(dim, k)
            yield f
def test_typedbytes_flann():
    pts = np.random.normal(size=(100, 2))
    for algorithm in ['kdtree_single', 'linear']:
        idx = cyflann.FLANNIndex(algorithm=algorithm)
        idx.build_index(pts)

        with closing(StringIO()) as sio:
            out = tb.Output(sio)
            register_write(out)
            out.write(idx)

            sio.seek(0)
            inp = tb.Input(sio)
            register_read(inp)
            idx2 = inp.read()

            fn = partial(_check_flann, idx, idx2)
            fn.description = "flann typedbytes io - {}".format(algorithm)
            yield fn
Exemple #7
0
    def fit(self, x, y, f=0.005, iterr=3, order=1):
        """
        Locally smoothed regression with the LOWESS algorithm.

        Parameters
        ----------
        x: float [n, dim] array  
            Values of x for which f(x) is known (e.g. measured). The shape of this
            is (n, dim), where dim is the number the dimensions and n is the
            number of distinct coordinates sampled.
    
        y: float [n, ] array
            The known values of f(x) at these points. This has shape (n,) 

        f: int
            bandwidth or smoothing parameter. Determines how much of the data is used
            to fit each local polynomial. 0.1 means 10% of the data is used to fit a
            single data point. Default: 0.005
        
        iterr: int
            Determines how often a robust weighted fit is conducted.
            iterr > 1: aapply the robustification procedure from [Cleveland79], page 831
            Default: 3

        order: int
            The degree of smoothing functions. 1 is locally linear, 2 locally quadratic,
            etc. Default: 1
        """
        self.x = x
        self.y = y
        
        n = y.size
        x_dim = x.shape[-1]
        self.r = int(ceil(f * n))
        
        timer = time.time()
        X = x
        #nbrs = NearestNeighbors(n_neighbors=self.r, algorithm='ball_tree', n_jobs=-1).fit(X)
        #distances, self.indices = nbrs.kneighbors(X)
        #tree = spatial.KDTree(X)
        #distances, self.indices = tree.query(X, k=self.r)
        cy = cyflann.FLANNIndex(algorithm='kdtree_single')
        cy.build_index(X)
        self.indices, distances = cy.nn_index(X, self.r)
        distances = np.sqrt(distances)
        time_for_kNN = time.time() - timer
        print "%0.2fsec needed for kNN" % time_for_kNN
        
        self.w = np.clip(distances/distances[: ,-1][:, None], 0.0, 1.0)
        self.w = (1 - self.w ** 3) ** 3
        
        positions = [range(x_dim+1)]*order
        permutations =  list(itertools.product(*positions))
        sorted_permutations = [sorted(a) for a in permutations]
        self.permutation_indeces = [list(b) for b in set(tuple(b) for b in sorted_permutations)]
        self.permutation_indeces.sort()

        yest = np.zeros(n)
        self.delta = np.ones(n)
        for iteration in range(iterr):

            # Process each voxel        
            p = parallelization(display=True)
            yest_list = p.start(self.single_voxel_fit, n, range(n))
            yest = np.asarray(yest_list)
            
            residuals = y - yest
            s = np.median(np.abs(residuals))
            self.delta = np.clip(residuals / (6.0 * s), -1, 1)
            self.delta = (1 - self.delta ** 2) ** 2

        return yest