コード例 #1
0
def intrinsic_metric(data,
                     k=1,
                     eps=0.,
                     metricpar={},
                     allow_disconnected=False,
                     verbose=True,
                     callback=None):
    r'''Intrinsic metric.'''
    data = np.array(data)
    if data.ndim == 1:
        # dissimilarity matrix
        assert metricpar == {}, ('No optional parameter is allowed for a '
                                 'dissimilarity matrix.')
        D = data
    else:
        # vector data
        D = pdist(data, **metricpar)
    G = neighborhood_graph(D, k, eps, verbose=verbose, callback=callback)

    if not allow_disconnected:
        c = ncomp(*G)
        if c > 1:
            raise AssertionError('The neighborhood graph is disconnected. '
                                 'It has {0} components.'.format(c))
    return graph_distance(*G, callback=callback)
コード例 #2
0
ファイル: metric.py プロジェクト: sbojarovski/py-mapper
def intrinsic_metric(data, k=1, eps=0.,
                     metricpar={}, allow_disconnected=False,
                     verbose=True, callback=None):
    r'''Intrinsic metric.'''
    data = np.array(data)
    if data.ndim==1:
        # dissimilarity matrix
        assert metricpar=={}, ('No optional parameter is allowed for a '
                               'dissimilarity matrix.')
        D = data
    else:
        # vector data
        D = pdist(data, **metricpar)
    G = neighborhood_graph(D, k, eps, verbose=verbose, callback=callback)

    if not allow_disconnected:
        c = ncomp(*G)
        if c>1:
            raise AssertionError('The neighborhood graph is disconnected. '
                                 'It has {0} components.'.format(c))
    return graph_distance(*G, callback=callback)
コード例 #3
0
ファイル: metric.py プロジェクト: sbojarovski/py-mapper
    except ImportError:
        sys.stderr.write('Intrinsic metric is not available.\n')
else:
    '''Test neighborhood_graph from cmappertools against the Python version.'''
    import cmappertools

    seed = np.random.randint(10000000000)
    print("Seed: {0}".format(seed))
    np.random.seed(seed)

    for i in range(100):
        print("Test {0}/100:".format(i+1))
        N = np.random.randint(20,200)
        NN = N*(N-1)//2
        D = np.random.rand(NN)
        eps = np.random.rand(1)
        k = np.random.randint(1,N)

        G = neighborhood_graph(D, k, eps)
        H = cmappertools.neighborhood_graph(D, k, eps)

        D = cmappertools.graph_distance(*G)
        E = cmappertools.graph_distance(*H)

        assert np.all(D==E), np.max(np.abs(D-E))
    print("OK.")

# Local variables:
# mode: Python
# End:
コード例 #4
0
    except ImportError:
        sys.stderr.write('Intrinsic metric is not available.\n')
else:
    '''Test neighborhood_graph from cmappertools against the Python version.'''
    import cmappertools

    seed = np.random.randint(10000000000)
    print("Seed: {0}".format(seed))
    np.random.seed(seed)

    for i in range(100):
        print("Test {0}/100:".format(i + 1))
        N = np.random.randint(20, 200)
        NN = N * (N - 1) // 2
        D = np.random.rand(NN)
        eps = np.random.rand(1)
        k = np.random.randint(1, N)

        G = neighborhood_graph(D, k, eps)
        H = cmappertools.neighborhood_graph(D, k, eps)

        D = cmappertools.graph_distance(*G)
        E = cmappertools.graph_distance(*H)

        assert np.all(D == E), np.max(np.abs(D - E))
    print("OK.")

# Local variables:
# mode: Python
# End:
コード例 #5
0
ファイル: filters.py プロジェクト: Sandy4321/mapper
def graph_Laplacian(data, eps, n=1, k=1, weighted_edges=False, sigma_eps=1.,
                    normalized=True,
                    metricpar={}, verbose=True,
                    callback=None):
    r'''Graph Laplacian of the neighborhood graph.

* First, if *k* is 1, form the *eps*-neighborhood graph of the data set: vertices are the data points; two points are connected if their distance is at most *eps*.

* Alternatively, if *k* is greater than 1, form the neighborhood graph from the
  :math:`k`-th nearest neighbors of each point. Each point counts as its first
  nearest neighbor, so feasible values start with :math:`k=2`.

* If *weighted_edges* is ``False``, each edge gets weight 1. Otherwise, each
  edge is weighted with

  .. math::

    \exp\left(-\frac{d^2}{2\sigma^2}\right),

  where :math:`\sigma=\mathtt{eps}\cdot\mathtt{sigma\_eps}` and :math:`d` is
  the distance between the two points.

* Form the graph Laplacian. The graph Laplacian is a self-adjoint operator on
  the real vector space spanned by the vertices and can thus be described by a
  symmetric matrix :math:`L`:

  If *normalized* is false, :math:`L` is closely related to the adjacency matrix of the graph: it has entries :math:`-w(i,j)` whenever nodes :math:`i` and :math:`j` are connected by an edge of weight :math:`w(i,j)` and zero if there is no edge. The :math:`i`-th diagonal entry holds the degree :math:`\deg(i)` of the corresponding vertex, so that row and column sums are zero.

  If *normalized* is true, each row :math:`i` of :math:`L` is additionally scaled by :math:`1/\sqrt{\deg(i)}`, and so is each column. This destroys the zero row and column sums but preserves symmetry.

* Return the :math:`n`-th eigenvector of the graph Laplacian. The index is 0-based: the 0-th eigenvector is constant on all vertices, corresponding to the eigenvalue 0. :math:`n=1` returns the Fiedler vector, which is the second smallest eigenvector after 0.

The normalized variant seems to give consistently better results, so this is always chosen in the GUI. However, this experience is based on few examples only, so please do not hesitate to also try the non-normalized version if there is a reason for it.

Reference: [R9]_; see especially Section 6.3 for normalization.'''
    assert n>=1, 'The rank of the eigenvector must be positive.'
    assert isinstance(k, int)
    assert k>=1
    if data.ndim==1:
        # dissimilarity matrix
        assert metricpar=={}, ('No optional parameter is allowed for a '
                               'dissimilarity matrix.')
        D = data
        N = n_obs(D)
    else:
        # vector data
        D = pdist(data, **metricpar)
        N = len(data)
    if callback:
        callback('Computing: neighborhood graph.')
    rowstart, targets, weights = \
        neighborhood_graph(D, k, eps, diagonal=True,
                           verbose=verbose, callback=callback)

    c = ncomp(rowstart, targets)
    if (c>1):
        print('The neighborhood graph has {0} components. Return zero values.'.
              format(c))
        return zero_filter(data)

    weights = Laplacian(rowstart, targets, weights, weighted_edges,
                        eps, sigma_eps, normalized)

    L = scipy.sparse.csr_matrix((weights, targets, rowstart))
    del weights, targets, rowstart

    if callback:
        callback('Computing: eigenvectors.')

    assert n<N, ('The rank of the eigenvector must be smaller than the number '
                 'of data points.')

    if hasattr(spla, 'eigsh'):
        w, v = spla.eigsh(L, k=n+1, which='SA')
    else: # for SciPy < 0.9.0
        w, v = spla.eigen_symmetric(L, k=n+1, which='SA')
    # Strange: computing more eigenvectors seems faster.
    #w, v = spla.eigsh(L, k=n+1, sigma=0., which='LM')
    if verbose:
        print('Eigenvalues: {0}.'.format(w))
    order = np.argsort(w)
    if w[order[0]]<0 and w[order[1]]<abs(w[order[0]]):
        raise RuntimeError('Negative eigenvalue of the graph Laplacian found: {0}'.format(w))

    return v[:,order[n]]
コード例 #6
0
def graph_Laplacian(data, eps, n=1, k=1, weighted_edges=False, sigma_eps=1.,
                    normalized=True,
                    metricpar={}, verbose=True,
                    callback=None):
    r'''Graph Laplacian of the neighborhood graph.

* First, if *k* is 1, form the *eps*-neighborhood graph of the data set: vertices are the data points; two points are connected if their distance is at most *eps*.

* Alternatively, if *k* is greater than 1, form the neighborhood graph from the
  :math:`k`-th nearest neighbors of each point. Each point counts as its first
  nearest neighbor, so feasible values start with :math:`k=2`.

* If *weighted_edges* is ``False``, each edge gets weight 1. Otherwise, each
  edge is weighted with

  .. math::

    \exp\left(-\frac{d^2}{2\sigma^2}\right),

  where :math:`\sigma=\mathtt{eps}\cdot\mathtt{sigma\_eps}` and :math:`d` is
  the distance between the two points.

* Form the graph Laplacian. The graph Laplacian is a self-adjoint operator on
  the real vector space spanned by the vertices and can thus be described by a
  symmetric matrix :math:`L`:

  If *normalized* is false, :math:`L` is closely related to the adjacency matrix of the graph: it has entries :math:`-w(i,j)` whenever nodes :math:`i` and :math:`j` are connected by an edge of weight :math:`w(i,j)` and zero if there is no edge. The :math:`i`-th diagonal entry holds the degree :math:`\deg(i)` of the corresponding vertex, so that row and column sums are zero.

  If *normalized* is true, each row :math:`i` of :math:`L` is additionally scaled by :math:`1/\sqrt{\deg(i)}`, and so is each column. This destroys the zero row and column sums but preserves symmetry.

* Return the :math:`n`-th eigenvector of the graph Laplacian. The index is 0-based: the 0-th eigenvector is constant on all vertices, corresponding to the eigenvalue 0. :math:`n=1` returns the Fiedler vector, which is the second smallest eigenvector after 0.

The normalized variant seems to give consistently better results, so this is always chosen in the GUI. However, this experience is based on few examples only, so please do not hesitate to also try the non-normalized version if there is a reason for it.

Reference: [R9]_; see especially Section 6.3 for normalization.'''
    assert n>=1, 'The rank of the eigenvector must be positive.'
    assert isinstance(k, int)
    assert k>=1
    if data.ndim==1:
        # dissimilarity matrix
        assert metricpar=={}, ('No optional parameter is allowed for a '
                               'dissimilarity matrix.')
        D = data
        N = n_obs(D)
    else:
        # vector data
        D = pdist(data, **metricpar)
        N = len(data)
    if callback:
        callback('Computing: neighborhood graph.')
    rowstart, targets, weights = \
        neighborhood_graph(D, k, eps, diagonal=True,
                           verbose=verbose, callback=callback)

    c = ncomp(rowstart, targets)
    if (c>1):
        print('The neighborhood graph has {0} components. Return zero values.'.
              format(c))
        return zero_filter(data)

    weights = Laplacian(rowstart, targets, weights, weighted_edges,
                        eps, sigma_eps, normalized)

    L = scipy.sparse.csr_matrix((weights, targets, rowstart))
    del weights, targets, rowstart

    if callback:
        callback('Computing: eigenvectors.')

    assert n<N, ('The rank of the eigenvector must be smaller than the number '
                 'of data points.')

    if hasattr(spla, 'eigsh'):
        w, v = spla.eigsh(L, k=n+1, which='SA')
    else: # for SciPy < 0.9.0
        w, v = spla.eigen_symmetric(L, k=n+1, which='SA')
    # Strange: computing more eigenvectors seems faster.
    #w, v = spla.eigsh(L, k=n+1, sigma=0., which='LM')
    if verbose:
        print('Eigenvalues: {0}.'.format(w))
    order = np.argsort(w)
    if w[order[0]]<0 and w[order[1]]<abs(w[order[0]]):
        raise RuntimeError('Negative eigenvalue of the graph Laplacian found: {0}'.format(w))

    return v[:,order[n]]