Exemplo n.º 1
0
def pcosine(u, v):
    """Computes the Cosine distance (positive space) between 1-D arrays.

    The Cosine distance (positive space) between `u` and `v` is defined as

    .. math::

        d(u, v) = 1 - abs \\left( \\frac{u \\cdot v}{||u||_2 ||v||_2} \\right)

    where :math:`u \\cdot v` is the dot product of :math:`u` and :math:`v`.

    Parameters
    ----------
    u : array
        Input array.
    v : array
        Input array.

    Returns
    -------
    cosine : float
        Cosine distance between `u` and `v`.

    """

    # validate vectors like scipy does
    u = ssd._validate_vector(u)
    v = ssd._validate_vector(v)

    dist = 1. - np.abs(np.dot(u, v) / (linalg.norm(u) * linalg.norm(v)))

    return dist
Exemplo n.º 2
0
 def wt_euclidean(self, u, v, w):
     """ normal euclidean dist with the weighting
     """
     u = _validate_vector(u)
     v = _validate_vector(v)
     dist = norm(w * (u - v))
     return dist
Exemplo n.º 3
0
def mahalanobis(x: Union[list, tuple, np.ndarray,
                         float], mean: Union[list, tuple, np.ndarray, float],
                cov: Union[list, tuple, np.ndarray, float]) -> float:
    """
    Computes the Mahalanobis distance between the state vector x from the
    Gaussian `mean` with covariance `cov`. This can be thought as the number
    of standard deviations x is from the mean, i.e. a return value of 3 means
    x is 3 std from mean.

    Parameters
    ----------
    x: (N,) array_like, or float,
        Input state vector
    mean: (N,) array_like, or float,
        mean of multivariate Gaussian
    cov: (N, N) array_like  or float,
        covariance of the multivariate Gaussian

    Returns
    -------
    mahalanobis: float,
        The Mahalanobis distance between vectors `x` and `mean`
    
    Examples
    --------
    >>> mahalanobis(x=3., mean=3.5, cov=4.**2) # univariate case
    0.125
    >>> mahalanobis(x=3., mean=6, cov=1) # univariate, 3 std away
    3.0
    >>> mahalanobis([1., 2], [1.1, 3.5], [[1., .1],[.1, 13]])
    0.42533327058913922
    """

    _x = scipy_dist._validate_vector(x)
    _mean = scipy_dist._validate_vector(mean)

    if _x.shape != _mean.shape:
        raise ValueError("length of input vectors must be the same")

    y = _x - _mean
    S = np.atleast_2d(cov)

    dist = float(np.dot(np.dot(y.T, np.linalg.inv(S)), y))
    return sqrt(dist)
Exemplo n.º 4
0
def seuclidean(u, v, V):
    """
    """
    u = _validate_and_mask(u)
    v = _validate_and_mask(v)
    V = _validate_vector(V, dtype=np.float64)
    if V.shape[0] != u.shape[0] or u.shape[0] != v.shape[0]:
        raise TypeError('V must be a 1-D array of the same dimension '
                        'as u and v.')
    return euclidean(u, v, w=1/V)
Exemplo n.º 5
0
def wt_euclidean(u,v,w):
    u = _validate_vector(u)
    v = _validate_vector(v)
    dist = norm(w*(u - v))
    return dist
Exemplo n.º 6
0
def _validate_weights(w, dtype=np.double):
    w = _validate_vector(w, dtype=dtype)
    if np.any(w < 0):
        raise ValueError("Input weights should be all non-negative")
    return w
Exemplo n.º 7
0
def _validate_and_mask(x, **kwargs):
    return _mask_vector(_validate_vector(x, **kwargs))
Exemplo n.º 8
0
def _mask_vector(x):
    x = _validate_vector(x)
    if np.isnan(x).any():
        return ma.array(x, mask=np.isnan(x))
    return x