Exemplo n.º 1
0
Arquivo: gs.py Projeto: akamaus/piq
def witness(
        features: np.ndarray, sample_size: int = 64, gamma: Optional[float] = None) \
        -> Tuple[np.ndarray, np.ndarray]:
    """Compute the persistence intervals for the dataset of features using the witness complex.

    Args:
        features: Array with shape (N_samples, data_dim) representing the dataset.
        sample_size: Number of landmarks to use on each iteration.
        gamma: Parameter determining maximum persistence value. Default is `1.0 / 128 * N_imgs / 5000`

    Returns
        A list of persistence intervals and the maximal persistence value.
    """
    N = features.shape[0]
    if gamma is None:
        gamma = 1.0 / 128 * N / 5000

    # Randomly sample `sample_size` points from X
    np.random.seed()
    idx = np.random.choice(N, sample_size)
    landmarks = features[idx]

    distances, max_dist = lmrk_table(witnesses=features, landmarks=landmarks)
    wc = gudhi.WitnessComplex(distances)
    alpha_max = max_dist * gamma
    st = wc.create_simplex_tree(max_alpha_square=alpha_max, limit_dimension=2)

    # This seems to modify the st object
    st.persistence(homology_coeff_field=2)
    intervals = st.persistence_intervals_in_dimension(1)
    return intervals, alpha_max
def pseudo_alpha_beta_witness_complex(n_lm, alpha_beta, n_landmarks):

    indices_remaining = list(range(0, n_landmarks))
    pairings = []

    keep_searching_pairs = True

    i = 0
    while keep_searching_pairs:

        alpha, beta = alpha_beta[i][0], alpha_beta[i][1]

        n_lm_current = filter_alpha(n_lm, alpha)

        witness_complex = gudhi.WitnessComplex(
            nearest_landmark_table=n_lm_current)
        simplex_tree = witness_complex.create_simplex_tree(
            max_alpha_square=beta, limit_dimension=1)
        skeleton = simplex_tree.get_skeleton(1)

        indices_remaining, pairings = get_pairings(skeleton, indices_remaining,
                                                   pairings)

        i += 1

        if len(indices_remaining) == 0:
            keep_searching_pairs = False
        elif len(alpha_beta) == i:
            keep_searching_pairs = False
            print('Stopped early')
        else:
            pass

    return pairings
Exemplo n.º 3
0
def witness(
        features: np.ndarray, sample_size: int = 64, gamma: Optional[float] = None) \
        -> Tuple[np.ndarray, np.ndarray]:
    """Compute the persistence intervals for the dataset of features using the witness complex.

    Args:
        features: Array of shape (N_samples, data_dim) representing the dataset.
        sample_size: Number of landmarks to use on each iteration.
        gamma: Parameter determining maximum persistence value. Default is `1.0 / 128 * N_imgs / 5000`

    Returns
        A list of persistence intervals and the maximal persistence value.
    """
    # Install gudhi only if needed
    try:
        import gudhi
    except ImportError as e:
        import six
        error = e.__class__(
            "You are likely missing your GUDHI installation, "
            "you should visit http://gudhi.gforge.inria.fr/python/latest/installation.html "
            "for further instructions.\nIf you use conda, you can use\nconda install -c conda-forge gudhi"
        )
        six.raise_from(error, e)

    N = features.shape[0]
    if gamma is None:
        gamma = 1.0 / 128 * N / 5000

    # Randomly sample `sample_size` points from X
    np.random.seed()
    idx = np.random.choice(N, sample_size)
    landmarks = features[idx]

    distances, max_dist = lmrk_table(witnesses=features, landmarks=landmarks)
    wc = gudhi.WitnessComplex(distances)
    alpha_max = max_dist * gamma
    st = wc.create_simplex_tree(max_alpha_square=alpha_max, limit_dimension=2)

    # This seems to modify the st object
    st.persistence(homology_coeff_field=2)
    intervals = st.persistence_intervals_in_dimension(1)
    return intervals, alpha_max
Exemplo n.º 4
0
Arquivo: gs.py Projeto: aiedward/piq
def witness(features: np.ndarray, sample_size: int = 64, gamma: Optional[float] = None) \
        -> Tuple[np.ndarray, np.ndarray]:
    """Compute the persistence intervals for the dataset of features using the witness complex.

    Args:
        features: Array with shape (N_samples, data_dim) representing the dataset.
        sample_size: Number of landmarks to use on each iteration.
        gamma: Parameter determining maximum persistence value. Default is `1.0 / 128 * N_imgs / 5000`

    Returns
        A list of persistence intervals and the maximal persistence value.
    """
    try:
        import gudhi
    except ImportError:
        raise ImportError("GUDHI is required for computation of the Geometry Score but not installed. "
                          "Please install scipy using the following command: pip install --user gudhi")

    recommended_gudhi_version = "3.2"
    if _version_tuple(gudhi.__version__) < _version_tuple(recommended_gudhi_version):
        warn(f'GUDHI of version {gudhi.__version__} is used while version >= {recommended_gudhi_version} is '
             f'recommended. Consider updating GUDHI to avoid potential problems.')

    N = features.shape[0]
    if gamma is None:
        gamma = 1.0 / 128 * N / 5000

    # Randomly sample `sample_size` points from X
    np.random.seed()
    idx = np.random.choice(N, sample_size)
    landmarks = features[idx]

    distances, max_dist = lmrk_table(witnesses=features, landmarks=landmarks)
    wc = gudhi.WitnessComplex(distances)
    alpha_max = max_dist * gamma
    st = wc.create_simplex_tree(max_alpha_square=alpha_max, limit_dimension=2)

    # This seems to modify the st object
    st.persistence(homology_coeff_field=2)
    intervals = st.persistence_intervals_in_dimension(1)
    return intervals, alpha_max
Exemplo n.º 5
0
def witness(X, gamma=1.0 / 128, L_0=64):
    """
      This function computes the persistence intervals for the dataset
      X using the witness complex.

    Args:
      X: 2d array representing the dataset.
      gamma: parameter determining the maximal persistence value.
      L_0: int, number of landmarks to use.

    Returns
      A list of persistence intervals and the maximal persistence value.
    """
    L = random_landmarks(X, L_0)
    W = X
    lmrk_tab, max_dist = lmrk_table(W, L)
    wc = gudhi.WitnessComplex(lmrk_tab)
    alpha_max = max_dist * gamma
    st = wc.create_simplex_tree(max_alpha_square=alpha_max, limit_dimension=2)
    big_diag = st.persistence(homology_coeff_field=2)
    diag = st.persistence_intervals_in_dimension(1)
    return diag, alpha_max
print(pers_pairs)


result_str = 'Witness complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
    repr(simplex_tree.num_simplices()) + ' simplices - ' + \
    repr(simplex_tree.num_vertices()) + ' vertices.'
print(result_str)

nlt = [
    [[0,1],[3,9],[1,29],[2,34]],
    [[1,1],[0,16],[2,26],[3,39]],
    [[2,4],[1,9],[3,29],[0,34]],
    [[3,4],[2,9],[0,29],[1,34]]
]

wcomp = gudhi.WitnessComplex(nlt)
simplex_tree = wcomp.create_simplex_tree(max_alpha_square=50)
skeleton = simplex_tree.get_skeleton(1)
pairs = []
for element in skeleton:
    if len(element[0]) == 2:
        if element[1] == 0:
            pairs.append(element[0])
print('Skeleton W-LIST: {}'.format(skeleton))
print('Pairs W-LIST: {}'.format(pairs))





dataset_sampler = SwissRoll()