def mds_decomp(self, noise=False): matrix = self.get_dm(noise) dbc = double_centre(matrix) return eigen(dbc)
def spectral_decomp(self, prune=None, local_scale=None, noise=False, verbosity=0, logic='or', **kwargs): """ Use prune to remove links between distant points: prune is None: no pruning prune={int > 0}: prunes links beyond `prune` nearest neighbours prune='estimate': searches for the smallest value that retains a fully connected graph """ matrix = self.get_dm(noise) kp, mask, est_scale = binsearch_mask(matrix, logic=logic) # prune anyway, # get local scale estimate ks = kp # ks and kp are the scaling and pruning parameters est_ks = ks # ADJUST MASK if prune is None: # change mask to all kp = len(matrix) - 1 mask = np.ones(matrix.shape, dtype=bool) elif isinstance(prune, int) and prune > 0: kp = prune mask = kmask(matrix, prune, logic=logic) else: if not prune == 'estimate': raise ValueError( "'prune' should be None, a positive integer value, or 'estimate', not {}" .format(prune)) # ADJUST SCALE if local_scale is not None: if local_scale == 'median': ks = 'median' dist = np.median(matrix, axis=1) scale = np.outer(dist, dist) elif isinstance(local_scale, int): ks = local_scale scale = kscale(matrix, local_scale) else: scale = est_scale else: scale = est_scale # ZeroDivisionError safety check if not (scale > 1e-5).all(): if verbosity > 0: print('Rescaling to avoid zero-div error') scale = est_scale ks = est_ks assert (scale > 1e-5).all() aff = affinity(matrix, mask, scale) laplacian = laplace(aff, **kwargs) if verbosity > 0: print('Pruning parameter: {0}'.format(kp)) print('Scaling parameter: {0}'.format(ks)) print('Mask, scale, affinity matrix and laplacian:') print(mask) print(scale) print(aff) print(laplace) return eigen(laplacian) # vectors are in columns
def spectral_decomp( self, prune=None, local_scale=None, noise=False, verbosity=0, logic='or', **kwargs): """ Use prune to remove links between distant points: prune is None: no pruning prune={int > 0}: prunes links beyond `prune` nearest neighbours prune='estimate': searches for the smallest value that retains a fully connected graph """ matrix = self.get_dm(noise) kp, mask, est_scale = binsearch_mask(matrix, logic=logic) # prune anyway, # get local scale estimate ks = kp # ks and kp are the scaling and pruning parameters est_ks = ks # ADJUST MASK if prune is None: # change mask to all kp = len(matrix) - 1 mask = np.ones(matrix.shape, dtype=bool) elif isinstance(prune, int) and prune > 0: kp = prune mask = kmask(matrix, prune, logic=logic) else: if not prune=='estimate': raise ValueError("'prune' should be None, a positive integer value, or 'estimate', not {}".format(prune)) # ADJUST SCALE if local_scale is not None: if local_scale == 'median': ks = 'median' dist = np.median(matrix, axis=1) scale = np.outer(dist, dist) elif isinstance(local_scale, int): ks = local_scale scale = kscale(matrix, local_scale) else: scale = est_scale else: scale = est_scale # ZeroDivisionError safety check if not (scale > 1e-5).all(): if verbosity > 0: print('Rescaling to avoid zero-div error') scale = est_scale ks = est_ks assert (scale > 1e-5).all() aff = affinity(matrix, mask, scale) laplacian = laplace(aff, **kwargs) if verbosity > 0: print('Pruning parameter: {0}'.format(kp)) print('Scaling parameter: {0}'.format(ks)) print('Mask, scale, affinity matrix and laplacian:') print(mask) print(scale) print(aff) print(laplace) return eigen(laplacian) # vectors are in columns