def __init__(self, resolution: float = 1, modularity: str = 'dugue', tol_optimization: float = 1e-3, tol_aggregation: float = 1e-3, n_aggregations: int = -1, shuffle_nodes: bool = False, sort_clusters: bool = True, return_membership: bool = True, return_aggregate: bool = True, random_state: Optional[Union[np.random.RandomState, int]] = None, verbose: bool = False): super(Louvain, self).__init__(sort_clusters=sort_clusters, return_membership=return_membership, return_aggregate=return_aggregate) VerboseMixin.__init__(self, verbose) self.resolution = np.float32(resolution) self.modularity = modularity self.tol = np.float32(tol_optimization) self.tol_aggregation = tol_aggregation self.n_aggregations = n_aggregations self.shuffle_nodes = shuffle_nodes self.random_state = check_random_state(random_state)
def __init__(self, adjacency: sparse.csr_matrix, damping_factor: float = 0.85, personalization=None, fb_mode: bool = False, verbose: bool = False): VerboseMixin.__init__(self, verbose) n1, n2 = adjacency.shape restart_prob: np.ndarray = restart_probability(n1, personalization) if fb_mode: restart_prob = np.hstack((restart_prob, np.zeros(n2))) adjacency = bipartite2undirected(adjacency) LinearOperator.__init__(self, shape=adjacency.shape, dtype=float) n = adjacency.shape[0] out_degrees = adjacency.dot(np.ones(n)) damping_matrix = damping_factor * sparse.eye(n, format='csr') if fb_mode: damping_matrix.data[n1:] = 1 self.a = (damping_matrix.dot(transition_matrix(adjacency))).T.tocsr() self.b = (np.ones(n) - damping_factor * out_degrees.astype(bool)) * restart_prob
def __init__(self, algorithm: BaseRanking, n_jobs: Optional[int] = None, verbose: bool = False): super(RankClassifier, self).__init__() VerboseMixin.__init__(self, verbose) self.algorithm = algorithm self.n_jobs = check_n_jobs(n_jobs) self.verbose = verbose
def __init__(self, verbose: bool = False, n_iter: int = 0, n_jobs: Optional[int] = None): super(MultiDiff, self).__init__() VerboseMixin.__init__(self, verbose) self.verbose = verbose self.n_iter = n_iter self.n_jobs = check_n_jobs(n_jobs)
def __init__(self, n_iter: int = 10, damping_factor: Optional[float] = None, verbose: bool = False): super(Dirichlet, self).__init__() VerboseMixin.__init__(self, verbose) self.n_iter = n_iter if damping_factor is None: damping_factor = 1. check_is_proba(damping_factor, 'Damping factor') self.damping_factor = damping_factor
def __init__(self, engine: str = 'default', algorithm: Union[str, Optimizer] = 'default', resolution: float = 1, tol: float = 1e-3, agg_tol: float = 1e-3, max_agg_iter: int = -1, shuffle_nodes: bool = False, sorted_cluster: bool = True, random_state: Optional[Union[np.random.RandomState, int]] = None, verbose: bool = False): super(Louvain, self).__init__() VerboseMixin.__init__(self, verbose) self.random_state = check_random_state(random_state) if algorithm == 'default': self.algorithm = GreedyModularity(resolution, tol, engine=check_engine(engine)) elif isinstance(algorithm, Optimizer): self.algorithm = algorithm else: raise TypeError('Algorithm must be \'auto\' or a valid algorithm.') if type(max_agg_iter) != int: raise TypeError('The maximum number of iterations must be an integer.') self.agg_tol = agg_tol self.max_agg_iter = max_agg_iter self.shuffle_nodes = shuffle_nodes self.sorted_cluster = sorted_cluster self.iteration_count_ = None self.aggregate_graph_ = None
def test_prints(self): verbose = VerboseMixin(verbose=True) verbose.log.print('There are', 4, 'seasons in a year') self.assertEqual(str(verbose.log), 'There are 4 seasons in a year\n')
def __init__(self, n_iter: int = 10, verbose: bool = False): super(Diffusion, self).__init__() VerboseMixin.__init__(self, verbose) self.n_iter = n_iter