def community_pooling(cluster, data): # determine what the batches has as attributes has_internal_edges = hasattr(data, 'internal_edge_index') has_pos2D = hasattr(data, 'pos2D') has_pos = hasattr(data, 'pos') has_cluster = hasattr(data, 'cluster0') cluster, perm = consecutive_cluster(cluster) cluster = cluster.to(data.x.device) # pool the node infos x, _ = scatter_max(data.x, cluster, dim=0) # pool the edges edge_index, edge_attr = pool_edge(cluster, data.edge_index, data.edge_attr) # pool internal edges if necessary if has_internal_edges: internal_edge_index, internal_edge_attr = pool_edge( cluster, data.internal_edge_index, data.internal_edge_attr) # pool the pos if has_pos: pos = scatter_mean(data.pos, cluster, dim=0) if has_pos2D: pos2D = scatter_mean(data.pos2D, cluster, dim=0) if has_cluster: c0, c1 = data.cluster0, data.cluster1 # pool batch if hasattr(data, 'batch'): batch = None if data.batch is None else pool_batch(perm, data.batch) data = Batch(batch=batch, x=x, edge_index=edge_index, edge_attr=edge_attr, pos=pos) if has_internal_edges: data.internal_edge_index = internal_edge_index data.internal_edge_attr = internal_edge_attr if has_cluster: data.cluster0 = c0 data.cluster1 = c1 else: data = Data(x=x, edge_index=edge_index, edge_attr=edge_attr, pos=pos) if has_internal_edges: data.internal_edge_index = internal_edge_index data.internal_edge_attr = internal_edge_attr if has_pos2D: data.pos2D = pos2D if has_cluster: data.cluster0 = c0 data.cluster1 = c1 return data
def community_pooling(cluster, data): """Pools features and edges of all cluster members All cluster members are pooled into a single node that is assigned: - the max cluster value for each feature - the average cluster nodes position Args: cluster ([type]): clusters data ([type]): features tensor Returns: pooled features tensor """ # determine what the batches has as attributes has_internal_edges = hasattr(data, 'internal_edge_index') has_pos2D = hasattr(data, 'pos2D') has_pos = hasattr(data, 'pos') has_cluster = hasattr(data, 'cluster0') cluster, perm = consecutive_cluster(cluster) cluster = cluster.to(data.x.device) # pool the node infos x, _ = scatter_max(data.x, cluster, dim=0) # pool the edges edge_index, edge_attr = pool_edge(cluster, data.edge_index, data.edge_attr) # pool internal edges if necessary if has_internal_edges: internal_edge_index, internal_edge_attr = pool_edge( cluster, data.internal_edge_index, data.internal_edge_attr) # pool the pos if has_pos: pos = scatter_mean(data.pos, cluster, dim=0) if has_pos2D: pos2D = scatter_mean(data.pos2D, cluster, dim=0) if has_cluster: c0, c1 = data.cluster0, data.cluster1 # pool batch if hasattr(data, 'batch'): batch = None if data.batch is None else pool_batch(perm, data.batch) data = Batch(batch=batch, x=x, edge_index=edge_index, edge_attr=edge_attr, pos=pos) if has_internal_edges: data.internal_edge_index = internal_edge_index data.internal_edge_attr = internal_edge_attr if has_cluster: data.cluster0 = c0 data.cluster1 = c1 else: data = Data(x=x, edge_index=edge_index, edge_attr=edge_attr, pos=pos) if has_internal_edges: data.internal_edge_index = internal_edge_index data.internal_edge_attr = internal_edge_attr if has_pos2D: data.pos2D = pos2D if has_cluster: data.cluster0 = c0 data.cluster1 = c1 return data
def community_pooling(cluster, data): """Pools features and edges of all cluster members All cluster members are pooled into a single node that is assigned: - the max cluster value for each feature - the average cluster nodes position Args: cluster ([type]): clusters data ([type]): features tensor Returns: pooled features tensor Example: >>> import torch >>> from torch_geometric.data import Data, Batch >>> edge_index = torch.tensor([[0, 1, 1, 2, 3, 4, 4, 5], >>> [1, 0, 2, 1, 4, 3, 5, 4]], dtype=torch.long) >>> x = torch.tensor([[0], [1], [2], [3], [4], [5]], >>> dtype=torch.float) >>> data = Data(x=x, edge_index=edge_index) >>> data.pos = torch.tensor(np.random.rand(data.num_nodes, 3)) >>> c = community_detection(data.edge_index, data.num_nodes) >>> batch = Batch().from_data_list([data, data]) >>> cluster = community_detection(batch.edge_index, batch.num_nodes) >>> new_batch = community_pooling(cluster, batch) """ # determine what the batches has as attributes has_internal_edges = hasattr(data, 'internal_edge_index') has_pos2D = hasattr(data, 'pos2D') has_pos = hasattr(data, 'pos') has_cluster = hasattr(data, 'cluster0') cluster, perm = consecutive_cluster(cluster) cluster = cluster.to(data.x.device) # pool the node infos x, _ = scatter_max(data.x, cluster, dim=0) # pool the edges edge_index, edge_attr = pool_edge(cluster, data.edge_index, data.edge_attr) # pool internal edges if necessary if has_internal_edges: internal_edge_index, internal_edge_attr = pool_edge( cluster, data.internal_edge_index, data.internal_edge_attr) # pool the pos if has_pos: pos = scatter_mean(data.pos, cluster, dim=0) if has_pos2D: pos2D = scatter_mean(data.pos2D, cluster, dim=0) if has_cluster: c0, c1 = data.cluster0, data.cluster1 # pool batch if hasattr(data, 'batch'): batch = None if data.batch is None else pool_batch(perm, data.batch) data = Batch(batch=batch, x=x, edge_index=edge_index, edge_attr=edge_attr, pos=pos) if has_internal_edges: data.internal_edge_index = internal_edge_index data.internal_edge_attr = internal_edge_attr if has_cluster: data.cluster0 = c0 data.cluster1 = c1 else: data = Data(x=x, edge_index=edge_index, edge_attr=edge_attr, pos=pos) if has_internal_edges: data.internal_edge_index = internal_edge_index data.internal_edge_attr = internal_edge_attr if has_pos2D: data.pos2D = pos2D if has_cluster: data.cluster0 = c0 data.cluster1 = c1 return data