import tensorflow as tf
from sklearn.model_selection import train_test_split

from kgcnn.data.datasets.cora import CoraDataset
from kgcnn.literature.GCN import make_gcn as make_gcn
from kgcnn.utils.adj import precompute_adjacency_scaled, convert_scaled_adjacency_to_list, make_adjacency_undirected_logical_or
from kgcnn.utils.data import ragged_tensor_from_nested_numpy
from kgcnn.utils.learning import lr_lin_reduction

# Download and load Dataset
dataset = CoraDataset()
A_data, X_data, y_data = dataset.get_graph()
# Make node features dense
nodes = X_data.todense()
# Precompute scaled and undirected (symmetric) adjacency matrix
A_scaled = precompute_adjacency_scaled(
    make_adjacency_undirected_logical_or(A_data))
# Use edge_indices and weights instead of adj_matrix
edge_index, edge_weight = convert_scaled_adjacency_to_list(A_scaled)
edge_weight = np.expand_dims(edge_weight, axis=-1)
# Change labels to one-hot-encoding
labels = np.expand_dims(y_data, axis=-1)
labels = np.array(labels == np.arange(70), dtype=np.float)

# Make test/train split
# Since only one graph in the dataset
# Use a mask to hide test nodes labels
inds = np.arange(len(y_data))
ind_train, ind_val = train_test_split(inds, test_size=0.10, random_state=42)
val_mask = np.zeros_like(y_data)
train_mask = np.zeros_like(y_data)
val_mask[ind_val] = 1
Exemple #2
0

# Map label to class
def get_label_name(label):
    return [
        "Case_Based", "Genetic_Algorithms", "Neural_Networks",
        "Probabilistic_Methods", "Reinforcement_Learning", "Rule_Learning",
        "Theory"
    ][label]


nodes, edge_index, labels, class_label_mapping = cora_graph()
nodes = nodes[:, 1:]  # Remove IDs
edge_index = sort_edge_indices(edge_index)
adj_matrix = make_adjacency_from_edge_indices(edge_index)
adj_matrix = precompute_adjacency_scaled(
    make_adjacency_undirected_logical_or(adj_matrix))
edge_index, edge_weight = convert_scaled_adjacency_to_list(adj_matrix)
edge_weight = np.expand_dims(edge_weight, axis=-1)
labels = np.expand_dims(labels, axis=-1)
labels = np.array(labels == np.arange(7), dtype=np.float)

# Make test/train split
# Since only one graph in the dataset
# Use a mask to hide test nodes labels
inds = np.arange(len(labels))
ind_train, ind_val = train_test_split(inds, test_size=0.10, random_state=0)
val_mask = np.zeros_like(inds)
train_mask = np.zeros_like(inds)
val_mask[ind_val] = 1
train_mask[ind_train] = 1
val_mask = np.expand_dims(val_mask, axis=0)  # One graph in batch
import numpy as np
import tensorflow as tf
from sklearn.model_selection import train_test_split

from kgcnn.data.cora.cora import cora_graph
from kgcnn.literature.GCN import getmodelGCN
from kgcnn.utils.adj import precompute_adjacency_scaled, scaled_adjacency_to_list, make_undirected
from kgcnn.utils.data import ragged_tensor_from_nested_numpy
from kgcnn.utils.learning import lr_lin_reduction

# Download and load Dataset
A_data, X_data, y_data = cora_graph()
# Make node features dense
nodes = X_data.todense()
# Precompute scaled and undirected (symmetric) adjacency matrix
A_scaled = precompute_adjacency_scaled(make_undirected(A_data))
# Use indices and weights instead of A
edge_index, edge_weight = scaled_adjacency_to_list(A_scaled)
edge_weight = np.expand_dims(edge_weight, axis=-1)
# Change labels to one-hot-encoding
labels = np.expand_dims(y_data, axis=-1)
labels = np.array(labels == np.arange(70), dtype=np.float)

# Make test/train split
# Since only one graph in the dataset
# Use a mask to hide test nodes labels
inds = np.arange(len(y_data))
ind_train, ind_val = train_test_split(inds, test_size=0.10, random_state=42)
val_mask = np.zeros_like(y_data)
train_mask = np.zeros_like(y_data)
val_mask[ind_val] = 1