Esempio n. 1
0
def test_logging_functions():
    log_dir = logging.init_logging(name="test")
    logging.log("test")
    logging.tic(message="test")
    logging.toc(message="test")

    model = GCN(1)
    model.build([(10, 2), (10, 10)])
    logging.model_to_str(model)

    shutil.rmtree(log_dir)
from spektral.utils import localpooling_filter
from spektral.utils.logging import init_logging

# Load data
adj, x, y = delaunay.generate_data(return_type='numpy', classes=[0, 5])

# Parameters
N = x.shape[-2]  # Number of nodes in the graphs
F = x.shape[-1]  # Original feature dimensionality
n_classes = y.shape[-1]  # Number of classes
l2_reg = 5e-4  # Regularization rate for l2
learning_rate = 1e-3  # Learning rate for Adam
epochs = 200  # Number of training epochs
batch_size = 32  # Batch size
es_patience = 10  # Patience fot early stopping
log_dir = init_logging()  # Create log directory and file

# Preprocessing
fltr = localpooling_filter(adj.copy())

# Train/test split
fltr_train, fltr_test, \
x_train, x_test,       \
y_train, y_test = train_test_split(fltr, x, y, test_size=0.1)

# Model definition
X_in = Input(shape=(N, F))
filter_in = Input((N, N))

gc1 = GraphConv(32, activation='relu',
                kernel_regularizer=l2(l2_reg))([X_in, filter_in])