edge_weight ]), ragged_tensor_from_nested_numpy([edge_index]) # One graph in batch # Set training data. But requires mask and batch-dimension of 1 xtrain = nodes, edges, edge_indices ytrain = np.expand_dims(labels, axis=0) # One graph in batch model = make_gcn( input_node_shape=[None, 1433], input_edge_shape=[None, 1], # Output output_embedd={"output_mode": 'node'}, output_mlp={ "use_bias": [True, True, False], "units": [64, 16, 7], "activation": ['relu', 'relu', 'softmax'] }, # model specs depth=3, gcn_args={ "units": 124, "use_bias": True, "activation": "relu", "has_unconnected": True }) # Set learning rate and epochs learning_rate_start = 1e-3 learning_rate_stop = 1e-4 epo = 300 epomin = 260 epostep = 10
ytrain = np.expand_dims(labels_train, axis=-1) ytest = np.expand_dims(labels_test, axis=-1) model_args = {"input_node_shape": [None, 14], "input_edge_shape": [None, 1], "input_embedd": {'input_node_vocab': 55, "input_node_embedd": 64}, # Output "output_embedd": {"output_mode": 'graph', "output_type": 'padded'}, "output_mlp": {"use_bias": [True, True, False], "units": [140, 70, 1], "activation": ['relu', 'relu', 'sigmoid']}, # model specs "depth": 3, "gcn_args": {"units": 64, "use_bias": True, "activation": "relu", "has_unconnected": True, "is_sorted": False, "pooling_method": 'segment_mean'} } model = make_gcn(**model_args) model_node_weights = make_gcn_node_weights(**model_args) # Set learning rate and epochs learning_rate_start = 1e-3 learning_rate_stop = 1e-4 epo = 150 epomin = 100 epostep = 10 # Compile model with optimizer and loss optimizer = tf.keras.optimizers.Adam(lr=learning_rate_start) cbks = tf.keras.callbacks.LearningRateScheduler(lr_lin_reduction(learning_rate_start, learning_rate_stop, epomin, epo)) model.compile(loss='binary_crossentropy', optimizer=optimizer, weighted_metrics=['accuracy'])
ytest = np.expand_dims(labels_test, axis=-1) model = make_gcn( input_node_shape=[None, 14], input_edge_shape=[None, 1], input_embedd={ 'input_node_vocab': 55, "input_node_embedd": 64 }, # Output output_embedd={ "output_mode": 'graph', "output_type": 'padded' }, output_mlp={ "use_bias": [True, True, False], "units": [140, 70, 1], "activation": ['relu', 'relu', 'sigmoid'] }, # model specs depth=3, gcn_args={ "units": 64, "use_bias": True, "activation": "relu", "has_unconnected": True, "is_sorted": False, "pooling_method": 'segment_mean' }) # Set learning rate and epochs