def __init__(self, params): super(KerasGraphFF3, self).__init__(params) # declare graph_params and update from dict --graph_params self.graph_params["ff_hidden_1"] = 128 self.graph_params["ff_hidden_2"] = 128 self.graph_params["ff_hidden_3"] = 128 self.graph_params = update_params(self.graph_params, self._flags.graph_params, "graph") # initilize keras layer self._tracked_layers["add_layer"] = tf.keras.layers.Add() self._tracked_layers["flatten_1"] = tf.keras.layers.Flatten() self._tracked_layers["ff_layer_1"] = tf.keras.layers.Dense( self.graph_params["ff_hidden_1"], activation=tf.nn.leaky_relu, name="ff_layer_1") self._tracked_layers["ff_layer_2"] = tf.keras.layers.Dense( self.graph_params["ff_hidden_2"], activation=tf.nn.leaky_relu, name="ff_layer_2") self._tracked_layers["ff_layer_3"] = tf.keras.layers.Dense( self.graph_params["ff_hidden_3"], activation=tf.nn.leaky_relu, name="ff_layer_3") self._tracked_layers["last_layer"] = tf.keras.layers.Dense( 6, activation=None, name="last_layer") self._tracked_layers["flatten_2"] = tf.keras.layers.Flatten()
def __init__(self, params): super(GraphConv2MultiFF, self).__init__(params) # v0.1 self.graph_params["mid_layer_activation"] = "leaky_relu" self.graph_params["conv_layer_activation"] = "leaky_relu" self.graph_params["input_dropout"] = 0.0 self.graph_params["batch_norm"] = False self.graph_params["dense_layers"] = [256, 128] self.graph_params = update_params(self.graph_params, self._flags.graph_params, "graph")
def __init__(self, params): super(GraphConv2MultiFFTriangle, self).__init__(params) # v0.2 # raise version if adjusting default values self.graph_params["dense_layers"] = [512, 256, 128, 64, 32] self.graph_params["nhidden_dense_final"] = 6 self.graph_params["mid_layer_activation"] = "leaky_relu" self.graph_params["conv_layer_activation"] = "leaky_relu" self.graph_params["batch_norm"] = False self.graph_params["edge_classifier"] = False self.graph_params["nhidden_max_edges"] = 6 self.graph_params = update_params(self.graph_params, self._flags.graph_params, "graph")
def __init__(self, params): super(GraphMultiFF, self).__init__(params) # v0.2 self.graph_params["mid_layer_activation"] = "leaky_relu" self.graph_params["batch_norm"] = False self.graph_params["dense_layers"] = [512, 256, 128, 64] self.graph_params["dense_dropout"] = [ ] # [0.0, 0.0] dropout after each dense layer self.graph_params["input_dropout"] = 0.01 self.graph_params["abs_as_input"] = False self.graph_params = update_params(self.graph_params, self._flags.graph_params, "graph")
def __init__(self, params): super(GraphMultiFF, self).__init__(params) # v0.3 if not self._flags.complex_phi: self.fc_size_0 = 3 else: self.fc_size_0 = 4 self.graph_params["dense_layers"] = [512, 256, 128, 64, 32] self.graph_params["input_dropout"] = 0.0 self.graph_params["ff_dropout"] = 0.0 self.graph_params["uniform_noise"] = 0.0 self.graph_params["normal_noise"] = 0.0 self.graph_params["nhidden_dense_final"] = 6 self.graph_params["edge_classifier"] = False self.graph_params["batch_norm"] = False self.graph_params["nhidden_max_edges"] = 6 self.graph_params = update_params(self.graph_params, self._flags.graph_params, "graph")
def print_params(self): self.graph_params = update_params(self.graph_params, self._flags.graph_params, "graph") print("{}_params:".format("graph")) for i, j in enumerate(self.graph_params): print(" {}: {}".format(j, self.graph_params[j]))
def update_params(self): """Updating of the default params if provided via flags as a dict""" self._optimizer_params = update_params(self._optimizer_params, self._flags.optimizer_params, "Optimizer")