def __init__( self, n_labels, hidden_states=64, activation='relu', output_activation="softmax", use_bias=False, dropout_rate=0.5, n_input_channels=None, **kwargs, ): super().__init__(**kwargs) self.n_labels = n_labels self.hidden_states = hidden_states self.activation = activation self.output_activation = output_activation self.use_bias = use_bias self.dropout_rate = dropout_rate self.n_input_channels = n_input_channels ## need to add some regularization terms self._d0 = Dropout(dropout_rate) # this can hold a kernel_regularizer=reg which is yet to be implemented self._gcn0 = GCNConv(hidden_states, activation=activation, use_bias=use_bias) self.d1 = Dropout(dropout_rate) self.gcn1 = GCNConv(n_labels, activation=output_activation, use_bias=use_bias)
def __init__(self, n_out = 7): super().__init__() # Define layers of the model self.ECC1 = ECCConv(hidden_states, [hidden_states, hidden_states, hidden_states], n_out = hidden_states, activation = "relu") self.GCN1 = GCNConv(hidden_states, activation = "relu") self.GCN2 = GCNConv(hidden_states * 2, activation = "relu") self.GCN3 = GCNConv(hidden_states * 4, activation = "relu") self.GCN4 = GCNConv(hidden_states * 8, activation = "relu") self.Pool1 = GlobalMaxPool() self.Pool2 = GlobalAvgPool() self.Pool3 = GlobalSumPool() self.decode = [Dense(size * hidden_states) for size in [16, 8, 4, 2, 2]] self.norm_layers = [BatchNormalization() for i in range(len(self.decode))] self.d2 = Dense(n_out)
def __init__(self, n_out = 4, hidden_states=64, gat_layers=2, gat_activation='relu', decode_layers=3, decode_activation='relu', regularization=None, dropout=0.2, batch_norm=True, forward=True): super().__init__() self.n_out=n_out self.hidden_states=hidden_states self.gat_activation=conv_activation self.forward=forward self.dropout=dropout self.gat_layers=gat_layers self.regularize=regularization if type(decode_activation)==str: self.decode_activation=tf.keras.activations.get(decode_activation) else: self.decode_activation=decode_activation self.batch_norm=batch_norm # Define layers of the model if self.edgeconv: self.ECC1 = ECCConv(hidden_states, [hidden_states, hidden_states, hidden_states], n_out = hidden_states, activation = "relu", kernel_regularizer=self.regularize) self.GCNs = [GCNConv(hidden_states*int(i), activation=self.conv_activation, kernel_regularizer=self.regularize) for i in 2**np.arange(self.conv_layers)] self.Pool1 = GlobalMaxPool() self.Pool2 = GlobalAvgPool() self.Pool3 = GlobalSumPool() self.decode = [Dense(i * hidden_states, activation=self.decode_activation) for i in 2**np.arange(decode_layers)] self.dropout_layers = [Dropout(dropout) for i in range(len(self.decode))] if self.batch_norm: self.norm_layers = [BatchNormalization() for i in range(len(self.decode))] else: self.norm_layers = [no_norm for i in range(len(self.decode))] self.final = Dense(n_out)
def __init__(self, n_out = 6, hidden_states = 64, forward = False, dropout = 0.5): super().__init__() self.forward = forward # Define layers of the model self.ECC1 = ECCConv(hidden_states, [hidden_states, hidden_states], n_out = hidden_states, activation = "relu") self.GCN1 = GCNConv(hidden_states, activation = "relu") self.GCN2 = GCNConv(hidden_states * 2, activation = "relu") self.GCN3 = GCNConv(hidden_states * 4, activation = "relu") self.GCN4 = GCNConv(hidden_states * 8, activation = "relu") self.Pool1 = GlobalMaxPool() self.Pool2 = GlobalAvgPool() # self.Pool3 = GlobalSumPool() self.decode = [Dense(size * hidden_states) for size in [16, 16, 8]] self.drop_w = [Dropout(dropout) for _ in range(len(self.decode))] self.norm_layers = [BatchNormalization() for _ in range(len(self.decode))] self.angles = [Dense(hidden_states) for _ in range(2)] self.angles_out = Dense(2) self.sigs = [Dense(hidden_states) for _ in range(2)] self.sigs_out = Dense(2)
def __init__(self, n_out=4, hidden_states=64, n_GCN=2, GCN_activation=LeakyReLU(alpha=0.2), decode_activation=LeakyReLU(alpha=0.2), regularize=None, dropout=0.2, forward=True, ECC=True): super().__init__() self.n_out = n_out self.hidden_states = hidden_states self.conv_activation = GCN_activation self.forward = forward self.dropout = dropout self.n_GCN = n_GCN self.ECC = ECC self.regularize = regularize self.decode_activation = decode_activation # Define layers of the model self.ECC = ECC if self.ECC: self.ECC1 = ECCConv(hidden_states, [hidden_states, hidden_states, hidden_states], n_out=hidden_states, activation="relu", kernel_regularizer=self.regularize) self.GCNs = [ GCNConv(hidden_states * int(i), activation=GCN_activation, kernel_regularizer=self.regularize) for i in 2**np.arange(n_GCN) ] self.Pool1 = GlobalMaxPool() self.Pool2 = GlobalAvgPool() self.Pool3 = GlobalSumPool() self.decode = [Dense(i * hidden_states) for i in 2**np.arange(n_GCN)] self.dropout_layers = [ Dropout(dropout) for i in range(len(self.decode)) ] self.norm_layers = [ BatchNormalization() for i in range(len(self.decode)) ] self.final = Dense(n_out)