Exemplo n.º 1
0
 def __init__(self, n_out = 4, hidden_states=64, gat_layers=2, gat_activation='relu', decode_layers=3, decode_activation='relu', regularization=None, dropout=0.2, batch_norm=True, forward=True):
     super().__init__()
     self.n_out=n_out
     self.hidden_states=hidden_states
     self.gat_activation=conv_activation
     self.forward=forward
     self.dropout=dropout
     self.gat_layers=gat_layers
     self.regularize=regularization
     if type(decode_activation)==str:
       self.decode_activation=tf.keras.activations.get(decode_activation)
     else:
       self.decode_activation=decode_activation
     self.batch_norm=batch_norm
     # Define layers of the model
     if self.edgeconv:
       self.ECC1    = ECCConv(hidden_states, [hidden_states, hidden_states, hidden_states], n_out = hidden_states, activation = "relu", kernel_regularizer=self.regularize)
     self.GCNs     = [GCNConv(hidden_states*int(i), activation=self.conv_activation, kernel_regularizer=self.regularize) for i in 2**np.arange(self.conv_layers)]
     self.Pool1   = GlobalMaxPool()
     self.Pool2   = GlobalAvgPool()
     self.Pool3   = GlobalSumPool()
     self.decode  = [Dense(i * hidden_states, activation=self.decode_activation) for i in  2**np.arange(decode_layers)]
     self.dropout_layers  = [Dropout(dropout) for i in range(len(self.decode))]
     if self.batch_norm:
       self.norm_layers  = [BatchNormalization() for i in range(len(self.decode))]
     else:
       self.norm_layers =  [no_norm for i in range(len(self.decode))]
     self.final      = Dense(n_out)
Exemplo n.º 2
0
 def __init__(self, n_out=4):
     super().__init__()
     # Define layers of the model
     self.att1 = GATConv(hidden_states,
                         attn_heads=2,
                         dropout_rate=0.4,
                         activation="relu",
                         return_attn_coef=False
                         )  #required keywords is channels/hidden states
     self.att2 = GATConv(
         hidden_states // 2,
         attn_heads=3,
         dropout_rate=0.1,
         activation="relu"
     )  # attn heads are the time limiting key_word, watch out with it
     self.att3 = GATConv(
         hidden_states * 2,
         attn_heads=4,
         dropout_rate=0.7,
         activation="relu")  # hiddenstates has to be pretty low as well
     self.Pool1 = GlobalAvgPool()  #good results with all three
     self.Pool2 = GlobalSumPool()
     self.Pool3 = GlobalMaxPool()  #important for angle fitting
     self.decode = [Dense(size * hidden_states) for size in [16, 8, 4]]
     self.norm_layers = [
         BatchNormalization() for i in range(len(self.decode))
     ]
     self.d2 = Dense(n_out)
Exemplo n.º 3
0
 def __init__(self, n_out = 7):
     super().__init__()
     # Define layers of the model
     self.ECC1    = ECCConv(hidden_states, [hidden_states, hidden_states, hidden_states], n_out = hidden_states, activation = "relu")
     self.GCN1    = GCNConv(hidden_states, activation = "relu")
     self.GCN2    = GCNConv(hidden_states * 2, activation = "relu")
     self.GCN3    = GCNConv(hidden_states * 4, activation = "relu")
     self.GCN4    = GCNConv(hidden_states * 8, activation = "relu")
     self.Pool1   = GlobalMaxPool()
     self.Pool2   = GlobalAvgPool()
     self.Pool3   = GlobalSumPool()
     self.decode  = [Dense(size * hidden_states) for size in [16, 8, 4, 2, 2]]
     self.norm_layers  = [BatchNormalization() for i in range(len(self.decode))]
     self.d2      = Dense(n_out)
Exemplo n.º 4
0
    def __init__(self, n_out = 3, n_sigs=2, K=[1,2,3], agg_method='sum', hidden_states=64, glob=True, conv_layers=1, conv_activation='relu', decode_layers=2, decode_activation=1, regularization=None, dropout=0.2, batch_norm=True, forward=True):
        super().__init__()
        self.n_out=n_out
        self.n_sigs=n_sigs
        self.hidden_states=hidden_states
        self.conv_activation=conv_activation
        self.forward=forward
        self.dropout=dropout
        self.glob=glob
        self.Ks=K
        self.agg_method=agg_method
        self.conv_layers=conv_layers
        self.regularize=regularization
        if type(decode_activation)==str:
          self.decode_activation=tf.keras.activations.get(decode_activation)
        else:
          self.decode_activation=d_act
        self.batch_norm=batch_norm
        # Define layers of the model

        self.MPs      = [SGConv(hidden_states, hidden_states, K=K, agg_method=self.agg_method, dropout = dropout) for K in self.Ks]

        self.GCNs    = [GraphSageConv(hidden_states*int(i), activation=self.conv_activation, kernel_regularizer=self.regularize) for i in 2*2**np.arange(self.conv_layers)]

        self.Pool1   = GlobalMaxPool()
        self.Pool2   = GlobalAvgPool()
        self.Pool3   = GlobalSumPool()

        self.decode  = [Dense(i * hidden_states) for i in  2*2**np.arange(decode_layers+1,1,-1)]
        self.dropout_layers  = [Dropout(dropout) for i in range(len(self.decode))]
        if self.batch_norm:
          self.norm_layers  = [BatchNormalization() for i in range(len(self.decode))]
        else:
          self.norm_layers =  [no_norm for i in range(len(self.decode))]
        
        self.loge     = [Dense(hidden_states) for _ in range(2)]
        self.loge_out = Dense(1)
        self.zeni     = [Dense(hidden_states) for _ in range(2)]
        self.zeni_out = Dense(1)
        self.azi     = [Dense(hidden_states) for _ in range(2)]
        self.azi_out = Dense(1)
        self.zeni_scale= Dense(1)
        self.azi_scale= Dense(1)
        self.sig_zeni     = [Dense(hidden_states) for i in range(2)]
        self.sig_zeni_out  = Dense(1)
        self.sig_azi     = [Dense(hidden_states) for i in range(2)]
        self.sig_azi_out  = Dense(1)
Exemplo n.º 5
0
    def __init__(self, edgeconv, edgenorm, hidden_states=64, edgetype=0, forward=True, K=[1,2], agg_method='min',regularization=None, dropout=0.025):
        super().__init__()
        self.n_out=3
        self.n_sigs=2
        self.hidden_states=hidden_states
        self.conv_activation='relu'
        self.forward=forward
        self.dropout=dropout
        self.Ks=K
        self.agg_method=agg_method
        self.conv_layers=2
        self.decode_layers=2
        self.edgeconv=edgeconv
        self.edgenorm=edgenorm
        self.edgetype=edgetype
        self.regularize=regularization
        self.decode_activation=d_act
        self.batch_norm=True
        # Define layers of the model
        if self.edgenorm:
          self.norm_edge  = BatchNormalization()
        self.MPs      = [SGConv(self.hidden_states, self.hidden_states, K=K, agg_method=self.agg_method, dropout = self.dropout) for K in self.Ks]

        if self.edgeconv:
          self.ECC1    = ECCConv(self.hidden_states, [self.hidden_states, self.hidden_states, self.hidden_states], n_out = self.hidden_states, activation = "relu", kernel_regularizer=self.regularize)

        self.GCNs    = [GraphSageConv(self.hidden_states*int(i), activation=self.conv_activation, kernel_regularizer=self.regularize) for i in 4*2**np.arange(self.conv_layers)]

        self.Pool1   = GlobalMaxPool()
        self.Pool2   = GlobalAvgPool()
        self.Pool3   = GlobalSumPool()

        self.decode  = [Dense(i * self.hidden_states) for i in  2*2**np.arange(self.decode_layers+1,1,-1)]
        self.dropout_layers  = [Dropout(self.dropout) for i in range(len(self.decode))]
        if self.batch_norm:
          self.norm_layers  = [BatchNormalization() for i in range(len(self.decode))]
        else:
          self.norm_layers =  [no_norm for i in range(len(self.decode))]
        
        self.loge     = [Dense(self.hidden_states) for _ in range(2)]
        self.loge_out = Dense(1)
        self.angles     = [Dense(self.hidden_states) for _ in range(2)]
        self.angles_out = Dense(2)
        self.angle_scale= Dense(2)
        if self.n_sigs > 0:
          self.sigs      = [Dense(self.hidden_states) for _ in range(2)]
          self.sigs_out  = Dense(self.n_sigs)
Exemplo n.º 6
0
 def __init__(self,
              n_out=4,
              hidden_states=64,
              n_GCN=2,
              GCN_activation=LeakyReLU(alpha=0.2),
              decode_activation=LeakyReLU(alpha=0.2),
              regularize=None,
              dropout=0.2,
              forward=True,
              ECC=True):
     super().__init__()
     self.n_out = n_out
     self.hidden_states = hidden_states
     self.conv_activation = GCN_activation
     self.forward = forward
     self.dropout = dropout
     self.n_GCN = n_GCN
     self.ECC = ECC
     self.regularize = regularize
     self.decode_activation = decode_activation
     # Define layers of the model
     self.ECC = ECC
     if self.ECC:
         self.ECC1 = ECCConv(hidden_states,
                             [hidden_states, hidden_states, hidden_states],
                             n_out=hidden_states,
                             activation="relu",
                             kernel_regularizer=self.regularize)
     self.GCNs = [
         GCNConv(hidden_states * int(i),
                 activation=GCN_activation,
                 kernel_regularizer=self.regularize)
         for i in 2**np.arange(n_GCN)
     ]
     self.Pool1 = GlobalMaxPool()
     self.Pool2 = GlobalAvgPool()
     self.Pool3 = GlobalSumPool()
     self.decode = [Dense(i * hidden_states) for i in 2**np.arange(n_GCN)]
     self.dropout_layers = [
         Dropout(dropout) for i in range(len(self.decode))
     ]
     self.norm_layers = [
         BatchNormalization() for i in range(len(self.decode))
     ]
     self.final = Dense(n_out)
Exemplo n.º 7
0
 def __init__(self, n_out = 6, hidden_states = 64, forward = False, dropout = 0.5):
     super().__init__()
     self.forward = forward
     # Define layers of the model
     self.ECC1    = ECCConv(hidden_states, [hidden_states, hidden_states], n_out = hidden_states, activation = "relu")
     self.GCN1    = GCNConv(hidden_states, activation = "relu")
     self.GCN2    = GCNConv(hidden_states * 2, activation = "relu")
     self.GCN3    = GCNConv(hidden_states * 4, activation = "relu")
     self.GCN4    = GCNConv(hidden_states * 8, activation = "relu")
     self.Pool1   = GlobalMaxPool()
     self.Pool2   = GlobalAvgPool()
     # self.Pool3   = GlobalSumPool()
     self.decode  = [Dense(size * hidden_states) for size in [16, 16, 8]]
     self.drop_w  = [Dropout(dropout) for _ in range(len(self.decode))]
     self.norm_layers  = [BatchNormalization() for _ in range(len(self.decode))]
     self.angles     = [Dense(hidden_states) for _ in range(2)]
     self.angles_out = Dense(2)
     self.sigs      = [Dense(hidden_states) for _ in range(2)]
     self.sigs_out  = Dense(2)
Exemplo n.º 8
0
    def __init__(self,
                 n_out=3,
                 n_sigs=2,
                 hidden_states=64,
                 conv_layers=2,
                 decode_layers=3,
                 conv_activation='relu',
                 decode_activation=1,
                 regularization=None,
                 dropout=0.03):
        super().__init__()
        self.n_out = n_out
        self.n_sigs = n_sigs
        self.hidden_states = hidden_states
        self.conv_activation = conv_activation
        self.dropout = dropout
        self.conv_layers = conv_layers
        self.regularize = regularization
        if type(decode_activation) == str:
            self.decode_activation = tf.keras.activations.get(
                decode_activation)
        else:
            self.decode_activation = d_act

        # Define layers of the model

        self.hop2mean = SGConv(hidden_states,
                               hidden_states,
                               K=2,
                               agg_method='mean',
                               dropout=dropout)

        self.norm_edge = BatchNormalization()

        self.GCNs = [
            GraphSageConv(hidden_states * int(i),
                          activation=self.conv_activation,
                          kernel_regularizer=self.regularize)
            for i in 2 * 2**np.arange(self.conv_layers)
        ]

        self.Pool1 = GlobalMaxPool()
        self.Pool2 = GlobalAvgPool()
        self.Pool3 = GlobalSumPool()

        self.decode = [
            Dense(int(i) * hidden_states)
            for i in 1.5 * 2**np.arange(decode_layers + 1, 1, -1)
        ]
        self.dropout_layers = [
            Dropout(dropout) for i in range(len(self.decode))
        ]
        self.norm_layers = [
            BatchNormalization() for i in range(len(self.decode))
        ]

        self.loge = [Dense(hidden_states) for _ in range(2)]
        self.loge_out = Dense(1)
        self.angles = [Dense(hidden_states) for _ in range(2)]
        self.angles_out = Dense(2)
        self.angle_scale = Dense(2)

        self.sigs = [Dense(hidden_states) for i in range(2)]
        self.sigs_out = Dense(n_sigs)
Exemplo n.º 9
0
    def __init__(self,
                 n_out=3,
                 n_sigs=2,
                 hidden_states=64,
                 conv_layers=2,
                 glob=True,
                 conv_activation='relu',
                 decode_layers=3,
                 decode_activation=d_act,
                 regularization=None,
                 dropout=0.2,
                 batch_norm=True,
                 forward=True,
                 edgeconv=True,
                 edgenorm=True):
        super().__init__()
        self.n_out = n_out
        self.n_sigs = n_sigs
        self.hidden_states = hidden_states
        self.conv_activation = conv_activation
        self.forward = forward
        self.dropout = dropout
        self.glob = glob
        self.conv_layers = conv_layers
        self.edgeconv = edgeconv
        self.edgenorm = edgenorm
        self.regularize = regularization
        if type(decode_activation) == str:
            self.decode_activation = tf.keras.activations.get(
                decode_activation)
        else:
            self.decode_activation = d_act
        self.batch_norm = batch_norm
        # Define layers of the model
        if self.edgenorm:
            self.norm_edge = BatchNormalization()

        self.MP = MP(hidden_states, hidden_states, dropout=dropout)

        if self.edgeconv:
            self.ECC1 = ECCConv(hidden_states,
                                [hidden_states, hidden_states, hidden_states],
                                n_out=hidden_states,
                                activation="relu",
                                kernel_regularizer=self.regularize)

        self.GCNs = [
            GraphSageConv(hidden_states * int(i),
                          activation=self.conv_activation,
                          kernel_regularizer=self.regularize)
            for i in 2 * 2**np.arange(self.conv_layers)
        ]

        self.Pool1 = GlobalMaxPool()
        self.Pool2 = GlobalAvgPool()
        self.Pool3 = GlobalSumPool()

        self.decode = [
            Dense(i * hidden_states)
            for i in 2 * 2**np.arange(decode_layers + 1, 1, -1)
        ]
        self.dropout_layers = [
            Dropout(dropout) for i in range(len(self.decode))
        ]
        if self.batch_norm:
            self.norm_layers = [
                BatchNormalization() for i in range(len(self.decode))
            ]
        else:
            self.norm_layers = [no_norm for i in range(len(self.decode))]

        self.loge = [Dense(hidden_states) for _ in range(2)]
        self.loge_out = Dense(1)
        self.angles = [Dense(hidden_states) for _ in range(2)]
        self.angles_out = Dense(2)
        self.angle_scale = Dense(2)
        if n_sigs > 0:
            self.sigs = [Dense(hidden_states) for _ in range(2)]
            self.sigs_out = Dense(n_sigs)