Пример #1
0
 def __init__(self,
              output_size,
              activation='relu',
              attn_activation='leakyrelu',
              use_bias=True,
              sparse=False,
              initializers=None,
              partitioners=None,
              regularizers=None,
              custom_getter=None,
              name="graph_attn"):
     super(GraphAttentionLayer, self).__init__(output_size,
                                               use_bias=use_bias,
                                               initializers=initializers,
                                               partitioners=partitioners,
                                               regularizers=regularizers,
                                               custom_getter=custom_getter,
                                               name=name)
     self._sparse = sparse
     self._activ = tfutils.get_tf_activ(activation)
     self._attn_activ = tfutils.get_tf_activ(attn_activation)
     self.weight_keys = {("w", output_size), ("u", output_size), ("f1", 1),
                         ("f2", 1)}
     self.bias_keys = set()
     self.weights = {x[0]: None for x in self.weight_keys}
     if use_bias:
         self.bias_keys = {("b", output_size), ("c", output_size),
                           ("d1", 1), ("d2", 1)}
         for x in self.bias_keys:
             self.weights[x[0]] = None
     self.possible_keys = self.get_possible_initializer_keys(
         use_bias=use_bias)
Пример #2
0
 def __init__(self,
              output_size,
              activation='relu',
              use_bias=True,
              initializers=None,
              partitioners=None,
              regularizers=None,
              custom_getter=None,
              name="graph_skip"):
     super().__init__(output_size,
                      use_bias=use_bias,
                      initializers=initializers,
                      partitioners=partitioners,
                      regularizers=regularizers,
                      custom_getter=custom_getter,
                      name=name)
     self._activ = tfutils.get_tf_activ(activation)
     self._weight = {
         "w": None,
         "u": None,
     }
     self._bias = {
         "b": None,
         "c": None,
     }
     self.possible_keys = self.get_possible_initializer_keys(
         use_bias=use_bias)
Пример #3
0
 def SingleLayerMLP(self, layer_lens):
     return lambda: snt.nets.MLP(layer_lens,
                                 activate_final=True,
                                 regularizers=self.regularizers,
                                 initializers=self.initializers,
                                 custom_getter=self.custom_getter,
                                 use_bias=self.use_bias,
                                 activation=tfutils.get_tf_activ(self.arch.
                                                                 activ))
Пример #4
0
 def __init__(self,
              output_size,
              activation='relu',
              use_bias=True,
              initializers=None,
              partitioners=None,
              regularizers=None,
              custom_getter=None,
              name="graph_conv"):
     super(GraphConvLayer, self).__init__(output_size,
                                          use_bias=use_bias,
                                          initializers=initializers,
                                          partitioners=partitioners,
                                          regularizers=regularizers,
                                          custom_getter=custom_getter,
                                          name=name)
     self._activ = tfutils.get_tf_activ(activation)
     self._w = None
     self._b = None
     self.possible_keys = self.get_possible_initializer_keys(
         use_bias=use_bias)