예제 #1
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 model_size="small",
                 neigh_input_dim=None,
                 dropout=0.,
                 bias=False,
                 act=tf.nn.relu,
                 name=None,
                 concat=False,
                 **kwargs):
        super(MeanPoolingAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat

        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        if model_size == "small":
            hidden_dim = self.hidden_dim = 512
        elif model_size == "big":
            hidden_dim = self.hidden_dim = 1024

        self.mlp_layers = []
        self.mlp_layers.append(
            Dense(input_dim=neigh_input_dim,
                  output_dim=hidden_dim,
                  act=tf.nn.relu,
                  dropout=dropout,
                  sparse_inputs=False,
                  logging=self.logging))

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['neigh_weights'] = glorot([hidden_dim, output_dim],
                                                name='neigh_weights')

            self.vars['self_weights'] = glorot([input_dim, output_dim],
                                               name='self_weights')
            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim
        self.neigh_input_dim = neigh_input_dim
예제 #2
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 model_size="small",
                 neigh_input_dim=None,
                 dropout=0.,
                 bias=False,
                 act=tf.nn.relu,
                 name=None,
                 concat=False,
                 **kwargs):
        super(SeqAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat

        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        if model_size == "small":
            hidden_dim = self.hidden_dim = 128
        elif model_size == "big":
            hidden_dim = self.hidden_dim = 256

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['neigh_weights'] = glorot([hidden_dim, output_dim],
                                                name='neigh_weights')

            self.vars['self_weights'] = glorot([input_dim, output_dim],
                                               name='self_weights')
            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim
        self.neigh_input_dim = neigh_input_dim
        self.cell = tf.contrib.rnn.BasicLSTMCell(self.hidden_dim)
예제 #3
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 neigh_input_dim=None,
                 dropout=0.,
                 bias=False,
                 act=tf.nn.relu,
                 name=None,
                 concat=False,
                 **kwargs):
        super(MeanAggregator, self).__init__(**kwargs)

        # print('========== init MeanAggregator =========')
        # import traceback
        # traceback.print_stack()

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat

        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['neigh_weights'] = glorot([neigh_input_dim, output_dim],
                                                name='neigh_weights')
            self.vars['self_weights'] = glorot([input_dim, output_dim],
                                               name='self_weights')

            # self.vars['alpha'] = glorot([9], name='alpha') # 增加,居然是硬编码9!

            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim
예제 #4
0
 def config_embeds(self, adj_info, features, embed_dim):
     zero_padding = tf.zeros([1, embed_dim], name="zero_padding")
     embeds = glorot([len(adj_info), embed_dim], name="node_emebddings")
     self.embeds = tf.concat([zero_padding, embeds], axis=0)
     if features is None:
         self.features = self.embeds
     else:
         self.features = tf.Variable(tf.constant(features,
                                                 dtype=tf.float32),
                                     trainable=False)
         self.features = tf.concat([self.embeds, self.features], axis=1)
예제 #5
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 neigh_input_dim=None,
                 dropout=0.,
                 bias=False,
                 act=tf.nn.relu,
                 name=None,
                 concat=False,
                 **kwargs):
        super(CrossAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat
        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        # 写死了
        atten_dim = 64
        num_attr = 9  # 嘉琪说10 column初始为零,9的话,gather_nd 会给零值

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['neigh_weights'] = glorot([neigh_input_dim, output_dim],
                                                name='neigh_weights')
            self.vars['self_weights'] = glorot([input_dim, output_dim],
                                               name='self_weights')

            # self.vars['initial_weights'] = glorot([102, output_dim],
            #                                       name='initial_weights')
            self.vars['alpha'] = glorot([num_attr], name='alpha')
            self.vars['self_atten'] = glorot([input_dim, atten_dim],
                                             name='self_atten')
            self.vars['neigh_atten'] = glorot([neigh_input_dim, atten_dim],
                                              name='neigh_atten')
            self.vars['v'] = glorot([atten_dim, 1], name='v')

            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim
예제 #6
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 neigh_input_dim=None,
                 dropout=0.,
                 bias=False,
                 act=tf.nn.relu,
                 name=None,
                 concat=False,
                 **kwargs):
        super(GCNAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat

        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['weights'] = glorot([neigh_input_dim, output_dim],
                                          name='neigh_weights')

            # self.vars['alpha'] = glorot([9], name='alpha') # 增加

            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim
예제 #7
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 neigh_input_dim=None,
                 dropout=0,
                 bias=False,
                 act=tf.nn.tanh,
                 name=None,
                 concat=False,
                 **kwargs):
        super(GeniePathAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat

        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['W_s'] = glorot([input_dim, output_dim], name='W_s')
            self.vars['W_d'] = glorot([neigh_input_dim, output_dim],
                                      name='W_d')
            self.vars['v'] = glorot([output_dim, 1], name='v')

            self.vars['W_t'] = glorot([input_dim, output_dim], name='W_t')
            self.vars['b_t'] = glorot([1, output_dim], name='b_t')

            self.vars['W_i'] = glorot([input_dim, output_dim], name='W_i')
            self.vars['b_i'] = glorot([1, output_dim], name='b_i')
            self.vars['W_f'] = glorot([input_dim, output_dim], name='W_f')
            self.vars['b_f'] = glorot([1, output_dim], name='b_f')
            self.vars['W_o'] = glorot([input_dim, output_dim], name='W_o')
            self.vars['b_o'] = glorot([1, output_dim], name='b_o')
            self.vars['W_c'] = glorot([input_dim, output_dim], name='W_c')
            self.vars['b_c'] = glorot([1, output_dim], name='b_c')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim