Ejemplo n.º 1
0
    def __init__(self, input_dims, output_dim, gate_layer=None, combination_type='concat',
                 dropout=0., bias=False, act=tf.nn.relu, name=None, **kwargs):
        super(ConcatAggregator, self).__init__(**kwargs)

        assert combination_type in ('concat', 'add', 'elem-mul'), \
            'Unknown combination type: ' + combination_type
        self.combination_type = combination_type
        self.dropout = dropout
        self.bias = bias
        self.act = act

        self.input_dims = input_dims
        self.output_dim = 2 * output_dim if combination_type == 'concat' else output_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        if gate_layer is None:
            self.gate_layer = Gate(input_dims, name=(self.name + name + '_gate'))
        else:
            self.gate_layer = gate_layer

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['self_weights'] = glorot((input_dims[0], output_dim),
                                               name='self_weights')
            self.vars['neigh_weights'] = glorot((input_dims[1] + input_dims[2], output_dim),
                                                name='neigh_weights')
            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()
Ejemplo n.º 2
0
    def __init__(self, input_dims, output_dim, gate_layer=None,
                 dropout=0., bias=False, act=tf.nn.relu, name=None, **kwargs):
        super(RandWalkAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act

        self.input_dims = input_dims
        self.output_dim = output_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        if gate_layer is None:
            self.gate_layer = Gate(input_dims, name=(self.name + name + '_gate'))
        else:
            self.gate_layer = gate_layer

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['self_weights'] = glorot((input_dims[0], input_dims[2]),
                                               name='self_weights')
            self.vars['neigh_weights'] = glorot((input_dims[2], output_dim),
                                                name='neigh_weights')
            self.vars['link_weights'] = glorot((input_dims[3], input_dims[2]),
                                               name='link_weights')
            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()
Ejemplo n.º 3
0
    def __init__(self, input_dim, output_dim, dropout=0., 
                 act=None, placeholders=None, bias=True, featureless=False,
                 sparse_inputs=False, **kwargs):
        super(Dense, self).__init__(**kwargs)

        self.dropout = dropout

        self.act = act
        self.featureless = featureless
        self.bias = bias
        self.input_dim = input_dim
        self.output_dim = output_dim

        # helper variable for sparse dropout
        self.sparse_inputs = sparse_inputs
        if sparse_inputs:
            self.num_features_nonzero = placeholders['num_features_nonzero']

        with tf.variable_scope(self.name + '_vars'):
            self.vars['weights'] = tf.get_variable('weights', shape=(input_dim, output_dim),
                                         dtype=tf.float32, 
                                         initializer=tf.contrib.layers.xavier_initializer(),
                                         regularizer=tf.contrib.layers.l2_regularizer(FLAGS.weight_decay))
            if self.bias:
                self.vars['bias'] = zeros([output_dim], name='bias')

        if self.logging:
            self._log_vars()
Ejemplo n.º 4
0
    def __init__(self, dims, bias=False, act=tf.nn.sigmoid, name=None, **kwargs):

        super(Gate, self).__init__(**kwargs)

        self.bias = bias
        self.act = act

        if name is not None:
            name = '/' + name
        else:
            name = ''

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['self_weights'] = glorot((dims[0],), name='self_weights')
            self.vars['neigh_weights'] = glorot((dims[1],), name='neigh_weights')
            self.vars['link_weights'] = glorot((dims[2],), name='link_weights')
            if self.bias:
                self.vars['bias'] = zeros(1, name='bias')

        if self.logging:
            self._log_vars()

        self.input_dims = dims
        self.output_dim = None