示例#1
0
    def __init__(self,
                 dim_in,
                 dim_out,
                 weight_decay,
                 dropout=0.,
                 act=lambda x: x,
                 bias=True,
                 model_pretrain=None,
                 **kwargs):
        """
        model_pretrain is not None if you want to load the trained model
        model_pretrain[0] is weights
        model_pretrain[1] is bias
        """
        super(Dense, self).__init__(**kwargs)
        self.dropout = dropout
        self.act = F_ACT[act]
        self.bias = bias
        self.dim_in = dim_in
        self.dim_out = dim_out
        self.weight_decay = weight_decay

        with tf.variable_scope(self.name + '_vars'):
            if model_pretrain is None:
                self.vars['weights'] = tf.get_variable(
                    'weights',
                    shape=(dim_in, dim_out),
                    dtype=DTYPE,
                    initializer=tf.contrib.layers.xavier_initializer(),
                    regularizer=tf.contrib.layers.l2_regularizer(
                        self.weight_decay))
                if self.bias:
                    self.vars['bias'] = zeros([dim_out], name='bias')
            else:
                self.vars['weights'] = trained(model_pretrain[0],
                                               name='weight')
                if self.bias:
                    self.vars['bias'] = trained(model_pretrain[1], name='bias')
        if self.logging:
            self._log_vars()
示例#2
0
    def __init__(self,
                 dim_in,
                 dim_out,
                 dropout=0.,
                 act='relu',
                 order=1,
                 aggr='mean',
                 model_pretrain=None,
                 is_train=True,
                 bias='norm',
                 **kwargs):
        super(HighOrderAggregator, self).__init__(**kwargs)
        self.dropout = dropout
        self.bias = bias
        self.act = F_ACT[act]
        self.order = order
        self.aggr = aggr
        self.is_train = is_train
        if dim_out > 0:
            with tf.variable_scope(self.name + '_vars'):
                if model_pretrain is None:
                    for o in range(self.order + 1):
                        _k = 'order{}_weights'.format(o)
                        self.vars[_k] = glorot([dim_in, dim_out], name=_k)
                else:
                    for o in range(self.order + 1):
                        _k = 'order{}_weights'.format(o)
                        self.vars[_k] = trained(model_pretrain[0], name=_k)
                for o in range(self.order + 1):
                    _k = 'order{}_bias'.format(o)
                    self.vars[_k] = zeros([dim_out], name=_k)
                if self.bias == 'norm':
                    for o in range(self.order + 1):
                        _k1 = 'order{}_offset'.format(o)
                        _k2 = 'order{}_scale'.format(o)
                        self.vars[_k1] = zeros([1, dim_out], name=_k1)
                        self.vars[_k2] = ones([1, dim_out], name=_k2)
        print('>> layer {}, dim: [{},{}]'.format(self.name, dim_in, dim_out))
        if self.logging:
            self._log_vars()

        self.dim_in = dim_in
        self.dim_out = dim_out