Esempio n. 1
0
    def __init__(self, n_layers, x_names, input_dim, output_dim, dropout, bias,  act=lambda x: x,**kwargs):
        super(Fusion, self).__init__(**kwargs)

        self.n_layers = n_layers + 1
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.act = act
        self.dropout = dropout
        self.node_features = x_names[0]
        self.neighbor_features = x_names[1]
        self.bias = bias
        # self.fusion_dim = 128
        self.fusion_dim = self.output_dim

        self.start_h = 0
        # if len(self.node_features) == 0 and self.m_name no:
        #     self.start_h += 1

        self.fusion_dim = self.input_dim

        for i in range(self.start_h, self.n_layers):
            self.vars['weights_'+str(i)] = glorot((self.input_dim, self.fusion_dim), name='weights_'+str(i))
        # self.vars['weights_final'] = identity((self.fusion_dim, self.output_dim), name='weights_final')
        self.vars['weights_final'] = identity((self.fusion_dim, self.fusion_dim), name='weights_final')

        # Matrix gating
        self.vars['weights_A'] = identity((self.fusion_dim, self.fusion_dim), name='weights_A')

        # Triple vector Gating
        gate_dim = self.fusion_dim
        self.vars['weights_C'] = identity((self.fusion_dim, gate_dim), name='weights_C')
        self.vars['weights_D'] = identity((self.fusion_dim, gate_dim), name='weights_D')
        self.vars['weights_V'] = tanh_init((1, gate_dim), name='weights_V')

        self.vars['weights'] = glorot((self.fusion_dim, self.output_dim), name='weights_final2')
Esempio n. 2
0
    def __init__(self,
                 n_layers,
                 x_names,
                 input_dim,
                 output_dim,
                 dropout,
                 bias,
                 act=lambda x: x,
                 **kwargs):
        super(Fusion, self).__init__(**kwargs)

        self.n_layers = n_layers + 1
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.act = act
        self.dropout = dropout
        self.node_features = x_names[0]
        self.neighbor_features = x_names[1]
        self.bias = bias

        # Default
        self.fusion_dim = self.output_dim

        self.start_h = 0
        if len(self.node_features) == 0 and self.m_name not in [
                'krylov', 'cheby'
        ]:
            self.start_h += 1

        self.fusion_dim = self.output_dim
        for i in range(self.start_h, self.n_layers):
            self.vars['weights_' + str(i)] = glorot(
                (self.input_dim, self.fusion_dim), name='weights_' + str(i))
Esempio n. 3
0
    def __init__(self, n_layers, x_names, input_dim, output_dim, dropout, bias,  act=lambda x: x,**kwargs):
        super(Fusion, self).__init__(**kwargs)
        self.n_layers = n_layers + 1
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.act = act
        self.dropout = dropout
        self.node_features = x_names[0]
        self.neighbor_features = x_names[1]
        self.bias = bias

        self.start_h = 0
        self.fusion_dim = self.input_dim
        for i in range(self.start_h, self.n_layers):
            self.vars['weights_' + str(i)] = glorot((self.input_dim, self.fusion_dim), name='weights_' + str(i))

        self.vars['weights'] = glorot((self.fusion_dim, self.output_dim), name='weights_final')
Esempio n. 4
0
    def __init__(self, n_layers, x_names, input_dim, output_dim, dropout, bias,  act=lambda x: x,**kwargs):
        super(Fusion, self).__init__(**kwargs)

        self.n_layers = n_layers + 1
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.act = act
        self.dropout = dropout
        self.node_features = x_names[0]
        self.neighbor_features = x_names[1]
        self.bias = bias

        # self.fusion_dim = self.output_dim
        self.fusion_dim = self.input_dim

        self.start_h = 0
        self.lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(self.fusion_dim, forget_bias=1.0)
        self.lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(self.fusion_dim, forget_bias=1.0)

        self.vars['weights_att'] = glorot((self.fusion_dim*2, 1), name='weights_A')
        self.vars['weights'] = glorot((self.fusion_dim*2, self.output_dim), name='weights_final')
Esempio n. 5
0
    def __init__(self,
                 layer_id,
                 x_names,
                 dims,
                 dropout,
                 act=tf.nn.relu,
                 nnz_features=None,
                 sparse_inputs=False,
                 bias=False,
                 shared_weights=True,
                 skip_connection=False,
                 add_labels=False,
                 weights=True,
                 **kwargs):
        super(Kernels_new, self).__init__(**kwargs)

        self.layer_id = layer_id
        self.act = act
        self.dropout = dropout
        self.skip_connetion = skip_connection
        self.add_labels = add_labels
        self.nnz_features = nnz_features
        self.sparse_inputs = sparse_inputs

        self.weights = weights
        self.bias = bias
        self.shared_weights = shared_weights
        self.node = None
        self.neighbor = None
        self.g0 = None
        self.g1 = None

        self.node_feautures = x_names[0]
        self.neighbor_features = x_names[1]
        if self.add_labels:
            if len(self.neighbor_features) == 0:
                self.neighbor_features = ['l']
            else:
                self.neighbor_features.append('l')

        # Weights initialization
        self.input_dims = {}
        # Initially 'x' and 'h' save same input
        if self.layer_id == 0 and self.m_name != '':
            self.input_dims['x'] = dims[0]
        else:
            self.input_dims['x'] = dims[1]

        self.input_dims['h'] = dims[layer_id]
        self.input_dims['l'] = dims[-1]
        self.output_dim = dims[layer_id + 1]

        self.node_dims = 0
        self.neigh_dims = 0

        if self.bias:
            self.vars['bias'] = const([self.output_dim])

        # Compute total dimensions
        for key in self.node_feautures:
            self.node_dims += self.input_dims[key]
        for key in self.neighbor_features:
            self.neigh_dims += self.input_dims[key]

        self.weights_node = {}
        self.bias_node = None
        self.weights_neigh = {}
        self.bias_neigh = None

        if weights:
            if not shared_weights:
                # Neigh weights
                with tf.variable_scope(self.name + "_neighbor_vars"):
                    keys = self.neighbor_features
                    for key in keys:
                        self.weights_neigh[key] = glorot(
                            (self.input_dims[key], self.output_dim),
                            name=key + 'weights')

            # Node weights
            with tf.variable_scope(self.name + "_node_vars"):
                if shared_weights:
                    keys = self.node_feautures + list(
                        set(self.neighbor_features) - set(self.node_feautures))
                else:
                    keys = self.node_feautures
                for key in keys:
                    self.weights_node[key] = glorot(
                        (self.input_dims[key], self.output_dim),
                        name=key + 'weights')