Ejemplo n.º 1
0
 def __init__(self, in_features, out_features, nbaselayer,
              withbn=False, withloop=False, withgn=False, withnn=False, withse=False, supweight=False,
              activation=F.relu, dropout=True, aggrmethod=None, dense=None):
     """
     The multiple layer GCN with residual connection block.
     :param in_features: the input feature dimension.
     :param out_features: the hidden feature dimension.
     :param nbaselayer: the number of layers in the base block.
     :param withbn: using batch normalization in graph convolution.
     :param withloop: using self feature modeling in graph convolution.
     :param activation: the activation function, default is ReLu.
     :param dropout: the dropout ratio.
     :param aggrmethod: not applied.
     :param dense: not applied.
     """
     super(ResGCNBlock, self).__init__()
     self.model = GraphBaseBlock(in_features=in_features,
                                 out_features=out_features,
                                 nbaselayer=nbaselayer,
                                 withbn=withbn,
                                 withloop=withloop,
                                 withgn=withgn,
                                 withnn=withnn,
                                 withse=withse, 
                                 supweight = supweight,
                                 activation=activation,
                                 dropout=dropout,
                                 dense=False,
                                 aggrmethod="add")
Ejemplo n.º 2
0
 def __init__(self, in_features, out_features, nbaselayer,
              withbn=False, withloop=False, withgn=False, withnn=False, withse=False, supweight=False,
              activation=F.relu, dropout=True, aggrmethod="concat", dense=True):
     """
     The multiple layer GCN with dense connection block.
     :param in_features: the input feature dimension.
     :param out_features: the hidden feature dimension.
     :param nbaselayer: the number of layers in the base block.
     :param withbn: using batch normalization in graph convolution.
     :param withloop: using self feature modeling in graph convolution.
     :param activation: the activation function, default is ReLu.
     :param dropout: the dropout ratio.
     :param aggrmethod: the aggregation function for the output. For denseblock, default is "concat".
     :param dense: default is True, cannot be changed.
     """
     super(DenseGCNBlock, self).__init__()
     self.model = GraphBaseBlock(in_features=in_features,
                                 out_features=out_features,
                                 nbaselayer=nbaselayer,
                                 withbn=withbn,
                                 withloop=withloop,
                                 withgn=withgn,
                                 withnn=withnn,
                                 withse=withse, 
                                 supweight = supweight,
                                 activation=activation,
                                 dropout=dropout,
                                 dense=True,
                                 aggrmethod=aggrmethod)
Ejemplo n.º 3
0
class HebbGCNBlock(Module):
    """
    The multiple layer GCN with residual connection block.
    """

    def __init__(self, in_features, out_features, nbaselayer,
                 withbn=False, withloop=False, withgn=False, withnn=False, withse=False, supweight=False, 
                 activation=F.relu, dropout=True, aggrmethod=None, dense=None):
        """
        The multiple layer GCN with residual connection block.
        :param in_features: the input feature dimension.
        :param out_features: the hidden feature dimension.
        :param nbaselayer: the number of layers in the base block.
        :param withbn: using batch normalization in graph convolution.
        :param withloop: using self feature modeling in graph convolution.
        :param activation: the activation function, default is ReLu.
        :param dropout: the dropout ratio.
        :param aggrmethod: not applied.
        :param dense: not applied.
        """
        super(HebbGCNBlock, self).__init__()
        self.model = GraphBaseBlock(in_features=in_features,
                                    out_features=out_features,
                                    nbaselayer=nbaselayer,
                                    withbn=withbn,
                                    withloop=withloop,
                                    withgn=withgn,
                                    withnn=withnn,
                                    withse=withse,
                                    supweight=supweight,
                                    activation=activation,
                                    dropout=dropout,
                                    dense=False,
                                    ishebb=True, 
                                    aggrmethod="nores")

    def forward(self, input, adj):
        return self.model.forward(input, adj)

    def get_outdim(self):
        return self.model.get_outdim()

    def __repr__(self):
        return "%s %s (%d - [%d:%d] > %d)" % (self.__class__.__name__,
                                              self.aggrmethod,
                                              self.model.in_features,
                                              self.model.hiddendim,
                                              self.model.nhiddenlayer,
                                              self.model.out_features)