示例#1
0
    def __init__(self, opt):
        super(DeepGCN, self).__init__()
        channels = opt.n_filters
        act = opt.act
        norm = opt.norm
        bias = opt.bias
        conv = opt.conv
        heads = opt.n_heads
        c_growth = 0
        self.n_blocks = opt.n_blocks
        self.head = GraphConv(opt.in_channels, channels, conv, act, norm, bias, heads)

        res_scale = 1 if opt.block.lower() == 'res' else 0
        if opt.block.lower() == 'dense':
            c_growth = channels
            self.backbone = MultiSeq(*[DenseGraphBlock(channels+i*c_growth, c_growth, conv, act, norm, bias, heads)
                                       for i in range(self.n_blocks-1)])
        else:
            self.backbone = MultiSeq(*[ResGraphBlock(channels, conv, act, norm, bias, heads, res_scale)
                                       for _ in range(self.n_blocks-1)])
        fusion_dims = int(channels * self.n_blocks + c_growth * ((1 + self.n_blocks - 1) * (self.n_blocks - 1) / 2))
        self.fusion_block = MLP([fusion_dims, 1024], act, None, bias)
        self.prediction = Seq(*[MLP([1+fusion_dims, 512], act, norm, bias), torch.nn.Dropout(p=opt.dropout),
                                MLP([512, 256], act, norm, bias), torch.nn.Dropout(p=opt.dropout),
                                MLP([256, opt.n_classes], None, None, bias)])
        self.model_init()
示例#2
0
    def __init__(self, opt):
        super(DynDeepGCN, self).__init__()
        channels = opt.n_filters
        k = opt.kernel_size
        act = opt.act
        norm = opt.norm
        knn = opt.knn
        bias = opt.bias
        epsilon = opt.epsilon
        # TODO: stochastic
        stochastic = False
        conv = opt.conv
        c_growth = channels

        self.n_blocks = opt.n_blocks
        self.head = GraphConv(opt.in_channels, channels, conv, act, norm, bias)

        if opt.block.lower() == 'res':
            self.backbone = MultiSeq(*[
                ResDynBlock(channels,
                            k,
                            i % 8 + 1,
                            conv,
                            act,
                            norm,
                            bias,
                            stochastic=stochastic,
                            epsilon=epsilon,
                            knn=knn) for i in range(self.n_blocks - 1)
            ])
        elif opt.block.lower() == 'dense':
            self.backbone = MultiSeq(*[
                DenseDynBlock(channels,
                              k,
                              i % 8 + 1,
                              conv,
                              act,
                              norm,
                              bias,
                              stochastic=stochastic,
                              epsilon=epsilon,
                              knn=knn) for i in range(self.n_blocks - 1)
            ])
        else:
            raise NotImplementedError(
                '{} is not implemented. Please check.\n'.format(opt.block))
        self.fusion_block = MLP(
            [channels + c_growth * (self.n_blocks - 1), 1024], act, None, bias)
        self.prediction = MultiSeq(*[
            MLP([1 + channels + c_growth *
                 (self.n_blocks - 1), 512, 256], act, None, bias),
            MLP([256, opt.n_classes], None, None, bias)
        ])

        self.model_init()
示例#3
0
    def __init__(self, opt):
        super(SparseDeepGCN, self).__init__()
        channels = opt.n_filters
        k = opt.kernel_size
        act = opt.act
        norm = opt.norm
        bias = opt.bias
        epsilon = opt.epsilon
        stochastic = opt.stochastic
        conv = opt.conv
        c_growth = channels

        self.n_blocks = opt.n_blocks

        self.knn = DilatedKnnGraph(k, 1, stochastic, epsilon)
        self.head = GraphConv(opt.in_channels, channels, conv, act, norm, bias)

        if opt.block.lower() == 'res':
            self.backbone = MultiSeq(*[
                ResDynBlock(channels,
                            k,
                            1 + i,
                            conv,
                            act,
                            norm,
                            bias,
                            stochastic=stochastic,
                            epsilon=epsilon) for i in range(self.n_blocks - 1)
            ])
        elif opt.block.lower() == 'dense':
            self.backbone = MultiSeq(*[
                DenseDynBlock(channels,
                              k,
                              1 + i,
                              conv,
                              act,
                              norm,
                              bias,
                              stochastic=stochastic,
                              epsilon=epsilon)
                for i in range(self.n_blocks - 1)
            ])
        else:
            raise NotImplementedError(
                '{} is not implemented. Please check.\n'.format(opt.block))
        self.fusion_block = MLP(
            [channels + c_growth * (self.n_blocks - 1), 1024], act, norm, bias)
        self.prediction = MultiSeq(*[
            MLP([channels + c_growth *
                 (self.n_blocks - 1) + 1024, 512], act, norm, bias),
            MLP([512, 256], act, norm, bias),
            torch.nn.Dropout(p=opt.dropout),
            MLP([256, opt.n_classes], None, None, bias)
        ])
        self.model_init()
示例#4
0
    def __init__(self, opt):
        super(SparseDeepGCN, self).__init__()
        channels = opt.n_filters
        k = opt.k
        act = opt.act
        norm = opt.norm
        bias = opt.bias
        epsilon = opt.epsilon
        stochastic = opt.stochastic
        conv = opt.conv
        c_growth = channels

        self.n_blocks = opt.n_blocks

        self.knn = DilatedKnnGraph(k, 1, stochastic, epsilon)
        self.head = GraphConv(opt.in_channels, channels, conv, act, norm, bias)

        if opt.block.lower() == 'res':
            self.backbone = MultiSeq(*[
                ResDynBlock(channels,
                            k,
                            1 + i,
                            conv,
                            act,
                            norm,
                            bias,
                            stochastic=stochastic,
                            epsilon=epsilon) for i in range(self.n_blocks - 1)
            ])
            fusion_dims = int(channels + c_growth * (self.n_blocks - 1))
        elif opt.block.lower() == 'dense':
            self.backbone = MultiSeq(*[
                DenseDynBlock(channels + c_growth * i,
                              c_growth,
                              k,
                              1 + i,
                              conv,
                              act,
                              norm,
                              bias,
                              stochastic=stochastic,
                              epsilon=epsilon)
                for i in range(self.n_blocks - 1)
            ])
            fusion_dims = int((channels + channels + c_growth *
                               (self.n_blocks - 1)) * self.n_blocks // 2)
        else:
            # Use PlainGCN without skip connection and dilated convolution.
            stochastic = False
            self.backbone = MultiSeq(*[
                PlainDynBlock(channels,
                              k,
                              1,
                              conv,
                              act,
                              norm,
                              bias,
                              stochastic=stochastic,
                              epsilon=epsilon)
                for i in range(self.n_blocks - 1)
            ])
            fusion_dims = int(channels + c_growth * (self.n_blocks - 1))

        self.fusion_block = MLP([fusion_dims, 1024], act, norm, bias)
        self.prediction = MultiSeq(*[
            MLP([fusion_dims + 1024, 512], act, norm, bias),
            MLP([512, 256], act, norm, bias, drop=opt.dropout),
            MLP([256, opt.n_classes], None, None, bias)
        ])
        self.model_init()