コード例 #1
0
ファイル: wavenet.py プロジェクト: GuideWsp/scalenet
 def __init__(self, indepth, branch=1, active='relu', nclass=1000, method='split'):
     super(SummaryBlock, self).__init__()
     assert len(indepth) == branch, '各分类分支的通道数必须全部给定,so, len of <indepth> == branch.'
     assert method in self.METHOD, 'Unknown <method> %s for SummaryBlock.' % method
     self.indepth = indepth
     self.branch = branch
     self.active = active
     self.nclass = nclass
     self.method = method
     self.active_fc = True
     if method == 'split':
         for i in range(branch):
             fc_layer = nn.Sequential(
                 nn.BatchNorm2d(indepth[i]),
                 Activate(active),
                 AdaAvgPool(),
                 ViewLayer(),
                 nn.Linear(indepth[i], nclass))
             setattr(self, 'classifier%s' % (i + 1), fc_layer)
     elif method == 'merge':
         for i in range(branch):
             view_layer = nn.Sequential(
                 nn.BatchNorm2d(indepth[i]),
                 Activate(active),
                 AdaAvgPool(),
                 ViewLayer())
             setattr(self, 'pool_view%s' % (i + 1), view_layer)
         self.classifier = nn.Linear(sum(indepth), nclass)
     else:
         raise NotImplementedError
コード例 #2
0
ファイル: waveresnet.py プロジェクト: zhjpqq/scalenet
 def __init__(self,
              indepth,
              branch,
              active='relu',
              pool='avg',
              nclass=1000,
              method='split'):
     super(RockSummary, self).__init__()
     assert len(
         indepth) == branch, '各输出分支的通道数必须全部给定,len of <indepth> == branch.'
     assert method in self.METHOD, 'Unknown <methond> %s.' % method
     self.indepth = indepth
     self.branch = branch
     self.active_fc = True
     self.nclass = nclass
     self.method = method
     if method == 'split':
         for b in range(1, branch + 1):
             layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]),
                                   Activate(active), AdaAvgPool(),
                                   ViewLayer(),
                                   nn.Linear(indepth[b - 1], nclass))
             setattr(self, 'classifier%s' % b, layer)
     elif method == 'merge':
         for b in range(1, branch + 1):
             layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]),
                                   Activate(active), AdaAvgPool(),
                                   ViewLayer())
             setattr(self, 'pool_view%s' % b, layer)
         self.classifier = nn.Linear(sum(indepth), nclass)
     else:
         raise NotImplementedError
コード例 #3
0
ファイル: realnet.py プロジェクト: zhjpqq/scalenet
    def __init__(self, indepth, growth, active='relu', first=False, after=True, down=False,
                 trans='A', reduce=0.5, convkp='T', inmode='nearest', classify=0, nclass=1000,
                 last_branch=1, last_down=False, last_expand=0):
        super(SingleCouple, self).__init__()
        assert last_branch <= 2, '<last_branch> of SingleCouple should be <= 2...'
        self.indepth = indepth
        self.growth = growth
        self.active = getattr(nn.functional, active)
        self.first = first
        self.after = after
        self.down = down
        self.trans = trans
        self.reduce = reduce
        self.trans_func = self._trans[trans]
        self.classify = classify
        self.nclass = nclass
        self.last_branch = last_branch
        self.last_down = last_down
        self.last_expand = last_expand
        self.inmode = inmode
        self.convkp = convkp
        kp = {'T': (3, 1), 'O': (1, 0)}[convkp]

        first_outdepth = indepth + growth if self.first else growth
        self.bn1 = nn.BatchNorm2d(indepth)
        self.conv1 = nn.Conv2d(indepth, first_outdepth, 3, stride=2, padding=1, bias=False, dilation=1)
        self.bn2 = nn.BatchNorm2d(indepth + growth)
        self.conv2 = nn.Conv2d(indepth + growth, growth, kp[0], stride=1, padding=kp[1], bias=False, dilation=1)

        if self.classify > 0:
            self.classifier = nn.Sequential(
                nn.BatchNorm2d(indepth + growth),
                Activate(active),
                AdaAvgPool(),
                ViewLayer(dim=-1),
                nn.Linear(indepth + growth, nclass)
            )
        if self.after:
            if self.down:
                outdepth = int(math.floor((indepth + growth) * reduce))
                self.down_res2 = self.trans_func(indepth + growth, outdepth)
                self.down_res1 = self.trans_func(indepth + growth, outdepth)
        else:
            if self.last_down:
                outdepth = indepth + growth + last_expand
                if self.last_branch >= 1:
                    self.down_last2 = self.trans_func(indepth + growth, outdepth)
                if self.last_branch >= 2:
                    self.down_last1 = self.trans_func(indepth + growth, outdepth)
            else:
                if self.classify > 0 and self.last_branch == 2:
                    # 此时,最后一个Couple的中间层被当做branch输出而对接在Summary上.
                    # 因此,删除此Couple自带的Classifier,以免与Summary中的Classifier重复.
                    delattr(self, 'classifier')
                    self.classify = 0
                    print('Note: 1 xfc will be deleted  because of duplicate with the last-fc!')
コード例 #4
0
ファイル: aeresnet_v2.py プロジェクト: zhjpqq/scalenet
 def __init__(self,
              indepth,
              branch=1,
              active='relu',
              pool='avg',
              nclass=1000):
     super(RockSummary, self).__init__()
     self.indepth = indepth
     self.branch = branch
     self.active_fc = True
     self.pool = pool
     for b in range(1, branch + 1):
         layer = nn.Sequential(nn.BatchNorm2d(indepth), Activate(active),
                               AdaAvgPool(), ViewLayer(),
                               nn.Linear(indepth, nclass))
         setattr(self, 'classifier%s' % b, layer)
コード例 #5
0
ファイル: wavenet.py プロジェクト: GuideWsp/scalenet
    def __init__(self, indepth, growth, bottle=4, active='relu', first=False, after=True, down=False, trans='A',
                 reduce=0.5, classify=0, nclass=1000, last_branch=1, last_down=False, last_expand=0):
        super(DoubleCouple, self).__init__()
        assert last_branch <= 4, '<last_branch> of DoubleCouple should be <= 4...'
        self.indepth = indepth
        self.growth = growth
        self.bottle = bottle
        self.classify = classify
        self.nclass = nclass
        self.active = getattr(nn.functional, active)
        self.first = first
        self.after = after
        self.down = down
        self.trans = trans
        self.reduce = reduce
        self.trans_func = self._trans[trans]
        self.last_branch = last_branch
        self.last_down = last_down
        self.last_expand = last_expand

        if bottle > 0:
            interdepth = int(bottle * growth)
            assert interdepth == math.ceil(bottle * growth), \
                '<bottle> * <growth> cannot be a fraction number (带小数位), but %s !\n' % bottle * growth

            first_outdepth = indepth + growth if self.first else growth
            self.bn1_b = nn.BatchNorm2d(indepth)
            self.conv1_b = nn.Conv2d(indepth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False)
            self.bn1 = nn.BatchNorm2d(interdepth)
            self.conv1 = nn.Conv2d(interdepth, first_outdepth, 3, stride=2, padding=1, bias=False, dilation=1)

            self.bn2_b = nn.BatchNorm2d(indepth + growth)
            self.conv2_b = nn.Conv2d(indepth + growth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False)
            self.bn2 = nn.BatchNorm2d(interdepth)
            self.conv2 = nn.Conv2d(interdepth, growth, 3, stride=2, padding=1, bias=False, dilation=1)

            self.bn3_b = nn.BatchNorm2d(indepth + growth)
            self.conv3_b = nn.Conv2d(indepth + growth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False)
            self.bn3 = nn.BatchNorm2d(interdepth)
            self.deconv3 = nn.ConvTranspose2d(interdepth, growth, 3, stride=2, padding=1,
                                              output_padding=1, bias=False, dilation=1)

            self.bn4_b = nn.BatchNorm2d(indepth + growth)
            self.conv4_b = nn.Conv2d(indepth + growth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False)
            self.bn4 = nn.BatchNorm2d(interdepth)
            self.deconv4 = nn.ConvTranspose2d(interdepth, growth, 3, stride=2, padding=1,
                                              output_padding=1, bias=False, dilation=1)

        elif bottle == 0:

            first_outdepth = indepth + growth if self.first else growth
            self.bn1 = nn.BatchNorm2d(indepth)
            self.conv1 = nn.Conv2d(indepth, first_outdepth, 3, stride=2, padding=1, bias=False, dilation=1)
            self.bn2 = nn.BatchNorm2d(indepth + growth)
            self.conv2 = nn.Conv2d(indepth + growth, growth, 3, stride=2, padding=1, bias=False, dilation=1)
            self.bn3 = nn.BatchNorm2d(indepth + growth)
            self.deconv3 = nn.ConvTranspose2d(indepth + growth, growth, 3, stride=2, padding=1,
                                              output_padding=1, bias=False, dilation=1)
            self.bn4 = nn.BatchNorm2d(indepth + growth)
            self.deconv4 = nn.ConvTranspose2d(indepth + growth, growth, 3, stride=2, padding=1,
                                              output_padding=1, bias=False, dilation=1)

        else:
            raise NotImplementedError('<bottle> should be >= 0, but %s' % bottle)

        if self.classify > 0:
            self.classifier = nn.Sequential(
                nn.BatchNorm2d(indepth + growth),
                Activate(active),
                AdaAvgPool(),
                ViewLayer(dim=-1),
                nn.Linear(indepth + growth, nclass))

        if self.after:
            if self.down and self.reduce != 1:
                outdepth = int(math.floor((indepth + growth) * reduce))
                self.down_res4 = self.trans_func(indepth + growth, outdepth)
                self.down_res3 = self.trans_func(indepth + growth, outdepth)
                self.down_res2 = self.trans_func(indepth + growth, outdepth)
                self.down_res1 = self.trans_func(indepth + growth, outdepth)

        else:
            if self.last_down:
                outdepth = indepth + growth + last_expand
                if self.last_branch >= 1:
                    self.down_last4 = self.trans_func(indepth + growth, outdepth)
                if self.last_branch >= 2:
                    self.down_last3 = self.trans_func(indepth + growth, outdepth)
                if self.last_branch >= 3:
                    self.down_last2 = self.trans_func(indepth + growth, outdepth)
                if self.last_branch >= 4:
                    self.down_last1 = self.trans_func(indepth + growth, outdepth)
            else:
                if self.classify > 0 and self.last_branch == 4:
                    # 最后一个Couple的中间层被当做branch输出而对接在Summary上.
                    # 因此,删除此Couple自带的Classifier,以免与Summary中的Classifier重复.
                    delattr(self, 'classifier')
                    self.classify = 0
                    print('\nNote: 1 xfc will be deleted because of duplicated with the lfc!\n')
コード例 #6
0
ファイル: aeresnet_v2.py プロジェクト: zhjpqq/scalenet
    def __init__(self,
                 branch=3,
                 rock='A',
                 depth=16,
                 blockexp=(1, 1),
                 stages=3,
                 blocks=('D', 'D', 'S'),
                 slink=('A', 'A', 'A'),
                 expand=(1, 2, 4),
                 layers=(2, 2, 2),
                 dfunc=('C', 'C', 'C'),
                 classify=(1, 1, 1),
                 last_down=True,
                 last_expand=1,
                 nclass=10):
        super(CifarAEResNetMix, self).__init__()
        assert stages <= min(len(blocks), len(slink), len(expand),
                             len(layers), len(dfunc), len(classify)), \
            'Hyper Pameters Not Enough to Match Stages:%s!' % stages
        assert sorted(blocks[:stages]) == list(blocks[:stages]), \
            'DoubleCouple must be ahead of SingleCouple! %s' % blocks[:stages]
        assert stages <= 4, 'cifar stages should < 4'
        self.branch = branch
        self.rock = self.rocker[rock]
        self.depth = depth
        self.expand = expand
        self.layers = layers
        self.dfunc = dfunc
        self.classify = classify
        self.last_down = last_down
        self.last_expand = last_expand
        self.stages = stages
        self.blocks = blocks
        self.slink = slink
        self.nclass = nclass

        self.after = [True for _ in range(stages - 1)] + [False]
        fc_indepth = depth * expand[stages - 1]
        if self.last_down:
            fc_indepth *= last_expand

        self.layer0 = self.rock(depth=depth, branch=branch, dataset='cifar')
        if stages >= 1:
            self.stage1 = self._make_aelayer(self.couple[blocks[0]],
                                             layers[0],
                                             expand[0] * depth,
                                             slink[0],
                                             self.after[0],
                                             last_down,
                                             last_expand,
                                             dfunc=dfunc[0],
                                             cfy=classify[0],
                                             blockexp=blockexp)
        if stages >= 2:
            self.stage2 = self._make_aelayer(self.couple[blocks[1]],
                                             layers[1],
                                             expand[1] * depth,
                                             slink[1],
                                             self.after[1],
                                             last_down,
                                             last_expand,
                                             dfunc=dfunc[1],
                                             cfy=classify[1],
                                             blockexp=blockexp)
        if stages >= 3:
            self.stage3 = self._make_aelayer(self.couple[blocks[2]],
                                             layers[2],
                                             expand[2] * depth,
                                             slink[2],
                                             self.after[2],
                                             last_down,
                                             last_expand,
                                             dfunc=dfunc[2],
                                             cfy=classify[2],
                                             blockexp=blockexp)
        if stages >= 4:
            self.stage4 = self._make_aelayer(self.couple[blocks[3]],
                                             layers[3],
                                             expand[3] * depth,
                                             slink[3],
                                             self.after[3],
                                             last_down,
                                             last_expand,
                                             dfunc=dfunc[3],
                                             cfy=classify[3],
                                             blockexp=blockexp)
        self.classifier = nn.Sequential(nn.BatchNorm2d(fc_indepth), nn.ReLU(),
                                        AdaAvgPool(), ViewLayer(dim=-1),
                                        nn.Linear(fc_indepth, nclass))

        self._init_params()
コード例 #7
0
    def __init__(self,
                 block='P',
                 nblocks=(3, 3, 3),
                 inplanes=16,
                 bottle=1,
                 active='fx',
                 dataset='cifar10'):
        """ Constructor
        Args:
            bottle: =1 a ResPlain block; >1 a dilate ResBottle block; <1, a shrink ResBottle block.
        """
        super(XResNet, self).__init__()
        assert block in self.residual.keys(
        ), 'Unknown residual block: %s.' % block
        assert dataset in self.datasets.keys(
        ), 'Unsupported dataset: %s.' % dataset
        assert len(nblocks) == [
            4, 3
        ][dataset != 'imagenet'], 'Assure <nblocks> match with dataset.'

        block = self.residual[block]
        self.block = block
        self.nblocks = nblocks
        self.inplanes = inplanes
        self.bottle = bottle
        self.active = active
        self.dataset = dataset
        nlabels = self.datasets[dataset]

        nplanes = [inplanes, 2 * inplanes, 4 * inplanes, 8 * inplanes]
        nblocks = nblocks if len(
            nblocks) >= 4 else list(nblocks) + [-1]  # 添加伪数,防止越界
        nbottls = [bottle] * len(nblocks)

        self.preproc = PreProc(3, nplanes[0], dataset)

        self.stage_1 = self._make_layer(block,
                                        nplanes[0],
                                        nblocks[0],
                                        nbottls[0],
                                        stride=1)
        self.stage_2 = self._make_layer(block,
                                        nplanes[1],
                                        nblocks[1],
                                        nbottls[1],
                                        stride=2)
        self.stage_3 = self._make_layer(block,
                                        nplanes[2],
                                        nblocks[2],
                                        nbottls[2],
                                        stride=2)
        self.stage_4 = [
            self._make_layer(block,
                             nplanes[3],
                             nblocks[3],
                             nbottls[3],
                             stride=2),
            ReturnX()
        ][dataset != 'imagenet']
        out_planes = [nplanes[-1], nplanes[-2]][dataset != 'imagenet']

        self.squeeze = nn.Sequential(
            nn.BatchNorm2d(out_planes * block.expansion),
            nn.ReLU(inplace=True), AdaAvgPool(), ViewLayer())

        self.classifier = nn.Linear(out_planes * block.expansion, nlabels)

        self._init_params()
コード例 #8
0
ファイル: dxnet.py プロジェクト: zhjpqq/scalenet
    def __init__(self,
                 indepth,
                 insize,
                 branch,
                 active='relu',
                 pool='avg',
                 nclass=1000,
                 method='split'):
        super(RockSummary, self).__init__()
        assert len(
            indepth) == branch, '各输出分支的通道数必须全部给定,len of <indepth> == branch.'
        assert method in self.METHOD, 'Unknown <methond> %s.' % method
        if method in ['convf', 'convs', 'convt']:
            assert insize >= 1, '进行卷积分类,输入特征图尺寸<insize> >= 1.'
        if method == 'convf':
            assert branch >= 1, '对last-1进行卷积分类,输出分支<last-branch>>=1'
        elif method == 'convs':
            assert branch >= 2, '对last-2进行卷积分类,输出分支<last-branch>必须>=2'
        elif method == 'convt':
            assert branch >= 3, '对last-3进行卷积分类,输出分支<last-branch>必须>=3'

        self.indepth = indepth
        self.branch = branch
        self.active_fc = True
        self.nclass = nclass
        insize = int(insize)
        self.insize = insize
        self.method = method
        if method == 'split':
            for b in range(1, branch + 1):
                layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]),
                                      Activate(active), AdaAvgPool(),
                                      ViewLayer(),
                                      nn.Linear(indepth[b - 1], nclass))
                setattr(self, 'classifier%s' % b, layer)
        elif method == 'merge':
            for b in range(1, branch + 1):
                layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]),
                                      Activate(active), AdaAvgPool(),
                                      ViewLayer())
                setattr(self, 'pool_view%s' % b, layer)
            self.classifier = nn.Linear(sum(indepth), nclass)
        elif method == 'convx':
            for b in range(1, branch + 1):
                ksize = int(insize * ((1 / 2)**(b - 1)))
                layer = ConvClassifier(indepth[b - 1],
                                       nclass,
                                       ksize=ksize,
                                       stride=1,
                                       padding=0)
                setattr(self, 'classifier%s' % b, layer)
        elif method == 'convf':
            self.classifier = ConvClassifier(indepth[0],
                                             nclass,
                                             ksize=insize // 1,
                                             stride=1,
                                             padding=0)
        elif method == 'convs':
            self.classifier = ConvClassifier(indepth[1],
                                             nclass,
                                             ksize=insize // 2,
                                             stride=1,
                                             padding=0)
        elif method == 'convt':
            self.classifier = ConvClassifier(indepth[2],
                                             nclass,
                                             ksize=insize // 4,
                                             stride=1,
                                             padding=0)
        else:
            raise NotImplementedError('Unknow <method> for SummaryBlock, %s' %
                                      method)