def __init__(self, indepth, branch=1, active='relu', nclass=1000, method='split'): super(SummaryBlock, self).__init__() assert len(indepth) == branch, '各分类分支的通道数必须全部给定,so, len of <indepth> == branch.' assert method in self.METHOD, 'Unknown <method> %s for SummaryBlock.' % method self.indepth = indepth self.branch = branch self.active = active self.nclass = nclass self.method = method self.active_fc = True if method == 'split': for i in range(branch): fc_layer = nn.Sequential( nn.BatchNorm2d(indepth[i]), Activate(active), AdaAvgPool(), ViewLayer(), nn.Linear(indepth[i], nclass)) setattr(self, 'classifier%s' % (i + 1), fc_layer) elif method == 'merge': for i in range(branch): view_layer = nn.Sequential( nn.BatchNorm2d(indepth[i]), Activate(active), AdaAvgPool(), ViewLayer()) setattr(self, 'pool_view%s' % (i + 1), view_layer) self.classifier = nn.Linear(sum(indepth), nclass) else: raise NotImplementedError
def __init__(self, indepth, branch, active='relu', pool='avg', nclass=1000, method='split'): super(RockSummary, self).__init__() assert len( indepth) == branch, '各输出分支的通道数必须全部给定,len of <indepth> == branch.' assert method in self.METHOD, 'Unknown <methond> %s.' % method self.indepth = indepth self.branch = branch self.active_fc = True self.nclass = nclass self.method = method if method == 'split': for b in range(1, branch + 1): layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]), Activate(active), AdaAvgPool(), ViewLayer(), nn.Linear(indepth[b - 1], nclass)) setattr(self, 'classifier%s' % b, layer) elif method == 'merge': for b in range(1, branch + 1): layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]), Activate(active), AdaAvgPool(), ViewLayer()) setattr(self, 'pool_view%s' % b, layer) self.classifier = nn.Linear(sum(indepth), nclass) else: raise NotImplementedError
def __init__(self, indepth, growth, active='relu', first=False, after=True, down=False, trans='A', reduce=0.5, convkp='T', inmode='nearest', classify=0, nclass=1000, last_branch=1, last_down=False, last_expand=0): super(SingleCouple, self).__init__() assert last_branch <= 2, '<last_branch> of SingleCouple should be <= 2...' self.indepth = indepth self.growth = growth self.active = getattr(nn.functional, active) self.first = first self.after = after self.down = down self.trans = trans self.reduce = reduce self.trans_func = self._trans[trans] self.classify = classify self.nclass = nclass self.last_branch = last_branch self.last_down = last_down self.last_expand = last_expand self.inmode = inmode self.convkp = convkp kp = {'T': (3, 1), 'O': (1, 0)}[convkp] first_outdepth = indepth + growth if self.first else growth self.bn1 = nn.BatchNorm2d(indepth) self.conv1 = nn.Conv2d(indepth, first_outdepth, 3, stride=2, padding=1, bias=False, dilation=1) self.bn2 = nn.BatchNorm2d(indepth + growth) self.conv2 = nn.Conv2d(indepth + growth, growth, kp[0], stride=1, padding=kp[1], bias=False, dilation=1) if self.classify > 0: self.classifier = nn.Sequential( nn.BatchNorm2d(indepth + growth), Activate(active), AdaAvgPool(), ViewLayer(dim=-1), nn.Linear(indepth + growth, nclass) ) if self.after: if self.down: outdepth = int(math.floor((indepth + growth) * reduce)) self.down_res2 = self.trans_func(indepth + growth, outdepth) self.down_res1 = self.trans_func(indepth + growth, outdepth) else: if self.last_down: outdepth = indepth + growth + last_expand if self.last_branch >= 1: self.down_last2 = self.trans_func(indepth + growth, outdepth) if self.last_branch >= 2: self.down_last1 = self.trans_func(indepth + growth, outdepth) else: if self.classify > 0 and self.last_branch == 2: # 此时,最后一个Couple的中间层被当做branch输出而对接在Summary上. # 因此,删除此Couple自带的Classifier,以免与Summary中的Classifier重复. delattr(self, 'classifier') self.classify = 0 print('Note: 1 xfc will be deleted because of duplicate with the last-fc!')
def __init__(self, indepth, branch=1, active='relu', pool='avg', nclass=1000): super(RockSummary, self).__init__() self.indepth = indepth self.branch = branch self.active_fc = True self.pool = pool for b in range(1, branch + 1): layer = nn.Sequential(nn.BatchNorm2d(indepth), Activate(active), AdaAvgPool(), ViewLayer(), nn.Linear(indepth, nclass)) setattr(self, 'classifier%s' % b, layer)
def __init__(self, indepth, growth, bottle=4, active='relu', first=False, after=True, down=False, trans='A', reduce=0.5, classify=0, nclass=1000, last_branch=1, last_down=False, last_expand=0): super(DoubleCouple, self).__init__() assert last_branch <= 4, '<last_branch> of DoubleCouple should be <= 4...' self.indepth = indepth self.growth = growth self.bottle = bottle self.classify = classify self.nclass = nclass self.active = getattr(nn.functional, active) self.first = first self.after = after self.down = down self.trans = trans self.reduce = reduce self.trans_func = self._trans[trans] self.last_branch = last_branch self.last_down = last_down self.last_expand = last_expand if bottle > 0: interdepth = int(bottle * growth) assert interdepth == math.ceil(bottle * growth), \ '<bottle> * <growth> cannot be a fraction number (带小数位), but %s !\n' % bottle * growth first_outdepth = indepth + growth if self.first else growth self.bn1_b = nn.BatchNorm2d(indepth) self.conv1_b = nn.Conv2d(indepth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False) self.bn1 = nn.BatchNorm2d(interdepth) self.conv1 = nn.Conv2d(interdepth, first_outdepth, 3, stride=2, padding=1, bias=False, dilation=1) self.bn2_b = nn.BatchNorm2d(indepth + growth) self.conv2_b = nn.Conv2d(indepth + growth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False) self.bn2 = nn.BatchNorm2d(interdepth) self.conv2 = nn.Conv2d(interdepth, growth, 3, stride=2, padding=1, bias=False, dilation=1) self.bn3_b = nn.BatchNorm2d(indepth + growth) self.conv3_b = nn.Conv2d(indepth + growth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False) self.bn3 = nn.BatchNorm2d(interdepth) self.deconv3 = nn.ConvTranspose2d(interdepth, growth, 3, stride=2, padding=1, output_padding=1, bias=False, dilation=1) self.bn4_b = nn.BatchNorm2d(indepth + growth) self.conv4_b = nn.Conv2d(indepth + growth, interdepth, 1, stride=1, padding=0, dilation=1, bias=False) self.bn4 = nn.BatchNorm2d(interdepth) self.deconv4 = nn.ConvTranspose2d(interdepth, growth, 3, stride=2, padding=1, output_padding=1, bias=False, dilation=1) elif bottle == 0: first_outdepth = indepth + growth if self.first else growth self.bn1 = nn.BatchNorm2d(indepth) self.conv1 = nn.Conv2d(indepth, first_outdepth, 3, stride=2, padding=1, bias=False, dilation=1) self.bn2 = nn.BatchNorm2d(indepth + growth) self.conv2 = nn.Conv2d(indepth + growth, growth, 3, stride=2, padding=1, bias=False, dilation=1) self.bn3 = nn.BatchNorm2d(indepth + growth) self.deconv3 = nn.ConvTranspose2d(indepth + growth, growth, 3, stride=2, padding=1, output_padding=1, bias=False, dilation=1) self.bn4 = nn.BatchNorm2d(indepth + growth) self.deconv4 = nn.ConvTranspose2d(indepth + growth, growth, 3, stride=2, padding=1, output_padding=1, bias=False, dilation=1) else: raise NotImplementedError('<bottle> should be >= 0, but %s' % bottle) if self.classify > 0: self.classifier = nn.Sequential( nn.BatchNorm2d(indepth + growth), Activate(active), AdaAvgPool(), ViewLayer(dim=-1), nn.Linear(indepth + growth, nclass)) if self.after: if self.down and self.reduce != 1: outdepth = int(math.floor((indepth + growth) * reduce)) self.down_res4 = self.trans_func(indepth + growth, outdepth) self.down_res3 = self.trans_func(indepth + growth, outdepth) self.down_res2 = self.trans_func(indepth + growth, outdepth) self.down_res1 = self.trans_func(indepth + growth, outdepth) else: if self.last_down: outdepth = indepth + growth + last_expand if self.last_branch >= 1: self.down_last4 = self.trans_func(indepth + growth, outdepth) if self.last_branch >= 2: self.down_last3 = self.trans_func(indepth + growth, outdepth) if self.last_branch >= 3: self.down_last2 = self.trans_func(indepth + growth, outdepth) if self.last_branch >= 4: self.down_last1 = self.trans_func(indepth + growth, outdepth) else: if self.classify > 0 and self.last_branch == 4: # 最后一个Couple的中间层被当做branch输出而对接在Summary上. # 因此,删除此Couple自带的Classifier,以免与Summary中的Classifier重复. delattr(self, 'classifier') self.classify = 0 print('\nNote: 1 xfc will be deleted because of duplicated with the lfc!\n')
def __init__(self, indepth, insize, branch, active='relu', pool='avg', nclass=1000, method='split'): super(RockSummary, self).__init__() assert len( indepth) == branch, '各输出分支的通道数必须全部给定,len of <indepth> == branch.' assert method in self.METHOD, 'Unknown <methond> %s.' % method if method in ['convf', 'convs', 'convt']: assert insize >= 1, '进行卷积分类,输入特征图尺寸<insize> >= 1.' if method == 'convf': assert branch >= 1, '对last-1进行卷积分类,输出分支<last-branch>>=1' elif method == 'convs': assert branch >= 2, '对last-2进行卷积分类,输出分支<last-branch>必须>=2' elif method == 'convt': assert branch >= 3, '对last-3进行卷积分类,输出分支<last-branch>必须>=3' self.indepth = indepth self.branch = branch self.active_fc = True self.nclass = nclass insize = int(insize) self.insize = insize self.method = method if method == 'split': for b in range(1, branch + 1): layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]), Activate(active), AdaAvgPool(), ViewLayer(), nn.Linear(indepth[b - 1], nclass)) setattr(self, 'classifier%s' % b, layer) elif method == 'merge': for b in range(1, branch + 1): layer = nn.Sequential(nn.BatchNorm2d(indepth[b - 1]), Activate(active), AdaAvgPool(), ViewLayer()) setattr(self, 'pool_view%s' % b, layer) self.classifier = nn.Linear(sum(indepth), nclass) elif method == 'convx': for b in range(1, branch + 1): ksize = int(insize * ((1 / 2)**(b - 1))) layer = ConvClassifier(indepth[b - 1], nclass, ksize=ksize, stride=1, padding=0) setattr(self, 'classifier%s' % b, layer) elif method == 'convf': self.classifier = ConvClassifier(indepth[0], nclass, ksize=insize // 1, stride=1, padding=0) elif method == 'convs': self.classifier = ConvClassifier(indepth[1], nclass, ksize=insize // 2, stride=1, padding=0) elif method == 'convt': self.classifier = ConvClassifier(indepth[2], nclass, ksize=insize // 4, stride=1, padding=0) else: raise NotImplementedError('Unknow <method> for SummaryBlock, %s' % method)