Exemplo n.º 1
0
class RN18(Net):
    '''
    ResNet18 has a total of 18 layers: 
    Note that some parameters are predetermined. The parameters need to be specified are in ''.
    For all ResNetBlock modules, the output sizes of stage 1 and stage 2 conv2D blocks equals to 
    1/4 of that of the final stage.
    Conv1 - kernel:(3x3), pad:(1,1), stride:1, output: 'c1OutChannel'
    Conv1 - kernel:(3x3), pad:(1,1), stride:2, output: 'c2OutChannel'  # H and W reduced by half
    RNB1 - skipMode:identity, output : 'rnb1OutChannel' 
    RNB2 - skipMode:identity, output : same as RNB1
    RNB3 - skipMode:identity, output : same as RNB1
    RNB4 - skipMode:conv, skipStride:2, output : 'rnb4OutChannel' # H and W reduced by half
    RNB5 - skipMode:identity, output : same as RNB4
    pool - average pooling of RNB5 per channel, reducing output to 'rnb4OutChannel', need to specify
            'pSize', which is used to specify stride and kernel size
    fc - outChannel: 'classNum'
    softmax - final classification layer
    '''
    def __init__(self, para):
        Net.__init__(self, para)
        convPara1 = {
            'instanceName': 'RN18' + '_Conv1',
            'padding': True,
            'padShape': (1, 1),
            'stride': 1,
            'outChannel': para['c1OutChannel'],
            'kernelShape': (3, 3),
            'bias': False
        }
        self.conv1 = Conv2D(convPara1)
        self.norm1 = Normalize({'instanceName': 'RN18' + '_Norm1'})
        self.scale1 = Scale({'instanceName': 'RN18' + '_Scale1'})
        self.activation1 = Activation({
            'instanceName': 'RN18' + '_Activation1',
            'activationType': 'ReLU'
        })
        self.layerList.append(self.conv1)
        self.layerList.append(self.norm1)
        self.layerList.append(self.scale1)
        self.layerList.append(self.activation1)
        convPara2 = {
            'instanceName': 'RN18' + '_Conv2',
            'padding': True,
            'padShape': (1, 1),
            'stride': 2,
            'outChannel': para['c2OutChannel'],
            'kernelShape': (3, 3),
            'bias': False
        }
        self.conv2 = Conv2D(convPara2)
        self.norm2 = Normalize({'instanceName': 'RN18' + '_Norm2'})
        self.scale2 = Scale({'instanceName': 'RN18' + '_Scale2'})
        self.activation2 = Activation({
            'instanceName': 'RN18' + '_Activation2',
            'activationType': 'ReLU'
        })
        self.layerList.append(self.conv2)
        self.layerList.append(self.norm2)
        self.layerList.append(self.scale2)
        self.layerList.append(self.activation2)
        self.rnb1 = ResNetBlock({
            'instanceName': 'RN18' + '_RNB1',
            'skipMode': 'identity',
            'skipStride': 0,
            'stride1': 1,
            'outChannel1': int(para['rnb1OutChannel'] / 4),
            'outChannel2': int(para['rnb1OutChannel'] / 4),
            'outChannel3': para['rnb1OutChannel'],
            'activationType': 'ReLU'
        })
        self.layerList.append(self.rnb1)
        self.rnb2 = ResNetBlock({
            'instanceName': 'RN18' + '_RNB2',
            'skipMode': 'identity',
            'skipStride': 0,
            'stride1': 1,
            'outChannel1': int(para['rnb1OutChannel'] / 4),
            'outChannel2': int(para['rnb1OutChannel'] / 4),
            'outChannel3': para['rnb1OutChannel'],
            'activationType': 'ReLU'
        })
        self.layerList.append(self.rnb2)
        self.rnb3 = ResNetBlock({
            'instanceName': 'RN18' + '_RNB3',
            'skipMode': 'identity',
            'skipStride': 0,
            'stride1': 1,
            'outChannel1': int(para['rnb1OutChannel'] / 4),
            'outChannel2': int(para['rnb1OutChannel'] / 4),
            'outChannel3': para['rnb1OutChannel'],
            'activationType': 'ReLU'
        })
        self.layerList.append(self.rnb3)
        self.rnb4 = ResNetBlock({
            'instanceName': 'RN18' + '_RNB4',
            'skipMode': 'conv',
            'skipStride': 2,
            'stride1': 2,
            'outChannel1': int(para['rnb4OutChannel'] / 4),
            'outChannel2': int(para['rnb4OutChannel'] / 4),
            'outChannel3': para['rnb4OutChannel'],
            'activationType': 'ReLU'
        })
        self.layerList.append(self.rnb4)
        self.rnb5 = ResNetBlock({
            'instanceName': 'RN18' + '_RNB5',
            'skipMode': 'identity',
            'skipStride': 1,
            'stride1': 1,
            'outChannel1': int(para['rnb4OutChannel'] / 4),
            'outChannel2': int(para['rnb4OutChannel'] / 4),
            'outChannel3': para['rnb4OutChannel'],
            'activationType': 'ReLU'
        })
        self.layerList.append(self.rnb5)
        self.pool1 = Pool({
            'instanceName': 'RN18' + '_pool1',
            'poolType': 'ave',
            'stride': para['pSize'],
            'kernelShape': (para['pSize'], para['pSize'])
        })
        self.layerList.append(self.pool1)
        self.fc1 = FullyConnected({
            'instanceName': 'RN18' + '_fc1',
            'outChannel': para['classNum'],
            'bias': True
        })
        self.layerList.append(self.fc1)
        self.softmax = Softmax({'instanceName': 'RN18' + '_softmax'})
        self.layerList.append(self.softmax)

        self.bottomInterface = self.conv1
        self.topInterface = self.softmax
        self.softmax.setNet(self)

    def stack(self, top, bottom):
        self.top = top
        self.bottom = bottom

        self.conv1.stack(self.norm1, bottom)
        self.norm1.stack(self.scale1, self.conv1)
        self.scale1.stack(self.activation1, self.norm1)
        self.activation1.stack(self.conv2, self.scale1)

        self.conv2.stack(self.norm2, self.activation1)
        self.norm2.stack(self.scale2, self.conv2)
        self.scale2.stack(self.activation2, self.norm2)
        self.activation2.stack(self.rnb1, self.scale2)

        self.rnb1.stack(self.rnb2, self.activation2)
        self.rnb2.stack(self.rnb3, self.rnb1)
        self.rnb3.stack(self.rnb4, self.rnb2)
        self.rnb4.stack(self.rnb5, self.rnb3)
        self.rnb5.stack(self.pool1, self.rnb4)
        self.pool1.stack(self.fc1, self.rnb5)
        self.fc1.stack(self.softmax, self.pool1)
        self.softmax.stack(top, self.fc1)
        self.softmax.setSource(bottom)
Exemplo n.º 2
0
class ResNetBlock(Subnet):
    '''
    On the main path, the first convolution block has (1x1) kernel, zero padding. The second 
    convolution block has (3x3) kernel, (1,1) padding and stride of 1. The third convolution 
    block has (1x1) kernel, zero padding and stride of 1. The skip path has either identity mapping
    or a convolution block with (1x1) kernel and zero padding. The number of output channels is 
    the same as that of the third convolution block on the main path.
    
    Parameters required: 
    'instanceName': name of the block
    'skipMode': slect the operations on the skip path, 'conv' or 'identity'
    'skipStride': stride of the convolution block on the skip path
    'stride1': stride of the first convolution block on the main path
    'outChannel1': number of output channel of the first convolution block on the main path
    'outChannel2': number of output channel of the second convolution block on the main path
    'outChannel3': number of output channel of the third convolution block on the main path
    'activationType': activation function of the non-linear block, 'ReLU' or 'sigmoid'
    '''

    # 'conv' mode has a convolution block on the skip path. 'identity' mode is strict pass through.
    skipModes = ['conv', 'identity']

    def __init__(self, para):
        Subnet.__init__(self, para)
        self.layerList = []

        self.fork = Fork2({'instanceName': para['instanceName'] + '_fork'})
        self.layerList.append(self.fork)
        self.skipMode = para['skipMode']
        if self.skipMode == 'conv':
            convPara4 = {
                'instanceName': para['instanceName'] + '_skipConv1',
                'padding': False,
                'padShape': (0, 0),
                'stride': para['skipStride'],
                'outChannel': para['outChannel3'],
                'kernelShape': (1, 1),
                'bias': False
            }
            self.skipConv = Conv2D(convPara4)
            self.skipNorm = Normalize(
                {'instanceName': para['instanceName'] + '_skipNorm'})
            self.skipScale = Scale(
                {'instanceName': para['instanceName'] + '_skipScale'})
            self.layerList.append(self.skipConv)
            self.layerList.append(self.skipNorm)
            self.layerList.append(self.skipScale)

        convPara1 = {
            'instanceName': para['instanceName'] + '_mainConv1',
            'padding': False,
            'padShape': (0, 0),
            'stride': para['stride1'],
            'outChannel': para['outChannel1'],
            'kernelShape': (1, 1),
            'bias': False
        }
        convPara2 = {
            'instanceName': para['instanceName'] + '_mainConv2',
            'padding': True,
            'padShape': (1, 1),
            'stride': 1,
            'outChannel': para['outChannel2'],
            'kernelShape': (3, 3),
            'bias': False
        }
        convPara3 = {
            'instanceName': para['instanceName'] + '_mainConv3',
            'padding': False,
            'padShape': (0, 0),
            'stride': 1,
            'outChannel': para['outChannel3'],
            'kernelShape': (1, 1),
            'bias': False
        }

        self.mainConv1 = Conv2D(convPara1)
        self.mainNorm1 = Normalize(
            {'instanceName': para['instanceName'] + '_mainNorm1'})
        self.mainScale1 = Scale(
            {'instanceName': para['instanceName'] + '_mainScale1'})
        self.mainActivation1 = Activation({
            'instanceName':
            para['instanceName'] + '_mainReLU1',
            'activationType':
            para['activationType']
        })
        self.layerList.append(self.mainConv1)
        self.layerList.append(self.mainNorm1)
        self.layerList.append(self.mainScale1)
        self.layerList.append(self.mainActivation1)

        self.mainConv2 = Conv2D(convPara2)
        self.mainNorm2 = Normalize(
            {'instanceName': para['instanceName'] + '_mainNorm2'})
        self.mainScale2 = Scale(
            {'instanceName': para['instanceName'] + '_mainScale2'})
        self.mainActivation2 = Activation({
            'instanceName':
            para['instanceName'] + '_mainReLU2',
            'activationType':
            para['activationType']
        })
        self.layerList.append(self.mainConv2)
        self.layerList.append(self.mainNorm2)
        self.layerList.append(self.mainScale2)
        self.layerList.append(self.mainActivation2)

        self.mainConv3 = Conv2D(convPara3)
        self.mainNorm3 = Normalize(
            {'instanceName': para['instanceName'] + '_mainNorm3'})
        self.mainScale3 = Scale(
            {'instanceName': para['instanceName'] + '_mainScale3'})
        self.layerList.append(self.mainConv3)
        self.layerList.append(self.mainNorm3)
        self.layerList.append(self.mainScale3)

        self.sum = Sum2({'instanceName': para['instanceName'] + '_sum'})
        self.activation3 = Activation({
            'instanceName': para['instanceName'] + '_outReLU3',
            'activationType': para['activationType']
        })
        self.layerList.append(self.sum)
        self.layerList.append(self.activation3)
        self.bottomInterface = self.fork
        self.topInterface = self.activation3

    def stack(self, top, bottom):
        self.top = top
        self.bottom = bottom
        if self.skipMode == 'conv':
            self.fork.fork(self.skipConv, self.mainConv1, bottom)
            self.skipConv.stack(self.skipNorm, self.fork.skip)
            self.skipNorm.stack(self.skipScale, self.skipConv)
            self.skipScale.stack(self.sum.skip, self.skipNorm)
        else:
            self.fork.fork(self.sum.skip, self.mainConv1, bottom)
        # main path
        self.mainConv1.stack(self.mainNorm1, self.fork.main)
        self.mainNorm1.stack(self.mainScale1, self.mainConv1)
        self.mainScale1.stack(self.mainActivation1, self.mainNorm1)
        self.mainActivation1.stack(self.mainConv2, self.mainScale1)

        self.mainConv2.stack(self.mainNorm2, self.mainActivation1)
        self.mainNorm2.stack(self.mainScale2, self.mainConv2)
        self.mainScale2.stack(self.mainActivation2, self.mainNorm2)
        self.mainActivation2.stack(self.mainConv3, self.mainScale2)

        self.mainConv3.stack(self.mainNorm3, self.mainActivation2)
        self.mainNorm3.stack(self.mainScale3, self.mainConv3)
        self.mainScale3.stack(self.sum.main, self.mainNorm3)
        # sum
        if self.skipMode == 'conv':
            self.sum.sum(self.activation3, self.skipScale, self.mainScale3)
        else:
            self.sum.sum(self.activation3, self.fork.skip, self.mainScale3)
        self.activation3.stack(top, self.sum)