def __resBlock(self,
                x,
                channels=64,
                kernel_size=(3, 3),
                scale=1.0,
                layer=0):
     nn = ReluLayer(x, name='res%d/ru1' % (layer))
     nn = tl.Conv2d(nn,
                    channels - self.prunedlist[layer],
                    kernel_size,
                    act=tf.nn.relu,
                    name='res%d/c1' % (layer))
     self.op.append(nn.outputs)
     #self.out_t = nn.outputs
     #from c1_nn get a rand input [3,3,input_channels]
     #nn.print_layers()
     nn = tl.Conv2d(nn,
                    channels,
                    kernel_size,
                    act=None,
                    name='res%d/c2' % (layer))
     nn = ScaleLayer(nn, scale, name='res%d/scale' % (layer))
     n = tl.ElementwiseLayer([x, nn],
                             tf.add,
                             name='res%d/res_add' % (layer))
     return n
예제 #2
0
 def _res_block(x, n_features=64, kernel_size=(3, 3), scale=1.0, layer=0):
     """
     a resBlock is defined in the paper as (excuse the ugly ASCII graph)
             x
             |\
             | \
             |  relu
             |  conv2d
             |  relu
             |  conv2d
             | /
             |/
             + (addition here)
             |
             result
     """
     nn = ReluLayer(x, name='res%d/ru1' % layer)
     nn = tl.Conv2d(nn,
                    n_features,
                    kernel_size,
                    act=tf.nn.relu,
                    name='res%d/c1' % layer)
     nn = tl.Conv2d(nn,
                    n_features,
                    kernel_size,
                    act=None,
                    name='res%d/c2' % layer)
     nn = ScaleLayer(nn, scale, name='res%d/scale' % layer)
     n = tl.ElementwiseLayer([x, nn], tf.add, name='res%d/res_add' % layer)
     return n
예제 #3
0
    def __resBlock(self, x, channels=64, kernel_size=[3, 3], scale=1, layer=0):
        """
           Creates a convolutional residual block
           as defined in the paper. More on
           this inside model.py

           a resBlock is defined in the paper as
           (excuse the ugly ASCII graph)
               x
               |\
               | \
               |  conv2d
               |  relu
               |  conv2d
               | /
               |/
               + (addition here)
               |
               result

           x: input to pass through the residual block
           channels: number of channels to compute
           stride: convolution stride

           :param x: input tensor
           :param channnels: channnels in the block
           :param kernel_size: filter kernel size
           :param scale: scale for residual skip
           :param layer: layer number
           """
        nn = tl.Conv2d(x, channels, kernel_size, act=tf.nn.relu, name='res%d/c1' % (layer))
        nn = tl.Conv2d(nn, channels, kernel_size, act=None, name='res%d/c2' % (layer))
        nn = ScaleLayer(nn, scale, name='res%d/scale' % (layer))
        n = tl.ElementwiseLayer([x, nn], tf.add, name='res%d/res_add' % (layer))
        return n
예제 #4
0
 def __resBlock(self, x, channels = 64, kernel_size = (3, 3), scale = 1.0,layer = 0):
     nn = ReluLayer(x, name='res%d/ru1'%(layer))
     nn = tl.Conv2d(nn, channels-self.prunedlist[layer], kernel_size, act=tf.nn.relu, name='res%d/c1'%(layer))
     nn = tl.Conv2d(nn, channels, kernel_size, act=None, name='res%d/c2'%(layer))
     nn = ScaleLayer(nn,scale, name='res%d/scale'%(layer))
     n = tl.ElementwiseLayer([x,nn],tf.add, name='res%d/res_add'%(layer))
     #这里调用dictionary(X, W2, Y)函数,注意X就是x,即本层的输入,
     #W2仅仅是此block里第一层卷积的权值,Y=n-Y'这里的Y'是上一层剪枝后的输出 
     #即在残差模块中不断调整,弥补shortcut中的无法影响的部分
     
     #把本层的beta记录起来(append到model的beta数组里),减掉的通道数也要记录起来,影响新模型上一层的filter数量
     
     
     #所以上面是算完旧的,这里还要再算一次新的
     return n
예제 #5
0
 def __resBlock(self, x, channels=64, kernel_size=[3, 3], scale=1, layer=0):
     nn = tl.Conv2d(x,
                    channels,
                    kernel_size,
                    act=tf.nn.relu,
                    name='res%d/c1' % (layer))
     nn = tl.Conv2d(nn,
                    channels,
                    kernel_size,
                    act=None,
                    name='res%d/c2' % (layer))
     nn = ScaleLayer(nn, scale, name='res%d/scale' % (layer))
     n = tl.ElementwiseLayer([x, nn],
                             tf.add,
                             name='res%d/res_add' % (layer))
     return n
예제 #6
0
 def __resBlock(self,
                x,
                channels=64,
                kernel_size=(3, 3),
                scale=1.0,
                layer=0):
     nn = ReluLayer(x, name='res%d/ru1' % (layer))
     nn = tl.Conv2d(nn,
                    channels - self.prunedlist[layer],
                    kernel_size,
                    act=tf.nn.relu,
                    name='res%d/c1' % (layer))
     nn = tl.Conv2d(nn,
                    channels,
                    kernel_size,
                    act=None,
                    name='res%d/c2' % (layer))
     nn = ScaleLayer(nn, scale, name='res%d/scale' % (layer))
     n = tl.ElementwiseLayer([x, nn],
                             tf.add,
                             name='res%d/res_add' % (layer))
     return n