예제 #1
0
 def dwconvLayer(self,
                 kernel,
                 multi,
                 stride=1,
                 pad='SAME',
                 activation=-1,
                 batch_norm=False,
                 weight=None):
     with tf.variable_scope('dwconv_' + str(self.layernum)):
         if isinstance(kernel, list):
             kernel = kernel
         else:
             kernel = [kernel, kernel]
         self.result = L.conv2Ddw(self.result,
                                  self.inpsize[3],
                                  kernel,
                                  multi,
                                  'dwconv_' + str(self.layernum),
                                  stride=stride,
                                  pad=pad,
                                  weight=weight)
         if batch_norm:
             self.result = L.batch_norm(self.result,
                                        'batch_norm_' + str(self.layernum))
         self.layernum += 1
         self.activate(activation)
         self.inpsize = self.result.get_shape().as_list()
     return [self.result, list(self.inpsize)]
예제 #2
0
 def dwconvLayer(self,
                 kernel,
                 multi,
                 stride=1,
                 pad='SAME',
                 activation=-1,
                 batch_norm=False,
                 weight=None,
                 usebias=True):
     with tf.variable_scope('dwconv_' + str(self.layernum)):
         if isinstance(kernel, list):
             kernel = kernel
         else:
             kernel = [kernel, kernel]
         self.result = L.conv2Ddw(self.result,
                                  self.inpsize[3],
                                  kernel,
                                  multi,
                                  'dwconv_' + str(self.layernum),
                                  stride=stride,
                                  pad=pad,
                                  weight_data=weight,
                                  usebias=usebias)
         if batch_norm:
             self.result = L.batch_norm(self.result,
                                        'batch_norm_' + str(self.layernum),
                                        training=self.bntraining,
                                        epsilon=self.epsilon)
         self.layernum += 1
         self.inpsize = self.result.get_shape().as_list()
         self.activate(activation)
     return self.result
예제 #3
0
 def dwconvLayer(self,
                 kernel,
                 multi,
                 stride=1,
                 pad='SAME',
                 activation=-1,
                 batch_norm=False):
     with tf.variable_scope('dwconv_' + str(self.layernum)):
         if isinstance(kernel, list):
             kernel = kernel
         else:
             kernel = [kernel, kernel]
         self.result = L.conv2Ddw(self.result,
                                  self.inpsize[3],
                                  kernel,
                                  multi,
                                  'dwconv_' + str(self.layernum),
                                  stride=stride,
                                  pad=pad)
         if batch_norm:
             self.result = L.batch_norm(self.result,
                                        'batch_norm_' + str(self.layernum))
         self.layernum += 1
         if pad == 'VALID':
             self.inpsize[1] -= kernel[0] - stride
             self.inpsize[2] -= kernel[1] - stride
         self.inpsize[1] = self.inpsize[1] // stride
         self.inpsize[2] = self.inpsize[2] // stride
         self.inpsize[3] = self.inpsize[3] * multi
         self.activate(activation)
     return [self.result, list(self.inpsize)]