def cascade_feature(self, _input, block_num): atrous_layer = [] out_feature = int(_input.get_shape()[-1]) output = _input #output_1x1 = _conv3d(_input, kernel_size=1, stride=1, output_feature=out_feature, use_bias=True, name='pyramid_conv_1') for i in range(1, self.atrou_num + 1): #dilate_rate = int(np.power(2,4-block_num)*i) #dilate_rate = int(np.power(2,4-block_num)) # 8 8 8 // 4 4 4 // 2 2 2 dilate_rate = int(np.power(2, i)) # 2 4 8 // 2 4 8 // 2 4 8 output = atrous_bn_prelu(output, kernel_size=3, stride=1, output_channels=out_feature, dilation_rate=dilate_rate, is_training=self.is_training, name='atrous_conv%d' % i) atrous_layer.append(output) output = tf.concat([atrous_layer[0], atrous_layer[1], atrous_layer[2]], axis=-1) # output = self.Squeeze_excitation_layer(output, int(output.get_shape()[-1]), self.reduction_ratio, # layer_name='SE%d' % block_num) output = _conv2d(output, kernel_size=1, stride=1, output_feature=out_feature, use_bias=True, name='pyramid_conv_1x1') return output
def cascade_feature(self, _input, block_num): atrous_layer = [] out_feature = int(_input.get_shape()[-1]) output = _input #output_1x1 = _conv3d(_input, kernel_size=1, stride=1, output_feature=out_feature, use_bias=True, name='pyramid_conv_1') for i in range(1, self.atrou_num + 1): dilate_rate = int(np.power(2, 4 - block_num) * i) #print(dilate_rate) # dilate rate : 24 16 8 / 12 8 4 / 6 4 2 output = atrous_bn_prelu(output, kernel_size=3, stride=1, output_channels=out_feature, dilation_rate=dilate_rate, is_training=self.is_training, name='atrous_conv%d' % i) atrous_layer.append(output) output = tf.concat([atrous_layer[0], atrous_layer[1], atrous_layer[2]], axis=-1) print('atrous conv shape:', output) output = _conv3d(output, kernel_size=1, stride=1, output_feature=out_feature, use_bias=True, name='pyramid_conv_1x1') return output