def concat_conv_bn_relu_drop(input_, feature, filter_, strides, activation_type=None, norm_type=None, is_train=True, keep_prob=1, replace=1, resnet=True): with tf.variable_scope('concat_conv_bn_relu_drop'): x = crop_and_concat(input_, feature) x = get_normalization_fn(x, norm_type, is_train) for i in range(replace): with tf.variable_scope('conv_' + str(i + 1)): if i == 0: _filter = [ _f if _i != len(filter_) - 2 else _f * 2 for _i, _f in enumerate(filter_) ] x = convolution(x, _filter, strides=strides) input_ = get_normalization_fn(x, norm_type, is_train, scope='input') else: x = convolution(x, filter_, strides=strides) x = get_normalization_fn(x, norm_type, is_train) x = get_activation_fn(x, activation_type) x = tf.nn.dropout(x, keep_prob=keep_prob) if resnet and i == replace - 1: x = resnet_add(x, input_) return x
def _build_network(self): with tf.variable_scope('vnet'): with tf.variable_scope('input_layer'): x = tf.tile(self.image_ph, [1, 1, 1, 1, self.init_filter]) x = get_normalization_fn(x, self.norm_type, self.is_train) features = {} for level_name, items in net_configs.items(): with tf.variable_scope(level_name): for sub_name, _configs in items.items(): n_channels = get_num_channels(x) if 'conv_block' == sub_name: filter_ = _configs['kernel'] + [ n_channels, n_channels ] x = conv_bn_relu_drop( x, filter_, _configs['strides'], self.activation_type, self.norm_type, self.is_train, self.keep_prob, _configs['replace'], _configs['resnet']) features[level_name] = x elif 'down_block' == sub_name: filter_ = _configs['kernel'] + [ n_channels, n_channels * 2 ] x = down_conv_bn_relu(x, filter_, _configs['strides'], self.activation_type, self.norm_type, self.is_train) elif 'up_block' == sub_name: filter_ = _configs['kernel'] + [ n_channels // 2, n_channels ] # 32,32,2,128 x = deconv_bn_relu(x, filter_, _configs['strides'], self.activation_type, self.norm_type, self.is_train) elif 'concat_conv_block' == sub_name: filter_ = _configs['kernel'] + [ n_channels, n_channels ] feature = features[_configs['feature']] x = concat_conv_bn_relu_drop( x, feature, filter_, _configs['strides'], self.activation_type, self.norm_type, self.is_train, self.keep_prob, _configs['replace'], _configs['resnet']) else: raise Exception('找不到相应操作') with tf.variable_scope('output_layer'): x = convolution(x, [1, 1, 1, self.init_filter, self.num_classes]) logits = get_normalization_fn(x, self.norm_type, self.is_train) return logits
def deconv_bn_relu(x, filter_, strides, activation_type=None, norm_type=None, is_train=True): with tf.variable_scope('deconv_bn_relu'): x = deconvolution(x, filter_, strides=strides) x = get_normalization_fn(x, norm_type, is_train) x = get_activation_fn(x, activation_type) return x
def conv_bn_relu_drop(input_, filter_, strides=None, activation_type=None, norm_type=None, is_train=True, keep_prob=1, replace=1, resnet=True): with tf.variable_scope('conv_bn_relu_drop'): x = input_ for i in range(replace): with tf.variable_scope('conv_' + str(i + 1)): x = convolution(x, filter_, strides=strides, activation_type=activation_type) x = get_normalization_fn(x, norm_type, is_train) x = get_activation_fn(x, activation_type) x = tf.nn.dropout(x, keep_prob=keep_prob) if resnet and i == replace - 1: x = resnet_add(x, input_) return x