Exemple #1
0
def subtract_refine(subtract, layer7, layer8, layer9, phase, drop_conv, n_class=1):
    _, y_width, y_height, y_channel = subtract.get_shape().as_list()  # 拿出 输入 tensor 的 最后一维:也就是通道数
    seed = tf.reshape(subtract, [-1, y_width, y_height, y_channel])  # 将图片转换成tf识别格式


    refine1 = conv_bn_relu_drop(x=layer7, kernal=[1, 1, 128, 64], phase=phase, drop=drop_conv, scope='refine1_1')
    seed1 = tf.image.resize_images(images=seed, size=[int(y_width / 4), int(y_height / 4)], method=0)
    refine1 = crop_and_concat(refine1, seed1)
    refine1 = conv_bn_relu_drop(x=refine1, kernal=[3, 3, 65, 65], phase=phase, drop=drop_conv, scope='refine1_2')
    refine1 = squeeze_excitation_model(refine1, out_dim=65, scope='sem2_1')
    refine1 = deconv_relu(x=refine1, kernal=[3, 3, 32, 65], scope='refine1_deconv')

    refine2 = conv_bn_relu_drop(x=layer8, kernal=[1, 1, 64, 32], phase=phase, drop=drop_conv, scope='refine2_1')
    seed2 = tf.image.resize_images(images=seed, size=[int(y_width / 2), int(y_height / 2)], method=0)
    refine2 = crop_and_concat(refine2, seed2)
    refine2 = conv_bn_relu_drop(x=refine2, kernal=[3, 3, 33, 32], phase=phase, drop=drop_conv, scope='refine2_2')
    refine2 = crop_and_concat(refine1, refine2)
    refine2 = conv_bn_relu_drop(x=refine2, kernal=[3, 3, 64, 64], phase=phase, drop=drop_conv, scope='refine1_3')
    refine2 = squeeze_excitation_model(refine2, out_dim=64, scope='sem2_2')
    refine2 = deconv_relu(x=refine2, kernal=[3, 3, 32, 64], scope='refine2_deconv')

    refine3 = conv_bn_relu_drop(x=layer9, kernal=[1, 1, 32, 1], phase=phase, drop=drop_conv, scope='refine3_1')
    seed3 = tf.image.resize_images(images=seed, size=[int(y_width), int(y_height)], method=0)
    refine3 = crop_and_concat(refine3, seed3)
    refine3 = conv_bn_relu_drop(x=refine3, kernal=[3, 3, 2, 1], phase=phase, drop=drop_conv, scope='refine3_2')
    refine3 = crop_and_concat(refine2, refine3)
    refine3 = conv_bn_relu_drop(x=refine3, kernal=[3, 3, 33, 33], phase=phase, drop=drop_conv, scope='refine3_3')
    refine3 = squeeze_excitation_model(refine3, out_dim=33, scope='sem2_3')


    out1 = conv_sigmoid(x=refine3, kernal=[1, 1, 33, n_class], scope='out1')

    return out1
def _create_conv_net(X,
                     image_width,
                     image_height,
                     image_channel,
                     phase,
                     drop_conv,
                     n_class=1):
    inputX = tf.reshape(
        X,
        [-1, image_width, image_height, image_channel])  # shape=(?, 32, 32, 1)
    # UNet model
    # layer1->convolution
    layer0 = conv_bn_relu_drop(x=inputX,
                               kernal=[3, 3, image_channel, 32],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer0')
    layer1 = conv_bn_relu_drop(x=layer0,
                               kernal=[3, 3, 32, 32],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer1')
    # print(layer1.get_shape().as_list())
    layer1 = squeeze_excitation_model(layer1, out_dim=32, scope='sem1')
    pool1 = max_pooling_2x2(layer1)
    # layer2->convolution
    layer2 = conv_bn_relu_drop(x=pool1,
                               kernal=[3, 3, 32, 64],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer2_1')
    layer2 = conv_bn_relu_drop(x=layer2,
                               kernal=[3, 3, 64, 64],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer2_2')
    layer2 = squeeze_excitation_model(layer2, out_dim=64, scope='sem2')
    pool2 = max_pooling_2x2(layer2)

    # layer3->convolution
    layer3 = conv_bn_relu_drop(x=pool2,
                               kernal=[3, 3, 64, 128],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer3_1')
    layer3 = conv_bn_relu_drop(x=layer3,
                               kernal=[3, 3, 128, 128],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer3_2')
    layer3 = squeeze_excitation_model(layer3, out_dim=128, scope='sem3')
    pool3 = max_pooling_2x2(layer3)

    # layer4->convolution
    layer4 = conv_bn_relu_drop(x=pool3,
                               kernal=[3, 3, 128, 256],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer4_1')
    layer4 = conv_bn_relu_drop(x=layer4,
                               kernal=[3, 3, 256, 256],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer4_2')
    layer4 = squeeze_excitation_model(layer4, out_dim=256, scope='sem4')
    pool4 = max_pooling_2x2(layer4)

    # layer5->convolution
    layer5 = conv_bn_relu_drop(x=pool4,
                               kernal=[3, 3, 256, 512],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer5_1')
    layer5 = conv_bn_relu_drop(x=layer5,
                               kernal=[3, 3, 512, 512],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer5_2')

    # layer6->deconvolution

    deconv1 = deconv_relu(x=layer5, kernal=[3, 3, 256, 512], scope='deconv1')

    layer6 = crop_and_concat(layer4, deconv1)

    # layer7->convolution
    layer6 = conv_bn_relu_drop(x=layer6,
                               kernal=[3, 3, 512, 256],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer6_1')
    layer6 = conv_bn_relu_drop(x=layer6,
                               kernal=[3, 3, 256, 256],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer6_2')
    layer6 = squeeze_excitation_model(layer6, out_dim=256, scope='sem5')
    deconv2 = deconv_relu(layer6, kernal=[3, 3, 128, 256], scope='deconv2')

    layer7 = crop_and_concat(layer3, deconv2)

    # layer9->convolution
    layer7 = conv_bn_relu_drop(x=layer7,
                               kernal=[3, 3, 256, 128],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer7_1')
    layer7 = conv_bn_relu_drop(x=layer7,
                               kernal=[3, 3, 128, 128],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer7_2')

    layer7 = squeeze_excitation_model(layer7, out_dim=128, scope='sem6')
    # layer10->deconvolution
    deconv3 = deconv_relu(x=layer7, kernal=[3, 3, 64, 128], scope='deconv3')

    layer8 = crop_and_concat(layer2, deconv3)

    # layer11->convolution
    layer8 = conv_bn_relu_drop(x=layer8,
                               kernal=[3, 3, 128, 64],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer8_1')
    layer8 = conv_bn_relu_drop(x=layer8,
                               kernal=[3, 3, 64, 64],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer8_2')
    layer8 = squeeze_excitation_model(layer8, out_dim=64, scope='sem7')
    deconv4 = deconv_relu(layer8, kernal=[3, 3, 32, 64], scope='deconv4')

    layer9 = crop_and_concat(layer1, deconv4)

    # layer 13->convolution
    layer9 = conv_bn_relu_drop(x=layer9,
                               kernal=[3, 3, 64, 32],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer9_1')
    layer9 = conv_bn_relu_drop(x=layer9,
                               kernal=[3, 3, 32, 32],
                               phase=phase,
                               drop=drop_conv,
                               scope='layer9_2')
    layer9 = squeeze_excitation_model(layer9, out_dim=32, scope='sem8')
    output = conv_sigmoid(x=layer9, kernal=[1, 1, 32, n_class], scope='output')

    return layer7, layer8, layer9, output