name='Up3')(conv7_1)) merge8 = keras.layers.Concatenate(name='Concat3')([conv2, up8]) conv8 = Residual15(96, 32, merge8) conv8_1 = Residual16(32, 16, conv8) up9 = Conv2D(16, 2, activation='relu', padding='same', kernel_initializer='he_normal', name='UpConv4')(UpSampling2D(size=(2, 2), name='Up4')(conv8_1)) merge9 = keras.layers.Concatenate(name='Concat4')([conv1, up9]) conv9 = Residual17(48, 16, merge9) conv10 = Residual18(16, 2, conv9) conv10 = Residual19(2, 1, conv10) conv11 = Conv2D(1, 1, activation='sigmoid', name='Output')(conv10) with tf.device('/device:GPU:3'): init = initial_conv_block(input, weight_decay=5e-4) #x1 = ResidualR(32, 64, init) #192x192x64 #x1 = ResidualR(64, 64, x1) #x1 = ResidualR(64, 64, x1) #192x192x64 x1 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal')(init) x1 = BatchNormalization()(x1) x1 = layers.LeakyReLU()(x1) x1concat = keras.layers.Concatenate()([x1, conv9]) #192x192x80 x1se = squeeze_excite_block(x1concat) x1conv1 = SeparableConv2D(80, (1, 1),
kernel_initializer='he_normal', name='UpConv3')(UpSampling2D(size=(2, 2), name='Up3')(conv7)) merge8 = keras.layers.Concatenate(name='Concat3')([conv2, up8]) conv8 = Residual15(256, 128, merge8) #conv8 = Residual16(128, 128, conv8) up9 = Conv2D(64, 2, activation='relu', padding='same', kernel_initializer='he_normal', name='UpConv4')(UpSampling2D(size=(2, 2), name='Up4')(conv8)) merge9 = keras.layers.Concatenate(name='Concat4')([conv1, up9]) conv9 = Residual17(128, 64, merge9) conv10 = Residual18(64, 16, conv9) conv10 = Residual19(16, 1, conv10) conv11 = Conv2D(1, 1, activation='sigmoid', name='Output')(conv10) conv1r = _conv_bn_relu(filters=64, kernel_size=(7, 7), strides=(1, 1))(input) block1 = _residual_block(basic_block, filters=64, repetitions=1, is_first_layer=True)(conv1r) block1concat = keras.layers.Concatenate()([block1, conv9]) block1se = squeeze_excite_block(block1concat) block1conv1 = Conv2D(64, (1, 1), padding='same', kernel_initializer='he_normal')(block1se) block1conv1 = BatchNormalization(axis=CHANNEL_AXIS)(block1conv1) block1conv1 = layers.LeakyReLU()(block1conv1)