Пример #1
0
        def up_layer(x1, x2, in_size1, in_size2, out_size, i):
            # Up 1
            W1 = utils.weight_variable([2, 2, 2, in_size2, in_size1],
                                       name="W_u_" + str(i) + "_1")
            b1 = utils.bias_variable([in_size2], name="b_u_" + str(i) + "_1")
            deco1 = utils.conv3d_transpose_strided(x1,
                                                   W1,
                                                   b1,
                                                   output_shape=tf.shape(x2))
            relu1 = tf.nn.relu(deco1, name="relu_d_" + str(i) + "_1")

            # Concat
            conc1 = tf.concat([relu1, x2],
                              -1)  # concat along the channels dimension

            # Conv1
            W2 = utils.weight_variable([3, 3, 3, in_size2 * 2, out_size],
                                       name="W_u_" + str(i) + "_2")
            b2 = utils.bias_variable([out_size], name="b_u_" + str(i) + "_2")
            conv1 = utils.conv3d_basic(conc1, W2, b2)
            relu2 = tf.nn.relu(conv1, name="relu_u_" + str(i) + "_2")
            relu2 = tf.nn.dropout(relu2, keep_prob=keep_prob)

            # Conv2
            W3 = utils.weight_variable([3, 3, 3, out_size, out_size],
                                       name="W_u_" + str(i) + "_3")
            b3 = utils.bias_variable([out_size], name="b_u_" + str(i) + "_3")
            conv3 = utils.conv3d_basic(relu2, W3, b3)
            relu3 = tf.nn.relu(conv3, name="relu_u_" + str(i) + "_3")
            relu3 = tf.nn.dropout(relu3, keep_prob=keep_prob)

            return relu3