Esempio n. 1
0
def decoder(inputs):
    # decoder
    # 4 x 4 x 4 x 8   ->  8 x 8 x 8 x 16
    # 8 x 8 x 8 x 16  ->  16 x 16 x 16 x 32
    # 16 x 16 x 16 x 32  ->  32 x 32 x 32 x 1
    #net = lays.fully_connected(inputs, 1)

    #print("decoder input shape", inputs[1], tf.shape(inputs))
    # net = lays.conv3d_transpose(net, 8, [4, 4, 4], stride=2, padding='SAME', trainable=True)

    net = tf.layers.dense(inputs, units=512, activation=tf.nn.relu)
    net = tf.reshape(net, [-1, 4, 4, 4, 8])
    print("Here", net, tf.shape(net))

    net = lays.conv3d_transpose(net,
                                16, [4, 4, 4],
                                stride=2,
                                padding='SAME',
                                trainable=True)
    net = lays.batch_norm(net, decay=0.999)
    net = lays.conv3d_transpose(net,
                                32, [4, 4, 4],
                                stride=2,
                                padding='SAME',
                                trainable=True)
    net = lays.batch_norm(net, decay=0.999)
    net = lays.conv3d_transpose(net,
                                1, [4, 4, 4],
                                stride=2,
                                padding='SAME',
                                activation_fn=tf.nn.sigmoid,
                                trainable=True)
    return net
def decoder(inputs):

    # decoder
    # 2 x 2 x 2 x 8   ->  8 x 8 x 8 x 16
    # 8 x 8 x 8 x 16  ->  16 x 16 x 16 x 32
    # 16 x 16 x 16 x 32  ->  32 x 32 x 32 x 1
    net = tf.layers.dense(inputs, 2 * 2 * 2 * 8, activation=tf.nn.relu)
    #net = tf.layers.dense(net, 2 * 2 * 2 * 8, activation=tf.nn.relu)
    net = tf.reshape(net, [-1, 2, 2, 2, 8])
    net = lays.batch_norm(lays.conv3d_transpose(net,
                                                16, [5, 5, 5],
                                                stride=4,
                                                padding='SAME',
                                                trainable=True),
                          decay=0.9)
    net = lays.batch_norm(lays.conv3d_transpose(net,
                                                32, [5, 5, 5],
                                                stride=2,
                                                padding='SAME',
                                                trainable=True),
                          decay=0.9)
    net = lays.conv3d_transpose(net,
                                1, [5, 5, 5],
                                stride=2,
                                padding='SAME',
                                activation_fn=tf.nn.sigmoid,
                                trainable=True)
    return net
Esempio n. 3
0
 def _3d_rdnn_level_60(self, inputs):
     with tf.variable_scope('Volume30Up', reuse=self._reuse):
         return layers.conv3d_transpose(inputs,
                                        256,
                                        3,
                                        2,
                                        scope='deconv',
                                        **self._reg)
Esempio n. 4
0
def autoencoder(inputs):
    # encoder
    # 32 x 32 x 32 x 1   -> 16 x 16 x 16 x 32
    # 16 x 16 x 16 x 32  ->  8 x 8 x 8 x 16
    # 8 x 8 x 8 x 16    ->  2 x 2 x 2 x 8
    net = lays.conv3d(inputs, 32, [5, 5, 5], stride=2, padding='SAME')
    net = lays.conv3d(net, 16, [5, 5, 5], stride=2, padding='SAME')
    print(tf.shape(net))
    net = lays.conv3d(net, 8, [5, 5, 5], stride=4, padding='SAME')
    #net = lays.fully_connected(net,1)
    latent_space = net
    # decoder
    # 2 x 2 x 2 x 8   ->  8 x 8 x 8 x 16
    # 8 x 8 x 8 x 16  ->  16 x 16 x 16 x 32
    # 16 x 16 x 16 x 32  ->  32 x 32 x 32 x 1
    net = lays.conv3d_transpose(net, 16, [5, 5, 5], stride=4, padding='SAME')
    net = lays.conv3d_transpose(net, 32, [5, 5, 5], stride=2, padding='SAME')
    net = lays.conv3d_transpose(net, 1, [5, 5, 5], stride=2, padding='SAME', activation_fn=tf.nn.tanh)
    return latent_space, net