Esempio n. 1
0
            def residual_block(net, f_out, activation, share_weights, w1_in, w2_in,
                               scope, use_pre_activation=False, normalization=None, is_training=False):
                with tf.variable_scope(scope):
                    if not share_weights:
                        w1_in = None
                        w2_in = None
                    net = pad_borders(net, 3, mode="REFLECT")
                    net, w1_out = conv2d(net,
                                         f_out=f_out,
                                         k_size=3,
                                         stride=1,
                                         activation=activation,
                                         use_preactivation=use_pre_activation,
                                         is_training=is_training,
                                         normalization=normalization,
                                         use_bias=False,
                                         get_weights=True,
                                         set_weight=w1_in,
                                         padding='VALID',
                                         name='conv_1')
                    net = pad_borders(net, 3, mode="REFLECT")
                    net, w2_out = conv2d(net,
                                         f_out=f_out,
                                         k_size=3,
                                         stride=1,
                                         use_preactivation=use_pre_activation,
                                         is_training=is_training,
                                         normalization=normalization,
                                         use_bias=False,
                                         get_weights=True,
                                         set_weight=w2_in,
                                         padding='VALID',
                                         name='conv_2')

                    return net, w1_out, w2_out
Esempio n. 2
0
        def down_block(input, scope, f_out, k_size=3, activation=tf.nn.relu, normalization=None, is_training=False):

            with tf.variable_scope(scope):
                net = pad_borders(input, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_1')
                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             stride=1,
                             activation=linear,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_2')
                net_conv = net
                net = max_pool(net)
            return net, net_conv
Esempio n. 3
0
        def up_block(net,
                     net_down,
                     scope,
                     f_out,
                     k_size=3,
                     activation=tf.nn.relu,
                     normalization=None,
                     is_training=False):

            with tf.variable_scope(scope):

                net = deconv2d(net,
                               f_out=f_out,
                               k_size=2,
                               stride=2,
                               activation=activation,
                               normalization=normalization,
                               padding='SAME',
                               is_training=is_training,
                               use_bias=True,
                               name='deconv_1')

                for layer in net_down:
                    if net.shape == layer.shape:
                        net = tf.concat([net, layer], axis=3)
                        print("Concat")

                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             use_bias=True,
                             name='conv_1')

                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             use_bias=True,
                             name='conv_2')

            return net
Esempio n. 4
0
        def down_block(input,
                       scope,
                       f_out,
                       k_size=3,
                       activation=tf.nn.relu,
                       normalization=None,
                       is_training=False,
                       is_last_block=False):

            with tf.variable_scope(scope):
                net = pad_borders(input, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             use_bias=True,
                             name='conv_1')

                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             stride=1,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             use_bias=True,
                             name='conv_2')

                if f_out >= 512:
                    net = dropout(net, 0.5, is_training)

                net_conv = net
                if not is_last_block:
                    net = max_pool(net)

            return net, net_conv
Esempio n. 5
0
        def up_block(net, net_down, scope, f_out, k_size=3, activation=tf.nn.relu, normalization=None, is_training=False):

            with tf.variable_scope(scope):
                for layer in net_down:
                    if net.shape == layer.shape:
                        net = net + layer
                        print("skip")
                net = tf.image.resize_images(net,
                                             size=(int(net.shape[1] * 2),
                                                   int(net.shape[2] * 2)),
                                             method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)



                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_1')
                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=linear,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_2')

            return net
Esempio n. 6
0
        def recursive_block(input, f_out, activation, share_weights, scope,
                            use_pre_activation=False, normalization=None, is_training=False, units=3):

            def residual_block(net, f_out, activation, share_weights, w1_in, w2_in,
                               scope, use_pre_activation=False, normalization=None, is_training=False):
                with tf.variable_scope(scope):
                    if not share_weights:
                        w1_in = None
                        w2_in = None
                    net = pad_borders(net, 3, mode="REFLECT")
                    net, w1_out = conv2d(net,
                                         f_out=f_out,
                                         k_size=3,
                                         stride=1,
                                         activation=activation,
                                         use_preactivation=use_pre_activation,
                                         is_training=is_training,
                                         normalization=normalization,
                                         use_bias=False,
                                         get_weights=True,
                                         set_weight=w1_in,
                                         padding='VALID',
                                         name='conv_1')
                    net = pad_borders(net, 3, mode="REFLECT")
                    net, w2_out = conv2d(net,
                                         f_out=f_out,
                                         k_size=3,
                                         stride=1,
                                         use_preactivation=use_pre_activation,
                                         is_training=is_training,
                                         normalization=normalization,
                                         use_bias=False,
                                         get_weights=True,
                                         set_weight=w2_in,
                                         padding='VALID',
                                         name='conv_2')

                    return net, w1_out, w2_out

            with tf.variable_scope(scope):
                net = pad_borders(input, 3, mode="REFLECT")
                net_begin = conv2d(net,
                                   f_out=f_out,
                                   k_size=3,
                                   stride=1,
                                   activation=activation,
                                   normalization=normalization,
                                   use_bias=False,
                                   is_training=is_training,
                                   get_weights=False,
                                   padding='VALID',
                                   name='conv_begin')

                net = net_begin

                w1 = None
                w2 = None
                for n in range(units):
                    net, w1, w2 = residual_block(net,
                                                 f_out,
                                                 activation,
                                                 share_weights,
                                                 use_pre_activation=use_pre_activation,
                                                 normalization=normalization,
                                                 is_training=is_training,
                                                 w1_in=w1,
                                                 w2_in=w2,
                                                 scope=scope + '/ResBlock_' + str(n))
                    net = tf.add(net, net_begin)
                return net
Esempio n. 7
0
    def model(self, input, is_train=False, reuse=False):
        """
        Create generator model

        # Arguments
            input: Input-Tensor
            is_train: Bool
            reuse: Bool

        # Return
            Tensor of dimension 4D
        """

        def recursive_block(input, f_out, activation, share_weights, scope,
                            use_pre_activation=False, normalization=None, is_training=False, units=3):

            def residual_block(net, f_out, activation, share_weights, w1_in, w2_in,
                               scope, use_pre_activation=False, normalization=None, is_training=False):
                with tf.variable_scope(scope):
                    if not share_weights:
                        w1_in = None
                        w2_in = None
                    net = pad_borders(net, 3, mode="REFLECT")
                    net, w1_out = conv2d(net,
                                         f_out=f_out,
                                         k_size=3,
                                         stride=1,
                                         activation=activation,
                                         use_preactivation=use_pre_activation,
                                         is_training=is_training,
                                         normalization=normalization,
                                         use_bias=False,
                                         get_weights=True,
                                         set_weight=w1_in,
                                         padding='VALID',
                                         name='conv_1')
                    net = pad_borders(net, 3, mode="REFLECT")
                    net, w2_out = conv2d(net,
                                         f_out=f_out,
                                         k_size=3,
                                         stride=1,
                                         use_preactivation=use_pre_activation,
                                         is_training=is_training,
                                         normalization=normalization,
                                         use_bias=False,
                                         get_weights=True,
                                         set_weight=w2_in,
                                         padding='VALID',
                                         name='conv_2')

                    return net, w1_out, w2_out

            with tf.variable_scope(scope):
                net = pad_borders(input, 3, mode="REFLECT")
                net_begin = conv2d(net,
                                   f_out=f_out,
                                   k_size=3,
                                   stride=1,
                                   activation=activation,
                                   normalization=normalization,
                                   use_bias=False,
                                   is_training=is_training,
                                   get_weights=False,
                                   padding='VALID',
                                   name='conv_begin')

                net = net_begin

                w1 = None
                w2 = None
                for n in range(units):
                    net, w1, w2 = residual_block(net,
                                                 f_out,
                                                 activation,
                                                 share_weights,
                                                 use_pre_activation=use_pre_activation,
                                                 normalization=normalization,
                                                 is_training=is_training,
                                                 w1_in=w1,
                                                 w2_in=w2,
                                                 scope=scope + '/ResBlock_' + str(n))
                    net = tf.add(net, net_begin)
                return net

        def res_block(input, scope, f_out, k_size=3, activation=tf.nn.relu, normalization=None, is_training=False):

            with tf.variable_scope(scope):
                net = pad_borders(input, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_1')
                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=linear,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_2')
            return net + input

        with tf.variable_scope(self.params.scope, reuse=tf.AUTO_REUSE):

            net = pad_borders(input, k_size=7, mode="REFLECT")
            net = conv2d(net,
                         self.params.filter_dim,
                         k_size=7,
                         stride=1,
                         activation=self.activation,
                         normalization=self.normalization,
                         padding='VALID',
                         is_training=self.is_training,
                         reuse=self.reuse,
                         name='conv_in')

            net = conv2d(net,
                         self.params.filter_dim * 2,
                         k_size=3,
                         stride=2,
                         activation=self.activation,
                         normalization=self.normalization,
                         is_training=self.is_training,
                         reuse=self.reuse,
                         padding='SAME',
                         name='conv_1')

            net = conv2d(net,
                         self.params.filter_dim * 4,
                         k_size=3,
                         stride=2,
                         activation=self.activation,
                         normalization=self.normalization,
                         is_training=self.is_training,
                         reuse=self.reuse,
                         padding='SAME',
                         name='conv_2')
            net_begin = net
            if self.params.use_recursice_block:

                for i in range(self.params.blocks):
                    net = recursive_block(net,
                                          f_out=self.params.filter_dim * 4,
                                          activation=self.activation,
                                          normalization=self.normalization,
                                          is_training=self.is_training,
                                          share_weights=True,
                                          units=self.params.units,
                                          scope='Recursive_Block_' + str(i))
                net = net + net_begin
            else:
                for n in range(9):
                    net = res_block(net,
                                    scope='res_block' + str(n + 1),
                                    f_out=self.params.filter_dim * 4,
                                    k_size=3,
                                    activation=self.activation,
                                    normalization=self.normalization,
                                    is_training=self.is_training)

            if self.params.use_NN:
                net = upscale_nearest_neighbor(net,
                                               f_size=self.params.filter_dim * 4,
                                               resize_factor=2,
                                               is_training=not self.params.is_training)


                net = conv2d(net,
                             f_out=self.params.filter_dim * 2,
                             k_size=3,
                             stride=1,
                             activation=self.activation,
                             normalization=self.normalization,
                             is_training=self.is_training,
                             reuse=self.reuse,
                             padding='SAME',
                             name='conv_NN_1')
            else:
                net = deconv2d(net,
                               f_out=self.params.filter_dim * 2,
                               k_size=3,
                               stride=2,
                               activation=self.activation,
                               normalization=self.normalization,
                               is_training=self.is_training,
                               reuse=self.reuse,
                               padding='SAME',
                               name='deconv_1')

            if self.params.use_NN:
                net = upscale_nearest_neighbor(net,
                                               f_size=self.params.filter_dim * 2,
                                               resize_factor=2,
                                               is_training=not self.params.is_training)

                net = conv2d(net,
                             f_out=self.params.filter_dim,
                             k_size=3,
                             stride=1,
                             activation=self.activation,
                             normalization=self.normalization,
                             is_training=self.is_training,
                             reuse=self.reuse,
                             padding='SAME',
                             name='conv_NN_2')
            else:
                net = deconv2d(net,
                               f_out=self.params.filter_dim,
                               k_size=3,
                               stride=2,
                               activation=self.activation,
                               normalization=self.normalization,
                               is_training=self.is_training,
                               reuse=self.reuse,
                               padding='SAME',
                               name='deconv_2')

            net = pad_borders(net, k_size=7, mode="REFLECT")
            net = conv2d(net,
                         f_out=3,
                         k_size=7,
                         stride=1,
                         activation=tf.nn.tanh,
                         reuse=self.reuse,
                         padding='VALID',
                         name='conv_out')

            if not self.reuse:
                self.G_A = self.G = net
                self.reuse = True
            else:
                self.G_B = net

        print(' [*] CycleGAN-Generator loaded...')
        return net
Esempio n. 8
0
    def model(self, input, is_train=False, reuse=False):
        """
        Create generator model

        # Arguments
            input: Input-Tensor
            is_train: Bool
            reuse: Bool

        # Return
            Tensor of dimension 4D
        """

        def down_block(input, scope, f_out, k_size=3, activation=tf.nn.relu, normalization=None, is_training=False):

            with tf.variable_scope(scope):
                net = pad_borders(input, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_1')
                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             stride=1,
                             activation=linear,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_2')
                net_conv = net
                net = max_pool(net)
            return net, net_conv

        def up_block(net, net_down, scope, f_out, k_size=3, activation=tf.nn.relu, normalization=None, is_training=False):

            with tf.variable_scope(scope):
                for layer in net_down:
                    if net.shape == layer.shape:
                        net = net + layer
                        print("skip")
                net = tf.image.resize_images(net,
                                             size=(int(net.shape[1] * 2),
                                                   int(net.shape[2] * 2)),
                                             method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)



                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=activation,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_1')
                net = pad_borders(net, k_size, mode="REFLECT")
                net = conv2d(net,
                             f_out=f_out,
                             k_size=3,
                             activation=linear,
                             normalization=normalization,
                             padding='VALID',
                             is_training=is_training,
                             reuse=self.reuse,
                             name='conv_2')

            return net

        with tf.variable_scope(self.params.scope, reuse=tf.AUTO_REUSE):

            if self.params.use_patches:
                input = get_patches(input, patch_size=self.params.patch_size)

            net = pad_borders(input, k_size=7, mode="REFLECT")

            net = conv2d(net,
                         self.params.filter_dim,
                         k_size=7,
                         stride=1,
                         activation=self.activation_down,
                         normalization=self.normalization,
                         padding='VALID',
                         is_training=self.is_training,
                         reuse=self.reuse,
                         name='conv_in')

            layer = []

            f_out_max = 0
            for n in range(1,self.params.depth,1):
                net, net_conv = down_block(net,
                                 scope='down_block' + str(n + 1),
                                 f_out=self.params.filter_dim * n,
                                 k_size=3,
                                 activation=self.activation_down,
                                 normalization=self.normalization,
                                 is_training=self.is_training)
                f_out_max = self.params.filter_dim * n
                layer.append(net_conv)

            layer.reverse()

            layer_up = []
            for n in range(0,self.params.depth-1,1):
                net = up_block(net,
                               layer,
                               scope='up_block' + str(n + 1),
                               f_out=f_out_max - (self.params.filter_dim * n),
                               k_size=3,
                               activation=self.activation,
                               normalization=self.normalization,
                               is_training=self.is_training)
                layer_up.append(net)

            net = pad_borders(net, k_size=7, mode="REFLECT")
            net = conv2d(net,
                         f_out=3,
                         k_size=7,
                         stride=1,
                         activation=tf.nn.tanh,
                         is_training=self.is_training,
                         reuse=self.reuse,
                         padding='VALID',
                         name='conv_out')

            if not self.reuse:
                self.G_A = self.G = net
                self.reuse = True
            else:
                self.G_B = net

        print(' [*] CycleGAN-Generator loaded...')
        return net