コード例 #1
0
    def build_Gz(self):
        x_input = Input(shape=self.input_shape)

        # save all shareable layers
        self.s_layers[0].append(BasicConvLayer(64, (5, 5), strides=(1, 1), bnorm=False, activation='leaky_relu', leaky_relu_slope=0.1))
        self.s_layers[0].append(ResLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1))

        self.s_layers[1].append(ResLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1))
        self.s_layers[1].append(ResLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1))
        self.s_layers[1].append(BasicConvLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1))
        self.s_layers[1].append(BasicConvLayer(64, (1, 1), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1))

        # if there is a reference model (pre-defined layer weights)
        if self.share_with is not None and self.n_layers_to_share != (0, 0):
            self.s_layers[0][-self.n_layers_to_share[0]:] = self.share_with.s_layers[0][-self.n_layers_to_share[0]:]
            self.s_layers[1][-self.n_layers_to_share[1]:] = self.share_with.s_layers[1][-self.n_layers_to_share[1]:]

        # apply all layers to input
        x = x_input
        for layer in self.s_layers[0]:
            x = layer(x)
        x_d = self.s_layers[1][0](x)
        for layer in self.s_layers[1][1:]:
            x_d = layer(x_d)

        x_g = ResLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1)(x)
        x_g = ResLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1)(x_g)
        x_g = BasicConvLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1)(x_g)
        x_d = BasicConvLayer(64, (1, 1), strides=(1, 1), bnorm=False, activation='leaky_relu', leaky_relu_slope=0.1)(x_d)

        x_g = Flatten()(x_g)
        x_d = Flatten()(x_d)

        mu_g = Dense(self.z_dims // 2)(x_g)
        mu_g = Activation('linear')(mu_g)
        sigma_g = Dense(self.z_dims // 2)(x_g)
        sigma_g = Activation('linear')(sigma_g)

        mu_d = Dense(self.z_dims // 2)(x_d)
        mu_d = Activation('linear')(mu_d)
        sigma_d = Dense(self.z_dims // 2)(x_d)
        sigma_d = Activation('linear')(sigma_d)

        # use the generated values to sample random z from the latent space
        concatenated_g = Concatenate(axis=-1)([mu_g, sigma_g])
        concatenated_d = Concatenate(axis=-1)([mu_d, sigma_d])
        output_g = Lambda(
            function=lambda x: x[:, :self.z_dims // 2] + (K.exp(x[:, self.z_dims // 2:]) * (K.random_normal(shape=K.shape(x[:, self.z_dims // 2:])))),
            output_shape=(self.z_dims // 2, )
        )(concatenated_g)
        output_d = Lambda(
            function=lambda x: x[:, :self.z_dims // 2] + (K.exp(x[:, self.z_dims // 2:]) * (K.random_normal(shape=K.shape(x[:, self.z_dims // 2:])))),
            output_shape=(self.z_dims // 2, )
        )(concatenated_d)

        concatenated = Concatenate(axis=-1)([output_g, output_d])
        return Model(x_input, concatenated)
コード例 #2
0
    def build_D(self):
        x_inputs = Input(shape=self.input_shape)

        x = BasicConvLayer(filters=128, kernel_size=(5, 5), strides=(2, 2), bnorm=True)(x_inputs)
        x = BasicConvLayer(filters=128, kernel_size=(7, 7), strides=(2, 2), bnorm=True)(x)
        x = BasicConvLayer(filters=256, kernel_size=(5, 5), strides=(2, 2), bnorm=True)(x)
        x = BasicConvLayer(filters=256, kernel_size=(7, 7), strides=(2, 2), bnorm=True)(x)
        x = BasicConvLayer(filters=512, kernel_size=(4, 4), strides=(1, 1), bnorm=True)(x)
        x = BasicConvLayer(filters=512, kernel_size=(4, 4), strides=(8, 8), bnorm=True)(x)
        x = BasicConvLayer(filters=512, kernel_size=(1, 1), strides=(1, 1), bnorm=True)(x)
        x = Flatten()(x)

        z_inputs = Input(shape=(self.z_dims,))
        z = Reshape((1, 1, self.z_dims))(z_inputs)
        z = BasicConvLayer(filters=1024, kernel_size=(1, 1), dropout=0.2)(z)
        z = BasicConvLayer(filters=1024, kernel_size=(1, 1), dropout=0.2)(z)
        z = Flatten()(z)

        xz = Concatenate(axis=-1)([x, z])
        xz = Dropout(0.2)(xz)
        xz = Dense(2048)(xz)
        xz = LeakyReLU(0.1)(xz)

        xz = Dropout(0.2)(xz)
        xz = Dense(1)(xz)
        xz = Activation('sigmoid')(xz)

        return Model([x_inputs, z_inputs], xz)
コード例 #3
0
    def build_D(self):
        x_input = Input(shape=self.input_shape)

        x = BasicConvLayer(32, (5, 5),
                           strides=(1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(x_input)
        x = BasicConvLayer(64, (4, 4),
                           strides=(2, 2),
                           bnorm=True,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(x)
        x = BasicConvLayer(64, (4, 4),
                           strides=(2, 2),
                           bnorm=True,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(x)
        x = Flatten()(x)

        z_input = Input(shape=(self.z_dims, ))
        conditional_input = Input(shape=(self.conditional_dims, ))
        z = Concatenate()([z_input, conditional_input])
        z = Reshape((1, 1, -1))(z)
        z = BasicConvLayer(64, (1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(z)
        z = BasicConvLayer(64, (1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(z)
        z = Flatten()(z)

        xz = Concatenate(axis=-1)([x, z])

        xz = Dense(64)(xz)
        xz = LeakyReLU(0.01)(xz)
        xz = Dropout(0.2)(xz)

        xz = Dense(1)(xz)
        xz = Activation('sigmoid')(xz)

        return Model([x_input, z_input, conditional_input], xz)
コード例 #4
0
    def build_Gx(self):
        z_input = Input(shape=(self.z_dims,))
        orig_channels = self.input_shape[2]

        x = Reshape((1, 1, -1))(z_input)

        x = BasicDeconvLayer(256, (4, 4), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.01)(x)
        x = BasicDeconvLayer(128, (4, 4), strides=(2, 2), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.01)(x)
        x = BasicDeconvLayer(64, (4, 4), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.01)(x)
        x = BasicDeconvLayer(32, (4, 4), strides=(2, 2), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.01)(x)
        x = BasicDeconvLayer(32, (5, 5), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.01)(x)

        x = BasicConvLayer(32, (1, 1), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.01)(x)
        x = BasicConvLayer(orig_channels, (1, 1), activation='sigmoid', bnorm=False)(x)

        return Model(z_input, x, name="Gx")
コード例 #5
0
    def build_Gz(self):
        x_input = Input(shape=self.input_shape)

        x = BasicConvLayer(32, (5, 5),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x_input)
        x = BasicConvLayer(64, (4, 4),
                           strides=(2, 2),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        x = BasicConvLayer(128, (4, 4),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        x = BasicConvLayer(256, (4, 4),
                           strides=(2, 2),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        x = BasicConvLayer(512, (1, 1),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)

        x = Flatten()(x)

        # the output is an average (mu) and std variation (sigma)
        # describing the distribution that better describes the input
        mu = Dense(self.z_dims)(x)
        mu = Activation('linear')(mu)
        sigma = Dense(self.z_dims)(x)
        sigma = Activation('linear')(sigma)

        # use the generated values to sample random z from the latent space
        concatenated = Concatenate(axis=-1)([mu, sigma])
        output = Lambda(function=lambda x: x[:, :self.z_dims] +
                        (K.exp(x[:, self.z_dims:]) *
                         (K.random_normal(shape=K.shape(x[:, self.z_dims:])))),
                        output_shape=(self.z_dims, ))(concatenated)

        return Model(x_input, output)
コード例 #6
0
    def build_D(self):
        mobile_net = MobileNet(input_shape=self.input_shape, weights=None, include_top=False)
        x = Flatten()(mobile_net.output)

        z_inputs = Input(shape=(self.z_dims,))
        z = Reshape((1, 1, self.z_dims))(z_inputs)
        z = BasicConvLayer(filters=1024, kernel_size=(1, 1), dropout=0.2)(z)
        z = BasicConvLayer(filters=1024, kernel_size=(1, 1), dropout=0.2)(z)
        z = Flatten()(z)

        xz = Concatenate(axis=-1)([x, z])
        xz = Dropout(0.2)(xz)
        xz = Dense(2048)(xz)
        xz = LeakyReLU(0.1)(xz)

        xz = Dropout(0.2)(xz)
        xz = Dense(1)(xz)
        xz = Activation('sigmoid')(xz)

        return Model([mobile_net.input, z_inputs], xz)
コード例 #7
0
    def build_Gx(self):
        z_input = Input(shape=(self.z_dims,))
        orig_channels = self.input_shape[2]

        x = Dense(512)(z_input)
        x = LeakyReLU(0.1)(x)
        x = Dense(512)(x)
        x = LeakyReLU(0.1)(x)

        x = Reshape((4, 4, 32))(x)

        x = BasicDeconvLayer(64, (3, 3), strides=(1, 1), bnorm=False, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)
        x = ResDeconvLayer(64, (4, 4), strides=(2, 2), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)
        x = ResDeconvLayer(64, (4, 4), strides=(2, 2), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)
        x = ResDeconvLayer(64, (4, 4), strides=(2, 2), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)
        x = BasicDeconvLayer(32, (5, 5), strides=(1, 1), bnorm=True, activation='leaky_relu', leaky_relu_slope=0.1, padding='same')(x)

        x = BasicConvLayer(32, (1, 1), strides=(1, 1), bnorm=False, activation='leaky_relu', leaky_relu_slope=0.1)(x)
        x = BasicConvLayer(orig_channels, (1, 1), activation='sigmoid', bnorm=False)(x)

        return Model(z_input, x)
コード例 #8
0
    def build_D_cycle(self):
        x_input = Input(shape=self.input_shape)

        x = BasicConvLayer(32, (5, 5),
                           strides=(1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x_input)
        x = BasicConvLayer(64, (4, 4),
                           strides=(2, 2),
                           bnorm=True,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        x = BasicConvLayer(128, (4, 4),
                           strides=(2, 2),
                           bnorm=True,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        x = Flatten()(x)

        x_hat_input = Input(shape=self.input_shape)
        x_hat = BasicConvLayer(32, (5, 5),
                               strides=(1, 1),
                               bnorm=False,
                               dropout=0.2,
                               activation='leaky_relu',
                               leaky_relu_slope=0.1)(x_hat_input)
        x_hat = BasicConvLayer(64, (4, 4),
                               strides=(2, 2),
                               bnorm=True,
                               dropout=0.2,
                               activation='leaky_relu',
                               leaky_relu_slope=0.1)(x_hat)
        x_hat = BasicConvLayer(128, (4, 4),
                               strides=(2, 2),
                               bnorm=True,
                               dropout=0.2,
                               activation='leaky_relu',
                               leaky_relu_slope=0.1)(x_hat)
        x_hat = Flatten()(x_hat)

        x_x_hat = Concatenate(axis=-1)([x, x_hat])

        x_x_hat = Dense(128)(x_x_hat)
        x_x_hat = LeakyReLU(0.01)(x_x_hat)
        x_x_hat = Dropout(0.2)(x_x_hat)

        x_x_hat = Dense(1)(x_x_hat)
        x_x_hat = Activation('sigmoid')(x_x_hat)

        return Model([x_input, x_hat_input], x_x_hat)
コード例 #9
0
    def build_Gx(self):
        z_input = Input(shape=(self.z_dims, ))
        orig_channels = self.input_shape[2]

        x = Dense(64, activation='relu')(z_input)
        x = Dense(128, activation='relu')(x)

        x = Reshape((4, 4, 8))(x)

        res_x = x = BasicDeconvLayer(64, (3, 3),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)
        res_x = x = BasicDeconvLayer(64, (3, 3),
                                     strides=(2, 2),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)
        res_x = x = BasicDeconvLayer(64, (3, 3),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)
        res_x = x = BasicDeconvLayer(64, (3, 3),
                                     strides=(2, 2),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)
        res_x = x = BasicDeconvLayer(64, (3, 3),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)
        res_x = x = BasicDeconvLayer(64, (3, 3),
                                     strides=(2, 2),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(64, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)
        res_x = x = BasicDeconvLayer(32, (5, 5),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.01,
                                     padding='same')(x)
        x = BasicDeconvLayer(32, (5, 5),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.01,
                             padding='same',
                             residual=res_x)(x)

        x = BasicConvLayer(32, (1, 1),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(x)
        x = BasicConvLayer(orig_channels, (1, 1),
                           activation='sigmoid',
                           bnorm=False)(x)

        return Model(z_input, x)
コード例 #10
0
    def build_Gz(self):
        x_input = Input(shape=self.input_shape)

        res_x = x = BasicConvLayer(64, (5, 5),
                                   strides=(1, 1),
                                   bnorm=True,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.1)(x_input)
        x = BasicConvLayer(64, (5, 5),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        res_x = x = BasicConvLayer(64, (5, 5),
                                   strides=(1, 1),
                                   bnorm=True,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.1,
                                   residual=res_x)(x)
        x = BasicConvLayer(64, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        res_x = x = BasicConvLayer(64, (3, 3),
                                   strides=(1, 1),
                                   bnorm=True,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.1,
                                   residual=res_x)(x)
        x = BasicConvLayer(64, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        res_x = x = BasicConvLayer(64, (3, 3),
                                   strides=(1, 1),
                                   bnorm=True,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.1,
                                   residual=res_x)(x)
        x = BasicConvLayer(64, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        res_x = x = BasicConvLayer(64, (3, 3),
                                   strides=(1, 1),
                                   bnorm=True,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.1,
                                   residual=res_x)(x)
        x = BasicConvLayer(64, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1)(x)
        x = BasicConvLayer(64, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           activation='leaky_relu',
                           leaky_relu_slope=0.1,
                           residual=res_x)(x)

        res_x = x_g = BasicConvLayer(128, (3, 3),
                                     strides=(2, 2),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.1)(x)
        x_g = BasicConvLayer(128, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.1)(x_g)
        res_x = x_g = BasicConvLayer(128, (3, 3),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.1,
                                     residual=res_x)(x_g)
        x_g = BasicConvLayer(128, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.1)(x_g)
        res_x = x_g = BasicConvLayer(128, (1, 1),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.1,
                                     residual=res_x)(x_g)

        res_x = x_d = BasicConvLayer(128, (3, 3),
                                     strides=(2, 2),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.1)(x)
        x_d = BasicConvLayer(128, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.1)(x_d)
        res_x = x_d = BasicConvLayer(128, (3, 3),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.1,
                                     residual=res_x)(x_d)
        x_d = BasicConvLayer(128, (3, 3),
                             strides=(1, 1),
                             bnorm=True,
                             activation='leaky_relu',
                             leaky_relu_slope=0.1)(x_d)
        res_x = x_d = BasicConvLayer(128, (1, 1),
                                     strides=(1, 1),
                                     bnorm=True,
                                     activation='leaky_relu',
                                     leaky_relu_slope=0.1,
                                     residual=res_x)(x_d)

        x_g = Flatten()(x_g)
        x_d = Flatten()(x_d)

        mu_g = Dense(self.z_dims // 2)(x_g)
        mu_g = Activation('linear')(mu_g)
        sigma_g = Dense(self.z_dims // 2)(x_g)
        sigma_g = Activation('linear')(sigma_g)

        mu_d = Dense(self.z_dims // 2)(x_d)
        mu_d = Activation('linear')(mu_d)
        sigma_d = Dense(self.z_dims // 2)(x_d)
        sigma_d = Activation('linear')(sigma_d)

        # use the generated values to sample random z from the latent space
        concatenated_g = Concatenate(axis=-1)([mu_g, sigma_g])
        concatenated_d = Concatenate(axis=-1)([mu_d, sigma_d])
        output_g = Lambda(
            function=lambda x: x[:, :self.z_dims // 2] +
            (K.exp(x[:, self.z_dims // 2:]) *
             (K.random_normal(shape=K.shape(x[:, self.z_dims // 2:])))),
            output_shape=(self.z_dims // 2, ))(concatenated_g)
        output_d = Lambda(
            function=lambda x: x[:, :self.z_dims // 2] +
            (K.exp(x[:, self.z_dims // 2:]) *
             (K.random_normal(shape=K.shape(x[:, self.z_dims // 2:])))),
            output_shape=(self.z_dims // 2, ))(concatenated_d)

        concatenated = Concatenate(axis=-1)([output_g, output_d])
        return Model(x_input, concatenated)
コード例 #11
0
    def build_D(self):
        x_input = Input(shape=self.input_shape)

        x = BasicConvLayer(32, (5, 5),
                           strides=(1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01)(x_input)
        res_x = x = BasicConvLayer(64, (3, 3),
                                   strides=(2, 2),
                                   bnorm=True,
                                   dropout=0.2,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.01)(x)
        x = BasicConvLayer(64, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01,
                           residual=res_x)(x)
        res_x = x = BasicConvLayer(128, (3, 3),
                                   strides=(2, 2),
                                   bnorm=True,
                                   dropout=0.2,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.01)(x)
        x = BasicConvLayer(128, (3, 3),
                           strides=(1, 1),
                           bnorm=True,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01,
                           residual=res_x)(x)
        x = Flatten()(x)

        z_input = Input(shape=(self.z_dims, ))
        z = Reshape((1, 1, self.z_dims))(z_input)
        res_z = z = BasicConvLayer(128, (1, 1),
                                   bnorm=False,
                                   dropout=0.2,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.01)(z)
        z = BasicConvLayer(128, (1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01,
                           residual=res_z)(z)
        res_z = z = BasicConvLayer(128, (1, 1),
                                   bnorm=False,
                                   dropout=0.2,
                                   activation='leaky_relu',
                                   leaky_relu_slope=0.01)(z)
        z = BasicConvLayer(128, (1, 1),
                           bnorm=False,
                           dropout=0.2,
                           activation='leaky_relu',
                           leaky_relu_slope=0.01,
                           residual=res_z)(z)
        z = Flatten()(z)

        xz = Concatenate(axis=-1)([x, z])

        xz = Dense(512)(xz)
        xz = LeakyReLU(0.01)(xz)
        xz = Dropout(0.2)(xz)

        xz = Dense(512)(xz)
        xz = LeakyReLU(0.01)(xz)
        xz = Dropout(0.2)(xz)

        xz = Dense(1)(xz)
        xz = Activation('sigmoid')(xz)

        return Model([x_input, z_input], xz)