def encoder(self): """ The original Faceswap Encoder Network. The encoder for the original model has it's weights shared between both the "A" and "B" side of the model, so only one instance is created :func:`build_model`. However this same instance is then used twice (once for A and once for B) meaning that the weights get shared. Returns ------- :class:`keras.models.Model` The Keras encoder model, for sharing between inputs from both sides. """ input_ = Input(shape=self.input_shape) var_x = input_ var_x = Conv2DBlock(128, activation="leakyrelu")(var_x) var_x = Conv2DBlock(256, activation="leakyrelu")(var_x) var_x = Conv2DBlock(512, activation="leakyrelu")(var_x) if not self.low_mem: var_x = Conv2DBlock(1024, activation="leakyrelu")(var_x) var_x = Dense(self.encoder_dim)(Flatten()(var_x)) var_x = Dense(4 * 4 * 1024)(var_x) var_x = Reshape((4, 4, 1024))(var_x) var_x = UpscaleBlock(512, activation="leakyrelu")(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ Unbalanced Encoder """ kwargs = dict(kernel_initializer=self.kernel_initializer) encoder_complexity = 128 if self.low_mem else self.config["complexity_encoder"] dense_dim = 384 if self.low_mem else 512 dense_shape = self.input_shape[0] // 16 input_ = Input(shape=self.input_shape) var_x = input_ var_x = Conv2DBlock(encoder_complexity, normalization="instance", activation="leakyrelu", **kwargs)(var_x) var_x = Conv2DBlock(encoder_complexity * 2, normalization="instance", activation="leakyrelu", **kwargs)(var_x) var_x = Conv2DBlock(encoder_complexity * 4, **kwargs, activation="leakyrelu")(var_x) var_x = Conv2DBlock(encoder_complexity * 6, **kwargs, activation="leakyrelu")(var_x) var_x = Conv2DBlock(encoder_complexity * 8, **kwargs, activation="leakyrelu")(var_x) var_x = Dense(self.encoder_dim, kernel_initializer=self.kernel_initializer)(Flatten()(var_x)) var_x = Dense(dense_shape * dense_shape * dense_dim, kernel_initializer=self.kernel_initializer)(var_x) var_x = Reshape((dense_shape, dense_shape, dense_dim))(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ Encoder Network """ kwargs = dict(kernel_initializer=self.kernel_initializer) input_ = Input(shape=self.input_shape) in_conv_filters = self.input_shape[0] if self.input_shape[0] > 128: in_conv_filters = 128 + (self.input_shape[0] - 128) // 4 dense_shape = self.input_shape[0] // 16 var_x = Conv2DBlock(in_conv_filters, activation=None, **kwargs)(input_) tmp_x = var_x var_x = LeakyReLU(alpha=0.2)(var_x) res_cycles = 8 if self.config.get("lowmem", False) else 16 for _ in range(res_cycles): nn_x = ResidualBlock(in_conv_filters, **kwargs)(var_x) var_x = nn_x # consider adding scale before this layer to scale the residual chain tmp_x = LeakyReLU(alpha=0.1)(tmp_x) var_x = add([var_x, tmp_x]) var_x = Conv2DBlock(128, activation="leakyrelu", **kwargs)(var_x) var_x = PixelShuffler()(var_x) var_x = Conv2DBlock(128, activation="leakyrelu", **kwargs)(var_x) var_x = PixelShuffler()(var_x) var_x = Conv2DBlock(128, activation="leakyrelu", **kwargs)(var_x) var_x = SeparableConv2DBlock(256, **kwargs)(var_x) var_x = Conv2DBlock(512, activation="leakyrelu", **kwargs)(var_x) if not self.config.get("lowmem", False): var_x = SeparableConv2DBlock(1024, **kwargs)(var_x) var_x = Dense(self.encoder_dim, **kwargs)(Flatten()(var_x)) var_x = Dense(dense_shape * dense_shape * 1024, **kwargs)(var_x) var_x = Reshape((dense_shape, dense_shape, 1024))(var_x) var_x = UpscaleBlock(512, activation="leakyrelu", **kwargs)(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder_liae(self): """ DFL SAE LIAE Encoder Network """ input_ = Input(shape=self.input_shape) dims = self.input_shape[-1] * self.encoder_dim var_x = Conv2DBlock(dims, activation="leakyrelu")(input_) var_x = Conv2DBlock(dims * 2, activation="leakyrelu")(var_x) var_x = Conv2DBlock(dims * 4, activation="leakyrelu")(var_x) var_x = Conv2DBlock(dims * 8, activation="leakyrelu")(var_x) var_x = Flatten()(var_x) return KerasModel(input_, var_x, name="encoder_liae")
def encoder(self): """ Encoder Network """ input_ = Input(shape=self.input_shape) var_x = input_ var_x = Conv2DBlock(128)(var_x) var_x = Conv2DBlock(256)(var_x) var_x = Conv2DBlock(512)(var_x) var_x = Conv2DBlock(1024)(var_x) var_x = Flatten()(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ Encoder Network """ input_ = Input(shape=self.input_shape) var_x = input_ var_x = Conv2DBlock(128, activation="leakyrelu")(var_x) var_x = Conv2DBlock(256, activation="leakyrelu")(var_x) var_x = Conv2DBlock(512, activation="leakyrelu")(var_x) var_x = Conv2DBlock(1024, activation="leakyrelu")(var_x) var_x = Flatten()(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ DFL H128 Encoder """ input_ = Input(shape=self.input_shape) var_x = Conv2DBlock(128, activation="leakyrelu")(input_) var_x = Conv2DBlock(256, activation="leakyrelu")(var_x) var_x = Conv2DBlock(512, activation="leakyrelu")(var_x) var_x = Conv2DBlock(1024, activation="leakyrelu")(var_x) var_x = Dense(self.encoder_dim)(Flatten()(var_x)) var_x = Dense(8 * 8 * self.encoder_dim)(var_x) var_x = Reshape((8, 8, self.encoder_dim))(var_x) var_x = UpscaleBlock(self.encoder_dim, activation="leakyrelu")(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ Encoder Network """ input_ = Input(shape=self.input_shape) var_x = input_ var_x = Conv2DBlock(128)(var_x) var_x = Conv2DBlock(256)(var_x) var_x = Conv2DBlock(512)(var_x) var_x = Dense(self.encoder_dim)(Flatten()(var_x)) var_x = Dense(4 * 4 * 512)(var_x) var_x = Reshape((4, 4, 512))(var_x) var_x = UpscaleBlock(256)(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ DFL H128 Encoder """ input_ = Input(shape=self.input_shape) var_x = Conv2DBlock(128)(input_) var_x = Conv2DBlock(256)(var_x) var_x = Conv2DBlock(512)(var_x) var_x = Conv2DBlock(1024)(var_x) var_x = Dense(self.encoder_dim)(Flatten()(var_x)) var_x = Dense(8 * 8 * self.encoder_dim)(var_x) var_x = Reshape((8, 8, self.encoder_dim))(var_x) var_x = UpscaleBlock(self.encoder_dim)(var_x) return KerasModel(input_, var_x)
def encoder_df(self): """ DFL SAE DF Encoder Network""" input_ = Input(shape=self.input_shape) dims = self.input_shape[-1] * self.encoder_dim lowest_dense_res = self.input_shape[0] // 16 var_x = Conv2DBlock(dims, activation="leakyrelu")(input_) var_x = Conv2DBlock(dims * 2, activation="leakyrelu")(var_x) var_x = Conv2DBlock(dims * 4, activation="leakyrelu")(var_x) var_x = Conv2DBlock(dims * 8, activation="leakyrelu")(var_x) var_x = Dense(self.ae_dims)(Flatten()(var_x)) var_x = Dense(lowest_dense_res * lowest_dense_res * self.ae_dims)(var_x) var_x = Reshape((lowest_dense_res, lowest_dense_res, self.ae_dims))(var_x) var_x = UpscaleBlock(self.ae_dims, activation="leakyrelu")(var_x) return KerasModel(input_, var_x, name="encoder_df")
def encoder(self): """ RealFace Encoder Network """ input_ = Input(shape=self.input_shape) var_x = input_ encoder_complexity = self.config["complexity_encoder"] for idx in range(self.downscalers_no - 1): var_x = Conv2DBlock(encoder_complexity * 2**idx, activation=None)(var_x) var_x = LeakyReLU(alpha=0.2)(var_x) var_x = ResidualBlock(encoder_complexity * 2**idx, use_bias=True)(var_x) var_x = ResidualBlock(encoder_complexity * 2**idx, use_bias=True)(var_x) var_x = Conv2DBlock(encoder_complexity * 2**(idx + 1), activation="leakyrelu")(var_x) return KerasModel(input_, var_x, name="encoder")
def encoder(self): """ DeLight Encoder Network """ input_ = Input(shape=self.input_shape) var_x = input_ var_x1 = Conv2DBlock(self.encoder_filters // 2, activation="leakyrelu")(var_x) var_x2 = AveragePooling2D()(var_x) var_x2 = LeakyReLU(0.1)(var_x2) var_x = Concatenate()([var_x1, var_x2]) var_x1 = Conv2DBlock(self.encoder_filters, activation="leakyrelu")(var_x) var_x2 = AveragePooling2D()(var_x) var_x2 = LeakyReLU(0.1)(var_x2) var_x = Concatenate()([var_x1, var_x2]) var_x1 = Conv2DBlock(self.encoder_filters * 2, activation="leakyrelu")(var_x) var_x2 = AveragePooling2D()(var_x) var_x2 = LeakyReLU(0.1)(var_x2) var_x = Concatenate()([var_x1, var_x2]) var_x1 = Conv2DBlock(self.encoder_filters * 4, activation="leakyrelu")(var_x) var_x2 = AveragePooling2D()(var_x) var_x2 = LeakyReLU(0.1)(var_x2) var_x = Concatenate()([var_x1, var_x2]) var_x1 = Conv2DBlock(self.encoder_filters * 8, activation="leakyrelu")(var_x) var_x2 = AveragePooling2D()(var_x) var_x2 = LeakyReLU(0.1)(var_x2) var_x = Concatenate()([var_x1, var_x2]) var_x = Dense(self.encoder_dim)(Flatten()(var_x)) var_x = Dropout(0.05)(var_x) var_x = Dense(4 * 4 * 1024)(var_x) var_x = Dropout(0.05)(var_x) var_x = Reshape((4, 4, 1024))(var_x) return KerasModel(input_, var_x, name="encoder")
def __call__(self, inputs): """ Call the original Faceswap Encoder Parameters ---------- inputs: tensor The input tensor to the Faceswap Encoder Returns ------- tensor The output tensor from the Faceswap Encoder """ var_x = inputs filters = self._config["fs_original_min_filters"] for i in range(self._depth): var_x = Conv2DBlock(filters, activation="leakyrelu", name=f"fs_enc_convblk_{i}")(var_x) filters = min(self._config["fs_original_max_filters"], filters * 2) return var_x