def build_decoder(latent_dim, timesteps):
    decoder_inputs = Input((latent_dim, ))
    decoded = decoder_inputs

    decoded = Dense(15)(decoded)
    decoded = utils.BatchNormalization()(decoded)
    decoded = LeakyReLU(0.2)(decoded)

    decoded = Lambda(lambda x: K.expand_dims(x))(decoded)

    decoded = Conv1D(32, 3, padding='same')(decoded)
    decoded = utils.BatchNormalization()(decoded)
    decoded = LeakyReLU(0.2)(decoded)
    decoded = UpSampling1D(2)(decoded)

    decoded = Conv1D(32, 3, padding='same')(decoded)
    decoded = utils.BatchNormalization()(decoded)
    decoded = LeakyReLU(0.2)(decoded)
    decoded = UpSampling1D(2)(decoded)

    decoded = Conv1D(32, 3, padding='same')(decoded)
    decoded = utils.BatchNormalization()(decoded)
    decoded = LeakyReLU(0.2)(decoded)
    decoded = UpSampling1D(2)(decoded)

    decoded = Conv1D(1, 3, padding='same')(decoded)
    decoded = utils.BatchNormalization()(decoded)
    decoded = LeakyReLU(0.2)(decoded)

    decoded = Lambda(lambda x: K.squeeze(x, -1))(decoded)

    decoded = Dense(timesteps, activation='tanh')(decoded)

    decoder = Model(decoder_inputs, decoded, 'decoder')
    return decoder
def build_encoder(latent_dim, timesteps):
    encoder_inputs = Input((timesteps, ))
    encoded = Lambda(lambda x: K.expand_dims(x, -1))(encoder_inputs)

    encoded = Conv1D(32, 3, padding='same')(encoded)
    encoded = utils.BatchNormalization()(encoded)
    encoded = LeakyReLU(0.2)(encoded)
    encoded = MaxPooling1D(2, padding='same')(encoded)

    encoded = Conv1D(32, 3, padding='same')(encoded)
    encoded = utils.BatchNormalization()(encoded)
    encoded = LeakyReLU(0.2)(encoded)
    encoded = MaxPooling1D(2, padding='same')(encoded)

    encoded = Conv1D(32, 3, padding='same')(encoded)
    encoded = utils.BatchNormalization()(encoded)
    encoded = LeakyReLU(0.2)(encoded)
    encoded = MaxPooling1D(2, padding='same')(encoded)

    encoded = Conv1D(32, 3, padding='same')(encoded)
    encoded = utils.BatchNormalization()(encoded)
    encoded = LeakyReLU(0.2)(encoded)

    encoded = Flatten()(encoded)

    encoded = Dense(128)(encoded)
    encoded = utils.BatchNormalization()(encoded)
    encoded = Activation('tanh')(encoded)

    z_mean = Dense(latent_dim)(encoded)
    z_log_var = Dense(latent_dim)(encoded)

    encoder = Model(encoder_inputs, [z_mean, z_log_var])
    return encoder
Ejemplo n.º 3
0
def build_generator(latent_dim, timesteps):
    generator_inputs = Input((latent_dim, ))
    generated = generator_inputs

    generated = Dense(15)(generated)
    generated = utils.BatchNormalization()(generated)
    generated = LeakyReLU(0.2)(generated)

    generated = Lambda(lambda x: K.expand_dims(x))(generated)

    generated = Conv1D(32, 3, padding='same')(generated)
    generated = utils.BatchNormalization()(generated)
    generated = LeakyReLU(0.2)(generated)
    generated = UpSampling1D(2)(generated)

    generated = Conv1D(32, 3, padding='same')(generated)
    generated = utils.BatchNormalization()(generated)
    generated = LeakyReLU(0.2)(generated)
    generated = UpSampling1D(2)(generated)

    generated = Conv1D(32, 3, padding='same')(generated)
    generated = utils.BatchNormalization()(generated)
    generated = LeakyReLU(0.2)(generated)
    generated = UpSampling1D(2)(generated)

    generated = Conv1D(1, 3, padding='same')(generated)
    generated = utils.BatchNormalization()(generated)
    generated = LeakyReLU(0.2)(generated)

    generated = Lambda(lambda x: K.squeeze(x, -1))(generated)

    generated = Dense(timesteps, activation='tanh')(generated)

    generator = Model(generator_inputs, generated, 'generator')
    return generator
Ejemplo n.º 4
0
  def __init__(self,
               is_training_bn: bool,
               act_type: Union[Text, None],
               init_zero: bool = False,
               data_format: Text = 'channels_last',
               momentum: float = 0.99,
               epsilon: float = 1e-3,
               use_tpu: bool = False,
               name: Text = None):

    super(BatchNormAct, self).__init__()

    self.act_type = act_type
    self.training = is_training_bn

    if init_zero:
      self.gamma_initializer = tf.zeros_initializer()
    else:
      self.gamma_initializer = tf.ones_initializer()

    if data_format == 'channels_first':
      self.axis = 1
    else:
      self.axis = 3

    if is_training_bn and use_tpu:
      self.layer = utils.TpuBatchNormalization(
          axis=self.axis,
          momentum=momentum,
          epsilon=epsilon,
          center=True,
          scale=True,
          gamma_initializer=self.gamma_initializer,
          name=f'{name}')
    else:
      self.layer = utils.BatchNormalization(
          axis=self.axis,
          momentum=momentum,
          epsilon=epsilon,
          center=True,
          scale=True,
          gamma_initializer=self.gamma_initializer,
          name=f'{name}')

    self.act = ActivationFn(act_type)