Exemplo n.º 1
0
    def make_generator():
        """Generator function: Generate time-series data in latent space.
        
        Args for model:
        - Z: random variables
        
        MOdel returns:
        - E: generated embedding
        """
        generator_model = tf.keras.Sequential(name='generator')
        generator_model.add(
            rnn_cell(module_name,
                     hidden_dim,
                     return_sequences=True,
                     input_shape=(seq_len, dim)))
        for i in range(num_layers - 1):
            generator_model.add(
                rnn_cell(module_name,
                         hidden_dim,
                         return_sequences=True,
                         input_shape=(seq_len, hidden_dim)))
        generator_model.add(
            tf.keras.layers.Dense(hidden_dim, activation='sigmoid'))

        return generator_model
Exemplo n.º 2
0
    def make_embedder():
        """Embedding network between original feature space to latent space.
        
        Args for model:
        - X: input time-series features
        
        Model returns:
        - H: embeddings
        """
        embedder_model = tf.keras.Sequential(name='embedder')
        embedder_model.add(
            rnn_cell(module_name,
                     hidden_dim,
                     return_sequences=True,
                     input_shape=(seq_len, dim)))
        for i in range(num_layers - 1):
            embedder_model.add(
                rnn_cell(module_name,
                         hidden_dim,
                         return_sequences=True,
                         input_shape=(seq_len, hidden_dim)))
        embedder_model.add(
            tf.keras.layers.Dense(hidden_dim, activation='sigmoid'))

        return embedder_model
    def embedder():
        """Embedding network between original feature space to latent space.
    
    Args:
      - X: input time-series features
      - T: input time information
      
    Returns:
      - H: embeddings
    """
        e_cell = tf.keras.layers.StackedRNNCells([
            tf.keras.layers.GRUCell(hidden_dim,
                                    activation=tf.nn.tanh,
                                    input_shape=(seq_len, hidden_dim))
            for _ in range(num_layers - 1)
        ])
        model = tf.keras.Sequential([
            rnn_cell(module_name,
                     hidden_dim,
                     return_sequences=True,
                     input_shape=(seq_len, dim)),
            tf.keras.layers.RNN(e_cell, return_sequences=True),
            tf.keras.layers.Dense(hidden_dim, activation=tf.nn.sigmoid)
        ])

        return model
Exemplo n.º 4
0
 def supervisor (H, T): 
   """Generate next sequence using the previous sequence.
   
   Args:
     - H: latent representation
     - T: input time information
     
   Returns:
     - S: generated sequence based on the latent representations generated by the generator
   """          
   with tf.variable_scope("supervisor", reuse = tf.AUTO_REUSE):
     e_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell(module_name, hidden_dim) for _ in range(num_layers-1)])
     e_outputs, e_last_states = tf.nn.dynamic_rnn(e_cell, H, dtype=tf.float32, sequence_length = T)
     S = tf.contrib.layers.fully_connected(e_outputs, hidden_dim, activation_fn=tf.nn.sigmoid)     
   return S
    def make_recovery ():   
        """Recovery network from latent space to original space.
        
        Args for model:
        - H: latent representation
        
        Model returns:
        - X_tilde: recovered data
        """     
        recovery_model = tf.keras.Sequential(name='recovery')
        for i in range(num_layers):
            recovery_model.add(rnn_cell(module_name, hidden_dim, return_sequences=True, input_shape=(seq_len, hidden_dim)))
        recovery_model.add(tf.keras.layers.Dense(dim, activation='sigmoid'))

        return recovery_model
    def make_supervisor (): 
        """Generate next sequence using the previous sequence.
        
        Args for model:
        - H: latent representation
        
        Model returns:
        - S: generated sequence based on the latent representations generated by the generator
        """     
        supervisor_model = tf.keras.Sequential(name='supervisor')
        for i in range(num_layers-1):
            supervisor_model.add(rnn_cell(module_name, hidden_dim, return_sequences=True, input_shape=(seq_len, hidden_dim)))
        supervisor_model.add(tf.keras.layers.Dense(hidden_dim, activation='sigmoid'))

        return supervisor_model
    def make_discriminator ():   
        """Recovery network from latent space to original space.
        
        Args for model:
        - H: latent representation
        
        MOdel returns:
        - Y_hat: classification results between original and synthetic time-series
        """     
        discriminator_model = tf.keras.Sequential(name='discriminator')
        for i in range(num_layers):
            discriminator_model.add(rnn_cell(module_name, hidden_dim, return_sequences=True, input_shape=(seq_len, hidden_dim)))
        discriminator_model.add(tf.keras.layers.Dense(1, activation=None))

        return discriminator_model
Exemplo n.º 8
0
 def embedder (X, T):
   """Embedding network between original feature space to latent space.
   
   Args:
     - X: input time-series features
     - T: input time information
     
   Returns:
     - H: embeddings
   """
   with tf.variable_scope("embedder", reuse = tf.AUTO_REUSE):
     e_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
     e_outputs, e_last_states = tf.nn.dynamic_rnn(e_cell, X, dtype=tf.float32, sequence_length = T)
     H = tf.contrib.layers.fully_connected(e_outputs, hidden_dim, activation_fn=tf.nn.sigmoid)     
   return H
Exemplo n.º 9
0
 def recovery (H, T):   
   """Recovery network from latent space to original space.
   
   Args:
     - H: latent representation
     - T: input time information
     
   Returns:
     - X_tilde: recovered data
   """     
   with tf.variable_scope("recovery", reuse = tf.AUTO_REUSE):       
     r_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
     r_outputs, r_last_states = tf.nn.dynamic_rnn(r_cell, H, dtype=tf.float32, sequence_length = T)
     X_tilde = tf.contrib.layers.fully_connected(r_outputs, dim, activation_fn=tf.nn.sigmoid) 
   return X_tilde
Exemplo n.º 10
0
 def discriminator (H, T):
   """Discriminate the original and synthetic time-series data.
   
   Args:
     - H: latent representation
     - T: input time information
     
   Returns:
     - Y_hat: classification results between original and synthetic time-series
   """        
   with tf.variable_scope("discriminator", reuse = tf.AUTO_REUSE):
     d_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
     d_outputs, d_last_states = tf.nn.dynamic_rnn(d_cell, H, dtype=tf.float32, sequence_length = T)
     Y_hat = tf.contrib.layers.fully_connected(d_outputs, 1, activation_fn=None) 
   return Y_hat   
Exemplo n.º 11
0
 def generator (Z, T):  
   """Generator function: Generate time-series data in latent space.
   
   Args:
     - Z: random variables
     - T: input time information
     
   Returns:
     - E: generated embedding
   """        
   with tf.variable_scope("generator", reuse = tf.AUTO_REUSE):
     e_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
     e_outputs, e_last_states = tf.nn.dynamic_rnn(e_cell, Z, dtype=tf.float32, sequence_length = T)
     E = tf.contrib.layers.fully_connected(e_outputs, hidden_dim, activation_fn=tf.nn.sigmoid)     
   return E
Exemplo n.º 12
0
    def recovery(H, T):
        """Recovery network from latent space to original space.
    
    Args:
      - H: latent representation
      - T: input time information
      
    Returns:
      - X_tilde: recovered data
    """
        with tf.compat.v1.variable_scope("recovery",
                                         reuse=tf.compat.v1.AUTO_REUSE):
            r_cell = tf.keras.layers.StackedRNNCells(
                [rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
            r_rnn = tf.keras.layers.RNN(r_cell, return_sequences=True)
            r_outputs = r_rnn(H)
            r_dense = tf.keras.layers.Dense(dim, activation='sigmoid')
            X_tilde = r_dense(r_outputs)

        return X_tilde
Exemplo n.º 13
0
    def discriminator(H, T):
        """Discriminate the original and synthetic time-series data.
    
    Args:
      - H: latent representation
      - T: input time information
      
    Returns:
      - Y_hat: classification results between original and synthetic time-series
    """
        with tf.compat.v1.variable_scope("discriminator",
                                         reuse=tf.compat.v1.AUTO_REUSE):
            d_cell = tf.keras.layers.StackedRNNCells(
                [rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
            d_rnn = tf.keras.layers.RNN(d_cell, return_sequences=True)
            d_outputs = d_rnn(H)
            d_dense = tf.keras.layers.Dense(1, activation=None)
            Y_hat = d_dense(d_outputs)

        return Y_hat
Exemplo n.º 14
0
    def generator(Z, T):
        """Generator function: Generate time-series data in latent space.
    
    Args:
      - Z: random variables
      - T: input time information
      
    Returns:
      - E: generated embedding
    """
        with tf.compat.v1.variable_scope("generator",
                                         reuse=tf.compat.v1.AUTO_REUSE):
            e_cell = tf.keras.layers.StackedRNNCells(
                [rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
            e_rnn = tf.keras.layers.RNN(e_cell, return_sequences=True)
            e_outputs = e_rnn(Z)
            e_dense = tf.keras.layers.Dense(hidden_dim, activation='sigmoid')
            E = e_dense(e_outputs)

        return E
    def make_discriminator():
        """Recovery network from latent space to original space.
        
        Args:
        - H: latent representation
        - T: input time information
        
        Returns:
        - X_tilde: recovered data
        """
        discriminator_model = tf.keras.Sequential(name='discriminator')
        for i in range(num_layers):
            discriminator_model.add(
                rnn_cell(module_name,
                         hidden_dim,
                         return_sequences=True,
                         input_shape=(seq_len, hidden_dim)))
        discriminator_model.add(tf.keras.layers.Dense(1, activation=None))

        return discriminator_model
Exemplo n.º 16
0
    def embedder(X, T):
        """Embedding network between original feature space to latent space.
    
    Args:
      - X: input time-series features
      - T: input time information
      
    Returns:
      - H: embeddings
    """
        with tf.compat.v1.variable_scope("embedder",
                                         reuse=tf.compat.v1.AUTO_REUSE):
            e_cell = tf.keras.layers.StackedRNNCells(
                [rnn_cell(module_name, hidden_dim) for _ in range(num_layers)])
            e_rnn = tf.keras.layers.RNN(e_cell, return_sequences=True)
            e_outputs = e_rnn(X)
            e_dense = tf.keras.layers.Dense(hidden_dim, activation='sigmoid')
            H = e_dense(e_outputs)

        return H
Exemplo n.º 17
0
    def supervisor(H, T):
        """Generate next sequence using the previous sequence.
    
    Args:
      - H: latent representation
      - T: input time information
      
    Returns:
      - S: generated sequence based on the latent representations generated by the generator
    """
        with tf.compat.v1.variable_scope("supervisor",
                                         reuse=tf.compat.v1.AUTO_REUSE):
            e_cell = tf.keras.layers.StackedRNNCells([
                rnn_cell(module_name, hidden_dim)
                for _ in range(num_layers - 1)
            ])
            e_rnn = tf.keras.layers.RNN(e_cell, return_sequences=True)
            e_outputs = e_rnn(H)
            e_dense = tf.keras.layers.Dense(hidden_dim, activation='sigmoid')
            S = e_dense(e_outputs)

        return S