Exemplo n.º 1
0
 def _create_encoder(self, n_layers, dropout):
   """Create the encoder layers."""
   prev_layer = self._features
   for i in range(len(self._filter_sizes)):
     filter_size = self._filter_sizes[i]
     kernel_size = self._kernel_sizes[i]
     if dropout > 0.0:
       prev_layer = layers.Dropout(dropout, in_layers=prev_layer)
     prev_layer = layers.Conv1D(
         filters=filter_size,
         kernel_size=kernel_size,
         in_layers=prev_layer,
         activation_fn=tf.nn.relu)
   prev_layer = layers.Flatten(prev_layer)
   prev_layer = layers.Dense(
       self._decoder_dimension, in_layers=prev_layer, activation_fn=tf.nn.relu)
   prev_layer = layers.BatchNorm(prev_layer)
   if self._variational:
     self._embedding_mean = layers.Dense(
         self._embedding_dimension,
         in_layers=prev_layer,
         name='embedding_mean')
     self._embedding_stddev = layers.Dense(
         self._embedding_dimension, in_layers=prev_layer, name='embedding_std')
     prev_layer = layers.CombineMeanStd(
         [self._embedding_mean, self._embedding_stddev], training_only=True)
   return prev_layer
Exemplo n.º 2
0
 def test_batch_norm(self):
     """Test invoking BatchNorm in eager mode."""
     with context.eager_mode():
         batch_size = 10
         n_features = 5
         input = np.random.rand(batch_size, n_features).astype(np.float32)
         layer = layers.BatchNorm()
         result = layer(input)
         assert result.shape == (batch_size, n_features)
         assert len(layer.trainable_variables) == 2