def testUneven(self): masks = masked_autoregressive._make_dense_autoregressive_masks( params=2, event_size=3, hidden_units=[5, 3], input_order="left-to-right", hidden_degrees="equal") self.assertLen(masks, 3) self.assertAllEqual([ [1, 1, 1, 1, 1], [0, 0, 0, 1, 1], [0, 0, 0, 0, 0], ], masks[0]) self.assertAllEqual([ [1, 1, 1], [1, 1, 1], [1, 1, 1], [0, 0, 1], [0, 0, 1], ], masks[1]) self.assertAllEqual([ [0, 0, 1, 1, 1, 1], [0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 1, 1], ], masks[2])
def testRightToLeft(self): masks = masked_autoregressive._make_dense_autoregressive_masks( params=2, event_size=3, hidden_units=[4, 4], input_order=list(reversed(range(1, 4))), hidden_degrees="equal") self.assertLen(masks, 3) self.assertAllEqual([ [0, 0, 0, 0], [0, 0, 1, 1], [1, 1, 1, 1], ], masks[0]) self.assertAllEqual([ [1, 1, 1, 1], [1, 1, 1, 1], [0, 0, 1, 1], [0, 0, 1, 1], ], masks[1]) self.assertAllEqual([ [1, 1, 1, 1, 0, 0], [1, 1, 1, 1, 0, 0], [1, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0], ], masks[2])
def testRandom(self): masks = masked_autoregressive._make_dense_autoregressive_masks( params=2, event_size=3, hidden_units=[4, 4], input_order="random", hidden_degrees="random", seed=1) self.assertLen(masks, 3) self.assertAllEqual([ [1, 0, 1, 1], [0, 0, 0, 0], [1, 1, 1, 1], ], masks[0]) self.assertAllEqual([ [1, 0, 1, 1], [1, 1, 1, 1], [1, 0, 1, 1], [1, 0, 1, 1], ], masks[1]) self.assertAllEqual([ [0, 0, 1, 1, 0, 0], [1, 1, 1, 1, 0, 0], [0, 0, 1, 1, 0, 0], [0, 0, 1, 1, 0, 0], ], masks[2])
def random_made(x): masks = masked_autoregressive._make_dense_autoregressive_masks( params=params, event_size=event_size, hidden_units=[hidden_size] * num_hidden) output_sizes = [hidden_size] * num_hidden input_size = event_size for (mask, output_size) in zip(masks, output_sizes): mask = tf.cast(mask, tf.float32) x = tf.matmul( x, np.random.randn(input_size, output_size).astype(np.float32) * mask) x = tf.nn.relu(x) input_size = output_size x = tf.matmul( x, np.random.randn(input_size, params * event_size).astype(np.float32) * masks[-1]) x = tf.reshape(x, [-1, event_size, params]) return x
def build(self, input_shape): """See tfkl.Layer.build.""" assert self._event_shape is not None, \ 'Unlike MADE, MAN require specified event_shape at __init__' # `event_shape` wasn't specied at __init__, so infer from `input_shape`. self._input_size = input_shape[-1] # Construct the masks. self._input_order = _create_input_order( self._input_size, self._input_order_param, ) units = [] if self._hidden_units is None else list(self._hidden_units) units.append(self._event_size) masks = _make_dense_autoregressive_masks( params=self._params, event_size=self._input_size, hidden_units=units, input_order=self._input_order, hidden_degrees=self._hidden_degrees, ) masks = masks[:-1] masks[-1] = np.reshape( np.tile(masks[-1][..., tf.newaxis], [1, 1, self._params]), [masks[-1].shape[0], self._event_size * self._params]) self._masks = masks # create placeholder for ouput inputs = tf.keras.Input((self._input_size, ), dtype=self.dtype) outputs = [inputs] if self._conditional: conditional_input = tf.keras.Input((self._conditional_size, ), dtype=self.dtype) inputs = [inputs, conditional_input] # Input-to-hidden, hidden-to-hidden, and hidden-to-output layers: # [..., self._event_size] -> [..., self._hidden_units[0]]. # [..., self._hidden_units[k-1]] -> [..., self._hidden_units[k]]. # [..., self._hidden_units[-1]] -> [..., event_size * self._params]. layer_output_sizes = list( self._hidden_units) + [self._event_size * self._params] for k in range(len(self._masks)): autoregressive_output = tf.keras.layers.Dense( layer_output_sizes[k], activation=None, use_bias=self._use_bias, kernel_initializer=_make_masked_initializer( self._masks[k], self._kernel_initializer), bias_initializer=self._bias_initializer, kernel_regularizer=self._kernel_regularizer, bias_regularizer=self._bias_regularizer, kernel_constraint=_make_masked_constraint( self._masks[k], self._kernel_constraint), bias_constraint=self._bias_constraint, dtype=self.dtype)(outputs[-1]) if (self._conditional and ((self._conditional_layers == 'all_layers') or ((self._conditional_layers == 'first_layer') and (k == 0)))): conditional_output = tf.keras.layers.Dense( layer_output_sizes[k], activation=None, use_bias=False, kernel_initializer=self._kernel_initializer, bias_initializer=None, kernel_regularizer=self._kernel_regularizer, bias_regularizer=None, kernel_constraint=self._kernel_constraint, bias_constraint=None, dtype=self.dtype)(conditional_input) outputs.append(tf.keras.layers.Add()( [autoregressive_output, conditional_output])) else: outputs.append(autoregressive_output) # last hidden layer, activation if k + 1 < len(self._masks): outputs.append( tf.keras.layers.Activation(self._activation)(outputs[-1])) self._network = tf.keras.models.Model(inputs=inputs, outputs=outputs[-1]) # Allow network to be called with inputs of shapes that don't match # the specs of the network's input layers. self._network.input_spec = None # Record that the layer has been built. super(AutoregressiveNetwork, self).build(input_shape)