def _validate_embedded_fixed_features(comp): """Checks that the embedded fixed features of |comp| are set up properly.""" for feature in comp.spec.fixed_feature: check.Gt(feature.embedding_dim, 0, 'Embeddings requested for non-embedded feature: %s' % feature) if feature.is_constant: check.IsTrue(feature.HasField('pretrained_embedding_matrix'), 'Constant embeddings must be pretrained: %s' % feature)
def __init__(self, component): """Initializes weights and layers. Args: component: Parent ComponentBuilderBase object. """ super(BiaffineLabelNetwork, self).__init__(component) parameters = component.spec.network_unit.parameters self._num_labels = int(parameters['num_labels']) check.Gt(self._num_labels, 0, 'Expected some labels') check.Eq(len(self._fixed_feature_dims.items()), 0, 'Expected no fixed features') check.Eq(len(self._linked_feature_dims.items()), 2, 'Expected two linked features') check.In('sources', self._linked_feature_dims, 'Missing required linked feature') check.In('targets', self._linked_feature_dims, 'Missing required linked feature') self._source_dim = self._linked_feature_dims['sources'] self._target_dim = self._linked_feature_dims['targets'] # TODO(googleuser): Make parameter initialization configurable. self._weights = [] self._weights.append( tf.get_variable( 'weights_pair', [self._num_labels, self._source_dim, self._target_dim], tf.float32, tf.random_normal_initializer(stddev=1e-4, seed=self._seed))) self._weights.append( tf.get_variable( 'weights_source', [self._num_labels, self._source_dim], tf.float32, tf.random_normal_initializer(stddev=1e-4, seed=self._seed))) self._weights.append( tf.get_variable( 'weights_target', [self._num_labels, self._target_dim], tf.float32, tf.random_normal_initializer(stddev=1e-4, seed=self._seed))) self._biases = [] self._biases.append( tf.get_variable( 'biases', [self._num_labels], tf.float32, tf.random_normal_initializer(stddev=1e-4, seed=self._seed))) self._params.extend(self._weights + self._biases) self._regularized_weights.extend(self._weights) self._layers.append( network_units.Layer(self, 'labels', self._num_labels))
def testCheckGt(self): check.Gt(2, 1, 'foo') with self.assertRaisesRegexp(ValueError, 'bar'): check.Gt(1, 1, 'bar') with self.assertRaisesRegexp(RuntimeError, 'baz'): check.Gt(-1, 1, 'baz', RuntimeError)