Exemplo n.º 1
0
    def __init__(self, component):
        """Initializes layers.

    Args:
      component: Parent ComponentBuilderBase object.
    """
        layers = [
            network_units.Layer(self, 'lengths', -1),
            network_units.Layer(self, 'scores', -1),
            network_units.Layer(self, 'logits', -1),
            network_units.Layer(self, 'arcs', -1),
        ]
        super(MstSolverNetwork, self).__init__(component, init_layers=layers)

        self._attrs = network_units.get_attrs_with_defaults(
            component.spec.network_unit.parameters,
            defaults={
                'forest': False,
                'loss': 'softmax',
                'crf_max_dynamic_range': 20,
            })

        check.Eq(len(self._fixed_feature_dims.items()), 0,
                 'Expected no fixed features')
        check.Eq(len(self._linked_feature_dims.items()), 2,
                 'Expected two linked features')

        check.In('lengths', self._linked_feature_dims,
                 'Missing required linked feature')
        check.In('scores', self._linked_feature_dims,
                 'Missing required linked feature')
Exemplo n.º 2
0
 def testCheckIn(self):
     check.In('a', ('a', 'b', 'c'), 'foo')
     check.In('b', {'a': 1, 'b': 2}, 'bar')
     with self.assertRaisesRegexp(ValueError, 'bar'):
         check.In('d', ('a', 'b', 'c'), 'bar')
     with self.assertRaisesRegexp(RuntimeError, 'baz'):
         check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)
Exemplo n.º 3
0
    def __init__(self, component):
        """Initializes weights and layers.

    Args:
      component: Parent ComponentBuilderBase object.
    """
        super(BiaffineLabelNetwork, self).__init__(component)

        parameters = component.spec.network_unit.parameters
        self._num_labels = int(parameters['num_labels'])

        check.Gt(self._num_labels, 0, 'Expected some labels')
        check.Eq(len(self._fixed_feature_dims.items()), 0,
                 'Expected no fixed features')
        check.Eq(len(self._linked_feature_dims.items()), 2,
                 'Expected two linked features')

        check.In('sources', self._linked_feature_dims,
                 'Missing required linked feature')
        check.In('targets', self._linked_feature_dims,
                 'Missing required linked feature')

        self._source_dim = self._linked_feature_dims['sources']
        self._target_dim = self._linked_feature_dims['targets']

        # TODO(googleuser): Make parameter initialization configurable.
        self._weights = []
        self._weights.append(
            tf.get_variable(
                'weights_pair',
                [self._num_labels, self._source_dim, self._target_dim],
                tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))
        self._weights.append(
            tf.get_variable(
                'weights_source', [self._num_labels, self._source_dim],
                tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))
        self._weights.append(
            tf.get_variable(
                'weights_target', [self._num_labels, self._target_dim],
                tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))

        self._biases = []
        self._biases.append(
            tf.get_variable(
                'biases', [self._num_labels], tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))

        self._params.extend(self._weights + self._biases)
        self._regularized_weights.extend(self._weights)

        self._layers.append(
            network_units.Layer(self, 'labels', self._num_labels))
Exemplo n.º 4
0
    def __init__(self, component):
        """Initializes weights and layers.

    Args:
      component: Parent ComponentBuilderBase object.
    """
        super(BiaffineDigraphNetwork, self).__init__(component)

        check.Eq(len(self._fixed_feature_dims.items()), 0,
                 'Expected no fixed features')
        check.Eq(len(self._linked_feature_dims.items()), 2,
                 'Expected two linked features')

        check.In('sources', self._linked_feature_dims,
                 'Missing required linked feature')
        check.In('targets', self._linked_feature_dims,
                 'Missing required linked feature')
        self._source_dim = self._linked_feature_dims['sources']
        self._target_dim = self._linked_feature_dims['targets']

        # TODO(googleuser): Make parameter initialization configurable.
        self._weights = []
        self._weights.append(
            tf.get_variable(
                'weights_arc', [self._source_dim, self._target_dim],
                tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))
        self._weights.append(
            tf.get_variable(
                'weights_source', [self._source_dim], tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))
        self._weights.append(
            tf.get_variable(
                'root', [self._source_dim], tf.float32,
                tf.random_normal_initializer(stddev=1e-4, seed=self._seed)))

        self._params.extend(self._weights)
        self._regularized_weights.extend(self._weights)

        # Negative Layer.dim indicates that the dimension is dynamic.
        self._layers.append(network_units.Layer(self, 'adjacency', -1))
    def __init__(self, component):
        """Initializes weights and layers.

    Args:
      component: Parent ComponentBuilderBase object.
    """
        super(BiaffineDigraphNetwork, self).__init__(component)

        check.Eq(len(self._fixed_feature_dims.items()), 0,
                 'Expected no fixed features')
        check.Eq(len(self._linked_feature_dims.items()), 2,
                 'Expected two linked features')

        check.In('sources', self._linked_feature_dims,
                 'Missing required linked feature')
        check.In('targets', self._linked_feature_dims,
                 'Missing required linked feature')
        self._source_dim = self._linked_feature_dims['sources']
        self._target_dim = self._linked_feature_dims['targets']

        self._weights = []
        self._weights.append(
            tf.get_variable('weights_arc',
                            [self._source_dim, self._target_dim], tf.float32,
                            tf.orthogonal_initializer()))
        self._weights.append(
            tf.get_variable('weights_source', [self._source_dim], tf.float32,
                            tf.zeros_initializer()))
        self._weights.append(
            tf.get_variable('root', [self._source_dim], tf.float32,
                            tf.zeros_initializer()))

        self._params.extend(self._weights)
        self._regularized_weights.extend(self._weights)

        # Add runtime hooks for pre-computed weights.
        self._derived_params.append(self._get_root_weights)
        self._derived_params.append(self._get_root_bias)

        # Negative Layer.dim indicates that the dimension is dynamic.
        self._layers.append(network_units.Layer(component, 'adjacency', -1))
Exemplo n.º 6
0
    def __init__(self, component):
        super(BulkBiLSTMNetwork, self).__init__(component)

        check.In('lengths', self._linked_feature_dims,
                 'Missing required linked feature')
        check.Eq(self._linked_feature_dims['lengths'], 1,
                 'Wrong dimension for "lengths" feature')
        self._input_dim = self._concatenated_input_dim - 1  # exclude 'lengths'
        self._output_dim = self.get_layer_size('outputs')
        tf.logging.info('[%s] Bulk bi-LSTM with input_dim=%d output_dim=%d',
                        component.name, self._input_dim, self._output_dim)

        # Create one training and inference cell per layer and direction.
        self._train_cells_forward = self._create_train_cells()
        self._train_cells_backward = self._create_train_cells()
        self._inference_cells_forward = self._create_inference_cells()
        self._inference_cells_backward = self._create_inference_cells()

        def _bilstm_closure(scope):
            """Applies the bi-LSTM to placeholder inputs and lengths."""
            # Use singleton |stride| and |steps| because their values don't affect the
            # weight variables.
            stride, steps = 1, 1
            placeholder_inputs = tf.placeholder(
                dtype=tf.float32, shape=[stride, steps, self._input_dim])
            placeholder_lengths = tf.placeholder(dtype=tf.int64,
                                                 shape=[stride])

            # Omit the initial states and sequence lengths for simplicity; they don't
            # affect the weight variables.
            tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
                self._train_cells_forward,
                self._train_cells_backward,
                placeholder_inputs,
                dtype=tf.float32,
                sequence_length=placeholder_lengths,
                scope=scope)

        self._capture_variables_as_params(_bilstm_closure)

        # Allocate parameters for the initial states.  Note that an LSTM state is a
        # tuple of two substates (c, h), so there are 4 variables per layer.
        for index, num_units in enumerate(self._hidden_layer_sizes):
            for direction in ['forward', 'backward']:
                for substate in ['c', 'h']:
                    self._params.append(
                        tf.get_variable(
                            'initial_state_%s_%s_%d' %
                            (direction, substate, index),
                            [1, num_units
                             ],  # leading 1 for later batch-wise tiling
                            dtype=tf.float32,
                            initializer=tf.constant_initializer(0.0)))