コード例 #1
0
ファイル: wrappers_test.py プロジェクト: zqy0/keras
def test_Bidirectional_dropout(merge_mode):
    rnn = layers.LSTM
    samples = 2
    dim = 5
    timesteps = 3
    units = 3
    X = [np.random.rand(samples, timesteps, dim)]

    inputs = Input((timesteps, dim))
    wrapped = wrappers.Bidirectional(rnn(units,
                                         dropout=0.2,
                                         recurrent_dropout=0.2),
                                     merge_mode=merge_mode)
    outputs = _to_list(wrapped(inputs, training=True))
    assert all(not getattr(x, '_uses_learning_phase') for x in outputs)

    inputs = Input((timesteps, dim))
    wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2,
                                         return_state=True),
                                     merge_mode=merge_mode)
    outputs = _to_list(wrapped(inputs))
    assert all(x._uses_learning_phase for x in outputs)

    model = Model(inputs, outputs)
    assert model.uses_learning_phase
    y1 = _to_list(model.predict(X))
    y2 = _to_list(model.predict(X))
    for x1, x2 in zip(y1, y2):
        assert_allclose(x1, x2, atol=1e-5)
コード例 #2
0
ファイル: wrappers_test.py プロジェクト: kevinbache/keras
def test_Bidirectional_dropout(merge_mode):
    rnn = layers.LSTM
    samples = 2
    dim = 5
    timesteps = 3
    units = 3
    X = [np.random.rand(samples, timesteps, dim)]

    inputs = Input((timesteps, dim))
    wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2),
                                     merge_mode=merge_mode)
    outputs = _to_list(wrapped(inputs, training=True))
    assert all(not getattr(x, '_uses_learning_phase') for x in outputs)

    inputs = Input((timesteps, dim))
    wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True),
                                     merge_mode=merge_mode)
    outputs = _to_list(wrapped(inputs))
    assert all(x._uses_learning_phase for x in outputs)

    model = Model(inputs, outputs)
    assert model.uses_learning_phase
    y1 = _to_list(model.predict(X))
    y2 = _to_list(model.predict(X))
    for x1, x2 in zip(y1, y2):
        assert_allclose(x1, x2, atol=1e-5)
コード例 #3
0
ファイル: wrappers_test.py プロジェクト: zqy0/keras
def test_Bidirectional_merged_value(merge_mode):
    rnn = layers.LSTM
    samples = 2
    dim = 5
    timesteps = 3
    units = 3
    X = [np.random.rand(samples, timesteps, dim)]

    if merge_mode == 'sum':
        merge_func = lambda y, y_rev: y + y_rev
    elif merge_mode == 'mul':
        merge_func = lambda y, y_rev: y * y_rev
    elif merge_mode == 'ave':
        merge_func = lambda y, y_rev: (y + y_rev) / 2
    elif merge_mode == 'concat':
        merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1)
    else:
        merge_func = lambda y, y_rev: [y, y_rev]

    # basic case
    inputs = Input((timesteps, dim))
    layer = wrappers.Bidirectional(rnn(units, return_sequences=True),
                                   merge_mode=merge_mode)
    f_merged = K.function([inputs], _to_list(layer(inputs)))
    f_forward = K.function([inputs], [layer.forward_layer.call(inputs)])
    f_backward = K.function([inputs],
                            [K.reverse(layer.backward_layer.call(inputs), 1)])

    y_merged = f_merged(X)
    y_expected = _to_list(merge_func(f_forward(X)[0], f_backward(X)[0]))
    assert len(y_merged) == len(y_expected)
    for x1, x2 in zip(y_merged, y_expected):
        assert_allclose(x1, x2, atol=1e-5)

    # test return_state
    inputs = Input((timesteps, dim))
    layer = wrappers.Bidirectional(rnn(units, return_state=True),
                                   merge_mode=merge_mode)
    f_merged = K.function([inputs], layer(inputs))
    f_forward = K.function([inputs], layer.forward_layer.call(inputs))
    f_backward = K.function([inputs], layer.backward_layer.call(inputs))
    n_states = len(layer.layer.states)

    y_merged = f_merged(X)
    y_forward = f_forward(X)
    y_backward = f_backward(X)
    y_expected = _to_list(merge_func(y_forward[0], y_backward[0]))
    assert len(y_merged) == len(y_expected) + n_states * 2
    for x1, x2 in zip(y_merged, y_expected):
        assert_allclose(x1, x2, atol=1e-5)

    # test if the state of a BiRNN is the concatenation of the underlying RNNs
    y_merged = y_merged[-n_states * 2:]
    y_forward = y_forward[-n_states:]
    y_backward = y_backward[-n_states:]
    for state_birnn, state_inner in zip(y_merged, y_forward + y_backward):
        assert_allclose(state_birnn, state_inner, atol=1e-5)
コード例 #4
0
ファイル: wrappers_test.py プロジェクト: kevinbache/keras
def test_Bidirectional_merged_value(merge_mode):
    rnn = layers.LSTM
    samples = 2
    dim = 5
    timesteps = 3
    units = 3
    X = [np.random.rand(samples, timesteps, dim)]

    if merge_mode == 'sum':
        merge_func = lambda y, y_rev: y + y_rev
    elif merge_mode == 'mul':
        merge_func = lambda y, y_rev: y * y_rev
    elif merge_mode == 'ave':
        merge_func = lambda y, y_rev: (y + y_rev) / 2
    elif merge_mode == 'concat':
        merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1)
    else:
        merge_func = lambda y, y_rev: [y, y_rev]

    # basic case
    inputs = Input((timesteps, dim))
    layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode)
    f_merged = K.function([inputs], _to_list(layer(inputs)))
    f_forward = K.function([inputs], [layer.forward_layer.call(inputs)])
    f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)])

    y_merged = f_merged(X)
    y_expected = _to_list(merge_func(f_forward(X)[0], f_backward(X)[0]))
    assert len(y_merged) == len(y_expected)
    for x1, x2 in zip(y_merged, y_expected):
        assert_allclose(x1, x2, atol=1e-5)

    # test return_state
    inputs = Input((timesteps, dim))
    layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode)
    f_merged = K.function([inputs], layer(inputs))
    f_forward = K.function([inputs], layer.forward_layer.call(inputs))
    f_backward = K.function([inputs], layer.backward_layer.call(inputs))
    n_states = len(layer.layer.states)

    y_merged = f_merged(X)
    y_forward = f_forward(X)
    y_backward = f_backward(X)
    y_expected = _to_list(merge_func(y_forward[0], y_backward[0]))
    assert len(y_merged) == len(y_expected) + n_states * 2
    for x1, x2 in zip(y_merged, y_expected):
        assert_allclose(x1, x2, atol=1e-5)

    # test if the state of a BiRNN is the concatenation of the underlying RNNs
    y_merged = y_merged[-n_states * 2:]
    y_forward = y_forward[-n_states:]
    y_backward = y_backward[-n_states:]
    for state_birnn, state_inner in zip(y_merged, y_forward + y_backward):
        assert_allclose(state_birnn, state_inner, atol=1e-5)
コード例 #5
0
    def __call__(self, inputs, **kwargs):
        if isinstance(inputs, list):
            inputs = inputs[:]

        with K.name_scope(self.name):
            # Raise exceptions in case the input is not compatible
            # with the input_spec specified in the layer constructor.
            self.assert_input_compatibility(inputs)

            # Handle laying building (weight creating, input spec locking).
            if not self.built:
                self.build(inputs)
                self.built = True

            # Handle mask propagation.
            previous_mask = _collect_previous_mask(inputs)
            user_kwargs = copy.copy(kwargs)
            if not _is_all_none(previous_mask):
                # The previous layer generated a mask.
                if has_arg(self.call, 'mask'):
                    if 'mask' not in kwargs:
                        # If mask is explicitly passed to __call__,
                        # we should override the default mask.
                        kwargs['mask'] = previous_mask
            # Handle automatic shape inference (only useful for Theano).
            input_shape = _collect_input_shape(inputs)

            # Actually call the layer, collecting output(s), mask(s), and shape(s).
            output = self.call(inputs, **kwargs)
            output_mask = self.compute_mask(inputs, previous_mask)

            # If the layer returns tensors from its inputs, unmodified,
            # we copy them to avoid loss of tensor metadata.
            output_ls = _to_list(output)
            inputs_ls = _to_list(inputs)
            output_ls_copy = []
            for x in output_ls:
                if x in inputs_ls:
                    x = K.identity(x)
                output_ls_copy.append(x)
            if len(output_ls_copy) == 1:
                output = output_ls_copy[0]
            else:
                output = output_ls_copy

            # Inferring the output shape is only relevant for Theano.
            if all([s is not None for s in _to_list(input_shape)]):
                output_shape = self.compute_output_shape(input_shape)
            else:
                if isinstance(input_shape, list):
                    output_shape = [None for _ in input_shape]
                else:
                    output_shape = None

            if not isinstance(output_mask,
                              (list, tuple)) and len(output_ls) > 1:
                # Augment the mask to match the length of the output.
                output_mask = [output_mask] * len(output_ls)

            # Add an inbound node to the layer, so that it keeps track
            # of the call and of all new variables created during the call.
            # This also updates the layer history of the output tensor(s).
            # If the input tensor(s) had not previous Keras history,
            # this does nothing.
            self._add_inbound_node(input_tensors=inputs,
                                   output_tensors=output,
                                   input_masks=previous_mask,
                                   output_masks=output_mask,
                                   input_shapes=input_shape,
                                   output_shapes=output_shape,
                                   arguments=user_kwargs)

            # Apply activity regularizer if any:
            if hasattr(self, 'activity_regularizer'
                       ) and self.activity_regularizer is not None:
                regularization_losses = [
                    self.activity_regularizer(x) for x in _to_list(output)
                ]
                self.add_loss(regularization_losses, _to_list(inputs))
        return output
コード例 #6
0
 def transform(self, x, batch_size=32, learning_phase=0., verbose=0):
     h = super(Model, self).predict(x, batch_size, learning_phase, verbose)
     return _to_list(h)