Ejemplo n.º 1
0
def test_check_bad_shape():
    a = np.random.random((2, 3, 5))
    with pytest.raises(Exception) as exc:
        check_loss_and_target_compatibility([a], [K.categorical_crossentropy],
                                            [(2, 3, 6)])

    assert "targets to have the same shape" in str(exc)
Ejemplo n.º 2
0
def test_check_last_is_one():
    a = np.random.random((2, 3, 1))
    with pytest.raises(Exception) as exc:
        check_loss_and_target_compatibility([a], [K.categorical_crossentropy],
                                            [a.shape])

    assert "You are passing a target array" in str(exc)
Ejemplo n.º 3
0
def _standardize_user_data(model,
                           x,
                           y,
                           sample_weight=None,
                           class_weight=None,
                           check_batch_dim=True,
                           batch_size=None):
    if not hasattr(model, 'optimizer'):
        raise Exception('You must compile a model before training/testing.'
                        ' Use `model.compile(optimizer, loss)`.')

    output_shapes = []
    for output_shape, loss_fn in zip(model.internal_output_shapes,
                                     model.loss_functions):
        if loss_fn.__name__ == 'sparse_categorical_crossentropy':
            output_shapes.append(output_shape[:-1] + (1, ))
        elif getattr(objectives, loss_fn.__name__, None) is None:
            output_shapes.append(None)
        else:
            output_shapes.append(output_shape)
    x = standardize_input_data(x,
                               model.input_names,
                               model.internal_input_shapes,
                               check_batch_dim=False,
                               exception_prefix='model input')
    y = standardize_input_data(y,
                               model.output_names,
                               output_shapes,
                               check_batch_dim=False,
                               exception_prefix='model target')
    sample_weights = standardize_sample_weights(sample_weight,
                                                model.output_names)
    class_weights = standardize_class_weights(class_weight, model.output_names)
    sample_weights = [
        standardize_weights(ref, sw, cw, mode) for (ref, sw, cw, mode) in zip(
            y, sample_weights, class_weights, model.sample_weight_modes)
    ]
    '''
    We only need to comment out check_array_lengeh(x, y, weights) in the next line to
    let the model compile and train.
    '''
    # check_array_lengths(x, y, sample_weights)

    check_loss_and_target_compatibility(y, model.loss_functions,
                                        model.internal_output_shapes)
    if model.stateful and batch_size:
        if x[0].shape[0] % batch_size != 0:
            raise Exception('In a stateful network, '
                            'you should only pass inputs with '
                            'a number of samples that can be '
                            'divided by the batch size. Found: ' +
                            str(x[0].shape[0]) + ' samples')
    return x, y, sample_weights
Ejemplo n.º 4
0
    def _standardize_user_data(self, x, y, sample_weight, class_weight,
                               input_names, input_shapes, output_names,
                               output_shapes, check_batch_dim, batch_size):
        """Standardizes given user data."""

        if not hasattr(self, 'optimizer'):
            raise RuntimeError('You must compile a model before training or '
                               'testing. Use `model.compile`.')

        x = self._convert_input_to_list(x, input_names)
        y = self._convert_input_to_list(y, output_names)

        # Calculates the number of training samples.
        nb_train_samples = None
        for arr in x + y:
            if is_numpy_array(arr):
                nb_train_samples = arr.shape[0]
                break
        else:
            raise ValueError('At least one of the fed inputs must be a Numpy '
                             'array (usually the real training data).')

        x = self._standardize_input_data(x,
                                         input_names,
                                         input_shapes,
                                         check_batch_dim=False,
                                         exception_prefix='model input')
        y = self._standardize_input_data(y,
                                         output_names,
                                         output_shapes,
                                         check_batch_dim=False,
                                         exception_prefix='model output')
        y_exp = [
            y_inst(nb_train_samples) if hasattr(y_inst, '__call__') else y_inst
            for y_inst in y
        ]
        sample_weights = keras_training.standardize_sample_weights(
            sample_weight, output_names)
        class_weights = keras_training.standardize_class_weights(
            class_weight, output_names)
        sample_weights = [
            keras_training.standardize_weights(ref, sw, cw, mode)
            for ref, sw, cw, mode in zip(y_exp, sample_weights, class_weights,
                                         self.sample_weight_modes)
        ]

        keras_training.check_loss_and_target_compatibility(
            y_exp, self.loss_functions, output_shapes)

        return x, y, sample_weights, nb_train_samples
def _standardize_user_data(model, x, y,
                           sample_weight=None, class_weight=None,
                           check_batch_dim=True, batch_size=None):
    if not hasattr(model, 'optimizer'):
        raise Exception('You must compile a model before training/testing.'
                        ' Use `model.compile(optimizer, loss)`.')

    output_shapes = []
    for output_shape, loss_fn in zip(model.internal_output_shapes, model.loss_functions):
        if loss_fn.__name__ == 'sparse_categorical_crossentropy':
            output_shapes.append(output_shape[:-1] + (1,))
        elif getattr(objectives, loss_fn.__name__, None) is None:
            output_shapes.append(None)
        else:
            output_shapes.append(output_shape)
    x = standardize_input_data(x, model.input_names,
                               model.internal_input_shapes,
                               check_batch_dim=False,
                               exception_prefix='model input')
    y = standardize_input_data(y, model.output_names,
                               output_shapes,
                               check_batch_dim=False,
                               exception_prefix='model target')
    sample_weights = standardize_sample_weights(sample_weight,
                                                model.output_names)
    class_weights = standardize_class_weights(class_weight,
                                              model.output_names)
    sample_weights = [standardize_weights(ref, sw, cw, mode)
                      for (ref, sw, cw, mode)
                      in zip(y, sample_weights, class_weights, model.sample_weight_modes)]

    '''
    We only need to comment out check_array_lengeh(x, y, weights) in the next line to
    let the model compile and train.
    '''
    # check_array_lengths(x, y, sample_weights)

    check_loss_and_target_compatibility(y, model.loss_functions, model.internal_output_shapes)
    if model.stateful and batch_size:
        if x[0].shape[0] % batch_size != 0:
            raise Exception('In a stateful network, '
                            'you should only pass inputs with '
                            'a number of samples that can be '
                            'divided by the batch size. Found: ' +
                            str(x[0].shape[0]) + ' samples')
    return x, y, sample_weights
Ejemplo n.º 6
0
def test_check_bad_shape():
    a = np.random.random((2, 3, 5))
    with pytest.raises(Exception) as exc:
        check_loss_and_target_compatibility([a], [K.categorical_crossentropy], [(2, 3, 6)])

    assert "targets to have the same shape" in str(exc)
Ejemplo n.º 7
0
def test_check_last_is_one():
    a = np.random.random((2, 3, 1))
    with pytest.raises(Exception) as exc:
        check_loss_and_target_compatibility([a], [K.categorical_crossentropy], [a.shape])

    assert "You are passing a target array" in str(exc)
Ejemplo n.º 8
0
def test_check_not_failing():
    a = np.random.random((2, 1, 3))
    check_loss_and_target_compatibility([a], [K.categorical_crossentropy], [a.shape])
    check_loss_and_target_compatibility([a], [K.categorical_crossentropy], [(2, None, 3)])
Ejemplo n.º 9
0
def test_check_not_failing():
    a = np.random.random((2, 1, 3))
    check_loss_and_target_compatibility([a], [K.categorical_crossentropy],
                                        [a.shape])
    check_loss_and_target_compatibility([a], [K.categorical_crossentropy],
                                        [(2, None, 3)])
Ejemplo n.º 10
0
def test_check_not_last_is_one():
    a = np.random.random((2, 1, 3))
    check_loss_and_target_compatibility([a], [K.categorical_crossentropy],
                                        [a.shape])
Ejemplo n.º 11
0
def test_check_not_last_is_one():
    a = np.random.random((2, 1, 3))
    check_loss_and_target_compatibility([a], [K.categorical_crossentropy], [a.shape])