Beispiel #1
0
def validation(
        nn: NeuralNetwork,
        training_set: Sequence[Pattern],
        validation_set: Sequence[Pattern],
        error_calculator: ErrorCalculator = ErrorCalculator.MSE
) -> ValidationResult:
    """

    :param nn
    :param training_set
    :param validation_set
    :param error_calculator
    :return: it returns the score and the respective epoch of that score.

    It fits the neural network and applies to the error calculator the validation curve.
    """
    old_error = nn.error_calculator

    nn.error_calculator = error_calculator

    nn.fit(training_set, validation_set, training_curve=False)
    learning_curve_validation = nn.validation_curve

    idx, score = error_calculator.choose(learning_curve_validation)

    nn.error_calculator = old_error

    return ValidationResult(
        epoch=idx + 1,
        score_validation=score,
    )
Beispiel #2
0
    def test_monk1(self):
        nn = NN(seed=4,
                epochs_limit=400,
                learning_algorithm=batch,
                error_calculator=ErrorCalculator.MSE,
                architecture=MultilayerPerceptron(
                    4,
                    activation=sigmoid,
                    activation_hidden=relu,
                    eta=0.5,
                    alambd=0,
                    alpha=0.8,
                ))

        train_data, test_data = read_monk(1)

        nn.fit(train_data)
        train_errs = nn.compute_learning_curve(train_data, ErrorCalculator.MIS)

        test_errs = nn.compute_learning_curve(test_data, ErrorCalculator.MIS)

        error_train = 0
        for x, d in train_data:
            error_train += (round(nn(x)[0][-1]) - d[0])**2

        error_test = 0
        for x, d in test_data:
            error_test += (round(nn(x)[0][-1]) - d[0])**2

        print(
            'train:',
            str(((len(train_data) - error_train) / len(train_data)) * 100) +
            '%')
        print(
            'test: ',
            str(((len(test_data) - error_test) / len(test_data)) * 100) + '%')

        self.assertEqual(error_train, 0)
        self.assertEqual(error_test, 0)

        nn.error_calculator = ErrorCalculator.MIS
        self.assertEqual(nn.compute_error(train_data), 0)
        self.assertEqual(nn.compute_error(test_data), 0)
Beispiel #3
0
        learning_algorithm=batch,
        n_init=1,
        error_calculator=ErrorCalculator.MSE,
        architecture=MultilayerPerceptron(
            size_hidden_layers=(2, 2),
            eta=eta,
            alpha=alpha,
            alambd=alambd,
            activation=tanh_classification,
            activation_hidden=relu,
        ),
    )

    nn.fit(train_set)

    nn.error_calculator = ErrorCalculator.MSE
    print('mse', nn.compute_error(train_set), nn.compute_error(validation_set),
          nn.compute_error(test_data))

    nn.error_calculator = ErrorCalculator.MEE
    print('mee', nn.compute_error(train_set), nn.compute_error(validation_set),
          nn.compute_error(test_data))

    nn.error_calculator = ErrorCalculator.ACC
    print('acc', nn.compute_error(train_set), nn.compute_error(validation_set),
          nn.compute_error(test_data))

    # MSE
    nn.error_calculator = ErrorCalculator.MSE
    training_curve = nn.compute_learning_curve(train_set)
    validation_curve = nn.compute_learning_curve(validation_set)