Example #1
0
def accuracy(y_true, y_pred):
    '''
  Temporary metrics to overcome "from_categorical" missing in standard metrics
  '''
    truth = from_categorical(y_true)
    predicted = from_categorical(y_pred)
    return mean_accuracy_score(truth, predicted)
Example #2
0
    def test_from_categorical(self, size, num_labels):

        label = np.random.uniform(low=0, high=num_labels, size=(size, ))

        categorical_tf = tf.keras.utils.to_categorical(label, num_classes=None)
        categorical_np = to_categorical(label)

        np.testing.assert_allclose(categorical_tf, categorical_np)

        fromlabel_tf = tf.math.argmax(categorical_tf, axis=-1)
        fromlabel_np = from_categorical(categorical_np)

        np.testing.assert_allclose(fromlabel_tf, fromlabel_np)
Example #3
0
    # model.add(Cost_layer(cost_type=cost_type.mse))

    # model.compile(optimizer=SGD(lr=0.01, decay=0., lr_min=0., lr_max=np.inf))
    model.compile(optimizer=Adam(), metrics=[accuracy])

    print('*************************************')
    print('\n Total input dimension: {}'.format(X_train.shape), '\n')
    print('**************MODEL SUMMARY***********')

    model.summary()

    print('\n***********START TRAINING***********\n')

    # Fit the model on the training set
    model.fit(X=X_train, y=y_train, max_iter=10, verbose=True)

    print('\n***********START TESTING**************\n')

    # Test the prediction with timing
    loss, out = model.evaluate(X=X_test, truth=y_test, verbose=True)

    truth = from_categorical(y_test)
    predicted = from_categorical(out)
    accuracy = mean_accuracy_score(truth, predicted)

    print('\nLoss Score: {:.3f}'.format(loss))
    print('Accuracy Score: {:.3f}'.format(accuracy))
    # SGD : best score I could obtain was 94% with 10 epochs, lr = 0.01 %
    # Momentum : best score I could obtain was 93% with 10 epochs
    # Adam : best score I could obtain was 95% with 10 epochs