Example #1
0
    def test_scalar_weighted(self):
        cce_obj = losses.SparseCategoricalCrossentropy()
        y_true = K.constant([[0], [1], [2]])
        y_pred = K.constant([[.9, .05, .05], [.5, .89, .6], [.05, .01, .94]])
        loss = cce_obj(y_true, y_pred, sample_weight=2.3)
        assert np.isclose(K.eval(loss), .7449, atol=1e-3)

        # Test with logits.
        logits = K.constant([[8., 1., 1.], [0., 9., 1.], [2., 3., 5.]])
        cce_obj = losses.SparseCategoricalCrossentropy(from_logits=True)
        loss = cce_obj(y_true, logits, sample_weight=2.3)
        assert np.isclose(K.eval(loss), .1317, atol=1e-3)
Example #2
0
    def test_unweighted(self):
        cce_obj = losses.SparseCategoricalCrossentropy()
        y_true = K.constant([0, 1, 2])
        y_pred = K.constant([[.9, .05, .05], [.5, .89, .6], [.05, .01, .94]])
        loss = cce_obj(y_true, y_pred)
        assert np.isclose(K.eval(loss), .3239, atol=1e-3)

        # Test with logits.
        logits = K.constant([[8., 1., 1.], [0., 9., 1.], [2., 3., 5.]])
        cce_obj = losses.SparseCategoricalCrossentropy(from_logits=True)
        loss = cce_obj(y_true, logits)
        assert np.isclose(K.eval(loss), .0573, atol=1e-3)
Example #3
0
    def test_all_correct_unweighted(self):
        y_true = K.constant([[0], [1], [2]])
        y_pred = K.constant([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
        cce_obj = losses.SparseCategoricalCrossentropy()
        loss = cce_obj(y_true, y_pred)
        assert np.isclose(K.eval(loss), 0.0, atol=1e-3)

        # Test with logits.
        logits = K.constant([[10., 0., 0.], [0., 10., 0.], [0., 0., 10.]])
        cce_obj = losses.SparseCategoricalCrossentropy(from_logits=True)
        loss = cce_obj(y_true, logits)
        assert np.isclose(K.eval(loss), 0.0, atol=1e-3)
Example #4
0
    def test_sample_weighted(self):
        cce_obj = losses.SparseCategoricalCrossentropy()
        y_true = K.constant([[0], [1], [2]])
        y_pred = K.constant([[.9, .05, .05], [.5, .89, .6], [.05, .01, .94]])
        sample_weight = K.constant([[1.2], [3.4], [5.6]], shape=(3, 1))
        loss = cce_obj(y_true, y_pred, sample_weight=sample_weight)
        assert np.isclose(K.eval(loss), 1.0696, atol=1e-3)

        # Test with logits.
        logits = K.constant([[8., 1., 1.], [0., 9., 1.], [2., 3., 5.]])
        cce_obj = losses.SparseCategoricalCrossentropy(from_logits=True)
        loss = cce_obj(y_true, logits, sample_weight=sample_weight)
        assert np.isclose(K.eval(loss), 0.31829, atol=1e-3)
Example #5
0
 def test_no_reduction(self):
     y_true = K.constant([[0], [1], [2]])
     logits = K.constant([[8., 1., 1.], [0., 9., 1.], [2., 3., 5.]])
     cce_obj = losses.SparseCategoricalCrossentropy(
         from_logits=True, reduction=Reduction.NONE)
     loss = cce_obj(y_true, logits)
     assert np.allclose(K.eval(loss), (0.001822, 0.000459, 0.169846), atol=1e-3)
#flatten
model.add(layers.Flatten())

#FCN
model.add(layers.Dense(4096, activation='tanh'))
# model.add(layers.Dropout(0.5))
model.add(layers.Dense(4096))
model.add(layers.LeakyReLU())
model.add(layers.Dense(len(c_list), activation=tf.nn.softmax))

model.summary()

with tf.device('/device:GPU:0'):
    model.compile(optimizer='SGD',
                  loss=losses.SparseCategoricalCrossentropy(from_logits=True),
                  metrics=['sparse_categorical_accuracy'])

    history = model.fit(train, epochs=30, validation_data=test)

plt.figure(figsize=(20, 5))

plt.subplot(121)
# plt.plot(history.history['sparse_categorical_accuracy'], label = 'accuracy')
plt.plot(history.history['val_sparse_categorical_accuracy'],
         label='val_accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
# plt.ylim([0.5, 1])
plt.legend(loc='lower right')
Example #7
0
 def test_config(self):
     cce_obj = losses.SparseCategoricalCrossentropy(
         reduction=losses_utils.Reduction.SUM, name='scc')
     assert cce_obj.name == 'scc'
     assert cce_obj.reduction == losses_utils.Reduction.SUM