Exemplo n.º 1
0
    def test_softmax_cross_entropy_loss(self):
        logits = np.ones((1, 10))
        labels = np.array([5])
        loss, dlogits = ops.softmax_cross_entropy_loss(logits, labels)
        self.assertTrue(np.allclose(loss, np.log(10)))

        numeric_dlogits = get_numerical_gradient(
            lambda x: ops.softmax_cross_entropy_loss(x, labels)[0], logits)
        self.assertTrue(np.allclose(numeric_dlogits, dlogits))
Exemplo n.º 2
0
    def _get_loss(self, mat_x, labels):
        """Calculates the loss of the current model for the given data, using a
    cross-entropy loss function.

    Args:
      mat_x (:obj:`numpy.ndarray`): The input data (image embeddings) to test, as a matrix of shape
        ``NxD``, where ``N`` is number of inputs to test and ``D`` is the dimension of the
        input feature (length of the feature vector).
      labels (:obj:`numpy.ndarray`): An array of the correct label indices that correspond to the
        test data passed in ``mat_x`` (class label index in one-hot vector). For example, if
        ``mat_x`` is just one image embedding, this array has one number for that image's correct
        label index.

    Returns:
      A 2-tuple with the cross-entropy loss (float) and gradients (a dictionary with ``'mat_w'``
      and ``'vec_b'``, for weight and bias, respectively).
    """
        mat_w = self.params['mat_w']
        vec_b = self.params['vec_b']
        scores, fc_cached = ops.fc_forward(mat_x, mat_w, vec_b)
        # Record min, max value of scores.
        self.min_score = np.min([self.min_score, np.min(scores)])
        self.max_score = np.max([self.max_score, np.max(scores)])
        loss, dscores = ops.softmax_cross_entropy_loss(scores, labels)
        loss += 0.5 * self.reg * np.sum(mat_w * mat_w)

        grads = {}
        _, grads['mat_w'], grads['vec_b'] = ops.fc_backward(dscores, fc_cached)
        grads['mat_w'] += self.reg * mat_w

        return loss, grads
Exemplo n.º 3
0
    def test_fc_backward(self):
        np.random.seed(12345)
        mat_x = np.random.randn(5, 3)
        mat_w = np.random.randn(3, 10)
        vec_b = np.random.randn(10)
        mat_y, cached = ops.fc_forward(mat_x, mat_w, vec_b)
        labels = np.random.randint(10, size=5)
        _, dlogits = ops.softmax_cross_entropy_loss(mat_y, labels)
        dmat_x, dmat_w, dvec_b = ops.fc_backward(dlogits, cached)

        # Chain FC layer and softmax loss together.
        # `i` for internal.
        def chained_loss(i_mat_x, i_mat_w, i_vec_b, i_labels):
            i_mat_y, _ = ops.fc_forward(i_mat_x, i_mat_w, i_vec_b)
            loss, _ = ops.softmax_cross_entropy_loss(i_mat_y, i_labels)
            return loss

        numeric_dmat_x = get_numerical_gradient(
            lambda var_mat_x: chained_loss(var_mat_x, mat_w, vec_b, labels),
            mat_x)
        self.assertTrue(np.allclose(numeric_dmat_x, dmat_x))

        numeric_dmat_w = get_numerical_gradient(
            lambda var_mat_w: chained_loss(mat_x, var_mat_w, vec_b, labels),
            mat_w)
        self.assertTrue(np.allclose(numeric_dmat_w, dmat_w))

        numeric_dvec_b = get_numerical_gradient(
            lambda var_vec_b: chained_loss(mat_x, mat_w, var_vec_b, labels),
            vec_b)
        self.assertTrue(np.allclose(numeric_dvec_b, dvec_b))
Exemplo n.º 4
0
 def chained_loss(i_mat_x, i_mat_w, i_vec_b, i_labels):
     i_mat_y, _ = ops.fc_forward(i_mat_x, i_mat_w, i_vec_b)
     loss, _ = ops.softmax_cross_entropy_loss(i_mat_y, i_labels)
     return loss