Ejemplo n.º 1
0
    def __call__(self, weights, xs, dlogits):
        forward = solution.LinearLayerForward()
        backward = solution.LinearLayerBackward()

        ctx = dict()
        logits = forward(weights, xs, ctx=ctx)
        dw = backward(ctx, dlogits)

        for d in range(self.num_features):
            real_dw = np.zeros_like(weights)
            real_dw[d] = self.delta

            moved_weights = weights + real_dw
            moved_logits = forward(moved_weights, xs)

            assert np.all(
                np.absolute(
                    (moved_logits - logits).dot(dlogits) / self.delta -
                    dw[d]) < 1e-5), 'Value mismatch in dimension {}.'.format(d)
Ejemplo n.º 2
0
    def __call__(self, weights, xs):
        forward = solution.LinearLayerForward()

        alpha_xs = np.random.normal(size=(self.batch_size, self.num_features))
        beta_xs = np.random.normal(size=(self.batch_size, self.num_features))
        kalpha = np.random.normal() * 50
        kbeta = np.random.normal() * 50
        assert np.all(
            np.absolute(
                forward(weights, kalpha * alpha_xs + kbeta * beta_xs) -
                (kalpha * forward(weights, alpha_xs) +
                 kbeta * forward(weights, beta_xs))) < 1e-10
        ), 'The linear layer should be linear with the input data.'

        alpha_weights = np.random.normal(size=(self.num_features, ))
        beta_weights = np.random.normal(size=(self.num_features, ))
        kalpha = np.random.normal() * 50
        kbeta = np.random.normal() * 50
        assert np.all(
            np.absolute(
                forward(kalpha * alpha_weights + kbeta * beta_weights, xs) -
                (kalpha * forward(alpha_weights, xs) +
                 kbeta * forward(beta_weights, xs))) < 1e-10
        ), 'The linear layer should be linear with the weights.'
Ejemplo n.º 3
0
 def __init__(self, num_features, init_weight_scale=1.0):
     self.weights = np.random.normal(init_weight_scale,
                                     size=(num_features, ))
     self._forward = solution.LinearLayerForward()
     self._backward = solution.LinearLayerBackward()
     self._update = solution.LinearLayerUpdate()