Example #1
0
def update_step(x_batch, y_batch, model, learning_rate):
    """Performs on single update step, (i.e. forward then backward).

    Args:
        x_batch(numpy.ndarray): input data of dimension (N, ndims).
        y_batch(numpy.ndarray): label data of dimension (N, 1).
        model(LinearModel): Initialized linear model.
    """
    f = LinearRegression.forward(model, x_batch)
    grad = learning_rate * LinearRegression.backward(model, f, y_batch)
    model.w = model.w - learning_rate * grad
    def test_input_output(self):
        model = LinearRegression(10)

        x = np.zeros([4, 10])
        y = np.zeros([4, ])

        # Check forward shape.
        f = model.forward(x)
        self.assertEqual(f.shape, (4,))

        # Check backward shape.
        gradient = model.backward(f, y)
        self.assertEqual(gradient.shape, (11,))

        # Check loss shape.
        loss = model.loss(f, y)
        self.assertEqual(loss.shape, ())