def step(self, net: NeuralNet) -> None: for param, grad in net.params_and_grads(): param -= self.lr * grad
def step(self, net: NeuralNet) -> None: for param, grad in net.params_and_grads(): param -= self.lr * grad #adjust params by declining learning rate * gradient