Exemple #1
0
lossfunc = extend(lossfunc)

# %%
# We can now evaluate the loss and do a backward pass with Backpack
# -----------------------------------------------------------------

loss = lossfunc(model(X), y)

with backpack(
    extensions.BatchGrad(),
    extensions.Variance(),
    extensions.SumGradSquared(),
    extensions.BatchL2Grad(),
    extensions.DiagGGNMC(mc_samples=1),
    extensions.DiagGGNExact(),
    extensions.DiagHessian(),
    extensions.KFAC(mc_samples=1),
    extensions.KFLR(),
    extensions.KFRA(),
):
    loss.backward()

# %%
# And here are the results
# -----------------------------------------------------------------

for name, param in model.named_parameters():
    print(name)
    print(".grad.shape:             ", param.grad.shape)
    print(".grad_batch.shape:       ", param.grad_batch.shape)
    print(".variance.shape:         ", param.variance.shape)
Exemple #2
0
def test_interface_diag_h():
    interface_test(new_ext.DiagHessian())
 def diag_h(self):
     with backpack(new_ext.DiagHessian()):
         _, _, loss = self.problem.forward_pass()
         loss.backward()
         diag_h = [p.diag_h for p in self.problem.model.parameters()]
     return diag_h
 def diag_h(self):
     with backpack(new_ext.DiagHessian()):
         self.loss().backward()
         diag_h = [p.diag_h for p in self.model.parameters()]
     return diag_h
Exemple #5
0
"""
Compute the gradient with PyTorch and the Hessian diagonal with BackPACK.
"""

from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential

from backpack import backpack, extend, extensions
from backpack.utils.examples import load_mnist_data

B = 4
X, y = load_mnist_data(B)

print("# Gradient with PyTorch, Hessian diagonal with BackPACK | B =", B)

model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()

model = extend(model)
lossfunc = extend(lossfunc)

loss = lossfunc(model(X), y)

with backpack(extensions.DiagHessian()):
    loss.backward()

for name, param in model.named_parameters():
    print(name)
    print(".grad.shape:             ", param.grad.shape)
    print(".diag_h.shape:           ", param.diag_h.shape)
Exemple #6
0
 def diag_h(self) -> List[Tensor]:  # noqa:D102
     with backpack(new_ext.DiagHessian()):
         _, _, loss = self.problem.forward_pass()
         loss.backward()
     return self.problem.collect_data("diag_h")
Exemple #7
0
plotter = CockpitPlotter()

# Main training loop
max_steps, global_step = 50, 0
for inputs, labels in iter(fmnist_data):
    opt.zero_grad()

    # forward pass
    outputs = model(inputs)
    loss = loss_fn(outputs, labels)
    losses = individual_loss_fn(outputs, labels)

    # backward pass
    with cockpit(
            global_step,
            extensions.DiagHessian(
            ),  # Other BackPACK quantities can be computed as well
            info={
                "batch_size": inputs.shape[0],
                "individual_losses": losses,
                "loss": loss,
                "optimizer": opt,
            },
    ):
        loss.backward(create_graph=cockpit.create_graph(global_step))

    # optimizer step
    opt.step()
    global_step += 1

    print(f"Step: {global_step:5d} | Loss: {loss.item():.4f}")