示例#1
0
             merge_mode="concat",
             batchnorm=True)
plot_net_rescue(model, shape=(1, 1, 64, 64, 64), outfileroot=None)

############################################################################
# Inspect a network
# -----------------
#
# The module propose utilities to inspect easyly some layers of the network.

from pynet.utils import test_model
from pprint import pprint
import numpy as np
from pynet.utils import get_named_layers
from pynet.utils import layer_at
from pynet.plotting import plot_data

out = test_model(model, shape=(1, 1, 64, 64, 64))
layers = get_named_layers(model)
pprint(layers)
hook_x, weight = layer_at(model=model,
                          layer_name="down.1.doubleconv.conv1-8.16",
                          x=torch.FloatTensor(
                              np.random.random((1, 1, 64, 64, 64))))
print(hook_x.shape)
print(weight.shape)
plot_data(hook_x[:, :1])

# import matplotlib.pyplot as plt
# plt.show()
示例#2
0
# Load the model
# --------------
#
# Load the model and fix all weights.
# Change the last linear layer.

cl = classifier.ResNet18(
    num_classes=1000,
    pretrained="/neurospin/nsap/torch/models/resnet18-5c106cde.pth",
    batch_size=50,
    optimizer_name="Adam",
    learning_rate=1e-4,
    loss_name="NLLLoss",
    metrics=["accuracy"])
print(cl.model)
layers = get_named_layers(cl.model, allowed_layers=[nn.Module], resume=True)
print(layers.keys())
to_freeze_layers = [
    "conv1", "bn1", "relu", "maxpool", "layer1", "layer2", "layer3", "layer4"
]
freeze_layers(cl.model, to_freeze_layers)
nb_features = cl.model.fc.in_features
cl.model.fc = nn.Linear(nb_features, 9)

#############################################################################
# Retrain the model
# -----------------
#
# Train the model