示例#1
0
 def setUp(self):
     """ Setup test.
     """
     self.networks = pynet.get_tools(tool_name="networks")
     self.losses = pynet.get_tools(tool_name="losses")
     self.x1 = torch.randn(3, 1, 64)
     self.x2 = torch.randn(1, 1, 127, 128)
     self.x3 = torch.randn(1, 1, 64, 64, 64)
示例#2
0
 def setUp(self):
     """ Setup test.
     """
     self.losses = pynet.get_tools(tool_name="losses")
     self.n_classes = 3
     self.x = torch.randn(2, self.n_classes, 3, 5, 5, requires_grad=True)
     self.target = torch.empty(2, 3, 5, 5,
                               dtype=torch.long).random_(self.n_classes)
     self.weights = torch.tensor([1., 2., 3.])
     self.mask = torch.ones(2, 3, 5, 5)
示例#3
0
from pynet.utils import setup_logging
from pynet.interfaces import (VoxelMorphNetRegister, ADDNetRegister,
                              VTNetRegister, RCNetRegister)
import pynet
from pynet.models.voxelmorphnet import FlowRegularizer
from pynet.models.vtnet import ADDNetRegularizer
from torch.optim import lr_scheduler
from pynet.plotting import plot_history
from pynet.history import History
from pynet.losses import MSELoss, NCCLoss, RCNetLoss, PCCLoss
from pynet.plotting import Board, update_board
import matplotlib.pyplot as plt

setup_logging(level="debug")
logger = logging.getLogger("pynet")
losses = pynet.get_tools(tool_name="losses")

outdir = "/neurospin/nsap/tmp/registration"
data = fetch_registration(datasetdir=outdir)
manager = DataManager(input_path=data.input_path,
                      metadata_path=data.metadata_path,
                      number_of_folds=2,
                      batch_size=8,
                      sampler="random",
                      stratify_label="studies",
                      projection_labels={"studies": ["abide"]},
                      test_size=0.1,
                      add_input=True,
                      sample_size=0.1)

#############################################################################
示例#4
0
                                 test_inputs=x_test,
                                 test_labels=y_test,
                                 batch_size=128,
                                 continuous_labels=True)
interfaces = pynet.get_interfaces()["graph"]
net_params = pynet.NetParameters(input_shape=(90, 90),
                                 in_channels=1,
                                 num_classes=2,
                                 nb_e2e=32,
                                 nb_e2n=64,
                                 nb_n2g=30,
                                 dropout=0.5,
                                 leaky_alpha=0.1,
                                 twice_e2e=False,
                                 dense_sml=True)
my_loss = pynet.get_tools()["losses"]["MSELoss"]()
model = interfaces["BrainNetCNNGraph"](net_params,
                                       optimizer_name="Adam",
                                       learning_rate=0.01,
                                       weight_decay=0.0005,
                                       loss_name="MSELoss")
model.board = Board(port=8097, host="http://localhost", env="main")
model.add_observer("after_epoch", update_board)
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer=model.optimizer,
                                           mode="min",
                                           factor=0.1,
                                           patience=5,
                                           verbose=True,
                                           eps=1e-8)
test_history, train_history = model.training(manager=manager,
                                             nb_epochs=15,
示例#5
0
    logit_function="sigmoid",
    predict=False)
result = pd.DataFrame.from_dict(collections.OrderedDict([
    ("pred", (y_pred.squeeze() > 0.5).astype(int)),
    ("truth", y_true.squeeze()),
    ("prob", y_pred.squeeze())]))
print(result)
fig, ax = plt.subplots()
cmap = plt.get_cmap('Blues')
cm = SKMetrics("confusion_matrix", with_logit=False)(y_pred, y_true)
sns.heatmap(cm, cmap=cmap, annot=True, fmt="g", ax=ax)
ax.set_xlabel("predicted values")
ax.set_ylabel("actual values")
metrics = {}
sk_metrics = dict(
    (key, val) for key, val in pynet.get_tools()["metrics"].items()
    if key.startswith("sk_"))
for name, metric in sk_metrics.items():
    metric.with_logit = False
    value = metric(y_pred, y_true)
    metrics.setdefault(name, []).append(value)
metrics = pd.DataFrame.from_dict(metrics)
print(classification_report(y_true, y_pred >= 0.4))
print(metrics)
# plot_metric_rank_correlations(metrics)
fpr, tpr, _ = roc_curve(y_true, y_pred)
roc_auc = auc(fpr, tpr)
plt.figure()
plt.plot(fpr, tpr, color="darkorange", lw=2,
         label="ROC curve (area = %0.2f)" % roc_auc)
plt.plot([0, 1], [0, 1], color="navy", lw=2, linestyle="--")
示例#6
0
 def setUp(self):
     """ Setup test.
     """
     self.networks = pynet.get_tools()["networks"]
     self.x2 = torch.randn(1, 1, 127, 128)
     self.x3 = torch.randn(1, 1, 64, 64, 64)
# ----------
#
# Define some global parameters that will be used to create and train the
# model:

n_samples = 100
n_classes = 3
n_feats = 4
true_lat_dims = 2
fit_lat_dims = 5
snr = 10
batch_size = 10
adam_lr = 2e-3
epochs = 100
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
losses = pynet.get_tools(tool_name="losses")
metrics = pynet.get_tools(tool_name="metrics")
setup_logging(level="info")

#############################################################################
# Synthetic dataset
# -----------------
#
# A Gaussian Linear Multi-Klass synthetic dataset is generated as
# follows. The number of the latent dimensions used to generate the data can be
# controlled.


class GeneratorUniform(nn.Module):
    """ Generate multiple sources (channels) of data through a linear
    generative model:
示例#8
0
                                 test_inputs=x_test,
                                 test_labels=y_test,
                                 batch_size=128,
                                 continuous_labels=True)
interfaces = pynet.get_interfaces()["graph"]
net_params = pynet.NetParameters(input_shape=(90, 90),
                                 in_channels=1,
                                 num_classes=2,
                                 nb_e2e=32,
                                 nb_e2n=64,
                                 nb_n2g=30,
                                 dropout=0.5,
                                 leaky_alpha=0.1,
                                 twice_e2e=False,
                                 dense_sml=True)
my_loss = pynet.get_tools(tool_name="losses")["MSELoss"]()
model = interfaces["BrainNetCNNGraph"](net_params,
                                       optimizer_name="Adam",
                                       learning_rate=0.01,
                                       weight_decay=0.0005,
                                       loss_name="MSELoss")
model.board = Board(port=8097, host="http://localhost", env="main")
model.add_observer("after_epoch", update_board)
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer=model.optimizer,
                                           mode="min",
                                           factor=0.1,
                                           patience=5,
                                           verbose=True,
                                           eps=1e-8)
test_history, train_history = model.training(manager=manager,
                                             nb_epochs=15,