Esempio n. 1
0
                    batch_norm = True
                    dropout = 0.
                    output_bias = True

                    optimizer = tt.optim.Adam(lr=lr)
                    mlp = tt.practical.MLPVanilla(
                        fold_X_train_std.shape[1],
                        [n_nodes for layer_idx in range(n_layers)],
                        fold_X_train_std.shape[1],
                        batch_norm,
                        dropout,
                        output_bias=output_bias)
                    net = nn.Sequential(
                        ResidualBlock(fold_X_train_std.shape[1], mlp),
                        DiagonalScaler(fold_X_train_std.shape[1]))

                    if num_durations > 0:
                        labtrans = NKSDiscrete.label_transform(num_durations)
                        fold_y_train_discrete \
                            = labtrans.fit_transform(*fold_y_train.T)
                        surv_model = NKSDiscrete(net,
                                                 optimizer,
                                                 duration_index=labtrans.cuts)
                    else:
                        surv_model = NKS(net, optimizer)

                    model_filename = \
                        os.path.join(output_dir, 'models',
                                     '%s_%s_exp%d_bs%d_nep%d_nla%d_nno%d_'
                                     % (survival_estimator_name, dataset,
Esempio n. 2
0
                    fold_X_val_std = transform_features(
                        fold_X_val, transformer)
                    fold_X_train_std = fold_X_train_std.astype('float32')
                    fold_X_val_std = fold_X_val_std.astype('float32')

                    tic = time.time()
                    torch.manual_seed(method_random_seed)
                    np.random.seed(method_random_seed)

                    batch_norm = True
                    dropout = 0.
                    output_bias = False

                    optimizer = tt.optim.Adam(lr=lr)
                    net = nn.Sequential(
                        DiagonalScaler(fold_X_train_std.shape[1]))

                    if num_durations > 0:
                        labtrans = NKSDiscrete.label_transform(num_durations)
                        fold_y_train_discrete \
                            = labtrans.fit_transform(*fold_y_train.T)
                        surv_model = NKSDiscrete(net,
                                                 optimizer,
                                                 duration_index=labtrans.cuts)
                    else:
                        surv_model = NKS(net, optimizer)

                    model_filename = \
                        os.path.join(output_dir, 'models',
                                     '%s_%s_exp%d_bs%d_nep%d_lr%f_nd%d_cv%d.pt'
                                     % (survival_estimator_name, dataset,
Esempio n. 3
0
torch.manual_seed(method_random_seed)
np.random.seed(method_random_seed)

batch_norm = True
dropout = 0.0
output_bias = True

mlp = tt.practical.MLPVanilla(X_train_std.shape[1],
                              [n_nodes for layer_idx in range(n_layers)],
                              X_train_std.shape[1],
                              batch_norm,
                              dropout,
                              output_bias=output_bias)
net = nn.Sequential(ResidualBlock(X_train_std.shape[1], mlp),
                    DiagonalScaler(X_train_std.shape[1]))

optimizer = tt.optim.Adam(lr=lr)

if num_durations > 0:
    labtrans = NKSDiscrete.label_transform(num_durations)
    y_train_discrete = labtrans.fit_transform(*y_train.T)
    surv_model = NKSDiscrete(net, optimizer, duration_index=labtrans.cuts)
else:
    surv_model = NKS(net, optimizer)

model_filename = \
    os.path.join(output_dir, 'models',
                 '%s_%s_exp%d_bs%d_nep%d_nla%d_nno%d_lr%f_nd%d_test.pt'
                 % (survival_estimator_name, dataset, experiment_idx,
                    batch_size, n_epochs, n_layers, n_nodes, lr, num_durations))