Exemplo n.º 1
0
output_bias = True

mlp = tt.practical.MLPVanilla(X_train_std.shape[1],
                              [n_nodes for layer_idx
                               in range(n_layers)],
                              X_train_std.shape[1],
                              batch_norm,
                              dropout,
                              output_bias=output_bias)
net = nn.Sequential(ResidualBlock(X_train_std.shape[1], mlp),
                    Scaler(X_train_std.shape[1]))

optimizer = tt.optim.Adam(lr=lr)

if num_durations > 0:
    labtrans = NKSDiscrete.label_transform(num_durations)
    y_train_discrete = labtrans.fit_transform(*y_train.T)
    surv_model = NKSDiscrete(net, optimizer,
                             duration_index=labtrans.cuts)
else:
    surv_model = NKS(net, optimizer)

model_filename = \
    os.path.join(output_dir, 'models',
                 '%s_%s_exp%d_bs%d_nep%d_nla%d_nno%d_lr%f_nd%d_test.pt'
                 % (survival_estimator_name, dataset, experiment_idx,
                    batch_size, n_epochs, n_layers, n_nodes, lr, num_durations))
assert os.path.isfile(model_filename)
if not os.path.isfile(model_filename):
    print('*** Fitting with hyperparam:', hyperparam, flush=True)
    if num_durations > 0:
Exemplo n.º 2
0
                 'lr%f_test.pt' % lr)
if not os.path.isfile(emb_model_filename):
    emb_model.fit(X_train_std, mds_embedding,
                  batch_size=batch_size, epochs=100,
                  verbose=False)
    emb_model.save_net(emb_model_filename)
else:
    emb_model.load_net(emb_model_filename)
emb_model.net.train()

print('*** Fine-tuning with DKSA...')
torch.manual_seed(fine_tune_random_seed + 1)
np.random.seed(fine_tune_random_seed + 1)
optimizer = tt.optim.Adam(lr=lr)
if num_durations > 0:
    labtrans = NKSDiscrete.label_transform(num_durations)
    y_train_discrete = labtrans.fit_transform(*y_train.T)
    surv_model = NKSDiscrete(emb_model.net, optimizer,
                             duration_index=labtrans.cuts)
else:
    surv_model = NKS(emb_model.net, optimizer)

model_filename = \
    os.path.join(output_dir, 'models',
                 '%s_%s_exp%d_mf%d_msl%d_km%d_'
                 % (survival_estimator_name, dataset,
                    experiment_idx, max_features,
                    min_samples_leaf, use_km)
                 +
                 'bs%d_nep%d_nla%d_nno%d_'
                 % (batch_size, n_epochs, n_layers, n_nodes)
Exemplo n.º 3
0
                    output_bias = True

                    optimizer = tt.optim.Adam(lr=lr)
                    mlp = tt.practical.MLPVanilla(fold_X_train_std.shape[1],
                                                  [n_nodes for layer_idx
                                                   in range(n_layers)],
                                                  fold_X_train_std.shape[1],
                                                  batch_norm,
                                                  dropout,
                                                  output_bias=output_bias)
                    net = nn.Sequential(ResidualBlock(fold_X_train_std.shape[1],
                                                      mlp),
                                        Scaler(fold_X_train_std.shape[1]))

                    if num_durations > 0:
                        labtrans = NKSDiscrete.label_transform(num_durations)
                        fold_y_train_discrete \
                            = labtrans.fit_transform(*fold_y_train.T)
                        surv_model = NKSDiscrete(net, optimizer,
                                                 duration_index=labtrans.cuts)
                    else:
                        surv_model = NKS(net, optimizer)

                    model_filename = \
                        os.path.join(output_dir, 'models',
                                     '%s_%s_exp%d_bs%d_nep%d_nla%d_nno%d_'
                                     % (survival_estimator_name, dataset,
                                        experiment_idx, batch_size, n_epochs,
                                        n_layers, n_nodes)
                                     +
                                     'lr%f_nd%d_cv%d.pt'
Exemplo n.º 4
0
y_train = y_train.astype('float32')
y_test = y_test.astype('float32')

torch.manual_seed(method_random_seed)
torch.cuda.manual_seed_all(method_random_seed)
np.random.seed(method_random_seed)

batch_norm = True
dropout = 0.0
output_bias = False

net = nn.Sequential(DiagonalScaler(X_train_std.shape[1]))
optimizer = tt.optim.Adam(lr=lr)

if num_durations > 0:
    labtrans = NKSDiscrete.label_transform(num_durations)
    y_train_discrete = labtrans.fit_transform(*y_train.T)
    surv_model = NKSDiscrete(net, optimizer, duration_index=labtrans.cuts)
else:
    surv_model = NKS(net, optimizer)

model_filename = \
    os.path.join(output_dir, 'models',
                 '%s_%s_exp%d_%s_bs%d_nep%d_lr%f_nd%d_test.pt'
                 % (survival_estimator_name, dataset, experiment_idx,
                    val_string, batch_size, n_epochs, lr, num_durations))
assert os.path.isfile(model_filename)
print('*** Loading ***', flush=True)
surv_model.load_net(model_filename)

if num_durations > 0: