X_train_std.shape[1], batch_norm, dropout, output_bias=output_bias) net = nn.Sequential(ResidualBlock(X_train_std.shape[1], mlp), Scaler(X_train_std.shape[1])) optimizer = tt.optim.Adam(lr=lr) if num_durations > 0: labtrans = NKSDiscrete.label_transform(num_durations) y_train_discrete = labtrans.fit_transform(*y_train.T) surv_model = NKSDiscrete(net, optimizer, duration_index=labtrans.cuts) else: surv_model = NKS(net, optimizer) model_filename = \ os.path.join(output_dir, 'models', '%s_%s_exp%d_bs%d_nep%d_nla%d_nno%d_lr%f_nd%d_test.pt' % (survival_estimator_name, dataset, experiment_idx, batch_size, n_epochs, n_layers, n_nodes, lr, num_durations)) assert os.path.isfile(model_filename) if not os.path.isfile(model_filename): print('*** Fitting with hyperparam:', hyperparam, flush=True) if num_durations > 0: surv_model.fit(X_train_std, y_train_discrete, batch_size, n_epochs, verbose=True) else: surv_model.fit(X_train_std, (y_train[:, 0], y_train[:, 1]), batch_size, n_epochs, verbose=True)
emb_model.save_net(emb_model_filename) else: emb_model.load_net(emb_model_filename) emb_model.net.train() print('*** Fine-tuning with DKSA...') torch.manual_seed(fine_tune_random_seed + 1) np.random.seed(fine_tune_random_seed + 1) optimizer = tt.optim.Adam(lr=lr) if num_durations > 0: labtrans = NKSDiscrete.label_transform(num_durations) y_train_discrete = labtrans.fit_transform(*y_train.T) surv_model = NKSDiscrete(emb_model.net, optimizer, duration_index=labtrans.cuts) else: surv_model = NKS(emb_model.net, optimizer) model_filename = \ os.path.join(output_dir, 'models', '%s_%s_exp%d_mf%d_msl%d_km%d_' % (survival_estimator_name, dataset, experiment_idx, max_features, min_samples_leaf, use_km) + 'bs%d_nep%d_nla%d_nno%d_' % (batch_size, n_epochs, n_layers, n_nodes) + 'lr%f_nd%d_test.pt' % (lr, num_durations)) if not os.path.isfile(model_filename): print('*** Fitting with hyperparam:', hyperparam, flush=True)