def train_deepsurv(x_train, y_train, x_test, y_test, **kwargs): # Standardize the datasets train_mean = x_train.mean(axis=0) train_std = x_train.std(axis=0) x_train = (x_train - train_mean) / train_std x_test = (x_test - train_mean) / train_std train_data = format_to_deepsurv(x_train, y_train) valid_data = format_to_deepsurv(x_test, y_test) hyperparams = get_hyperparams(kwargs) # Set up Tensorboard loggers # TODO improve the model_id for Tensorboard to better partition runs model_id = str(hash(str(hyperparams))) run_id = model_id + '_' + str(uuid.uuid4()) logger = TensorboardLogger( 'hyperparam_search', os.path.join(logdir, "tensor_logs", model_id, run_id)) network = deep_surv.DeepSurv(n_in=x_train.shape[1], **hyperparams) metrics = network.train(train_data, n_epochs=num_epochs, logger=logger, update_fn=update_fn, verbose=False) result = network.get_concordance_index(**valid_data) main_logger.info( 'Run id: %s | %s | C-Index: %f | Train Loss %f' % (run_id, str(hyperparams), result, metrics['loss'][-1][1])) return result
'dropout': 0.4, 'hidden_layers_sizes': [25, 25], 'learning_rate': 1e-05, 'lr_decay': 0.001, 'momentum': 0.9, 'n_in': train_data['x'].shape[1], 'standardize': True } # enable tensorboard experiment_name = 'test_experiment_sebastian' logdir = 'logs/tensorboard/' logger = TensorboardLogger(experiment_name, logdir = logdir) # create an instance of DeepSurv using the hyperparams defined above model = deep_surv.DeepSurv(**hyperparams) # the type of optimizer to use update_fn = lasagne.updates.nesterov_momentum # check out http://lasagne.readthedocs.io/en/latest/modules/updates.html # for other optimizers to use n_epochs = 10001 # train the model metrics = model.train(train_data, test_data, n_epochs = n_epochs, logger = logger, update_fn = update_fn) # Print the final metrics print('Train C-Index:', metrics['c-index'][-1]) print('Test C-Index:', metrics['valid_c-index'][-1])