def save_to_csv(PyTorchExperimentLogger, data, path, header=None): """ Saves a numpy array to csv in the experiment save dir Args: data: The array to be stored as a save file path: sub path in the save folder (or simply filename) """ folder_path = create_folder(PyTorchExperimentLogger.save_dir, os.path.dirname(path)) file_path = folder_path + '/' + os.path.basename(path) if not file_path.endswith('.csv'): file_path += '.csv' np.savetxt(file_path, data, delimiter=',', header=header, comments='') return
from eICU_preprocessing.split_train_test import create_folder from models.run_tpc import TPC from models.initialise_arguments import initialise_tpc_arguments if __name__ == '__main__': c = initialise_tpc_arguments() c['mode'] = 'test' c['exp_name'] = 'TempWeightShare' c['model_type'] = 'temp_only' c['share_weights'] = True c['temp_kernels'] = [32] * c['n_layers'] log_folder_path = create_folder('models/experiments/final', c.exp_name) temp_weight_share = TPC( config=c, n_epochs=c.n_epochs, name=c.exp_name, base_dir=log_folder_path, explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'}) temp_weight_share.run()
from eICU_preprocessing.split_train_test import create_folder from models.run_lstm import BaselineLSTM from models.initialise_arguments import initialise_lstm_arguments from models.final_experiment_scripts.best_hyperparameters import best_lstm if __name__ == '__main__': c = initialise_lstm_arguments() c['exp_name'] = 'StandardLSTM' c['dataset'] = 'MIMIC' c['task'] = 'mortality' c = best_lstm(c) log_folder_path = create_folder('models/experiments/final/MIMIC/mortality', c.exp_name) baseline_lstm = BaselineLSTM( config=c, n_epochs=c.n_epochs, name=c.exp_name, base_dir=log_folder_path, explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'}) baseline_lstm.run()
c['n_layers'] = random.choice(param_grid['n_layers']) c['kernel_size'] = random.choice(param_grid['kernel_size'][c['n_layers']]) c['temp_kernels'] = [random.choice(param_grid['temp_kernels']) ] * c['n_layers'] c['point_sizes'] = [random.choice(param_grid['point_sizes']) ] * c['n_layers'] c['learning_rate'] = round(random.choice(param_grid['learning_rate']), 5) c['batch_size'] = random.choice(param_grid['batch_size']) c['temp_dropout_rate'] = random.choice(param_grid['temp_dropout_rate']) return c if __name__ == '__main__': for i in range(50): try: c = get_hyperparam_config('eICU') log_folder_path = create_folder( 'models/experiments/hyperparameters/eICU', c.exp_name) tpc = TPC(config=c, n_epochs=c.n_epochs, name=c.exp_name, base_dir=log_folder_path, explogger_kwargs={ 'folder_format': '%Y-%m-%d_%H%M%S{run_number}' }) tpc.run() except RuntimeError: continue
from eICU_preprocessing.split_train_test import create_folder from models.run_lstm import BaselineLSTM from models.initialise_arguments import initialise_lstm_arguments from models.final_experiment_scripts.best_hyperparameters import best_cw_lstm if __name__ == '__main__': c = initialise_lstm_arguments() c['exp_name'] = 'ChannelwiseLSTM' c['dataset'] = 'MIMIC' c['task'] = 'multitask' c = best_cw_lstm(c) log_folder_path = create_folder('models/experiments/final/MIMIC/multitask', c.exp_name) channelwise_lstm = BaselineLSTM( config=c, n_epochs=c.n_epochs, name=c.exp_name, base_dir=log_folder_path, explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'}) channelwise_lstm.run()
def setup(self): self.setup_template() self.model = TempPointConv( config=self.config, F=self.train_datareader.F, D=self.train_datareader.D, no_flat_features=self.train_datareader.no_flat_features).to( device=self.device) self.elog.print(self.model) self.optimiser = Adam(self.model.parameters(), lr=self.config.learning_rate, weight_decay=self.config.L2_regularisation) return if __name__ == '__main__': c = initialise_tpc_arguments() c['exp_name'] = 'TPC' log_folder_path = create_folder( 'models/experiments/{}/{}'.format(c.dataset, c.task), c.exp_name) tpc = TPC( config=c, n_epochs=c.n_epochs, name=c.exp_name, base_dir=log_folder_path, explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'}) tpc.run()