def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) # path data_path = settings_dict['path'][ 'processed_data_path'] + '/' + create_fft_data_name(settings_dict) model_path = settings_dict['path'][ 'model_path'] + '/' + create_cnn_model_name(settings_dict) print data_path print model_path if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(model_path): os.makedirs(model_path) shutil.copy2('SETTINGS.json', model_path + '/SETTINGS.json') # params model_params = settings_dict['model'] validation_params = settings_dict['validation'] subjects = [ 'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2' ] for subject in subjects: print '***********************', subject, '***************************' train(subject, data_path, model_path, model_params, validation_params)
def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) # path data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) model_path = settings_dict['path']['model_path'] + '/' + create_cnn_model_name(settings_dict) print data_path print model_path if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(model_path): os.makedirs(model_path) shutil.copy2('SETTINGS.json', model_path + '/SETTINGS.json') # params model_params = settings_dict['model'] validation_params = settings_dict['validation'] subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] for subject in subjects: print '***********************', subject, '***************************' train(subject, data_path, model_path, model_params, validation_params)
def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) reg_list = [10000000, 100, 10, 1.0, 0.1, 0.01] for reg_C in reg_list: print reg_C data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = settings_dict['path']['submission_path'] + '/logreg_' + str( reg_C) + '_' + create_fft_data_name(settings_dict) if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] for subject in subjects: print subject model, data_scaler, = train(subject, data_path, reg_C) predict(subject, model, data_scaler, data_path, submission_path) merge_csv_files(submission_path, subjects, 'submission') merge_csv_files(submission_path, subjects, 'submission_softmax') merge_csv_files(submission_path, subjects, 'submission_minmax') merge_csv_files(submission_path, subjects, 'submission_median')
def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = settings_dict['path']['submission_path'] + '/LDA_' + create_fft_data_name(settings_dict) print data_path if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) test_labels_path = '/mnt/sda4/CODING/python/kaggle_data/test_labels.csv' test_labels = load_test_labels(test_labels_path) subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] coef_list = [] for subject in subjects: print '***********************', subject, '***************************' model, data_scaler, coefs = train(subject, data_path) predict(subject, model, data_scaler, data_path, submission_path, test_labels[subject]['preictal']) coef_list.append(coefs) merge_csv_files(submission_path, subjects, 'submission') merge_csv_files(submission_path, subjects, 'submission_softmax') merge_csv_files(submission_path, subjects, 'submission_minmax') merge_csv_files(submission_path, subjects, 'submission_median')
def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) data_path = settings_dict['path'][ 'processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = settings_dict['path'][ 'submission_path'] + '/LDA_' + create_fft_data_name(settings_dict) print data_path if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) test_labels_path = '/mnt/sda4/CODING/python/kaggle_data/test_labels.csv' test_labels = load_test_labels(test_labels_path) subjects = [ 'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2' ] coef_list = [] for subject in subjects: print '***********************', subject, '***************************' model, data_scaler, coefs = train(subject, data_path) predict(subject, model, data_scaler, data_path, submission_path, test_labels[subject]['preictal']) coef_list.append(coefs) merge_csv_files(submission_path, subjects, 'submission') merge_csv_files(submission_path, subjects, 'submission_softmax') merge_csv_files(submission_path, subjects, 'submission_minmax') merge_csv_files(submission_path, subjects, 'submission_median')
def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) # path data_path = settings_dict['path'][ 'processed_data_path'] + '/' + create_fft_data_name(settings_dict) print data_path if not os.path.exists(data_path): fft.run_fft_preprocessor() subjects = [ 'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2' ] for reg_C in [10000000, 100, 10, 1.0, 0.1, 0.01]: print reg_C all_valid_probs = [] all_valid_y = [] for subject in subjects: p, y = cross_validate(subject, data_path, reg_C=reg_C) all_valid_probs.extend(p) all_valid_y.extend(y) fpr, tpr, _ = roc_curve(all_valid_y, all_valid_probs, pos_label=1) print auc(fpr, tpr) print log_loss(all_valid_y, all_valid_probs)
def run_predictor(): with open('SETTINGS.json') as f: settings_dict = json.load(f) model_path = settings_dict['path']['model_path'] + '/' + create_cnn_model_name(settings_dict) data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = model_path + '/submission' print submission_path if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) shutil.copy2('SETTINGS.json', submission_path + '/SETTINGS.json') subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] #subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4'] for subject in subjects: print '***********************', subject, '***************************' predict(subject, data_path, model_path, submission_path) merge_csv_data(submission_path, subjects, submission_name='submission', scale='minmax') merge_csv_data(submission_path, subjects, submission_name='submission', scale='softmax') merge_csv_data(submission_path, subjects, submission_name='submission', scale='median') merge_csv_data(submission_path, subjects, submission_name='submission')
def run_predictor(): with open('SETTINGS.json') as f: settings_dict = json.load(f) model_path = settings_dict['path'][ 'model_path'] + '/' + create_cnn_model_name(settings_dict) data_path = settings_dict['path'][ 'processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = model_path + '/submission' print(submission_path) if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) shutil.copy2('SETTINGS.json', submission_path + '/SETTINGS.json') subjects = [ 'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2' ] #subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4'] for subject in subjects: print('***********************', subject, '***************************') predict(subject, data_path, model_path, submission_path) merge_csv_data(submission_path, subjects, submission_name='submission', scale='minmax') merge_csv_data(submission_path, subjects, submission_name='submission', scale='softmax') merge_csv_data(submission_path, subjects, submission_name='submission', scale='median') merge_csv_data(submission_path, subjects, submission_name='submission')
def run_trainer(): with open('SETTINGS.json') as f: settings_dict = json.load(f) # path data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) print data_path if not os.path.exists(data_path): fft.run_fft_preprocessor() subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] for reg_C in [10000000, 100, 10, 1.0, 0.1, 0.01]: print reg_C all_valid_probs = [] all_valid_y = [] for subject in subjects: p, y = cross_validate(subject, data_path, reg_C=reg_C) all_valid_probs.extend(p) all_valid_y.extend(y) fpr, tpr, _ = roc_curve(all_valid_y, all_valid_probs, pos_label=1) print auc(fpr, tpr) print log_loss(all_valid_y, all_valid_probs)
sn.append(sn_t) sp.append(sp_t) return t_list, sn, sp if __name__ == '__main__': with open('SETTINGS.json') as f: settings_dict = json.load(f) data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = settings_dict['path']['submission_path'] + '/LDA_' + create_fft_data_name(settings_dict) print(data_path) if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) test_labels_path = '/mnt/sda4/CODING/python/kaggle_data/test_labels.csv' test_labels = load_test_labels(test_labels_path) subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] t, sn, sp = [], [], [] for subject in subjects: print('***********************', subject, '***************************') t_i, sn_i, sp_i = curve_per_subject(subject, data_path, test_labels[subject]['preictal']) t.append(t_i) sn.append(sn_i) sp.append(sp_i)
sn.append(sn_t) sp.append(sp_t) return t_list, sn, sp if __name__ == '__main__': with open('SETTINGS.json') as f: settings_dict = json.load(f) data_path = settings_dict['path']['processed_data_path'] + '/' + create_fft_data_name(settings_dict) submission_path = settings_dict['path']['submission_path'] + '/LDA_' + create_fft_data_name(settings_dict) print data_path if not os.path.exists(data_path): fft.run_fft_preprocessor() if not os.path.exists(submission_path): os.makedirs(submission_path) test_labels_path = '/mnt/sda4/CODING/python/kaggle_data/test_labels.csv' test_labels = load_test_labels(test_labels_path) subjects = ['Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'] t, sn, sp = [], [], [] for subject in subjects: print '***********************', subject, '***************************' t_i, sn_i, sp_i = curve_per_subject(subject, data_path, test_labels[subject]['preictal']) t.append(t_i) sn.append(sn_i) sp.append(sp_i)