def run_BD(i): tf.reset_default_graph() data = DataReader.read_BD() ncells = configs[i]['cells'] learning_rate = configs[i]['lr'] group = configs[i]['g'] iters = configs[i]['iters'] model_path = configs[i]['model_path'] output_path = Paths.local_path + 'BD/rnn-opt-rand-init/' + 'run_' + str( configs[i]['s']) + '/' + str(ncells) + 'cells/' + group + '/' with LogFile(output_path, 'run.log'): DLogger.logger().debug("group: " + str(group)) gdata = data.loc[data.diag == group] ids = gdata['id'].unique().tolist() dftr = pd.DataFrame({'id': ids, 'train': 'train'}) tdftr = pd.DataFrame({'id': ids, 'train': 'test'}) train, test = DataProcess.train_test_between_subject( gdata, pd.concat((dftr, tdftr)), [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) train = DataProcess.merge_data(train) DLogger.logger().debug("total points: " + str(get_total_pionts(train))) worker = LSTMBeh(2, 0, n_cells=ncells) OptBEH.optimise(worker, output_path, train, None, learning_rate=learning_rate, global_iters=iters, load_model_path=model_path)
def run_BD(i): tf.reset_default_graph() data = DataReader.read_BD() learning_rate = configs[i]['lr'] group = configs[i]['g'] output_path = Paths.local_path + 'BD/gql-ml-opt/' + group + '/' with LogFile(output_path, 'run.log'): DLogger.logger().debug("group: " + str(group)) gdata = data.loc[data.diag == group] ids = gdata['id'].unique().tolist() dftr = pd.DataFrame({'id': ids, 'train': 'train'}) tdftr = pd.DataFrame({'id': ids, 'train': 'test'}) train, test = DataProcess.train_test_between_subject( gdata, pd.concat((dftr, tdftr)), [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) DLogger.logger().debug("total points: " + str(get_total_pionts(train))) worker = GQL.get_instance(2, 2, {}) train = DataProcess.merge_data(train) OptML.optimise(worker, output_path, train, test, global_iters=1000, learning_rate=learning_rate)
def run_BD_RNN(i): tf.reset_default_graph() ncells = configs[i]['cells'] learning_rate = configs[i]['lr'] group = configs[i]['g'] cv_index = configs[i]['cv_index'] output_path = Paths.local_path + 'BD/rnn-cv/' + str( ncells) + 'cells/' + group + '/' + 'fold' + str(cv_index) + '/' with LogFile(output_path, 'run.log'): indx_data = cv_lists_group[group][cv_index] gdata = data.loc[data.diag == group] train, test = DataProcess.train_test_between_subject( gdata, indx_data, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) indx_data.to_csv(output_path + 'train_test.csv') train_merged = DataProcess.merge_data(train) DLogger.logger().debug("total points: " + str(get_total_pionts(train_merged))) del train worker = LSTMBeh(2, 0, n_cells=ncells) OptBEH.optimise(worker, output_path, train_merged, None, learning_rate=learning_rate, global_iters=3000, load_model_path='../inits/rnn-init/' + str(ncells) + 'cells/model-final/')
def run_BD_GQL(i): tf.reset_default_graph() learning_rate = configs[i]['lr'] group = configs[i]['g'] cv_index = configs[i]['cv_index'] iters = configs[i]['iters'] output_path = Paths.local_path + 'BD/gql-ml-rand-opt/' + group + '/' + 'fold' + str( cv_index) + '/' with LogFile(output_path, 'run.log'): indx_data = cv_lists_group[group][cv_index] gdata = data.loc[data.diag == group] train, test = DataProcess.train_test_between_subject( gdata, indx_data, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) indx_data.to_csv(output_path + 'train_test.csv') DLogger.logger().debug("total points: " + str(get_total_pionts(train))) worker = GQL.get_instance(2, 2, {}) train = DataProcess.merge_data(train) OptML.optimise(worker, output_path, train, None, learning_rate=learning_rate, global_iters=iters)
def run_BD_RNN(i): tf.reset_default_graph() ncells = configs[i]['cells'] lr = configs[i]['lr'] output_path = Paths.local_path + 'BD/rnn-init/' + str(ncells) + 'cells/' with LogFile(output_path, 'run.log'): ids = data['id'].unique().tolist() dftr = pd.DataFrame({'id': ids, 'train': 'train'}) train, test = DataProcess.train_test_between_subject( data, dftr, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) train = DataProcess.merge_data(train) DLogger.logger().debug("total points: " + str(get_total_pionts(train))) worker = LSTMBeh(2, 0, n_cells=ncells) lrh.OptBEH.optimise(worker, output_path, train, None, learning_rate=lr, global_iters=0)