def run_experiment(config, hyper_params, random_seeds): experiment_root = hyper_params['experiment_root'] best_acc = -1 best_results = [np.NAN, np.NAN, np.NAN] for seed in random_seeds: hyper_params['random_seed'] = seed hyper_params['experiment_root'] = experiment_root + '.' + str(seed) params = merge_params(config, hyper_params) if os.path.exists(os.path.join(params.experiment_root, 'mlp.pkl')): print 'found existing mlp.pkl: {}'.format(params.experiment_root) else: print 'no mlp.pkl found at: {}'.format(params.experiment_root) if not config.get('only_extract_results', False): train_convnet(params) try: values = extract_results(params.experiment_root, mode='misclass') results = np.multiply( 100, [ # 1 - values['test_y_misclass'], # 1 - values['test_wseq_misclass_rate'], # 1 - values['test_wtrial_misclass_rate']]); 1 - values['frame_misclass'], 1 - values['sequence_misclass'], 1 - values['trial_misclass'] ]) # save the best results if np.max(results[2]) > best_acc: best_results = results best_acc = np.max(results[2]) except: print traceback.format_exc() results = [np.NAN, np.NAN, np.NAN] print 'results for seed {}: {}'.format(seed, results) if params.save_output: output = extract_output(params, values['best_epoch']) save(os.path.join(params.experiment_root, 'best_output.pklz'), output) print 'best results: {}'.format(best_results) return best_results
def run_experiment(config, hyper_params, random_seeds): experiment_root = hyper_params['experiment_root']; best_acc = -1; best_results = [np.NAN, np.NAN, np.NAN]; for seed in random_seeds: hyper_params['random_seed'] = seed; hyper_params['experiment_root'] = experiment_root + '.' + str(seed); params = merge_params(config, hyper_params); if os.path.exists(os.path.join(params.experiment_root, 'mlp.pkl')): print 'found existing mlp.pkl: {}'.format(params.experiment_root); else: print 'no mlp.pkl found at: {}'.format(params.experiment_root); if not config.get('only_extract_results', False): train_convnet(params); try: values = extract_results(params.experiment_root, mode='misclass'); results = np.multiply(100, [ # 1 - values['test_y_misclass'], # 1 - values['test_wseq_misclass_rate'], # 1 - values['test_wtrial_misclass_rate']]); 1 - values['frame_misclass'], 1 - values['sequence_misclass'], 1 - values['trial_misclass']]); # save the best results if np.max(results[2]) > best_acc: best_results = results; best_acc = np.max(results[2]); except: print traceback.format_exc(); results = [np.NAN, np.NAN, np.NAN]; print 'results for seed {}: {}'.format(seed, results); if params.save_output: output = extract_output(params, values['best_epoch']); save(os.path.join(params.experiment_root, 'best_output.pklz'), output); print 'best results: {}'.format(best_results); return best_results;
params=config, ); save_yaml_file(yaml_str, os.path.join(config.experiment_root, 'settings.yaml')); with log_timing(log, 'training network'): train.main_loop(); def get_default_config_path(): return os.path.join(os.path.dirname(__file__),'train_convnet.cfg'); if __name__ == '__main__': config = load_config(default_config=get_default_config_path(), reset_logging=False); if not config.get('only_extract_results', False): train_convnet(config); scan_for_best_performance(config.experiment_root, 'valid_y_misclass'); scan_for_best_performance(config.experiment_root, 'valid_ptrial_misclass_rate') values = extract_results(config.experiment_root, mode='misclass'); print np.multiply(100, [ # 1 - values['test_y_misclass'], # 1 - values['test_wseq_misclass_rate'], # 1 - values['test_wtrial_misclass_rate']]); 1 - values['frame_misclass'], 1 - values['sequence_misclass'], 1 - values['trial_misclass']]);
def pair_cross_trial_test(config, pairs=None): if pairs is None: pairs = [ [18, 19], [20, 21], [22, 23], [ 0, 1], [15, 9], [16, 17], [11, 5], [12, 6], [ 2, 3], [10, 4], [13, 7], [14, 8], ]; # config.experiment_root = os.path.join(config.experiment_root, 'cross-trial') ; accuracy = np.zeros(len(pairs)) results = np.zeros([len(pairs),3]); for i in xrange(len(pairs)): test_stimulus_ids = pairs[i]; train_stimulus_ids = set(); for j in xrange(48): if not j in test_stimulus_ids: train_stimulus_ids.add(j); train_stimulus_ids = list(train_stimulus_ids); log.info('training stimuli: {} \t test stimuli: {}'.format(train_stimulus_ids, test_stimulus_ids)); hyper_params = { 'experiment_root' : os.path.join(config.experiment_root, 'pair'+str(pairs[i])), 'remove_train_stimulus_ids' : test_stimulus_ids, 'remove_test_stimulus_ids' : train_stimulus_ids, }; params = merge_params(config, hyper_params); # dummy values for testing # results[i] = [i, i*10, i*100.]; # accuracy[i] = 0; # continue; if os.path.exists(os.path.join(params.experiment_root, 'mlp.pkl')): print 'found existing mlp.pkl: {}'.format(params.experiment_root); else: print 'no mlp.pkl found at: {}'.format(params.experiment_root); if not config.get('only_extract_results', False): train_mlp(params); values = extract_results(params.experiment_root, mode='misclass'); results[i] = np.multiply(100, [ # 1 - values['test_y_misclass'], # 1 - values['test_wseq_misclass_rate'], # 1 - values['test_wtrial_misclass_rate']]); 1 - values['frame_misclass'], 1 - values['sequence_misclass'], 1 - values['trial_misclass']]); accuracy[i] = 100 * (1 - extract_best_result(params.experiment_root, mode='misclass', check_dataset='test')[0]); print results; print results.mean(axis=0); print results.max(axis=1); print accuracy; print accuracy.mean(); return results, accuracy;
def pair_cross_trial_test(config, pairs=None): if pairs is None: pairs = [ [18, 19], [20, 21], [22, 23], [0, 1], [15, 9], [16, 17], [11, 5], [12, 6], [2, 3], [10, 4], [13, 7], [14, 8], ] # config.experiment_root = os.path.join(config.experiment_root, 'cross-trial') ; accuracy = np.zeros(len(pairs)) results = np.zeros([len(pairs), 3]) for i in xrange(len(pairs)): test_stimulus_ids = pairs[i] train_stimulus_ids = set() for j in xrange(48): if not j in test_stimulus_ids: train_stimulus_ids.add(j) train_stimulus_ids = list(train_stimulus_ids) log.info('training stimuli: {} \t test stimuli: {}'.format( train_stimulus_ids, test_stimulus_ids)) hyper_params = { 'experiment_root': os.path.join(config.experiment_root, 'pair' + str(pairs[i])), 'remove_train_stimulus_ids': test_stimulus_ids, 'remove_test_stimulus_ids': train_stimulus_ids, } params = merge_params(config, hyper_params) # dummy values for testing # results[i] = [i, i*10, i*100.]; # accuracy[i] = 0; # continue; if os.path.exists(os.path.join(params.experiment_root, 'mlp.pkl')): print 'found existing mlp.pkl: {}'.format(params.experiment_root) else: print 'no mlp.pkl found at: {}'.format(params.experiment_root) if not config.get('only_extract_results', False): train_mlp(params) values = extract_results(params.experiment_root, mode='misclass') results[i] = np.multiply( 100, [ # 1 - values['test_y_misclass'], # 1 - values['test_wseq_misclass_rate'], # 1 - values['test_wtrial_misclass_rate']]); 1 - values['frame_misclass'], 1 - values['sequence_misclass'], 1 - values['trial_misclass'] ]) accuracy[i] = 100 * (1 - extract_best_result( params.experiment_root, mode='misclass', check_dataset='test')[0]) print results print results.mean(axis=0) print results.max(axis=1) print accuracy print accuracy.mean() return results, accuracy
log.info('done'); def get_default_config_path(): return os.path.join(os.path.dirname(__file__),'train_sda_mlp.cfg'); if __name__ == '__main__': # config = load_config(default_config='../../train_sda.cfg', reset_logging=False); config = load_config(default_config=get_default_config_path(), reset_logging=False); hyper_params = { }; params = merge_params(config, hyper_params); if not config.get('only_extract_results', False): train_mlp(params); scan_for_best_performance(params.experiment_root, 'valid_y_misclass'); scan_for_best_performance(params.experiment_root, 'valid_ptrial_misclass_rate') values = extract_results(config.experiment_root, mode='misclass'); print np.multiply(100, [ # 1 - values['test_y_misclass'], # 1 - values['test_wseq_misclass_rate'], # 1 - values['test_wtrial_misclass_rate']]); 1 - values['frame_misclass'], 1 - values['sequence_misclass'], 1 - values['trial_misclass']]);