示例#1
0
                   params=config,
                   );
    
    save_yaml_file(yaml_str, os.path.join(config.experiment_root, 'settings.yaml'));
        
    with log_timing(log, 'training network'):    
        train.main_loop();
        
def get_default_config_path():
    return os.path.join(os.path.dirname(__file__),'train_convnet.cfg');

if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path(), reset_logging=False);

    if not config.get('only_extract_results', False):
        train_convnet(config);
    
    scan_for_best_performance(config.experiment_root, 'valid_y_misclass');
    scan_for_best_performance(config.experiment_root, 'valid_ptrial_misclass_rate')

    values = extract_results(config.experiment_root, mode='misclass');        
            
    print np.multiply(100, [
#                         1 - values['test_y_misclass'],
#                         1 - values['test_wseq_misclass_rate'],
#                         1 - values['test_wtrial_misclass_rate']]);     
               
                1 - values['frame_misclass'],
                1 - values['sequence_misclass'],
                1 - values['trial_misclass']]);
示例#2
0
    with log_timing(log, 'training network'):
        train.main_loop()


def get_default_config_path():
    return os.path.join(os.path.dirname(__file__), 'train_convnet.cfg')


if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path(),
                         reset_logging=False)

    if not config.get('only_extract_results', False):
        train_convnet(config)

    scan_for_best_performance(config.experiment_root, 'valid_y_misclass')
    scan_for_best_performance(config.experiment_root,
                              'valid_ptrial_misclass_rate')

    values = extract_results(config.experiment_root, mode='misclass')

    print np.multiply(
        100,
        [
            #                         1 - values['test_y_misclass'],
            #                         1 - values['test_wseq_misclass_rate'],
            #                         1 - values['test_wtrial_misclass_rate']]);
            1 - values['frame_misclass'],
            1 - values['sequence_misclass'],
            1 - values['trial_misclass']
        ])
示例#3
0
        
    log.info('done');
    
def get_default_config_path():
    return os.path.join(os.path.dirname(__file__),'train_sda_mlp.cfg');

if __name__ == '__main__':
#     config = load_config(default_config='../../train_sda.cfg', reset_logging=False);
    config = load_config(default_config=get_default_config_path(), reset_logging=False);
                         
    hyper_params = {   
    };
    
    params = merge_params(config, hyper_params);

    if not config.get('only_extract_results', False):
        train_mlp(params);
        
    scan_for_best_performance(params.experiment_root, 'valid_y_misclass');
    scan_for_best_performance(params.experiment_root, 'valid_ptrial_misclass_rate')
    
    values = extract_results(config.experiment_root, mode='misclass');        
            
    print np.multiply(100, [
#                         1 - values['test_y_misclass'],
#                         1 - values['test_wseq_misclass_rate'],
#                         1 - values['test_wtrial_misclass_rate']]);     
               
                1 - values['frame_misclass'],
                1 - values['sequence_misclass'],
                1 - values['trial_misclass']]);
示例#4
0
#     config.experiment_root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/sda/exp2.14all/';
    
    
    for i in xrange(13):
        hyper_params = { 
                    'experiment_root' : os.path.join(config.experiment_root, 'subj'+str(i+1)),
                    'subjects' : [i]  
                    # NOTE: layerX_content should still  point to global sda/ folder
                    };
                    
        if config.global_sda == False: 
            hyper_params['layer0_content'] = os.path.join(hyper_params['experiment_root'], 'sda', 'sda_layer0_tied.pkl');
            hyper_params['layer1_content'] = os.path.join(hyper_params['experiment_root'], 'sda', 'sda_layer1_tied.pkl');
            hyper_params['layer2_content'] = os.path.join(hyper_params['experiment_root'], 'sda', 'sda_layer2_tied.pkl');
            hyper_params['layer3_content'] = os.path.join(hyper_params['experiment_root'], 'sda', 'sda_layer3_tied.pkl');
        
        params = merge_params(config, hyper_params);

        if os.path.exists(os.path.join(params.experiment_root, 'epochs')):
            print 'skipping existing path: {}'.format(params.experiment_root);
            continue;

        train_mlp(params);
        
    # generate plot.pdfs
#     plot_batch(config.experiment_root);
    
    # print best peformance values
    for i in xrange(13):
        scan_for_best_performance(os.path.join(config.experiment_root, 'subj'+str(i+1)));