Ejemplo n.º 1
0
        
        job_config.experiment_root = os.path.join(
                                                  config.output_root,
                                                  job_config.type,
                                                  job.name
                                                  );
        log.debug('experiment root: {}'.format(job_config.experiment_root));
        
        print job_config;
        
#         try:
        if job_config.type == 'cnn':
            train_convnet(job_config);                
        elif job_config.type == 'fftcnn':
            train_convnet(job_config);
        elif job_config.type == 'sda':
            train_mlp(job_config);
        else:
            log.error('unsupported job type {}'.format(job_config.type));
 
#         except:
#             log.fatal("Unexpected error:", sys.exc_info());

if __name__ == '__main__':
    default_config = os.path.join(os.path.dirname(__file__), 'batch.cfg');    
    config = load_config(default_config=default_config, reset_logging=False);
                         
    config = merge_params(load_config_file(default_config), config);
                         
    process_jobs(config);
Ejemplo n.º 2
0
    
    train, yaml_str = load_yaml_file(
                   os.path.join(os.path.dirname(__file__), 'train_convnet_template.yaml'),
                   params=config,
                   );
    
    save_yaml_file(yaml_str, os.path.join(config.experiment_root, 'settings.yaml'));
        
    with log_timing(log, 'training network'):    
        train.main_loop();
        
def get_default_config_path():
    return os.path.join(os.path.dirname(__file__),'train_convnet.cfg');

if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path(), reset_logging=False);

    if not config.get('only_extract_results', False):
        train_convnet(config);
    
    scan_for_best_performance(config.experiment_root, 'valid_y_misclass');
    scan_for_best_performance(config.experiment_root, 'valid_ptrial_misclass_rate')

    values = extract_results(config.experiment_root, mode='misclass');        
            
    print np.multiply(100, [
#                         1 - values['test_y_misclass'],
#                         1 - values['test_wseq_misclass_rate'],
#                         1 - values['test_wtrial_misclass_rate']]);     
               
                1 - values['frame_misclass'],
Ejemplo n.º 3
0
    report = analyze_reconstruction_error(datasets, minibatched_output_fn);
    
    for line in report:
        print '{:>6} : mean = {:.3f} std = {:.3f} max = {:.3f}'.format(*line);
    
#     analyze_frames(dataset, output_fn);    

    if worst_frames:
        analyze_worst_frames(datasets['train'], minibatched_output_fn, output_path=output_path);
     
#     analyze_reconstruction(dataset, minibatched_output_fn, hop_size);
    
    analyze_complex(datasets['test'], minibatched_output_fn, config.hop_size, output_path=output_path);

    return report;
    

if __name__ == '__main__':
    config = load_config(default_config=
#                          '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/sda_400Hz_80-50-25-10.cfg'
#                         '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/sda_400Hz_100-50-25-10.cfg'
                        '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/sda_100Hz_100-50-25-10.cfg'
                         );
#                          os.path.join(os.path.dirname(__file__), 'run', 'train_sda_mlp.cfg'), reset_logging=False);
#     config.dataset_suffix = '_channels';   
    config.dataset_root = '/Users/sstober/work/datasets/Dan/eeg';
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/';
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/';
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/';
#     analyze(config, subjects='all', worst_frames=True);
    analyze(config, subjects=[1], worst_frames=False);
Ejemplo n.º 4
0
        log.debug('job overrides: {}'.format(job.overrides))
        job_config = merge_params(job_config, job.overrides)

        job_config.experiment_root = os.path.join(config.output_root,
                                                  job_config.type, job.name)
        log.debug('experiment root: {}'.format(job_config.experiment_root))

        print job_config

        #         try:
        if job_config.type == 'cnn':
            train_convnet(job_config)
        elif job_config.type == 'fftcnn':
            train_convnet(job_config)
        elif job_config.type == 'sda':
            train_mlp(job_config)
        else:
            log.error('unsupported job type {}'.format(job_config.type))


#         except:
#             log.fatal("Unexpected error:", sys.exc_info());

if __name__ == '__main__':
    default_config = os.path.join(os.path.dirname(__file__), 'batch.cfg')
    config = load_config(default_config=default_config, reset_logging=False)

    config = merge_params(load_config_file(default_config), config)

    process_jobs(config)
Ejemplo n.º 5
0
                    1 - values['sequence_misclass'],
                    1 - values['trial_misclass']]);
        
        accuracy[i] = 100 * (1 - extract_best_result(params.experiment_root, mode='misclass', check_dataset='test')[0]);
    
    print results;
    print results.mean(axis=0);
    print results.max(axis=1);
    
    print accuracy;
    print accuracy.mean();
    
    return results, accuracy;

if __name__ == '__main__':
    config = load_config(default_config=
                 os.path.join(os.path.dirname(__file__),'train_convnet.cfg'), reset_logging=True);                 
    
    # override settings
#     config.experiment_root = os.path.join(config.experiment_root, 'cross-trial');
#     config.subjects = None;
#     config.subjects = 8; # subj9 0-indexed
#     config.max_epochs = 5;

    # FIXME: remove manual override
#     config.only_extract_results = True;
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/fftcnn/cross-test2/'
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/fftcnn/exp08.a-crosstrial.subj9/';
    
    if config.get('full_cross_trial', False):
        full_cross_trial_test(config);
    else:
Ejemplo n.º 6
0
from deepthought.util.config_util import merge_params

from deepthought.experiments.ismir2014.train_convnet import train_convnet
# from deepthought.experiments.ismir2014.cross_trial_test import full_cross_trial_test;


def run(params):
    try:
        train_convnet(params)
    except:
        print "Unexpected error:", sys.exc_info()[0]


if __name__ == '__main__':
    config = load_config(default_config=os.path.join(os.path.dirname(__file__),
                                                     'train_convnet.cfg'),
                         reset_logging=False)

    batch_subjects = config.get('batch_subjects', xrange(13))

    # per person
    for i in batch_subjects:
        hyper_params = {
            'experiment_root':
            os.path.join(config.experiment_root, 'individual',
                         'subj' + str(i + 1)),
            'subjects': [i]
        }

        params = merge_params(config, hyper_params)
Ejemplo n.º 7
0
    print confusion_matrix(labels, y_pred);
    
    labels = np.argmax(testset.y, axis=1)
    print classification_report(labels, y_pred);
    print confusion_matrix(labels, y_pred);

    misclass = (labels != y_pred).mean()
    print misclass
    
#     # alternative version from KeepBestParams
#     minibatch = T.matrix('minibatch')
#     output_fn = theano.function(inputs=[minibatch],outputs=T.argmax( model.fprop(minibatch), axis = 1 ));
#     it = testset.iterator('sequential', batch_size=batch_size, targets=False);
#     y_pred = [output_fn(mbatch) for mbatch in it];

#             y_hat = T.argmax(state, axis=1)
#             y = T.argmax(target, axis=1)
#             misclass = T.neq(y, y_hat).mean()
#             misclass = T.cast(misclass, config.floatX)
#             rval['misclass'] = misclass
#             rval['nll'] = self.cost(Y_hat=state, Y=target)
        
    

    log.debug('done');
    


if __name__ == '__main__':
    config = load_config(default_config='../train_sda.cfg', reset_logging=False);    
    analyze(config);
Ejemplo n.º 8
0
                   params=params,
                   );
                   
    save_yaml_file(train_yaml_str, os.path.join(params.experiment_root, 'mlp_train.yaml'));
    
    with log_timing(log, 'training MLP'):    
        train.main_loop();
        
    log.info('done');
    
def get_default_config_path():
    return os.path.join(os.path.dirname(__file__),'train_sda_mlp.cfg');

if __name__ == '__main__':
#     config = load_config(default_config='../../train_sda.cfg', reset_logging=False);
    config = load_config(default_config=get_default_config_path(), reset_logging=False);
                         
    hyper_params = {   
    };
    
    params = merge_params(config, hyper_params);

    if not config.get('only_extract_results', False):
        train_mlp(params);
        
    scan_for_best_performance(params.experiment_root, 'valid_y_misclass');
    scan_for_best_performance(params.experiment_root, 'valid_ptrial_misclass_rate')
    
    values = extract_results(config.experiment_root, mode='misclass');        
            
    print np.multiply(100, [
Ejemplo n.º 9
0
    for col, group, subjects in groups:
        hyper_params = {
            'experiment_root': os.path.join(config.experiment_root,
                                            str(group)),
            'subjects': subjects
            # NOTE: layerX_content should still  point to global sda/ folder
        }
        table_row[col:col + 3] = run_experiment(config, hyper_params,
                                                random_seeds)

    print table_row
    return table_row


if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path())

    ## this will save the raw network output for the best model
    #     config.save_output = True;

    # FIXME: remove manual override
    #     config.only_extract_results = True;
    #
    #     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu.old/cnn/bigbatch/';
    #
    #     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/fftcnn/exp05.2/';  ## <= BEST so far
    # 72.2 & 72.8 & 73.7

    row = run_experiments_for_table_row(config)

    print config.experiment_root
Ejemplo n.º 10
0
              [11, 'all', 'all']
    ];
    
    for col, group, subjects in groups:
        hyper_params = { 
                    'experiment_root' : os.path.join(config.experiment_root, str(group)),
                    'subjects' : subjects
                    # NOTE: layerX_content should still  point to global sda/ folder
                    };
        table_row[col:col+3] = run_experiment(config, hyper_params, random_seeds);
        
    print table_row;
    return table_row;

if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path());
    
    # FIXME: remove manual override
#     config.only_extract_results = True;
#     config.experiment_root = '/Users/stober/git/deepbeat/deepbeat/output/gpu.old/sda/exp6.14all'
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch1/batch_50-25-10.a lr 0.00005 + Momentum 0.5-0.7/'
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch2/batch_50-25-10.f lr 0.00005 + AdaDelta/'
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch2/batch_50-25-10.d lr0.3 + 0.5 Momentum';
    
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch3/batch_50-25-10.k    ';
    
    row = run_experiments_for_table_row(config);
    
    print config.experiment_root;
    s = '';
    for i,f in enumerate(row): 
Ejemplo n.º 11
0
    if worst_frames:
        analyze_worst_frames(datasets['train'],
                             minibatched_output_fn,
                             output_path=output_path)


#     analyze_reconstruction(dataset, minibatched_output_fn, hop_size);

    analyze_complex(datasets['test'],
                    minibatched_output_fn,
                    config.hop_size,
                    output_path=output_path)

    return report

if __name__ == '__main__':
    config = load_config(
        default_config=
        #                          '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/sda_400Hz_80-50-25-10.cfg'
        #                         '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/sda_400Hz_100-50-25-10.cfg'
        '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/sda_100Hz_100-50-25-10.cfg'
    )
    #                          os.path.join(os.path.dirname(__file__), 'run', 'train_sda_mlp.cfg'), reset_logging=False);
    #     config.dataset_suffix = '_channels';
    config.dataset_root = '/Users/sstober/work/datasets/Dan/eeg'
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/'
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/'
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/'
    #     analyze(config, subjects='all', worst_frames=True);
    analyze(config, subjects=[1], worst_frames=False)
Ejemplo n.º 12
0
def plot2(root):
    plot(root+'/mlp.pkl', channels, root+'/plot.pdf');   
    plot(root+'/mlp.pkl', less_channels, root+'/plot-less.pdf', get_color_variants(2));
    
def plot_all(root_path):
    import fnmatch    

#     matches = []
    for root, dirnames, filenames in os.walk(root_path):
        for filename in fnmatch.filter(filenames, 'mlp.pkl'):
#             matches.append(os.path.join(root, filename))
            plot2(root);

if __name__ == '__main__':
    config = load_config(default_config=
#                 os.path.join(os.path.dirname(__file__), '..', 'run', 'train_convnet.cfg'), reset_logging=True);
                os.path.join(os.path.dirname(__file__), '..', 'run', 'train_fftconvnet.cfg'), reset_logging=True);
#                 os.path.join(os.path.dirname(__file__), '..', 'run', 'train_sda_mlp.cfg'), reset_logging=True);
    
    # FIXME:
#     root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/sda/exp6.14all/';
#     root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/cnn/bigbatch/individual/';
#     plot_batch(root);
    
#     for i in xrange(12):
#         root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/cnn/bigbatch/cross-trial/pair'+str(i);
#         plot(root+'/mlp.pkl', channels, root+'/plot.pdf');   
#         plot(root+'/mlp.pkl', less_channels, root+'/plot-less.pdf', get_color_variants(2));
    
    # FIXME
    root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/subj2_50-25-10.f';