Example #1
0
        params=config,
    )

    save_yaml_file(yaml_str,
                   os.path.join(config.experiment_root, 'settings.yaml'))

    with log_timing(log, 'training network'):
        train.main_loop()


def get_default_config_path():
    return os.path.join(os.path.dirname(__file__), 'train_convnet.cfg')


if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path(),
                         reset_logging=False)

    if not config.get('only_extract_results', False):
        train_convnet(config)

    scan_for_best_performance(config.experiment_root, 'valid_y_misclass')
    scan_for_best_performance(config.experiment_root,
                              'valid_ptrial_misclass_rate')

    values = extract_results(config.experiment_root, mode='misclass')

    print np.multiply(
        100,
        [
            #                         1 - values['test_y_misclass'],
            #                         1 - values['test_wseq_misclass_rate'],
        train_mlp(trainset, testset, config);
    
    # load model    
    with log_timing(log, 'loading MLP model from {}'.format(mlp_file)):
        mlp = serial.load(mlp_file);
    
    
    
    
    
    y_true = trainset.labels;
#     y_pred = mlp.fprop(trainset.X);
    
    
    X = mlp.get_input_space().make_theano_batch()
    Y = mlp.fprop( X )
    Y = T.argmax( Y, axis = 1 )
    f = theano.function( [X], Y )
    y_pred = f( trainset.X );
    
    # Compute confusion matrix
    print classification_report(y_true, y_pred);
    print confusion_matrix(y_true, y_pred);

    return mlp;

if __name__ == '__main__':
    config = load_config(default_config='../train_sda.cfg', reset_logging=False);

    train(config);
        log.debug('job overrides: {}'.format(job.overrides))
        job_config = merge_params(job_config, job.overrides)

        job_config.experiment_root = os.path.join(config.output_root,
                                                  job_config.type, job.name)
        log.debug('experiment root: {}'.format(job_config.experiment_root))

        print job_config

        #         try:
        if job_config.type == 'cnn':
            train_convnet(job_config)
        elif job_config.type == 'fftcnn':
            train_convnet(job_config)
        elif job_config.type == 'sda':
            train_mlp(job_config)
        else:
            log.error('unsupported job type {}'.format(job_config.type))


#         except:
#             log.fatal("Unexpected error:", sys.exc_info());

if __name__ == '__main__':
    default_config = os.path.join(os.path.dirname(__file__), 'batch.cfg')
    config = load_config(default_config=default_config, reset_logging=False)

    config = merge_params(Config(file(default_config)), config)

    process_jobs(config)
                   params=params,
                   );
                   
    save_yaml_file(train_yaml_str, os.path.join(params.experiment_root, 'mlp_train.yaml'));
    
    with log_timing(log, 'training MLP'):    
        train.main_loop();
        
    log.info('done');
    
def get_default_config_path():
    return os.path.join(os.path.dirname(__file__),'train_sda_mlp.cfg');

if __name__ == '__main__':
#     config = load_config(default_config='../../train_sda.cfg', reset_logging=False);
    config = load_config(default_config=get_default_config_path(), reset_logging=False);
                         
    hyper_params = {   
    };
    
    params = merge_params(config, hyper_params);

    if not config.get('only_extract_results', False):
        train_mlp(params);
        
    scan_for_best_performance(params.experiment_root, 'valid_y_misclass');
    scan_for_best_performance(params.experiment_root, 'valid_ptrial_misclass_rate')
    
    values = extract_results(config.experiment_root, mode='misclass');        
            
    print np.multiply(100, [
        
        job_config.experiment_root = os.path.join(
                                                  config.output_root,
                                                  job_config.type,
                                                  job.name
                                                  );
        log.debug('experiment root: {}'.format(job_config.experiment_root));
        
        print job_config;
        
#         try:
        if job_config.type == 'cnn':
            train_convnet(job_config);                
        elif job_config.type == 'fftcnn':
            train_convnet(job_config);
        elif job_config.type == 'sda':
            train_mlp(job_config);
        else:
            log.error('unsupported job type {}'.format(job_config.type));
 
#         except:
#             log.fatal("Unexpected error:", sys.exc_info());

if __name__ == '__main__':
    default_config = os.path.join(os.path.dirname(__file__), 'batch.cfg');    
    config = load_config(default_config=default_config, reset_logging=False);
                         
    config = merge_params(Config(file(default_config)), config);
                         
    process_jobs(config);
        x**x;
    log.info('done');
    
def run(params):
    try:
        log.debug('running {}'.format(params.experiment_root));
#         dummy();
        train_convnet(params);
#         plot2(config.experiment_root);
        
    except:
        log.fatal("Unexpected error:", sys.exc_info());


if __name__ == '__main__':
    config = load_config(default_config=
                 os.path.join(os.path.dirname(__file__),'train_fftconvnet.cfg'), reset_logging=True);                 
    
    lr_values = config.get('lr_values', [0.001, 0.0033, 0.01, 0.00033, 0.033, 0.1]);
    beat_patterns = config.get('beat_patterns', [10,20,30]);
    bar_patterns = config.get('bar_patterns', [10,20,30]);
    beat_pools = config.get('beat_pools', [1,3,5]);
    bar_pools = config.get('bar_pools', [1,3,5]);
    
#     with ThreadPoolExecutor(max_workers=config.num_processes) as executor:
    for lr in lr_values:
        for h1pat in bar_patterns:
            for h1pool in bar_pools:
                for h0pat in beat_patterns:
                    for h0pool in beat_pools:
                    
                        # collect params 
              [11, 'all', 'all']
    ];
    
    for col, group, subjects in groups:
        hyper_params = { 
                    'experiment_root' : os.path.join(config.experiment_root, str(group)),
                    'subjects' : subjects
                    # NOTE: layerX_content should still  point to global sda/ folder
                    };
        table_row[col:col+3] = run_experiment(config, hyper_params, random_seeds);
        
    print table_row;
    return table_row;

if __name__ == '__main__':
    config = load_config(default_config=get_default_config_path());
    
    config.save_output = True;
    
    # FIXME: remove manual override
#     config.only_extract_results = True;
#     
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu.old/cnn/bigbatch/';
#     
#     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/fftcnn/exp05.2/';  ## <= BEST so far
    # 72.2 & 72.8 & 73.7
    
    row = run_experiments_for_table_row(config);
    
    print config.experiment_root;
    s = '';
    report = analyze_reconstruction_error(datasets, minibatched_output_fn);
    
    for line in report:
        print '{:>6} : mean = {:.3f} std = {:.3f} max = {:.3f}'.format(*line);
    
#     analyze_frames(dataset, output_fn);    

    if worst_frames:
        analyze_worst_frames(datasets['train'], minibatched_output_fn, output_path=output_path);
     
#     analyze_reconstruction(dataset, minibatched_output_fn, hop_size);
    
    analyze_complex(datasets['test'], minibatched_output_fn, config.hop_size, output_path=output_path);

    return report;
    

if __name__ == '__main__':
    config = load_config(default_config=
#                          '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/sda_400Hz_80-50-25-10.cfg'
#                         '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/sda_400Hz_100-50-25-10.cfg'
                        '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/sda_100Hz_100-50-25-10.cfg'
                         );
#                          os.path.join(os.path.dirname(__file__), 'run', 'train_sda_mlp.cfg'), reset_logging=False);
#     config.dataset_suffix = '_channels';   
    config.dataset_root = '/Users/sstober/work/datasets/Dan/eeg';
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/';
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/';
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/';
#     analyze(config, subjects='all', worst_frames=True);
    analyze(config, subjects=[1], worst_frames=False);
Example #9
0
    if worst_frames:
        analyze_worst_frames(datasets['train'],
                             minibatched_output_fn,
                             output_path=output_path)


#     analyze_reconstruction(dataset, minibatched_output_fn, hop_size);

    analyze_complex(datasets['test'],
                    minibatched_output_fn,
                    config.hop_size,
                    output_path=output_path)

    return report

if __name__ == '__main__':
    config = load_config(
        default_config=
        #                          '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/sda_400Hz_80-50-25-10.cfg'
        #                         '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/sda_400Hz_100-50-25-10.cfg'
        '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/sda_100Hz_100-50-25-10.cfg'
    )
    #                          os.path.join(os.path.dirname(__file__), 'run', 'train_sda_mlp.cfg'), reset_logging=False);
    #     config.dataset_suffix = '_channels';
    config.dataset_root = '/Users/sstober/work/datasets/Dan/eeg'
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_80-50-25-10/'
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_400Hz_100-50-25-10/'
    config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/sda/batch_100Hz_100-50-25-10/'
    #     analyze(config, subjects='all', worst_frames=True);
    analyze(config, subjects=[1], worst_frames=False)
Example #10
0
        accuracy[i] = 100 * (1 - extract_best_result(
            params.experiment_root, mode='misclass', check_dataset='test')[0])

    print results
    print results.mean(axis=0)
    print results.max(axis=1)

    print accuracy
    print accuracy.mean()

    return results, accuracy

if __name__ == '__main__':
    config = load_config(default_config=os.path.join(os.path.dirname(__file__),
                                                     'train_convnet.cfg'),
                         reset_logging=True)

    # override settings
    #     config.experiment_root = os.path.join(config.experiment_root, 'cross-trial');
    #     config.subjects = None;
    #     config.subjects = 8; # subj9 0-indexed
    #     config.max_epochs = 5;

    # FIXME: remove manual override
    #     config.only_extract_results = True;
    #     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/fftcnn/cross-test2/'
    #     config.experiment_root = '/Users/sstober/git/deepbeat/deepbeat/output/gpu/fftcnn/exp08.a-crosstrial.subj9/';

    if config.get('full_cross_trial', False):
        full_cross_trial_test(config)
Example #11
0

def plot_all(root_path):
    import fnmatch

    #     matches = []
    for root, dirnames, filenames in os.walk(root_path):
        for filename in fnmatch.filter(filenames, 'mlp.pkl'):
            #             matches.append(os.path.join(root, filename))
            plot2(root)


if __name__ == '__main__':
    config = load_config(
        default_config=
        #                 os.path.join(os.path.dirname(__file__), '..', 'run', 'train_convnet.cfg'), reset_logging=True);
        os.path.join(os.path.dirname(__file__), '..', 'run',
                     'train_fftconvnet.cfg'),
        reset_logging=True)
    #                 os.path.join(os.path.dirname(__file__), '..', 'run', 'train_sda_mlp.cfg'), reset_logging=True);

    # FIXME:
    #     root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/sda/exp6.14all/';
    #     root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/cnn/bigbatch/individual/';
    #     plot_batch(root);

    #     for i in xrange(12):
    #         root = '/Users/stober/git/deepbeat/deepbeat/output/gpu/cnn/bigbatch/cross-trial/pair'+str(i);
    #         plot(root+'/mlp.pkl', channels, root+'/plot.pdf');
    #         plot(root+'/mlp.pkl', less_channels, root+'/plot-less.pdf', get_color_variants(2));

    # FIXME
    mlp_file = config.get('mlp_file')

    if not os.path.isfile(mlp_file):
        train_mlp(trainset, testset, config)

    # load model
    with log_timing(log, 'loading MLP model from {}'.format(mlp_file)):
        mlp = serial.load(mlp_file)

    y_true = trainset.labels
    #     y_pred = mlp.fprop(trainset.X);

    X = mlp.get_input_space().make_theano_batch()
    Y = mlp.fprop(X)
    Y = T.argmax(Y, axis=1)
    f = theano.function([X], Y)
    y_pred = f(trainset.X)

    # Compute confusion matrix
    print classification_report(y_true, y_pred)
    print confusion_matrix(y_true, y_pred)

    return mlp


if __name__ == '__main__':
    config = load_config(default_config='../train_sda.cfg',
                         reset_logging=False)

    train(config)
Example #13
0
#         trial_id += 1;

    data = np.vstack(data);
    labels = np.vstack(labels);
#     trial_meta = np.vstack(trial_meta);
    channel_meta = np.vstack(channel_meta);
#     subject_meta = np.vstack(subject_meta);

    log.debug('generated {} data points and {} labels '.format(data.shape, labels.shape));

#     return data, labels, trial_meta, channel_meta;
    return data, labels, channel_meta;

if __name__ == '__main__':
    
    config = load_config(default_config='../train_sda.cfg');
    
    DATA_ROOT = config.eeg.get('dataset_root', './');
    SAMPLE_RATE = 400; # in Hz
    TRIAL_LENGTH = 32; # in sec
    
    TRIAL_LENGTH += 2; # add 2s after end of presentation
    
    TRIAL_SAMPLE_LENGTH = SAMPLE_RATE * TRIAL_LENGTH;    
    
    log.info('using dataset at {}'.format(DATA_ROOT));
    
    '''
    Note from Dan:
    All subjects should have channels 15, 16, 17 and 18 removed [...]
    If you want to make them truly identical, you could remove channel 19 from