Ejemplo n.º 1
0
def process_jobs(config):
    common_config = config.common;
    for job in config.jobs:
        log.info('Processing job {} with base {}'.format(job.name, job.base));
        job_config = merge_params(common_config, config[job.base]);
        log.debug('job overrides: {}'.format(job.overrides));
        job_config = merge_params(job_config, job.overrides);
        
        job_config.experiment_root = os.path.join(
                                                  config.output_root,
                                                  job_config.type,
                                                  job.name
                                                  );
        log.debug('experiment root: {}'.format(job_config.experiment_root));
        
        print job_config;
        
#         try:
        if job_config.type == 'cnn':
            train_convnet(job_config);                
        elif job_config.type == 'fftcnn':
            train_convnet(job_config);
        elif job_config.type == 'sda':
            train_mlp(job_config);
        else:
            log.error('unsupported job type {}'.format(job_config.type));
Ejemplo n.º 2
0
def run_experiment(config, hyper_params, random_seeds):
    if config.global_sda == False:
        hyper_params = fix_local_sda_config(hyper_params)

    experiment_root = hyper_params['experiment_root']

    best_acc = -1
    best_results = [np.NAN, np.NAN, np.NAN]
    for seed in random_seeds:
        hyper_params['random_seed'] = seed
        hyper_params['experiment_root'] = experiment_root + '.' + str(seed)

        params = merge_params(config, hyper_params)

        if os.path.exists(os.path.join(params.experiment_root, 'mlp.pkl')):
            print 'found existing mlp.pkl: {}'.format(params.experiment_root)
        else:
            print 'no mlp.pkl found at: {}'.format(params.experiment_root)
            if not config.get('only_extract_results', False):
                train_mlp(params)

        try:
            values = extract_results(params.experiment_root, mode='misclass')

            results = np.multiply(
                100,
                [
                    #                         1 - values['test_y_misclass'],
                    #                         1 - values['test_wseq_misclass_rate'],
                    #                         1 - values['test_wtrial_misclass_rate']]);
                    1 - values['frame_misclass'],
                    1 - values['sequence_misclass'],
                    1 - values['trial_misclass']
                ])

            # save the best results
            if np.max(results[2]) > best_acc:
                best_results = results
                best_acc = np.max(results[2])
        except:
            print traceback.format_exc()
            results = [np.NAN, np.NAN, np.NAN]

        print 'results for seed {}: {}'.format(seed, results)

    print 'best results: {}'.format(best_results)
    return best_results
Ejemplo n.º 3
0
def run_experiment(config, hyper_params, random_seeds):
    if config.global_sda == False: 
        hyper_params = fix_local_sda_config(hyper_params);
    
    experiment_root = hyper_params['experiment_root'];
    
    best_acc = -1;
    best_results = [np.NAN, np.NAN, np.NAN];
    for seed in random_seeds:
        hyper_params['random_seed'] = seed;
        hyper_params['experiment_root'] = experiment_root + '.' + str(seed);            
    
        params = merge_params(config, hyper_params);
    
        if os.path.exists(os.path.join(params.experiment_root, 'mlp.pkl')):
            print 'found existing mlp.pkl: {}'.format(params.experiment_root);
        else:
            print 'no mlp.pkl found at: {}'.format(params.experiment_root);
            if not config.get('only_extract_results', False):
                train_mlp(params);
        
        try:
            values = extract_results(params.experiment_root, mode='misclass');
            
            results = np.multiply(100, [
#                         1 - values['test_y_misclass'],
#                         1 - values['test_wseq_misclass_rate'],
#                         1 - values['test_wtrial_misclass_rate']]);
     
                        1 - values['frame_misclass'],
                        1 - values['sequence_misclass'],
                        1 - values['trial_misclass']]);           
            
            # save the best results
            if np.max(results[2]) > best_acc:
                best_results = results; 
                best_acc = np.max(results[2]);
        except:
            print traceback.format_exc();
            results = [np.NAN, np.NAN, np.NAN];
            
        print 'results for seed {}: {}'.format(seed, results);
        
    print 'best results: {}'.format(best_results);
    return best_results;
Ejemplo n.º 4
0
def process_jobs(config):
    common_config = config.common
    for job in config.jobs:
        log.info('Processing job {} with base {}'.format(job.name, job.base))
        job_config = merge_params(common_config, config[job.base])
        log.debug('job overrides: {}'.format(job.overrides))
        job_config = merge_params(job_config, job.overrides)

        job_config.experiment_root = os.path.join(config.output_root,
                                                  job_config.type, job.name)
        log.debug('experiment root: {}'.format(job_config.experiment_root))

        print job_config

        #         try:
        if job_config.type == 'cnn':
            train_convnet(job_config)
        elif job_config.type == 'fftcnn':
            train_convnet(job_config)
        elif job_config.type == 'sda':
            train_mlp(job_config)
        else:
            log.error('unsupported job type {}'.format(job_config.type))