def main():
    set_namespace('env_stats')
    
    parser = OptionParser()

    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    (options, args) = parser.parse_args() #@UnusedVariable


    db = FlydraDB(options.db, False)
    outdir = os.path.join(options.db, 'out/environment_stats')

    images = ["luminance", "contrast", "luminance_w", "contrast_w",
              "hluminance_w", "hcontrast_w"]
              
    for image in images:
        samples = [x for x in db.list_samples() 
                      if db.get_attr(x, 'stimulus', None) != 'nopost' and
                      db.has_table(x, image)]
        
        if not samples:
            print "No samples for %s" % samples
            continue
            
        comp_prefix(image)        
        data = comp(compute_environment_autocorrelation, options.db, samples, image)
        
        comp(create_report, data, image, outdir)
    

    db.close()
    
    compmake_console()
Example #2
0
 def stage_execution(tcid, algid):
     stage_test_case_report(tcid)
     
     key = (tcid, algid)
     if not key in executions:
         test_case = test_cases[tcid]
         algo_class, algo_params = available_algorithms[algid]
         job_id = 'solve-%s-%s-run' % (tcid, algid)
         results = comp(run_combination, tcid,
                        test_case, algo_class, algo_params,
                         job_id=job_id)
         executions[key] = results
         
         exc_id = '%s-%s' % (tcid, algid)
         # Create iterations report
         job_id = 'solve-%s-report' % exc_id
         report = comp(create_report_execution, exc_id,
                        tcid,
                        test_case, algo_class, algo_params,
                       results, job_id=job_id)
         
         job_id += '-write'
         filename = os.path.join(options.outdir, 'executions',
                                 '%s-%s.html' % (tcid, algid))
         comp(write_report, report, filename, job_id=job_id)
         
     return executions[key]
Example #3
0
def create_predstats_jobs(config, distances, streams, id_discdds, rm, maxd):
    # Compmake storage for results
    store = StoreResults()

    # Try to instance it
    # dds = config.discdds.instance(id_discdds)

    for delta in range(0, maxd):
        for i, (id_stream,
                id_dds) in enumerate(itertools.product(streams, id_discdds)):
            key = dict(delta=delta, id_stream=id_stream, id_discdds=id_dds)
            job_id = 'pred-%s-log%s-delta%s' % (id_dds, i, delta)

            store[key] = comp(compute_predstats,
                              config,
                              id_dds,
                              id_stream,
                              delta,
                              distances,
                              job_id=job_id)

    comp(discdds_report, id_discdds, store, rm)

    for id_dds in id_discdds:
        #        pdb.set_trace()
        subsets = create_subsets(distances)
        job_report_one(subsets, id_dds, store, rm)
Example #4
0
    def stage_execution(tcid, algid):
        stage_test_case_report(tcid)

        key = (tcid, algid)
        if not key in executions:
            test_case = test_cases[tcid]
            algo_class, algo_params = available_algorithms[algid]
            job_id = 'solve-%s-%s-run' % (tcid, algid)
            results = comp(run_combination,
                           tcid,
                           test_case,
                           algo_class,
                           algo_params,
                           job_id=job_id)
            executions[key] = results

            exc_id = '%s-%s' % (tcid, algid)
            # Create iterations report
            job_id = 'solve-%s-report' % exc_id
            report = comp(create_report_execution,
                          exc_id,
                          tcid,
                          test_case,
                          algo_class,
                          algo_params,
                          results,
                          job_id=job_id)

            job_id += '-write'
            filename = os.path.join(options.outdir, 'executions',
                                    '%s-%s.html' % (tcid, algid))
            comp(write_report, report, filename, job_id=job_id)

        return executions[key]
Example #5
0
def jobs_rlearn_refine_level(config, rm, learners, streams, outdir, nthreads, i, ref, areas, parent):
    id_learner, id_stream = learners[0], streams[0]
#    for id_learner, id_stream in itertools.product(learners, streams):
    # try instancing them
    config.streams.instance(id_stream)
    
    # Learn the first refining iteration
    job_id = 'learn-%s-%s-%sof%s-refined%s' % (id_stream, id_learner, i + 1, nthreads, ref)

    learner_i = comp(rlearn_partial, config, id_learner, id_stream,
                     i, nthreads, job_id=job_id, search_areas=areas, parent=parent)

    
    dds = comp(summarize, learner_i,
           job_id='learn-%s-%s-%s-refined%s-summarize' % (id_stream, id_learner, i, ref))

    learner_report = comp(report_learner,
                          'learner-%s-%s-%s-refined%s' % (id_stream, id_learner, i, ref),
                          learner_i,
                          job_id='learn-%s-%s-%s-refined%s-report' % (id_stream, id_learner, i, ref)) 
#        pdb.set_trace()
    diffeo_report = comp(report_dds,
                         'dds-%s-%s-%s-refined%s' % (id_stream, id_learner, i, ref),
                         dds,
                         job_id='learn-%s-%s-%s-refined%s-summarize-report' % (id_stream, id_learner, i, ref))
    
    rm.add(learner_report, 'learner-%s-refined%s' % (i, ref), id_learner=id_learner, id_stream=id_stream)
    rm.add(diffeo_report, 'dds-%s-refined%s' % (i, ref), id_learner=id_learner, id_stream=id_stream)

#    comp(save_results, id_learner, id_stream, outdir, dds,
#         job_id='learn-%s-%s-%s-refined%s-summarize-save' % (id_stream, id_learner, i, ref))
    
    return  learner_i, comp(calculate_areas, learner_i, dds, ref + 1)
Example #6
0
def job_report_one(subsets, id_discdds, store, rm):
    records = comp(make_records, store)
    for id_subset, distances in subsets.items():
        job_id = 'report_predstats-%s-%s' % (id_discdds, id_subset)    
        report = comp(report_predstats, id_discdds, id_subset, distances, records,
                      job_id=job_id)
        rm.add(report, 'main', id_discdds=id_discdds, subset=id_subset)
Example #7
0
def create_diststats_jobs(config, distances, streams, rm, maxd):
    # Compmake storage for results
    store = StoreResults()

    for id_distance in distances:    
        for delta in range(1, maxd):
            for i, id_stream in enumerate(streams):
                key = dict(id_distance=id_distance,
                           delta=delta,
                           stream=id_stream)
                job_id = '%s-log%s-delta%s' % (id_distance, i, delta)
                
                store[key] = comp(compute_dist_stats, config, id_distance,
                                  id_stream, delta,
                                  job_id=job_id)
    
    
    for id_distance in distances:
        subset = store.select(id_distance=id_distance)
        stats = comp(compute_statistics, subset)
        report = comp(report_statistics, id_distance, stats)
        rm.add(report, 'bydistance', id_distance=id_distance)

    subsets = create_subsets(distances)
    
    job_report(subsets, store, rm)
Example #8
0
def create_diststats_jobs(config, distances, streams, rm, maxd):
    # Compmake storage for results
    store = StoreResults()

    for id_distance in distances:
        for delta in range(1, maxd):
            for i, id_stream in enumerate(streams):
                key = dict(id_distance=id_distance,
                           delta=delta,
                           stream=id_stream)
                job_id = '%s-log%s-delta%s' % (id_distance, i, delta)

                store[key] = comp(compute_dist_stats,
                                  config,
                                  id_distance,
                                  id_stream,
                                  delta,
                                  job_id=job_id)

    for id_distance in distances:
        subset = store.select(id_distance=id_distance)
        stats = comp(compute_statistics, subset)
        report = comp(report_statistics, id_distance, stats)
        rm.add(report, 'bydistance', id_distance=id_distance)

    subsets = create_subsets(distances)

    job_report(subsets, store, rm)
Example #9
0
 def testDep2(self):
     ''' Testing advanced dependencies discovery (double) '''
     cf1 = comp(f1)
     cf2 = comp(f2, cf1, cf1)
     self.assertTrue(cf1.job_id in direct_children(cf2.job_id))
     self.assertEqual(1, len(direct_children(cf2.job_id)))
     self.assertEqual(1, len(direct_parents(cf1.job_id)))
Example #10
0
def job_report(subsets, store, rm):
    for id_subset, which in subsets.items():
        logger.info('%s = %s' % (id_subset, which))
        subset = store.select(lambda x: x['id_distance'] in which)
        logger.info('selected %s' % len(subset))
        substats = comp(compute_statistics, subset, job_id='%s-s' % id_subset)
        report = comp(report_statistics_all, id_subset, substats)
        rm.add(report, 'main', subset=id_subset)
Example #11
0
def job_report(subsets, store, rm):
    for id_subset, which in subsets.items():
        logger.info('%s = %s' % (id_subset, which))
        subset = store.select(lambda x: x['id_distance'] in which)
        logger.info('selected %s' % len(subset))
        substats = comp(compute_statistics, subset, job_id='%s-s' % id_subset)
        report = comp(report_statistics_all, id_subset, substats)
        rm.add(report, 'main', subset=id_subset)
Example #12
0
def main():
    from compmake import comp, compmake_console, use_filesystem
    use_filesystem(os.path.join(Const.signals_dir, 'compmake'))
    for id_video, id_filter in itertools.product(Const.videos, Const.filters):
        if should_process(id_video, id_filter):
            comp(extract_signals, id_video, id_filter,
                 job_id='extract-%s-%s' % (id_video, id_filter))

    compmake_console()
Example #13
0
def test_order():
    from compmake import comp, batch_command
    # make A -> B(fail) -> C
    A = comp(job_success, job_id='A')
    B = comp(job_failure, A, job_id='B')
    comp(job_success, B, job_id='C')
    batch_command('make')

    check_job_states(A=Cache.DONE, B=Cache.FAILED, C=Cache.BLOCKED)
Example #14
0
def main():
    for log in logs:
        for script in scripts:
            job_id = '%s-%s' % (script, log)
            #config = {'logdir': "${PBENV_DATA}/rawseeds/%s" % log}
            config = {'logdir':  log}
            comp(pg, script, config=config, job_id=job_id)

    compmake_console()
Example #15
0
def rlearn(config, parser): #@UnusedVariable
    t0 = time.time()
    """ Displays the learned DDS """
    parser.add_option("-n", "--nthreads", help="Number of threads",
                      type='int', default='4')
    parser.add_option("-r", "--nrefine", help="Number of time to refine learning",
                      type='int', default='2')
    parser.add_option("-s", "--streams", help="Which streams to use.",
                      default="*")
    parser.add_option("-i", "--comb", default="default")
    parser.add_option("-l", "--learners", help="Learner config.", default="*")
    parser.add_option("-o", "--output", help="Output directory",
                      default='out/dp-rlearn/')
    parser.add_option("--sensels", default=None,
                      help="Sensel indices to use for debuging refining module")
    parser.add_option("-c", "--command",
                      help="Command to pass to compmake for batch mode")
    parser.add_option("--show", default=None, help="Name of learners to report")


    options = parser.parse_options()
    if options.show is not None:
        diffeomorphism2d_continuous.make_report(options.show.split(','))
        sys.exit()
    nthreads = options.nthreads
    nrefine = options.nrefine
    
    learners = config.learners.expand_names(options.learners) 
    streams = config.streams.expand_names(options.streams)
    
    if len(learners) > 1:
        logger.warn('Multiple learners are not supported for now')
    if len(streams) > 1:
        logger.warn('Multiple streams are not supported for now')
    
    id_comb = ",".join(streams) + "-" + ",".join(learners)
    outdir = os.path.join(options.output, id_comb) 
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()
    
    rm = ReportManager(os.path.join(outdir, 'reports'))
    
    jobs_rlearn(config, rm, learners, streams, outdir, nthreads, nrefine, options.sensels)
    
    # Time and report the learning
    comp(learning_times_rlearn, outdir, learners, streams, nthreads, nrefine)
    
    
    rm.create_index_job()
    
    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
    logger.info("Done after time: " + str(time.time() - t0) + ' seconds')
def main():
    parser = OptionParser()

    parser.add_option("--db", default='flydra_db', help="Data directory")

    parser.add_option("--image", default="luminance",
                      help="Rendered image to use -- "
            " corresponding to image 'saccades_view_{start,stop}_X'")
    
    parser.add_option("--interactive",
                      help="Start an interactive compmake session."
                      " Otherwise run in batch mode. ",
                      default=False, action="store_true")


    (options, args) = parser.parse_args() #@UnusedVariable
    
    if options.db is None:
        logger.error('Please specify a directory using --db.')
        sys.exit(-1)

    view_start = 'saccades_view_start_%s' % options.image
    view_stop = 'saccades_view_stop_%s' % options.image
    view_rstop = 'saccades_view_rstop_%s' % options.image    

    db = FlydraDB(options.db, False) 
    
    # all samples with enough data
    all_available = lambda x: db.has_saccades(x) and \
        db.has_table(x, view_start) and \
        db.has_table(x, view_stop) and \
        db.has_table(x, view_rstop)
        
    samples = filter(all_available, db.list_samples())
    
    set_namespace('saccade_view_show_%s' % options.image)
    
    for sample in samples: 
        comp_prefix(sample)
        
        comp(create_and_write_report, options.db, sample, options.image) 
        
    
    if options.interactive:
        # start interactive session
        compmake_console()
    else:
        # batch mode
        # try to do everything
        batch_command('make all')
        # start the console if we are not done
        # (that is, make all failed for some reason)
        todo = list(parse_job_list('todo')) 
        if todo:
            logger.info('Still %d jobs to do.' % len(todo))
            sys.exit(-2)
Example #17
0
def job_report_one(subsets, id_discdds, store, rm):
    records = comp(make_records, store)
    for id_subset, distances in subsets.items():
        job_id = 'report_predstats-%s-%s' % (id_discdds, id_subset)
        report = comp(report_predstats,
                      id_discdds,
                      id_subset,
                      distances,
                      records,
                      job_id=job_id)
        rm.add(report, 'main', id_discdds=id_discdds, subset=id_subset)
Example #18
0
def main():
    from compmake import comp, compmake_console, use_filesystem
    use_filesystem(os.path.join(Const.signals_dir, 'compmake'))
    for id_video, id_filter in itertools.product(Const.videos, Const.filters):
        if should_process(id_video, id_filter):
            comp(extract_signals,
                 id_video,
                 id_filter,
                 job_id='extract-%s-%s' % (id_video, id_filter))

    compmake_console()
Example #19
0
def main():
    use_filesystem(os.path.join(Const.signals_dir, 'compmake_stats'))

#    signals = list_signals() # only do the compound ones
#    signals = Const.osets.keys()
    for id_oset, id_filter, id_stat in itertools.product(
                                    Const.osets, Const.filters, Const.stats):
        signal = '%s-%s' % (id_oset, id_filter)
        comp(compute_and_write_stats, signal, id_stat,
             job_id='stats-%s-%s' % (signal, id_stat))

    compmake_console()
Example #20
0
def jobs_rlearn_refine_level(config, rm, learners, streams, outdir, nthreads,
                             i, ref, areas, parent):
    id_learner, id_stream = learners[0], streams[0]
    #    for id_learner, id_stream in itertools.product(learners, streams):
    # try instancing them
    config.streams.instance(id_stream)

    # Learn the first refining iteration
    job_id = 'learn-%s-%s-%sof%s-refined%s' % (id_stream, id_learner, i + 1,
                                               nthreads, ref)

    learner_i = comp(rlearn_partial,
                     config,
                     id_learner,
                     id_stream,
                     i,
                     nthreads,
                     job_id=job_id,
                     search_areas=areas,
                     parent=parent)

    dds = comp(summarize,
               learner_i,
               job_id='learn-%s-%s-%s-refined%s-summarize' %
               (id_stream, id_learner, i, ref))

    learner_report = comp(report_learner,
                          'learner-%s-%s-%s-refined%s' %
                          (id_stream, id_learner, i, ref),
                          learner_i,
                          job_id='learn-%s-%s-%s-refined%s-report' %
                          (id_stream, id_learner, i, ref))
    #        pdb.set_trace()
    diffeo_report = comp(report_dds,
                         'dds-%s-%s-%s-refined%s' %
                         (id_stream, id_learner, i, ref),
                         dds,
                         job_id='learn-%s-%s-%s-refined%s-summarize-report' %
                         (id_stream, id_learner, i, ref))

    rm.add(learner_report,
           'learner-%s-refined%s' % (i, ref),
           id_learner=id_learner,
           id_stream=id_stream)
    rm.add(diffeo_report,
           'dds-%s-refined%s' % (i, ref),
           id_learner=id_learner,
           id_stream=id_stream)

    #    comp(save_results, id_learner, id_stream, outdir, dds,
    #         job_id='learn-%s-%s-%s-refined%s-summarize-save' % (id_stream, id_learner, i, ref))

    return learner_i, comp(calculate_areas, learner_i, dds, ref + 1)
Example #21
0
def main():
    set_namespace('hdf2bpi_all')
    
    parser = OptionParser()

    parser.add_option("--model", default=None,
                      help="Only do this model")
    
    (options, args) = parser.parse_args() #@UnusedVariable
    
    if args:
        raise Exception('Extra arguments')
    
    
    Script = namedtuple('Script', 'job_prefix model input file_pattern params')
    
    scripts = [
        Script('3cams', 'rawseeds2bpi_3cams', '{logid}.h5', '{logid}.camera.bpi', {}),
        Script('4lasers', 'rawseeds2bpi_4lasers', '{logid}.h5', '{logid}.4lasers.bpi', {}),
        Script('frontal', 'rawseeds2bpi_frontal', '{logid}.h5', '{logid}.frontal.bpi', {}),
        Script('sick_extract', 'bpi_extract',
               '{logid}.4lasers.bpi', '{logid}.sick.bpi', {}),
        Script('sickpc', 'hdf_wrap_bpi_filter',
                '{logid}.sick.bpi', '{logid}.sickpc.bpi',
                {'bpi_filter': 'bpi_popcode',
                 'bpi_filter.edges': 'edges_sick.pickle' }),
        Script('sickpc_all', 'hdf_wrap_bpi_filter',
                '{logid}.sick.bpi', '{logid}.sickpca.bpi',
                {'bpi_filter': 'bpi_popcode',
                 'bpi_filter.edges': 'edges_sick-all.pickle' }),
    ]
    

    if not os.path.exists(hdf_dir):
        raise Exception('Input dir %r does not exist.' % hdf_dir)
    
    if not os.path.exists(hdf_dir):
        os.makedirs(hdf_dir)
    
    for log in logs:
        for script in scripts:
            hdf = os.path.join(hdf_dir, script.input.format(logid=log))
            bpi = os.path.join(hdf_dir, script.file_pattern.format(logid=log))
            job_id = '%s-%s' % (script.job_prefix, log)

            # if os.path.exists(bpi):
            #     print('File %r already exists; skipping creation of job %r.' %
            #             (bpi, job_id))
            #     continue
                
            comp(convert_hdf2bpi, script.model, hdf, bpi, script.params, job_id=job_id)
        
    compmake_console()
Example #22
0
def main():
    use_filesystem(os.path.join(Const.signals_dir, 'compmake_join'))

    sets = {}
    for fname in Const.filters.keys():
        mkname = lambda x: '%s-%s' % (x, fname)
        for master, pieces in Const.osets.items():
            sets[mkname(master)] = [mkname(x) for x in pieces]

    for master, pieces in sets.items():
        comp(join_signals, master, pieces, job_id='join-%s' % master)

    compmake_console()
Example #23
0
def main():
    use_filesystem(os.path.join(Const.signals_dir, 'compmake_join'))

    sets = {}
    for fname in Const.filters.keys():
        mkname = lambda x: '%s-%s' % (x, fname)
        for master, pieces in Const.osets.items():
            sets[mkname(master)] = [mkname(x) for x in pieces]

    for master, pieces in sets.items():
        comp(join_signals, master, pieces,
             job_id='join-%s' % master)

    compmake_console()
Example #24
0
def main():
    use_filesystem(os.path.join(Const.signals_dir, 'compmake_stats'))

    #    signals = list_signals() # only do the compound ones
    #    signals = Const.osets.keys()
    for id_oset, id_filter, id_stat in itertools.product(
            Const.osets, Const.filters, Const.stats):
        signal = '%s-%s' % (id_oset, id_filter)
        comp(compute_and_write_stats,
             signal,
             id_stat,
             job_id='stats-%s-%s' % (signal, id_stat))

    compmake_console()
Example #25
0
def main():
    usage = ''
    # my comment 
    parser = OptionParser(usage=usage)
    
    parser.add_option("--bigdata", default="~/BIGDATA", help="Base directory")
    
    parser.add_option("--outdir", help="Destination base directory")
    
    (options, args) = parser.parse_args()
    if args:
        raise Exception('trailing')

    if options.outdir is None:
        raise Exception('Specify outdir')
    
    bigdata = os.path.expanduser(options.bigdata)
    bigdata = os.path.expandvars(bigdata)
    
    conf2logs = {}
    conf2logs['conf1b'] = list_logdirs(os.path.join(bigdata, "er1-logs_compact_better", "conf1"))
    conf2logs['conf2b'] = list_logdirs(os.path.join(bigdata, "er1-logs_compact_better", "conf2"))
    # what to run
    # interface is   {logdir, logname, outdir}
    conf2pg = {}  
    conf2pg['conf1b'] = ['er1b_video01',
                         'er1b_video0_alone',
                         'er1b_video1_alone',
                         'er1conv_video0_bw_full',
                         'er1conv_video0_bw_small',
                         'er1conv_video01_bw_full',
                         'er1conv_video01_bw_small']
    conf2pg['conf2b'] = conf2pg['conf1b']
    
    for conf in conf2pg:
        logs = conf2logs[conf]
        pgs = conf2pg[conf]
        
        for logdir, pg in itertools.product(logs, pgs): #@UndefinedVariable
            logname = os.path.basename(logdir)
            outdir = os.path.join(options.outdir, conf) 
            
            job_id = '%s-%s-%s' % (conf, pg, logname)
            done_file = os.path.join(options.outdir, 'done', '%s-finished.txt' % job_id)
            make_sure_dir_exists(done_file)
            
            config = dict(logdir=logdir, outdir=outdir, logname=logname)
            comp(run_pg_script, pg, config, done_file, job_id=job_id)

    compmake_console()
Example #26
0
def plearn(config, parser):
    """ Learn the diffeomorphisms in parallel. """
    # parser.add_option("-i", "--id_image", help="ID image.", default='lena')
    ncpus = multiprocessing.cpu_count()
    parser.add_option("-n",
                      "--nthreads",
                      help="Number of threads",
                      type='int',
                      default=ncpus)
    parser.add_option("-s",
                      "--streams",
                      help="Which streams to use.",
                      default="*")
    parser.add_option("-i", "--comb", default="default")
    parser.add_option("-l", "--learners", help="Learner config.", default="*")
    parser.add_option("-o",
                      "--output",
                      help="Output directory",
                      default='out/dp-plearn/')
    parser.add_option("-c",
                      "--command",
                      help="Command to pass to compmake for batch mode")
    options = parser.parse_options()

    nthreads = options.nthreads

    learners = config.learners.expand_names(options.learners)
    streams = config.streams.expand_names(options.streams)
    id_comb = ",".join(streams) + "-" + ",".join(learners)
    outdir = os.path.join(options.output, id_comb)
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()

    rm = ReportManager(os.path.join(outdir, 'reports'))

    jobs_plearn(config, rm, learners, streams, outdir, nthreads)

    rm.create_index_job()

    # Time and report the learning
    comp(learning_times_plearn, outdir, learners, streams, nthreads)

    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
Example #27
0
 def compmake_job(self, *args, **kwargs):
     """ Calls compmake's self.compmake_job() function. """    
     try:
         from compmake import comp
     except ImportError:
         logger.error('Compmake not installed')
     return comp(*args, **kwargs)
Example #28
0
 def testID(self):
     ''' Check that the job id is correctly parsed '''
     job_id = 'terminus'
     c = comp(f1, job_id=job_id)
     self.assertEqual(c.job_id, job_id)
     make(job_id)
     self.assertTrue(True)
Example #29
0
def jobs_tables_by_algo_rows_sample_groups(samples_groups, rm, tables):
    source_descs = comp_store(Stats.all_descriptions())
    
    # Crate a new store, add the key "group"
    allstats = StoreResults()
    for id_group, samples in samples_groups.items():
        for key, value in samples.items():
            nkey = dict(id_group=id_group, **key)
            allstats[nkey] = value
    
    for id_statstable, stats in tables.items():
        for id_algo, samples in allstats.groups_by_field_value('id_algo'):
            job_id = 'byalgo-%s-%s' % (id_algo, id_statstable)

            r = comp(table_by_rows,
                     "byalgo-rows-sample-groups-%s-%s" % (id_algo, id_statstable),
                     samples=samples,
                     rows_field='id_group',  # rows = tc
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id) 
            
            report_attrs = dict(id_statstable=id_statstable)
            report_attrs.update(samples.fields_with_unique_values())
            assert report_attrs['id_algo'] == id_algo
           
            rm.add(r, 'byalgo-rows-sample-groups', **report_attrs)
Example #30
0
 def test_execution_stats(self):
     from compmake import comp, batch_command
     # schedule some commands
     res = comp(f, comp(f), comp(f, comp(f)))
     
     result = compmake_execution_stats(res)
     batch_command('make')
 
     res = get_job_userobject(result.job_id)
     
     assert isinstance(res, dict)
     res['cpu_time']
     res['wall_time']
     res['jobs']
     
     print res
Example #31
0
def main():
    set_namespace('rawseeds2hdf')

    if not os.path.exists(rawseeds_dir):
        raise Exception('Input dir %r does not exist.' % hdf_dir)
    
    if not os.path.exists(hdf_dir):
        os.makedirs(hdf_dir)
    
    for log in logs:
        logdir = os.path.join(rawseeds_dir, log)
        hdf = os.path.join(hdf_dir, '%s.h5' % log)
        job_id = 'rawseeds2hdf-%s' % log
        comp(convert_rawseeds2hdf, logdir, hdf, job_id=job_id)
        
    compmake_console()
Example #32
0
def main():
    outdir = "test/repman"

    storage = os.path.join(outdir, "compmake")
    use_filesystem(storage)

    rm = ReportManager(outdir)
    report = comp(make_rep1, "TestReport3")
    report2 = comp(make_rep1, "TestReport4")

    rm.add(report, "rep3")
    rm.add(report2, "rep4")

    rm.create_index_job()
    read_rc_files()

    compmake_console()
Example #33
0
def jobs_plearn_comb(config, rm, outdir, id_learner, id_stream, nthreads,
                     intermediate_reports=True):
    partial = []
    for i in range(nthreads):
        job_id = 'learn-%s-%s-%sof%s' % (id_stream, id_learner, i + 1, nthreads)
        learner_i = comp(plearn_partial, config, id_learner, id_stream, i, nthreads,
                         job_id=job_id)
        partial.append(learner_i)
        
        if intermediate_reports:
            diffeo_i = comp(summarize, learner_i, job_id=job_id + '-summarize')
            learner_i_report = comp(report_learner, 'learner-%s-%s-%sof%s' % (id_stream,
                                                id_learner, i + 1, nthreads),
                                    learner_i, job_id=job_id + '-report') 
            diffeo_i_report = comp(report_dds, 'dds-%s-%s-%sof%s' % (id_stream,
                                                          id_learner, i + 1, nthreads),
                                    diffeo_i, job_id=job_id + '-summarize-report')
            
            rm.add(learner_i_report, 'learner-partial', id_learner=id_learner,
                                                    i=i, id_stream=id_stream) 
            rm.add(diffeo_i_report, 'dds-partial', id_learner=id_learner,
                                                    i=i, id_stream=id_stream) 

    current = partial[0]
    for i in range(1, nthreads):
        job_id = 'learn-%s-%s-join-%sof%s' % (id_stream, id_learner, i, nthreads - 1)
        current = comp(plearn_join, current, partial[i], job_id=job_id)
    learner = current
    dds = comp(summarize, current,
               job_id='learn-%s-%s-summarize' % (id_stream, id_learner))

    learner_report = comp(report_learner,
                          'learner-%s-%s' % (id_stream, id_learner),
                          learner,
                          job_id='learn-%s-%s-report' % (id_stream, id_learner)) 
    
    diffeo_report = comp(report_dds,
                         'dds-%s-%s' % (id_stream, id_learner),
                         dds,
                         job_id='learn-%s-%s-summarize-report' % (id_stream, id_learner))
    
    rm.add(learner_report, 'learner', id_learner=id_learner, id_stream=id_stream) 
    rm.add(diffeo_report, 'dds', id_learner=id_learner, id_stream=id_stream)

    comp(save_results, id_learner, id_stream, outdir, dds,
         job_id='learn-%s-%s-summarize-save' % (id_stream, id_learner))
Example #34
0
def main():
    outdir = 'test/repman'

    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)

    rm = ReportManager(outdir)
    report = comp(make_rep1, 'TestReport3')
    report2 = comp(make_rep1, 'TestReport4')

    rm.add(report, 'rep3')
    rm.add(report2, 'rep4')

    rm.create_index_job()
    read_rc_files()

    compmake_console()
Example #35
0
def jobs_report_tc(config, rm, testcases, alltc):
    for id_tc in testcases:
        tc = config.testcases.instance(id_tc)
        report = comp(report_tc, comp_store(config), id_tc, alltc[id_tc],
                      job_id='report_tc-%s' % id_tc)
        report_attrs = dict(true_plan_length=len(tc.true_plan),
                            id_tc=id_tc, id_discdds=tc.id_discdds)
        rm.add(report, 'tc', **report_attrs)
Example #36
0
def uncert(config, parser):
    parser.add_option("-S", "--dds", help="DDS sytem .")
    parser.add_option("-c", "--command", \
                      help="Command to pass to compmake for batch mode")
    parser.add_option("-o", "--output", help="Output directory", \
                      default='out/dp-precision/')
    parser.add_option("-s", "--streams", help="Which streams to use.", \
                      default="*")
    parser.add_option("-d", "--distances", default='L2,L2w', \
                      help="Distances id to use for comparing the diffeo systems")
    parser.add_option("-l", "--length", default=9, type='int', \
                      help="Max length of test cases")
    options = parser.parse_options()

    dds = config.discdds.expand_names(options.dds)
    streams = config.streams.expand_names(options.streams)

    id_comb = ",".join(dds)

    outdir = os.path.join(options.output, id_comb)
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()

    rm = ReportManager(os.path.join(outdir, 'reports'))
    max_delta = options.length
    distances = config.distances.expand_names(options.distances)

    store = create_stats_jobs(config, dds, streams, distances, max_delta,
                              outdir)

    records = comp(make_records, store)

    report = comp(report_stats, records, dds, streams, distances)
    r0 = comp(empty_report)
    rm.add(report, 'main', id_dds='dds')
    rm.add(r0, 'empty')

    rm.create_index_job()

    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
Example #37
0
def uncert(config, parser):
    parser.add_option("-S", "--dds", help="DDS sytem .")
    parser.add_option("-c", "--command", \
                      help="Command to pass to compmake for batch mode")
    parser.add_option("-o", "--output", help="Output directory", \
                      default='out/dp-precision/')
    parser.add_option("-s", "--streams", help="Which streams to use.", \
                      default="*")
    parser.add_option("-d", "--distances", default='L2,L2w', \
                      help="Distances id to use for comparing the diffeo systems")
    parser.add_option("-l", "--length", default=9, type='int', \
                      help="Max length of test cases")
    options = parser.parse_options()
     
    dds = config.discdds.expand_names(options.dds) 
    streams = config.streams.expand_names(options.streams)
    
    id_comb = ",".join(dds)
    
    outdir = os.path.join(options.output, id_comb) 
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()
    
    
    rm = ReportManager(os.path.join(outdir, 'reports'))
    max_delta = options.length
    distances = config.distances.expand_names(options.distances)
    
    store = create_stats_jobs(config, dds, streams, distances, max_delta, outdir)
    
    records = comp(make_records, store)
    
    report = comp(report_stats, records, dds, streams, distances)
    r0 = comp(empty_report)
    rm.add(report, 'main', id_dds='dds')
    rm.add(r0, 'empty')
    
    rm.create_index_job()
    
    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
def main():
#    parser.add_option("--res", help="", default='[40, 80, 160, 320]')
#    options = parser.parse_options()
    
    rm = ReportManager('timeuse_reports')
    rm.add(comp(timeuse_report), 'timeuse', systems='all')
    rm.create_index_job()
    
    return batch_command('clean; make')
Example #39
0
def create_stats_jobs(config, id_ddss, id_streams, id_distances, max_delta, outdir):
    store = StoreResults()
    
    for id_dds in id_ddss:
        for id_stream in id_streams:
            for delta in range(1, max_delta):
                key = dict(id_discdds=id_dds, id_stream=id_stream, delta=delta)
                store[key] = comp(compute_predstats, config, id_dds, id_stream, delta, id_distances)
    return store
Example #40
0
def create_uncert_stats_jobs(config, id_ddss, id_streams, max_delta, outdir):
    store = StoreResults()
    id_distances = ['L2', 'L2w']
    for id_dds in id_ddss:
        for id_stream in id_streams:
            for delta in range(1, max_delta):
                key = dict(id_discdds=id_dds, id_stream=id_stream, delta=delta)
                store[key] = comp(compute_predstats, config, id_dds, id_stream, delta, id_distances)
    return store
Example #41
0
    def test_order_3(self):
        from compmake import comp, batch_command
        # choose wisely here
        comp(top, comp(bottom2))
        comp(bottom)
        comp(top, comp(bottom2))
        
        batch_command('clean')
        batch_command('make')

        self.assertEqual(['bottom', 'bottom2', 'top', 'bottom2', 'top'], TestOrder.order)
Example #42
0
def uncert(config, parser):  # @UnusedVariable
    parser.add_option("-s",
                      "--streams",
                      help="Which streams to use.",
                      default="*")
    parser.add_option("-S", "--dds", help="DDS sytem .")
    parser.add_option("-o",
                      "--output",
                      help="Output directory",
                      default='out/dp-uncert/')
    parser.add_option("-c",
                      "--command",
                      help="Command to pass to compmake for batch mode")
    options = parser.parse_options()

    dds = config.discdds.expand_names(options.dds)
    streams = config.streams.expand_names(options.streams)

    id_comb = ",".join(dds) + "-" + ",".join(streams)

    outdir = os.path.join(options.output, id_comb)
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()

    rm = ReportManager(os.path.join(outdir, 'reports'))
    max_delta = 9

    store = create_uncert_stats_jobs(config, dds, streams, max_delta, outdir)

    records = comp(make_records, store)

    report = comp(report_uncert_stats, records, dds)
    r0 = comp(empty_report)
    rm.add(report, 'main', id_dds='dds')
    rm.add(r0, 'empty')

    rm.create_index_job()

    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
Example #43
0
 def stage_test_case_report(tcid):
     if not tcid in available_test_cases:
         msg = ('Could not find test case %r \n %s' % 
                (tcid, available_test_cases.keys()))
         raise Exception(msg)
     if not tcid in test_cases:
         command, args = available_test_cases[tcid]
         job_id = 'test_case_data-%s' % tcid
         test_cases[tcid] = comp(command, job_id=job_id, **args)
     
     if not tcid in  test_case_reports:
         job_id = 'test_case-%s-report' % tcid
         report = comp(create_report_tc,
                       tcid, test_cases[tcid], job_id=job_id)
         job_id += '-write'
         filename = os.path.join(options.outdir, 'test_cases', '%s.html' % tcid)
         comp(write_report, report, filename, job_id=job_id)
         test_case_reports[tcid] = report
     return test_case_reports[tcid]
Example #44
0
def jobs_report_algo_init(config, rm, algoinit): #@UnusedVariable
    """ add the initialization report for each algorithm """
    for k, algo in algoinit.items():
        id_algo = k['id_algo'] 
        id_discdds = k['id_discdds']
        job_id = 'init-%s-%s-report' % (id_algo, id_discdds)
        report = comp(report_init_algorithm, id_algo, id_discdds, algo,
                      job_id=job_id)
        report_attrs = dict(id_algo=id_algo, id_discdds=id_discdds)
        rm.add(report, 'init', **report_attrs)
Example #45
0
def jobs_visualization(config, allruns, rm):
    
    for run in allruns:
        id_tc = run['id_tc']
        id_algo = run['id_algo']
        result = allruns[run]
        job_id = 'plan-%s-%s-visualize' % (id_algo, id_tc)
        report = comp(visualize_result, comp_store(config), id_tc, id_algo,
                      result, job_id=job_id)
        report_attrs = run
        rm.add(report, 'visualization', **report_attrs)
Example #46
0
    def stage_test_case_report(tcid):
        if not tcid in available_test_cases:
            msg = ('Could not find test case %r \n %s' %
                   (tcid, available_test_cases.keys()))
            raise Exception(msg)
        if not tcid in test_cases:
            command, args = available_test_cases[tcid]
            job_id = 'test_case_data-%s' % tcid
            test_cases[tcid] = comp(command, job_id=job_id, **args)

        if not tcid in test_case_reports:
            job_id = 'test_case-%s-report' % tcid
            report = comp(create_report_tc,
                          tcid,
                          test_cases[tcid],
                          job_id=job_id)
            job_id += '-write'
            filename = os.path.join(options.outdir, 'test_cases',
                                    '%s.html' % tcid)
            comp(write_report, report, filename, job_id=job_id)
            test_case_reports[tcid] = report
        return test_case_reports[tcid]
Example #47
0
def comp_comb(function, *args, **kwargs):
    sr = StoreResults()
    for a, b, chosen in all_args_combinations(*args, **kwargs):

        def s(k):
            return ""
            if isinstance(k, int):
                return ""
            else:
                return str(k)

        pid = '-'.join(['%s%s' % (s(k), v) for k, v in chosen.items()])
        job_id = '%s-%s' % (function.__name__, pid)
        b['job_id'] = job_id
        job = comp(function, *a, **b)
        sr[chosen] = job
    return sr
Example #48
0
def jobs_tables_by_sample_rows_algo(allstats, rm, tables):
    source_descs = comp_store(Stats.all_descriptions())
    
    for id_statstable, stats in tables.items():
        for id_tc, tcruns in allstats.groups_by_field_value('id_tc'):
            job_id = 'bysample-%s-%s' % (id_tc, id_statstable)

            r = comp(table_by_rows,
                     "bysample-%s-%s" % (id_tc, id_statstable),
                     samples=tcruns,
                     rows_field='id_algo',  # group by algorithm
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id)
            
            report_attrs = dict(id_statstable=id_statstable)  # id_tc=id_tc, 
            report_attrs.update(tcruns.fields_with_unique_values())

            rm.add(r, 'bysample', **report_attrs)
Example #49
0
def jobs_tables_by_algo_rows_samples(allstats, rm, tables):
    """ One table for each algo, where rows are test cases. """
    source_descs = comp_store(Stats.all_descriptions())
    for id_statstable, stats in tables.items():
        for id_algo, samples in allstats.groups_by_field_value('id_algo'):
            job_id = 'byalgo-%s-%s' % (id_algo, id_statstable)

            r = comp(table_by_rows,
                     "byalgo-rows-sample-%s-%s" % (id_algo, id_statstable),
                     samples=samples,
                     rows_field='id_tc',  # rows = tc
                     cols_fields=stats,  # which statistics for each col
                     source_descs=source_descs,
                     job_id=job_id)

            report_attrs = dict(id_statstable=id_statstable)
            report_attrs.update(samples.fields_with_unique_values())
            assert report_attrs['id_algo'] == id_algo
            
            rm.add(r, 'byalgo-rows-sample', **report_attrs)
Example #50
0
def jobs_tables_by_sample_groups(samples_groups, rm, tables):
    source_descs = comp_store(Stats.all_descriptions())
    # Tables grouping by algorithm
    for g, s in product(samples_groups.items(), tables.items()):
        id_sample_group, samples = g
        id_statstable, stats = s
        
        r = comp(table_by_rows,
                 "bysamplegroups-%s-%s" % (sanitize(id_sample_group), id_statstable),
                 samples=samples,
                 rows_field='id_algo',  # group by algorithm
                 cols_fields=stats,  # which statistics for each col
                 source_descs=source_descs)

        report_attrs = dict(id_sample_group=id_sample_group,
                            id_stats_table=id_statstable)
        
        report_attrs.update(samples.fields_with_unique_values())
        
        rm.add(r, 'bysamplegroups', **report_attrs)
Example #51
0
def pixlearn(config, parser):
    parser.add_option("-n", "--nthreads", help="Number of threads",
                      type='int', default='4')
    parser.add_option("--distribute", type='str', default='random',
                      help="Strategy to distribute sensles to threads")
    parser.add_option("-s", "--id_stream", help="Which streams to use.",
                      default="*")    
    parser.add_option("-o", "--output", help="Output directory",
                      default='out/dp-pixlearn/')
    parser.add_option("-l", "--id_learner", help="Learner config.", default="*")
    parser.add_option("-c", "--command",
                      help="Command to pass to compmake for batch mode")
    options = parser.parse_options()
    
    nthreads = options.nthreads
    id_learner = config.learners.expand_names(options.id_learner) 
    id_stream = config.streams.expand_names(options.id_stream)
    
    id_comb = ",".join(id_stream) + "-" + ",".join(id_learner)
    outdir = os.path.join(options.output, id_comb) 
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()
    rm = ReportManager(os.path.join(outdir, 'reports'))
    pdb.set_trace()
    if options.distribute == 'random':
        max_nsensels = 40 * 30
        # Generate a list with range of all indexes and assign them to threads
        all_indicies = np.array(range(max_nsensels))
        dist = np.random.randint(nthreads, size=max_nsensels)
        sensel_indexes = []
        for i in range(nthreads):
            sensel_indexes.append(all_indicies[dist == i])
    if options.distribute == 'demo4':
        sensel_indexes = [[0, 1, 2, 3,
                           40, 41, 42, 43,
                           80, 81, 82, 83,
                           120, 121, 122, 123],
                          [340, 341, 342, 343,
                           380, 381, 382, 383,
                           420, 421, 422, 423,
                           460, 461, 462, 463],
                          [650, 651, 652, 653,
                           690, 691, 692, 693,
                           730, 731, 732, 733,
                           770, 771, 772, 773],
                          [1076, 1077, 1078, 1079,
                           1116, 1117, 1118, 1119,
                           1156, 1157, 1158, 1159,
                           1196, 1197, 1198, 1199]] 

    if len(id_learner) > 1 or len(id_stream) > 1:
        logger.warning('learners and streams after index 0 will be ignored')
        
    id_learner = id_learner[0]
    id_stream = id_stream[0]
    
    commands = [[256, 0, 0], [-256, 0, 0]]
    states = [[100]]


    # # Parallel part of code
    # Initiate parallel learning
    group = []
    for i in range(nthreads):        
        estimator_i = comp(sensel_group_learn, config, id_learner, id_stream,
                           sensel_indexes[i])
        group.append(estimator_i)
        
        diffeo_system_i = comp(estimator_summarize, estimator_i,
                               commands, states, 'diffeo' + str(i))
        
        estimator_report = comp(report_estimator, 'estimator' + str(i), estimator_i)
        rm.add(estimator_report, 'estimator' + str(i), id_learner=id_learner,
               id_stream=id_stream)
        
        diffeo_report = comp(report_dds, 'diffeo' + str(i), diffeo_system_i)
        rm.add(diffeo_report, 'diffeo' + str(i), id_learner=id_learner,
               id_stream=id_stream)

    estimator_main = comp(join_estimators, group)
    main_system = comp(estimator_summarize, estimator_main,
                       commands, states, 'diffeo' + str(i))
    
    diffeo_report = comp(report_dds, 'dds-%s-%s' % (id_stream, id_learner),
                         main_system,
                         job_id='learn-%s-%s-summarize-report' % (id_stream, id_learner))
    
    rm.add(comp(empty_report), 'empty')
    rm.add(diffeo_report, 'dds', id_learner=id_learner, id_stream=id_stream)
    
    
    rm.create_index_job()
    
    if options.command:
#        return batch_command(options.command)
        batch_command(options.command)
    else:
        compmake_console()
Example #52
0
def rlearn(config, parser):  #@UnusedVariable
    t0 = time.time()
    """ Displays the learned DDS """
    parser.add_option("-n",
                      "--nthreads",
                      help="Number of threads",
                      type='int',
                      default='4')
    parser.add_option("-r",
                      "--nrefine",
                      help="Number of time to refine learning",
                      type='int',
                      default='2')
    parser.add_option("-s",
                      "--streams",
                      help="Which streams to use.",
                      default="*")
    parser.add_option("-i", "--comb", default="default")
    parser.add_option("-l", "--learners", help="Learner config.", default="*")
    parser.add_option("-o",
                      "--output",
                      help="Output directory",
                      default='out/dp-rlearn/')
    parser.add_option(
        "--sensels",
        default=None,
        help="Sensel indices to use for debuging refining module")
    parser.add_option("-c",
                      "--command",
                      help="Command to pass to compmake for batch mode")
    parser.add_option("--show",
                      default=None,
                      help="Name of learners to report")

    options = parser.parse_options()
    if options.show is not None:
        diffeomorphism2d_continuous.make_report(options.show.split(','))
        sys.exit()
    nthreads = options.nthreads
    nrefine = options.nrefine

    learners = config.learners.expand_names(options.learners)
    streams = config.streams.expand_names(options.streams)

    if len(learners) > 1:
        logger.warn('Multiple learners are not supported for now')
    if len(streams) > 1:
        logger.warn('Multiple streams are not supported for now')

    id_comb = ",".join(streams) + "-" + ",".join(learners)
    outdir = os.path.join(options.output, id_comb)
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()

    rm = ReportManager(os.path.join(outdir, 'reports'))

    jobs_rlearn(config, rm, learners, streams, outdir, nthreads, nrefine,
                options.sensels)

    # Time and report the learning
    comp(learning_times_rlearn, outdir, learners, streams, nthreads, nrefine)

    rm.create_index_job()

    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
    logger.info("Done after time: " + str(time.time() - t0) + ' seconds')
Example #53
0
    def constr1(command, state=None):
        if command == 1:
            return True
        else:
            return False
        
    def constr2(command, state=None):
        if command == 2:
            return True
        else:
            return False
        
    def constr3(command, state=None):
        if command == 3:
            return True
        else:
            return False
        
    hub.register_estimator(constr0)
    hub.register_estimator(constr1)
    hub.register_estimator(constr2)
    hub.register_estimator(constr3)
    
    pdb.set_trace()
    
    comp(demo_topic, '0', hub.read_filtered(0))
    comp(demo_topic, '1', hub.read_filtered(1))
    comp(demo_topic, '2', hub.read_filtered(2))
    comp(demo_topic, '3', hub.read_filtered(3))
    compmake_console(0)
Example #54
0
from compmake import comp, draw


def func(x):
    return 2 * x


res1 = comp(func, 2)
res2 = comp(func, 4)
draw(res1)
Example #55
0
def idealize_uncert(config, parser):
    '''
    Assumes a constant displacement over the whole sensor domain
    '''
    parser.add_option("-S", "--dds", help="DDS sytem .")
    parser.add_option("-c", "--command", \
                      help="Command to pass to compmake for batch mode")
    parser.add_option("-o", "--output", help="Output directory", \
                      default='out/idealized-dds/')
    options = parser.parse_options()

    id_discdds = options.dds
    dds = config.discdds.instance(id_discdds)

    outdir = os.path.join(options.output, id_discdds)

    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()

    #
    rm = ReportManager(os.path.join(outdir, 'reports'))

    # Ideal
    id_iu_discdds = 'iu-' + id_discdds
    iu_dds = comp(_idealize_uncert, dds, job_id='idealize_uncert')
    comp(save_results, id_iu_discdds, outdir, iu_dds)
    diffeo_report = comp(report_dds, 'iu_dds-%s' % id_discdds, iu_dds)
    rm.add(diffeo_report, 'iu-dds', id_learner='idealized-uncertainty')

    # Relative
    id_uur_discdds = 'uur-' + id_discdds
    dds_copyr = copy.copy(dds)
    uur_dds = comp(_update_uncert,
                   dds_copyr,
                   length_score_norm_relative,
                   job_id='update_uncert_relative')
    comp(save_results,
         id_uur_discdds,
         outdir,
         uur_dds,
         job_id='update_uncert_relative_save')
    diffeo_report = comp(report_dds,
                         'uur-dds-%s' % id_discdds,
                         uur_dds,
                         job_id='update_uncert_relative_report')
    rm.add(diffeo_report, 'uur-dds', id_learner='updated-uncertainty-uur')

    # Absolute
    dds_copya = copy.copy(dds)
    id_uua_discdds = 'uua-' + id_discdds
    uua_dds = comp(_update_uncert,
                   dds_copya,
                   length_score_norm,
                   job_id='update_uncert_absolute')
    comp(save_results,
         id_uua_discdds,
         outdir,
         uua_dds,
         job_id='update_uncert_absolute_save')
    diffeo_report = comp(report_dds,
                         'uua-dds-%s' % id_discdds,
                         uua_dds,
                         job_id='update_uncert_absolute_report')
    rm.add(diffeo_report, 'uua-dds', id_learner='updated-uncertainty-uua')

    rm.create_index_job()

    if options.command:
        return batch_command(options.command)
    else:
        #        pdb.set_trace()
        compmake_console()
        return 0
Example #56
0
def main():
    parser = OptionParser()

    group = OptionGroup(parser, "Files and directories")

    group.add_option("--outdir",
                     help='Directory with variables.pickle and where '
                     'the output will be placed.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Experiments options")

    group.add_option("--fast",
                     default=False,
                     action='store_true',
                     help='Disables sanity checks.')

    group.add_option("--set",
                     default='*',
                     help='[= %default] Which combinations to run.')

    group.add_option("--seed",
                     default=None,
                     type='int',
                     help='[= %default] Seed for random number generator.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Compmake options")

    group.add_option("--remake",
                     default=False,
                     action='store_true',
                     help='Remakes all (non interactive).')

    group.add_option("--report",
                     default=False,
                     action='store_true',
                     help='Cleans and redoes all reports (non interactive).')

    group.add_option(
        "--report_stats",
        default=False,
        action='store_true',
        help='Cleans and redoes the reports for the stats. (non interactive)')

    parser.add_option_group(group)

    (options, args) = parser.parse_args()  #@UnusedVariable

    np.random.seed(options.seed)

    if options.fast:
        disable_all()

    assert not args
    assert options.outdir is not None

    available_algorithms, available_test_cases, available_sets = get_everything(
    )

    which = expand_string(options.set, list(available_sets.keys()))

    if len(which) == 1:
        compmake_storage = os.path.join(options.outdir, 'compmake', which[0])
    else:
        compmake_storage = os.path.join(options.outdir, 'compmake',
                                        'common_storage')

    use_filesystem(compmake_storage)

    print('Staging creation of test cases reports')
    test_cases = {}
    test_case_reports = {}

    def stage_test_case_report(tcid):
        if not tcid in available_test_cases:
            msg = ('Could not find test case %r \n %s' %
                   (tcid, available_test_cases.keys()))
            raise Exception(msg)
        if not tcid in test_cases:
            command, args = available_test_cases[tcid]
            job_id = 'test_case_data-%s' % tcid
            test_cases[tcid] = comp(command, job_id=job_id, **args)

        if not tcid in test_case_reports:
            job_id = 'test_case-%s-report' % tcid
            report = comp(create_report_tc,
                          tcid,
                          test_cases[tcid],
                          job_id=job_id)
            job_id += '-write'
            filename = os.path.join(options.outdir, 'test_cases',
                                    '%s.html' % tcid)
            comp(write_report, report, filename, job_id=job_id)
            test_case_reports[tcid] = report
        return test_case_reports[tcid]

    # set of tuple (algo, test_case)
    executions = {}

    def stage_execution(tcid, algid):
        stage_test_case_report(tcid)

        key = (tcid, algid)
        if not key in executions:
            test_case = test_cases[tcid]
            algo_class, algo_params = available_algorithms[algid]
            job_id = 'solve-%s-%s-run' % (tcid, algid)
            results = comp(run_combination,
                           tcid,
                           test_case,
                           algo_class,
                           algo_params,
                           job_id=job_id)
            executions[key] = results

            exc_id = '%s-%s' % (tcid, algid)
            # Create iterations report
            job_id = 'solve-%s-report' % exc_id
            report = comp(create_report_execution,
                          exc_id,
                          tcid,
                          test_case,
                          algo_class,
                          algo_params,
                          results,
                          job_id=job_id)

            job_id += '-write'
            filename = os.path.join(options.outdir, 'executions',
                                    '%s-%s.html' % (tcid, algid))
            comp(write_report, report, filename, job_id=job_id)

        return executions[key]

    for comb_id in which:
        comb = available_sets[comb_id]
        alg_ids = expand_string(comb.algorithms, available_algorithms.keys())
        tc_ids = expand_string(comb.test_cases, available_test_cases.keys())

        print(
            'Set %r has %d test cases and %d algorithms (~%d jobs in total).' %
            (comb_id, len(alg_ids), len(tc_ids),
             len(alg_ids) * len(tc_ids) * 2))

        deps = {}
        for t, a in itertools.product(tc_ids, alg_ids):
            deps[(t, a)] = stage_execution(t, a)

        job_id = 'tex-%s' % comb_id
        comp(create_tables_for_paper,
             comb_id,
             tc_ids,
             alg_ids,
             deps,
             job_id=job_id)

        job_id = 'set-%s-report' % comb_id
        report = comp(create_report_comb_stats,
                      comb_id,
                      tc_ids,
                      alg_ids,
                      deps,
                      job_id=job_id)

        job_id += '-write'
        filename = os.path.join(options.outdir, 'stats', '%s.html' % comb_id)
        comp(write_report, report, filename, job_id=job_id)

    if options.report or options.report_stats:
        if options.report:
            batch_command('clean *-report*')
        elif options.report_stats:
            batch_command('clean set-*  tex*')
        batch_command('parmake')
    elif options.remake:
        batch_command('clean *')
        batch_command('make set-* tex-*')
    else:
        compmake_console()