def main():
    set_namespace('env_stats')
    
    parser = OptionParser()

    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    (options, args) = parser.parse_args() #@UnusedVariable


    db = FlydraDB(options.db, False)
    outdir = os.path.join(options.db, 'out/environment_stats')

    images = ["luminance", "contrast", "luminance_w", "contrast_w",
              "hluminance_w", "hcontrast_w"]
              
    for image in images:
        samples = [x for x in db.list_samples() 
                      if db.get_attr(x, 'stimulus', None) != 'nopost' and
                      db.has_table(x, image)]
        
        if not samples:
            print "No samples for %s" % samples
            continue
            
        comp_prefix(image)        
        data = comp(compute_environment_autocorrelation, options.db, samples, image)
        
        comp(create_report, data, image, outdir)
    

    db.close()
    
    compmake_console()
def main():
    parser = OptionParser()

    parser.add_option("--db", default='flydra_db', help="Data directory")

    parser.add_option("--image", default="luminance",
                      help="Rendered image to use -- "
            " corresponding to image 'saccades_view_{start,stop}_X'")
    
    parser.add_option("--interactive",
                      help="Start an interactive compmake session."
                      " Otherwise run in batch mode. ",
                      default=False, action="store_true")


    (options, args) = parser.parse_args() #@UnusedVariable
    
    if options.db is None:
        logger.error('Please specify a directory using --db.')
        sys.exit(-1)

    view_start = 'saccades_view_start_%s' % options.image
    view_stop = 'saccades_view_stop_%s' % options.image
    view_rstop = 'saccades_view_rstop_%s' % options.image    

    db = FlydraDB(options.db, False) 
    
    # all samples with enough data
    all_available = lambda x: db.has_saccades(x) and \
        db.has_table(x, view_start) and \
        db.has_table(x, view_stop) and \
        db.has_table(x, view_rstop)
        
    samples = filter(all_available, db.list_samples())
    
    set_namespace('saccade_view_show_%s' % options.image)
    
    for sample in samples: 
        comp_prefix(sample)
        
        comp(create_and_write_report, options.db, sample, options.image) 
        
    
    if options.interactive:
        # start interactive session
        compmake_console()
    else:
        # batch mode
        # try to do everything
        batch_command('make all')
        # start the console if we are not done
        # (that is, make all failed for some reason)
        todo = list(parse_job_list('todo')) 
        if todo:
            logger.info('Still %d jobs to do.' % len(todo))
            sys.exit(-2)
Example #3
0
 def comp(self, f, *args, **kwargs):
     """ 
         Simple wrapper for Compmake's comp function. 
         Use this instead of "comp". """
     self.count_comp_invocations()
     comp_prefix(self._job_prefix)
     extra_dep = self._extra_dep + kwargs.get('extra_dep', [])
     kwargs['extra_dep'] = extra_dep
     promise = comp(f, *args, **kwargs)
     self._jobs[promise.job_id] = promise
     return promise
Example #4
0
def dp_batch_main(config, parser):
    """ Runs batch planning experiments from batch configuration files. """
    parser.add_option("-o",
                      "--output",
                      default='out/dp-batch',
                      help="Output directory")

    parser.add_option("-c",
                      "--command",
                      help="Command to pass to compmake for batch mode")

    options, which = parser.parse()

    if not which:
        todo = config.sets.keys()
        id_comb = 'all'
    else:
        todo = config.sets.expand_names(which)
        id_comb = "+".join(sorted(todo))

    logger.info('Batch sets to do: %s' % todo)

    outdir = os.path.join(options.output, 'set-%s' % id_comb)

    # Compmake storage for results
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()

    for id_set in todo:
        logger.info('Instantiating batch set  %s' % id_set)
        spec = config.sets[id_set]

        try:
            algos = config.algos.expand_names(spec['algorithms'])
            testcases = config.testcases.expand_names(spec['testcases'])
            comp_prefix('%s' % id_set)
            b_outdir = os.path.join(outdir, id_set)
            create_bench_jobs(config=config,
                              algos=algos,
                              testcases=testcases,
                              outdir=b_outdir)
        except:
            logger.error('Error while instantiating batch\n%s' % pformat(spec))
            raise

    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
Example #5
0
def dp_batch_main(config, parser):
    """ Runs batch planning experiments from batch configuration files. """
    parser.add_option("-o", "--output", default='out/dp-batch',
                      help="Output directory")

    parser.add_option("-c", "--command",
                      help="Command to pass to compmake for batch mode")
    
    options, which = parser.parse()
    
    if not which:
        todo = config.sets.keys()
        id_comb = 'all'  
    else:
        todo = config.sets.expand_names(which)
        id_comb = "+".join(sorted(todo))
        
    logger.info('Batch sets to do: %s' % todo)
    
    outdir = os.path.join(options.output, 'set-%s' % id_comb)
    
    # Compmake storage for results
    storage = os.path.join(outdir, 'compmake')
    use_filesystem(storage)
    read_rc_files()
    
    for id_set in todo:
        logger.info('Instantiating batch set  %s' % id_set)
        spec = config.sets[id_set]
        
        try:            
            algos = config.algos.expand_names(spec['algorithms']) 
            testcases = config.testcases.expand_names(spec['testcases']) 
            comp_prefix('%s' % id_set)
            b_outdir = os.path.join(outdir, id_set)
            create_bench_jobs(config=config, algos=algos,
                              testcases=testcases, outdir=b_outdir)
        except:
            logger.error('Error while instantiating batch\n%s' % pformat(spec))
            raise
        
    if options.command:
        return batch_command(options.command)
    else:
        compmake_console()
        return 0
Example #6
0
#    val = (random.rand(1) - 0.5) * 2
#    i = int(floor(random.rand(1) * 2.99))
#    u = zeros(3)
#    u[i] = val
#    return u

random_commands_gen = my_random_commands_gen

num_iterations = 500
dt = 0.1
 
vehicle_list = vehicles_list_A()
all_vehicle_report = []
for vname, vehicle in vehicle_list:
    vname_tex = vname.replace('_', '-')
    comp_prefix(vname)

    result = comp(random_motion_simulation,
        world_gen=my_world_gen, vehicle=vehicle,
        random_pose_gen=random_pose_gen,
        num_iterations=num_iterations,
        random_commands_gen=random_commands_gen, dt=dt,
        processing_class=AffineModel,
        job_id='affine')

    vehicle_report = comp(create_report_affine, state=result, report_id=vname)
    all_vehicle_report.append(vehicle_report)
comp_prefix()


def create_report(id, children):
Example #7
0
    def go(self):  
         
        # check that if we have a parent who is a quickapp,
        # then use its context      
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            context = qapp_parent.child_context  
            self.define_jobs_context(context)
            return
        else:
            # self.info('Parent not found')
            pass
            

        if False:            
            import resource
            gbs = 5
            max_mem = long(gbs * 1000 * 1048576L)
            resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
            resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()
        
        if self.get_qapp_parent() is None:
            # only do this if somebody didn't do it before
            if not options.contracts:
                msg = 'PyContracts disabled for speed. Use --contracts to activate.'
                self.logger.warning(msg)
                contracts.disable_all()

        warnings.warn('removed configuration below')  # (start)

        output_dir = options.output
        
        # Compmake storage for results        
        storage = os.path.join(output_dir, 'compmake')
        sf = StorageFilesystem(storage, compress=True)
#     sf = StorageFilesystem2(directory)
#     sf = MemoryCache(sf)
        set_compmake_db(sf)

        # use_filesystem(storage)
        read_rc_files()
        
        context = CompmakeContext(parent=None, qapp=self, job_prefix=None,
                                  output_dir=output_dir)
        self.context = context
        original = get_comp_prefix()
        self.define_jobs_context(context)
        comp_prefix(original) 
        
        context.finalize_jobs()
        
        if context.n_comp_invocations == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else: 
            if not options.console:
                batch_result = batch_command(options.command)
                if isinstance(batch_result, str):
                    ret = QUICKAPP_COMPUTATION_ERROR
                elif isinstance(batch_result, int):
                    if batch_result == 0:
                        ret = 0
                    else:
                        # xxx: discarded information
                        ret = QUICKAPP_COMPUTATION_ERROR
                else:
                    assert False 
                return ret
            else:
                compmake_console()
                return 0
Example #8
0
def generate_saccade(model, num):
    sequence = model.sample_saccade_sequence(num)          
    return saccades_to_ndarray(sequence)

def create_report(id, children):
    return ReportNode(id=id, children=children)

def write_report(report, basename):
    report.to_latex_document(basename + '.tex')
    report.to_html_document(basename + '.html')


from compmake import comp, comp_prefix #@UnresolvedImport

models = create_models()

reports = []
for model_name, model in models:
    comp_prefix(model_name)
    saccades = comp(generate_saccade, model, num=10000)
    report = comp(analyze_saccades, report_id=model_name, saccades=saccades)
    reports.append(report)
    
comp_prefix()
report = comp(create_report, id='all_models', children=reports)
comp(write_report, report, 'all_models')

    
    
def batch_process_manager(data_central, which_sets, command=None):
    try:
        import compmake  # @UnusedImport
    except:
        logger.error('Compmake not installed; multiprocessor '
                     'processes not available.')
        raise

    from compmake import (comp_prefix, use_filesystem,
                          compmake_console, batch_command)

    batch_config = BatchConfigMaster()
    configs = data_central.get_dir_structure().get_config_directories()
    for config in configs:
        batch_config.load(config)

    sets_available = batch_config.sets.keys()

    # logger.info('Available: %r' % sets_available)
    # logger.info('Sets:      %r' % which_sets)
    which_sets_int = expand_string(which_sets, options=sets_available)

    if not which_sets_int:
        msg = 'Specified sets %r not found.' % which_sets
        msg += ' Available: %s' % sets_available
        raise UserError(msg)

    # logger.info('Expanded:  %r' % which_sets)

    for x in which_sets_int:
        if not x in sets_available:
            msg = 'Set %r not available.' % x
            raise UserError(msg)

    if len(which_sets_int) == 1:
        combid = which_sets[0]
    else:
        combid = '-'.join(which_sets)

    # Create the new root        
    root = data_central.root
    root_set = os.path.join(data_central.root, 'sets', combid)
    safe_makedirs(root_set)
    data_central_set = DataCentral(root_set)

    # add symbolic links to logs and config
    main_config = os.path.realpath(os.path.join(root, 'config'))
    set_config = os.path.join(root_set, 'config')
    safe_symlink(main_config, set_config) 

    safe_makedirs(os.path.join(root_set, 'logs'))
    safe_symlink(os.path.join(root, 'logs'),
                 os.path.join(root_set, 'logs', 'original'))

    storage = data_central_set.get_dir_structure().get_storage_dir()
    compmake_storage = os.path.join(storage, 'compmake')
    logger.debug('Using storage directory %r.' % friendly_path(compmake_storage))
    use_filesystem(compmake_storage)

    for id_set in which_sets:
        if len(which_sets) > 1:
            comp_prefix(id_set)

        try:
            spec = batch_config.sets[x]
            batch_set(data_central_set, id_set, spec)
        except ConfToolsException:
            msg = ('Bad configuration for the set %r with spec\n %s' % 
                   (id_set, pformat(spec)))
            logger.error(msg)
            raise

    if command:
        return batch_command(command)
    else:
        compmake_console()
        return 0
Example #10
0
from mycomputations import func1, func2, draw
from compmake import comp, comp_prefix

for param1 in [1, 2, 3]:
    for param2 in [10, 11, 12]:
        comp_prefix("p1=%s-p2=%s" % (param1, param2))

        # use job_id to override default naming
        res1 = comp(func1, param1, job_id="preparing")
        res2 = comp(func2, res1, param2, job_id="computing")
        comp(draw, res2, job_id="drawing")
Example #11
0
from mycomputations import func1, func2, draw
from compmake import comp, comp_prefix

for param1 in [1, 2, 3]:
    for param2 in [10, 11, 12]:
        # Add a prefix to the job ids
        # for easy reference 
        comp_prefix('p1=%s-p2=%s' % (param1,param2))
                
        res1 = comp(func1, param1)
        res2 = comp(func2, res1, param2)
        comp(draw, res2)