def test_contract_disabled(self):
        validKwargs = {
            'memberString': "Be free! Kill bureaucracy!!!",
            'propertyString': "Be free! Kill bureaucracy!!!",
            'memberStringList': ["a", "b"],
            'propertyStringList': ["a", "b"]
        }

        # Disabling contracts.
        contracts.disable_all()

        # No exception is raised.
        TestContract()

        # Invalid memberString but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['memberString'] = 1234
        TestContract(**kwargs)

        # Invalid propertyString but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['propertyString'] = 1234
        TestContract(**kwargs)

        # Invalid memberStringList but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['memberStringList'] = ["a", 2]
        TestContract(**kwargs)

        # Invalid propertyStringList but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['propertyStringList'] = ["a", 2]
        TestContract(**kwargs)
Exemplo n.º 2
0
        def test_disabled_contracts(self):
            contracts.disable_all()

            @contract
            def disabled(phrase):
                """
                :type phrase: str
                """
                return phrase

            # this should not throw w/ contracts disabled
            disabled(int(8))
            contracts.enable_all()
            # this will still not throw because the disabled value is checked at decoration time only
            disabled(int(8))

            @contract
            def enabled(phrase):
                """
                :type phrase: str
                """
                return phrase

            # a newly decorated function will throw
            with pytest.raises(exceptions.ContractNotRespected):
                enabled(int(8))
Exemplo n.º 3
0
def main():
    parser = OptionParser(usage=usage)

    parser.add_option("--outdir", default='.')
    
    parser.add_option("--slow", default=False, action='store_true',
                      help='Enables sanity checks.')
    
    parser.add_option("--stats", default=False, action='store_true',
                      help='Computes statistics.')
    
    (options, args) = parser.parse_args() #@UnusedVariable
    
    if not options.slow:
        disable_all()
    
    if len(args) > 1:
        raise Exception('Too many arguments')
    
    filename = args[0] if args else 'stdin'
    G = smart_load(filename, raise_if_unknown=True, progress=True)

    print('Creating report...')
    r = create_report(G, options.stats)
    
    rd = os.path.join(options.outdir, 'images')
    out = os.path.join(options.outdir, '%s.html' % G.graph['name'])
    print('Writing to %r' % out)
    r.to_html(out, resources_dir=rd)
Exemplo n.º 4
0
def main():
    parser = OptionParser(usage=usage)

    parser.add_option("--outdir", default='.')

    parser.add_option("--slow",
                      default=False,
                      action='store_true',
                      help='Enables sanity checks.')

    parser.add_option("--stats",
                      default=False,
                      action='store_true',
                      help='Computes statistics.')

    (options, args) = parser.parse_args()  #@UnusedVariable

    if not options.slow:
        disable_all()

    if len(args) > 1:
        raise Exception('Too many arguments')

    filename = args[0] if args else 'stdin'
    G = smart_load(filename, raise_if_unknown=True, progress=True)

    print('Creating report...')
    r = create_report(G, options.stats)

    rd = os.path.join(options.outdir, 'images')
    out = os.path.join(options.outdir, '%s.html' % G.graph['name'])
    print('Writing to %r' % out)
    r.to_html(out, resources_dir=rd)
Exemplo n.º 5
0
    def main(self, argumentList):
        # Disabling contracts solves some performance issues.
        contracts.disable_all()

        argumentParser = argparse.ArgumentParser(
            description=u"Make ModSecurity exceptions.")
        argumentParser.add_argument(
            u"-i",
            u"--input",
            metavar=u"MODSEC_AUDIT_LOG_FILE",
            dest='modsecurityAuditLogPath',
            type=unicode,
            default=None,
            help=
            u"Modsecurity audit log file path or '-' to read from standard input."
        )
        argumentParser.add_argument(
            u"-d",
            u"--data-url",
            dest='dataURL',
            type=unicode,
            required=True,
            default=None,
            help=u"Example: 'sqlite:////tmp/modsecurity-exception-factory.db'")
        argumentParser.add_argument(u"-c",
                                    u"--config-file",
                                    dest='configFilePath',
                                    type=unicode,
                                    default=None)

        argumentObject = argumentParser.parse_args(argumentList)

        # Try to parse config.
        config = Config(argumentObject.configFilePath)
        variableNameList = config.variableNameList()
        ignoredVariableDict = config.ignoredVariableDict()
        minimumOccurrenceCountThreshold = config.minimumOccurrenceCountThreshold(
        )
        maximumValueCountThreshold = config.maximumValueCountThreshold()

        # Initialize data source object.
        dataSource = ModsecurityAuditDataSourceSQL(argumentObject.dataURL)

        # Parse log if given.
        if argumentObject.modsecurityAuditLogPath is not None:
            self._parseFile(argumentObject.modsecurityAuditLogPath, dataSource)

        # Preparing correlation engine.
        correlationEngine = CorrelationEngine(variableNameList,
                                              ignoredVariableDict,
                                              minimumOccurrenceCountThreshold,
                                              maximumValueCountThreshold)
        correlationEngine.addProgressListener(
            CorrelationProgressListenerConsole(sys.stderr))

        # Correlating and writing exceptions progressively using the power of Python generators.
        ModsecurityExceptionWriter(stream=sys.stdout).write(
            correlationEngine.correlate(dataSource))

        return 0
    def test_contract_disabled(self):
        validKwargs = {'memberString': "Be free! Kill bureaucracy!!!",
                       'propertyString': "Be free! Kill bureaucracy!!!",
                       'memberStringList': ["a", "b"],
                       'propertyStringList': ["a", "b"]}

        # Disabling contracts.
        contracts.disable_all()

        # No exception is raised.
        TestContract()

        # Invalid memberString but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['memberString'] = 1234
        TestContract(**kwargs)

        # Invalid propertyString but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['propertyString'] = 1234
        TestContract(**kwargs)

        # Invalid memberStringList but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['memberStringList'] = ["a", 2]
        TestContract(**kwargs)

        # Invalid propertyStringList but no exception is raised.
        kwargs = validKwargs.copy()
        kwargs['propertyStringList'] = ["a", 2]
        TestContract(**kwargs)
Exemplo n.º 7
0
    def main(self):
        rospy.init_node('servo_demo', disable_signals=True)

        self.info('Started.')
        contracts.disable_all()

        boot_root = rospy.get_param('~boot_root')
        boot_root = expand_environment(boot_root)

        config_dir = rospy.get_param('~config_dir')
        id_robot_learned = rospy.get_param('~id_robot_learn')

        self.info('loading %r' % config_dir)
        GlobalConfig.global_load_dir(config_dir)

        id_agent = rospy.get_param('~id_agent')
        self.id_robot = rospy.get_param('~id_robot')
        self.sleep = rospy.get_param('~sleep', 0.005)
        self.info('sleep: %s' % self.sleep)
        self.error_threshold = float(rospy.get_param('~error_threshold'))

        raise_if_no_state = rospy.get_param('~raise_if_no_state', True)

        data_central = DataCentral(boot_root)

        ag_st = load_agent_state(data_central,
                                 id_agent,
                                 id_robot_learned,
                                 reset_state=False,
                                 raise_if_no_state=raise_if_no_state)
        self.agent, state = ag_st

        self.info('Loaded state: %s' % state)

        self.servo_agent = self.agent.get_servo()

        bo_config = get_boot_config()
        self.robot = bo_config.robots.instance(self.id_robot)
        self.boot_spec = self.robot.get_spec()

        self.publish_info_init()

        self.y = None
        self.y_goal = None
        self.started_now = False
        self.stopped_now = False
        self.e0 = 1
        self.e = 1
        self.last_boot_data = None
        self.state = STATE_WAIT

        self.info('Defining services')
        rospy.Service('set_goal', Empty, self.srv_set_goal)
        rospy.Service('start_servo', Empty, self.srv_start_servo)
        rospy.Service('stop_servo', Empty, self.srv_stop_servo)

        self.info('Finished initialization')
        self.count = 0
        self.go()
Exemplo n.º 8
0
def disable_all():
    """
    Wraps PyContracts `disable_all()
    <http://andreacensi.github.io/contracts/api/contracts.html#
    module-contracts.enabling>`_ function. From the PyContracts documentation:
    "Disables all contract checks"
    """
    contracts.disable_all()
    def testContractDisabled(self):
        instance = TestContract()

        contracts.disable_all()

        # No exception is raised
        instance.propertyString = 10
        instance.propertyStringList = ["a", 2]
Exemplo n.º 10
0
    def testContractDisabled(self):
        instance = TestContract()

        contracts.disable_all()

        # No exception is raised
        instance.setMemberString(10)
        instance.setMemberStringList(["a", 2])
Exemplo n.º 11
0
    def testContractDisabled(self):
        instance = TestContract()

        contracts.disable_all()

        # No exception is raised
        instance.setMemberString(10)
        instance.setMemberStringList(["a", 2])
Exemplo n.º 12
0
    def testContractDisabled(self):
        instance = TestContract()

        contracts.disable_all()

        # No exception is raised
        instance.propertyString = 10
        instance.propertyStringList = ["a", 2]
Exemplo n.º 13
0
def manager(args):
    configs_dir = resource_filename("fly_behaviors", "configs")
        
    usage = """"""
    parser = OptionParser(usage=usage)
    parser.disable_interspersed_args()

    parser.add_option("-c", dest='configs_dir', default=configs_dir,
                      help="Directory containing configuration [%default].")

    parser.add_option("-l", dest='log_directory', default='behaviors-sims',
                      help="Where to save logs [%default].")

    parser.add_option("--dt",
                      dest='dt', default=0.05, type='float',
                      help="Simulation interval (s) [%default].")

    parser.add_option("-T", default=100, type='float',
                      help="Maximum simulation time (s) [%default].")

    parser.add_option("--fast", default=False, action='store_true',
                      help="Disables contracts checking [%default].")

    parser.add_option("--num_episodes", default=1, type='int',
                      help="Number of episodes to run for each combination.")

    parser.add_option("--video", default=False, action='store_true',
                      help="Creates videos [%default].")

    (options, args) = parser.parse_args()

    if options.fast:
        contracts.disable_all()
    
    print options
        
    logger.info('Loading standard PyVehicles configuration.')
    load_vehicles_config()
    logger.info('Loading our additional PyVehicles configuration.')
    load_vehicles_config(options.configs_dir)
    logger.info('Loading our configuration (tasks, controllers, combinations).')
    manager_config = ManagerConfig(options.configs_dir)
    
    all_combinations = manager_config.combinations.keys()
    
    if args:
        use = args
    else:
        use = all_combinations
  
    logger.info('Using combinations %s' % use)

    for x in use:
        if not x in all_combinations:
            raise ValueError('No known combination %r.' % x)
        run_combinations(manager_config=manager_config,
                         comb_id=x,
                         other_options=options)
Exemplo n.º 14
0
Arquivo: solve.py Projeto: rusi/mcdp
    def go(self):

        logger.setLevel(logging.DEBUG)

        options = self.get_options()

        if not options.contracts:
            logger.debug('Disabling PyContrats. Use --contracts to enable.')
            disable_all()

        if options.expect_nimp is not None:
            options.imp = True

        params = options.get_extra()

        if len(params) < 1:
            raise ValueError('Please specify model name.')

        model_name = params[0]

        # drop the extension
        if '.mcdp' in model_name or '/' in model_name:
            msg = 'The model name should not contain extension or /.'
            raise UserError(msg)

        max_steps = options.max_steps

        _exp_advanced = options.advanced
        expect_nres = options.expect_nres
        lower = options.lower
        upper = options.upper
        out_dir = options.out
        query_strings = params[1:]

        intervals = options.intervals
        imp = options.imp
        expect_nimp = options.expect_nimp
        make = options.make
        if make: imp = True

        plot = options.plot
        do_movie = options.movie

        expect_res = None

        config_dirs = options.config_dirs.split(":")
        maindir = options.maindir
        if options.cache:
            if out_dir is None:
                out_dir = 'out-mcdp_solve'
            cache_dir = os.path.join(out_dir, '_cached', 'solve')
        else:
            cache_dir = None

        solve_main(logger, config_dirs, maindir, cache_dir, model_name, lower,
                   upper, out_dir, max_steps, query_strings, intervals,
                   _exp_advanced, expect_nres, imp, expect_nimp, plot,
                   do_movie, expect_res, make)
Exemplo n.º 15
0
    def main(self):
        rospy.init_node('servo_demo', disable_signals=True)
        
        self.info('Started.')
        contracts.disable_all()

        boot_root = rospy.get_param('~boot_root')
        boot_root = expand_environment(boot_root)
        
        config_dir = rospy.get_param('~config_dir')
        id_robot_learned = rospy.get_param('~id_robot_learn')
        
        self.info('loading %r' % config_dir)
        GlobalConfig.global_load_dir(config_dir)
        
        id_agent = rospy.get_param('~id_agent')
        self.id_robot = rospy.get_param('~id_robot')
        self.sleep = rospy.get_param('~sleep', 0.005)
        self.info('sleep: %s' % self.sleep)
        self.error_threshold = float(rospy.get_param('~error_threshold'))
        
        raise_if_no_state = rospy.get_param('~raise_if_no_state', True)
        
        data_central = DataCentral(boot_root)
        
        ag_st = load_agent_state(data_central, id_agent, id_robot_learned,
                                 reset_state=False,
                                 raise_if_no_state=raise_if_no_state)
        self.agent, state = ag_st
        
        self.info('Loaded state: %s' % state)
        
        self.servo_agent = self.agent.get_servo()
        
        bo_config = get_boot_config()
        self.robot = bo_config.robots.instance(self.id_robot)
        self.boot_spec = self.robot.get_spec()
            
        self.publish_info_init()     
        
        self.y = None
        self.y_goal = None
        self.started_now = False
        self.stopped_now = False
        self.e0 = 1
        self.e = 1         
        self.last_boot_data = None
        self.state = STATE_WAIT

        self.info('Defining services')
        rospy.Service('set_goal', Empty, self.srv_set_goal)
        rospy.Service('start_servo', Empty, self.srv_start_servo)
        rospy.Service('stop_servo', Empty, self.srv_stop_servo)
                
        self.info('Finished initialization') 
        self.count = 0
        self.go()
Exemplo n.º 16
0
def main():
    parser = OptionParser(usage=usage)

    parser.add_option("--slow",
                      default=False,
                      action='store_true',
                      help='Enables sanity checks.')

    parser.add_option(
        "--max_dist",
        default=15,
        type='float',
        help='[= %default] Maximum distance for graph simplification.')
    parser.add_option(
        "--min_nodes",
        default=250,
        type='float',
        help='[= %default] Minimum number of nodes to simplify to.')
    parser.add_option(
        "--scale",
        default=10000,
        type='float',
        help='[= %default] Controls the weight of angular vs linear .')

    parser.add_option("--seed",
                      default=42,
                      type='int',
                      help='[= %default] Seed for random number generator.')

    (options, args) = parser.parse_args()  #@UnusedVariable

    np.random.seed(options.seed)

    if not options.slow:
        disable_all()
    # TODO: warn

    if len(args) > 1:
        raise Exception('Too many arguments')

    filename = args[0] if args else 'stdin'
    G = smart_load(filename, raise_if_unknown=True, progress=True)

    algorithm = EFPNO_S
    params = dict(max_dist=options.max_dist,
                  min_nodes=options.min_nodes,
                  scale=options.scale)

    instance = algorithm(params)
    results = instance.solve(G)

    G2 = results['solution']
    # G2 = results['G_landmarks']

    G2.graph['name'] = '%s-solved%dm' % (G.graph['name'], options.max_dist)
    graph_write(G2, sys.stdout)
Exemplo n.º 17
0
 def init_diffeo(self, shape):
     id_symdiffeo = self.config.id_symdiffeo
     symdiffeo = get_dp_config().symdiffeos.instance(id_symdiffeo)
     label = 'tmp'
     original_cmd = [np.zeros(1)]
     self.info('creating diffeo_action')
     contracts.disable_all()
     self.diffeo_action = \
         diffeo_action_from_symdiffeo(symdiffeo, shape,
                                      label, original_cmd)
     self.info('..done')
Exemplo n.º 18
0
 def init_diffeo(self, shape):
     id_symdiffeo = self.config.id_symdiffeo
     symdiffeo = get_dp_config().symdiffeos.instance(id_symdiffeo)
     label = 'tmp'
     original_cmd = [np.zeros(1)]
     self.info('creating diffeo_action')
     contracts.disable_all()
     self.diffeo_action = \
         diffeo_action_from_symdiffeo(symdiffeo, shape,
                                      label, original_cmd)
     self.info('..done')
    def main(self, argumentList):
        # Disabling contracts solves some performance issues.
        contracts.disable_all()

        argumentParser = argparse.ArgumentParser(description = u"Make ModSecurity exceptions.")
        argumentParser.add_argument(u"-i",
                                    u"--input",
                                    metavar = u"MODSEC_AUDIT_LOG_FILE",
                                    dest = 'modsecurityAuditLogPath',
                                    type = unicode,
                                    default = None,
                                    help = u"Modsecurity audit log file path or '-' to read from standard input.")
        argumentParser.add_argument(u"-d",
                                    u"--data-url",
                                    dest = 'dataURL',
                                    type = unicode,
                                    required = True,
                                    default = None,
                                    help = u"Example: 'sqlite:////tmp/modsecurity-exception-factory.db'")
        argumentParser.add_argument(u"-c",
                                    u"--config-file",
                                    dest = 'configFilePath',
                                    type = unicode,
                                    default = None)
    
        argumentObject = argumentParser.parse_args(argumentList)
        
        # Try to parse config.
        config = Config(argumentObject.configFilePath)
        variableNameList = config.variableNameList()
        ignoredVariableDict = config.ignoredVariableDict()
        minimumOccurrenceCountThreshold = config.minimumOccurrenceCountThreshold()
        maximumValueCountThreshold = config.maximumValueCountThreshold()
        

        # Initialize data source object.
        dataSource = ModsecurityAuditDataSourceSQL(argumentObject.dataURL)
        
        # Parse log if given.
        if argumentObject.modsecurityAuditLogPath is not None:
            self._parseFile(argumentObject.modsecurityAuditLogPath, dataSource)

        # Preparing correlation engine.
        correlationEngine = CorrelationEngine(variableNameList,
                                              ignoredVariableDict,
                                              minimumOccurrenceCountThreshold,
                                              maximumValueCountThreshold)
        correlationEngine.addProgressListener(CorrelationProgressListenerConsole(sys.stderr))
        
        # Correlating and writing exceptions progressively using the power of Python generators.
        ModsecurityExceptionWriter(stream = sys.stdout).write(correlationEngine.correlate(dataSource))
    
        return 0
Exemplo n.º 20
0
def pytest_configure(config):
    """
    Do core setup operations from manage.py before collecting tests.
    """
    if config.getoption('help'):
        return
    enable_contracts = os.environ.get('ENABLE_CONTRACTS', False)
    if not enable_contracts:
        contracts.disable_all()
    settings_module = os.environ.get('DJANGO_SETTINGS_MODULE')
    startup_module = 'cms.startup' if settings_module.startswith('cms') else 'lms.startup'
    startup = importlib.import_module(startup_module)
    startup.run()
Exemplo n.º 21
0
def pytest_configure(config):
    """
    Do core setup operations from manage.py before collecting tests.
    """
    if config.getoption('help'):
        return
    enable_contracts = os.environ.get('ENABLE_CONTRACTS', False)
    if not enable_contracts:
        contracts.disable_all()
    settings_module = os.environ.get('DJANGO_SETTINGS_MODULE')
    startup_module = 'cms.startup' if settings_module.startswith(
        'cms') else 'lms.startup'
    startup = importlib.import_module(startup_module)
    startup.run()
Exemplo n.º 22
0
def graph_simplification_test():
    contracts.disable_all()
    print('Generating random network')
#    G1 = random_connected_pose_network(n=100, max_t=100,
#                                       max_connection_dist=10,
#                                        connect_self=False)
    G1 = grid_graph(nrows=10, ncols=10, side=5)
    print('Checking random network')
    assert_exact(G1)
    # todo: remove some edges
    G2, reattach = simplify_graph_aggressive(G1, max_dist=20)
    
    assert_exact(G2)
    print 'before: ', G1.nodes()
    print 'after: ', G2.nodes()
Exemplo n.º 23
0
def graph_simplification_test():
    contracts.disable_all()
    print('Generating random network')
    #    G1 = random_connected_pose_network(n=100, max_t=100,
    #                                       max_connection_dist=10,
    #                                        connect_self=False)
    G1 = grid_graph(nrows=10, ncols=10, side=5)
    print('Checking random network')
    assert_exact(G1)
    # todo: remove some edges
    G2, reattach = simplify_graph_aggressive(G1, max_dist=20)

    assert_exact(G2)
    print 'before: ', G1.nodes()
    print 'after: ', G2.nodes()
Exemplo n.º 24
0
def test_consistency_uncertainty():
    print 'here'
    pass
    contracts.disable_all()
    symdds = 'sym-dpchain1-120'
    print('instancing dds %s' % symdds)
    dds = get_conftools_discdds().instance(symdds)
    shape = dds.get_shape()
    d1f = dds.actions[0].get_diffeo2d_forward()
    d1b = dds.actions[0].get_diffeo2d_backward()
    
    fb = Diffeomorphism2D.compose(d1f, d1b)
    bf = Diffeomorphism2D.compose(d1b, d1f)
    identity = Diffeomorphism2D.identity(shape)
    print Diffeomorphism2D.distance_L2_infow(d1f, identity)
    print Diffeomorphism2D.distance_L2_infow(d1b, identity)
    print Diffeomorphism2D.distance_L2_infow(fb, identity)
    print Diffeomorphism2D.distance_L2_infow(bf, identity)

    action = dds.actions[0]
    action2 = consistency_based_uncertainty(action, None)

    r = Report(symdds)
    r.text('symdds', symdds)
    with r.subsection('action') as sub:
        action.display(sub)
    with r.subsection('action2') as sub:
        action2.display(sub)
#         
#     with r.subsection('misc') as sub:
#         d = d1f.get_discretized_diffeo()
#         f = sub.figure()
#         f.array_as_image('d0', d[:, :, 0])
#         f.array_as_image('d1', d[:, :, 1])
#         
        
#     with r.subsection('d1f') as sub:
#         d1f.display(sub)
#     with r.subsection('d1b') as sub:
#         d1b.display(sub)
# 
#     with r.subsection('fb') as sub:
#         fb.display(sub)
#     with r.subsection('bf') as sub:
#         bf.display(sub)
    
    r.to_html('test_consistency_uncertainty.html')
Exemplo n.º 25
0
    def init(self):
        contracts.disable_all()
        id_discdds = self.config.id_discdds

        dp_config = get_dp_config()

        dp_config.load(self.config.config_dir)
        self.discdds = dp_config.discdds.instance(id_discdds)

        N = self.config.nsteps
        m = make_matrix(nsteps=N, mult=self.config.mult)

        cmd_to_action = {'a': 3, 'b': 2, 'c': 1, 'd': 0}
        cmd_inv = {'a': 'c', 'b': 'd'}

        mult = self.config.mult
        sequence = {0: 0, 1: 1 * mult, 2: 2 * mult, 3: 3 * mult}

        @contract(ins='tuple(int,str)', returns='list[int]')
        def normalize_instruction(ins):
            num, cmd = ins
            if num < 0:
                cmd = cmd_inv[cmd]
                num = -num
            assert num >= 0
            k = cmd_to_action[cmd]
            n = sequence[num]
            return [k] * n

        @contract(splan='list[P](tuple(int,str))', returns='list(int)')
        def normalize_splan(splan):
            plan = []
            for ins in splan:
                plan.extend(normalize_instruction(ins))
            return plan

        M = 2 * N + 1
        self.actions = []
        for i, j in itertools.product(range(M), range(M)):
            m[i][j] = normalize_splan(m[i][j])
            action = self.discdds.plan2action(m[i][j])
            self.actions.append(action)

        self.info('shape: %s' % str(self.discdds.get_shape()))
        self.M = M
Exemplo n.º 26
0
 def init(self):
     contracts.disable_all()
     id_discdds = self.config.id_discdds
     
     dp_config = get_dp_config()
 
     dp_config.load(self.config.config_dir)
     self.discdds = dp_config.discdds.instance(id_discdds)
     
     N = self.config.nsteps
     m = make_matrix(nsteps=N, mult=self.config.mult)
     
     cmd_to_action = {'a': 3, 'b': 2, 'c': 1, 'd': 0}
     cmd_inv = {'a': 'c', 'b': 'd'}
     
     mult = self.config.mult
     sequence = {0: 0, 1: 1 * mult, 2: 2 * mult, 3: 3 * mult}
     
     @contract(ins='tuple(int,str)', returns='list[int]')
     def normalize_instruction(ins):
         num, cmd = ins
         if num < 0:
             cmd = cmd_inv[cmd]
             num = -num 
         assert num >= 0
         k = cmd_to_action[cmd]
         n = sequence[num]
         return [k] * n
     
     @contract(splan='list[P](tuple(int,str))', returns='list(int)')
     def normalize_splan(splan):
         plan = []
         for ins in splan:
             plan.extend(normalize_instruction(ins))
         return plan
     
     M = 2 * N + 1
     self.actions = []
     for i, j in itertools.product(range(M), range(M)):
         m[i][j] = normalize_splan(m[i][j])
         action = self.discdds.plan2action(m[i][j])
         self.actions.append(action)
     
     self.info('shape: %s' % str(self.discdds.get_shape()))
     self.M = M
Exemplo n.º 27
0
def main():
    np.seterr(all='raise')
    
    parser = OptionParser()

    parser.add_option("--plots", default=None)

    parser.add_option("--slow", default=False, action='store_true',
                      help='Enables sanity checks.')
    
    parser.add_option("--seed", default=None, type='int',
                      help='[= %default] Seed for random number generator.')
    
    parser.add_option("--min_nodes", default=250, type='int',
                      help='[= %default] Minimum number of nodes to simplify to.')

    parser.add_option("--max_dist", default=10, type='float',
                      help='[= %default] Maximum distance for graph simplification.')

    (options, args) = parser.parse_args() #@UnusedVariable
    
    np.random.seed(options.seed)    
    
    if not options.slow:
        disable_all()
    
    assert len(args) <= 1
    
    filename = args[0] if args else 'stdin'
    G = smart_load(filename, raise_if_unknown=True, progress=True)
        
    def eprint(x): sys.stderr.write('%s\n' % x)
     
    eprint('Loaded graph with %d nodes, %d edges.' % (G.number_of_nodes(),
                                                     G.number_of_edges()))

    G2, how_to_reattach = simplify_graph_aggressive(G, #@UnusedVariable
                                max_dist=options.max_dist,
                                min_nodes=options.min_nodes,
                                eprint=eprint)
    
    eprint('Reduced graph with %d nodes, %d edges.' % (G2.number_of_nodes(),
                                                     G2.number_of_edges()))
    G2.graph['name'] = '%s-sim%dm' % (G.graph['name'], options.max_dist)
    graph_write(G2, sys.stdout)
Exemplo n.º 28
0
    def __init__(self, **params):
        import rospy
        contracts.disable_all()  # XXX

        self.viz_level = params.get('viz_level', VizLevel.Everything)

        if self.viz_level > VizLevel.Nothing:
            from . import Marker, Image
            self.publisher = rospy.Publisher('~markers', Marker)
            self.pub_sensels_image = rospy.Publisher('~sensels_image', Image)
            self.pub_commands_image = rospy.Publisher('~commands_image', Image)
            self.first_time = True

        if self.viz_level >= VizLevel.State:
            from . import  String
            self.pub_state = rospy.Publisher('~state', String)

        BOVehicleSimulation.__init__(self, **params)
Exemplo n.º 29
0
def main():
    parser = OptionParser(usage=usage)

    parser.add_option("--slow", default=False, action='store_true',
                      help='Enables sanity checks.')

    parser.add_option("--max_dist", default=15, type='float',
                      help='[= %default] Maximum distance for graph simplification.')
    parser.add_option("--min_nodes", default=250, type='float',
                      help='[= %default] Minimum number of nodes to simplify to.')
    parser.add_option("--scale", default=10000, type='float',
                      help='[= %default] Controls the weight of angular vs linear .')
    
    parser.add_option("--seed", default=42, type='int',
                      help='[= %default] Seed for random number generator.')
    
    (options, args) = parser.parse_args() #@UnusedVariable
    
    np.random.seed(options.seed)    
    
    if not options.slow:
        disable_all()
    # TODO: warn
    
    if len(args) > 1:
        raise Exception('Too many arguments')
    
    filename = args[0] if args else 'stdin'
    G = smart_load(filename, raise_if_unknown=True, progress=True)

    algorithm = EFPNO_S
    params = dict(max_dist=options.max_dist,
                  min_nodes=options.min_nodes,
                  scale=options.scale)
    
    instance = algorithm(params)
    results = instance.solve(G)

    G2 = results['solution']
    # G2 = results['G_landmarks']

    G2.graph['name'] = '%s-solved%dm' % (G.graph['name'], options.max_dist)
    graph_write(G2, sys.stdout)
Exemplo n.º 30
0
def pytest_configure(config):
    """
    Do core setup operations from manage.py before collecting tests.
    """
    if config.pluginmanager.hasplugin(
            "pytest_jsonreport") or config.pluginmanager.hasplugin(
                "json-report"):
        config.pluginmanager.register(DeferPlugin())
    else:
        logging.info("pytest did not register json_report correctly")

    if config.getoption('help'):
        return
    enable_contracts = os.environ.get('ENABLE_CONTRACTS', False)
    if not enable_contracts:
        contracts.disable_all()
    settings_module = os.environ.get('DJANGO_SETTINGS_MODULE')
    startup_module = 'cms.startup' if settings_module.startswith(
        'cms') else 'lms.startup'
    startup = importlib.import_module(startup_module)
    startup.run()
Exemplo n.º 31
0
    def __init__(self, collector_uri, 
                 namespace=None, app_id=None, context_vendor=None, encode_base64=DEFAULT_ENCODE_BASE64, contracts=True):
        """
        Constructor
        """
        if not contracts:
            disable_all()

        self.collector_uri = self.as_collector_uri(collector_uri)

        self.config = {
            "encode_base64":    encode_base64,
            "context_vendor": context_vendor
        }

        self.standard_nv_pairs = {
            "p": DEFAULT_PLATFORM,
            "tv": VERSION,
            "tna": namespace,
            "aid": app_id
        }
Exemplo n.º 32
0
        def test_disabled_contracts(self):
            contracts.disable_all()

            @contract
            def disabled(phrase):
                """
                :type phrase: str
                """
                return phrase
            # this should not throw w/ contracts disabled
            disabled(int(8))
            contracts.enable_all()
            # this will still not throw because the disabled value is checked at decoration time only
            disabled(int(8))

            @contract
            def enabled(phrase):
                """
                :type phrase: str
                """
                return phrase
            # a newly decorated function will throw
            with pytest.raises(exceptions.ContractNotRespected):
                enabled(int(8))
Exemplo n.º 33
0
def main():
    parser = OptionParser()

    group = OptionGroup(parser, "Files and directories")

    group.add_option("--outdir",
                     help='Directory with variables.pickle and where '
                     'the output will be placed.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Experiments options")

    group.add_option("--fast",
                     default=False,
                     action='store_true',
                     help='Disables sanity checks.')

    group.add_option("--set",
                     default='*',
                     help='[= %default] Which combinations to run.')

    group.add_option("--seed",
                     default=None,
                     type='int',
                     help='[= %default] Seed for random number generator.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Compmake options")

    group.add_option("--remake",
                     default=False,
                     action='store_true',
                     help='Remakes all (non interactive).')

    group.add_option("--report",
                     default=False,
                     action='store_true',
                     help='Cleans and redoes all reports (non interactive).')

    group.add_option(
        "--report_stats",
        default=False,
        action='store_true',
        help='Cleans and redoes the reports for the stats. (non interactive)')

    parser.add_option_group(group)

    (options, args) = parser.parse_args()  #@UnusedVariable

    np.random.seed(options.seed)

    if options.fast:
        disable_all()

    assert not args
    assert options.outdir is not None

    available_algorithms, available_test_cases, available_sets = get_everything(
    )

    which = expand_string(options.set, list(available_sets.keys()))

    if len(which) == 1:
        compmake_storage = os.path.join(options.outdir, 'compmake', which[0])
    else:
        compmake_storage = os.path.join(options.outdir, 'compmake',
                                        'common_storage')

    use_filesystem(compmake_storage)

    print('Staging creation of test cases reports')
    test_cases = {}
    test_case_reports = {}

    def stage_test_case_report(tcid):
        if not tcid in available_test_cases:
            msg = ('Could not find test case %r \n %s' %
                   (tcid, available_test_cases.keys()))
            raise Exception(msg)
        if not tcid in test_cases:
            command, args = available_test_cases[tcid]
            job_id = 'test_case_data-%s' % tcid
            test_cases[tcid] = comp(command, job_id=job_id, **args)

        if not tcid in test_case_reports:
            job_id = 'test_case-%s-report' % tcid
            report = comp(create_report_tc,
                          tcid,
                          test_cases[tcid],
                          job_id=job_id)
            job_id += '-write'
            filename = os.path.join(options.outdir, 'test_cases',
                                    '%s.html' % tcid)
            comp(write_report, report, filename, job_id=job_id)
            test_case_reports[tcid] = report
        return test_case_reports[tcid]

    # set of tuple (algo, test_case)
    executions = {}

    def stage_execution(tcid, algid):
        stage_test_case_report(tcid)

        key = (tcid, algid)
        if not key in executions:
            test_case = test_cases[tcid]
            algo_class, algo_params = available_algorithms[algid]
            job_id = 'solve-%s-%s-run' % (tcid, algid)
            results = comp(run_combination,
                           tcid,
                           test_case,
                           algo_class,
                           algo_params,
                           job_id=job_id)
            executions[key] = results

            exc_id = '%s-%s' % (tcid, algid)
            # Create iterations report
            job_id = 'solve-%s-report' % exc_id
            report = comp(create_report_execution,
                          exc_id,
                          tcid,
                          test_case,
                          algo_class,
                          algo_params,
                          results,
                          job_id=job_id)

            job_id += '-write'
            filename = os.path.join(options.outdir, 'executions',
                                    '%s-%s.html' % (tcid, algid))
            comp(write_report, report, filename, job_id=job_id)

        return executions[key]

    for comb_id in which:
        comb = available_sets[comb_id]
        alg_ids = expand_string(comb.algorithms, available_algorithms.keys())
        tc_ids = expand_string(comb.test_cases, available_test_cases.keys())

        print(
            'Set %r has %d test cases and %d algorithms (~%d jobs in total).' %
            (comb_id, len(alg_ids), len(tc_ids),
             len(alg_ids) * len(tc_ids) * 2))

        deps = {}
        for t, a in itertools.product(tc_ids, alg_ids):
            deps[(t, a)] = stage_execution(t, a)

        job_id = 'tex-%s' % comb_id
        comp(create_tables_for_paper,
             comb_id,
             tc_ids,
             alg_ids,
             deps,
             job_id=job_id)

        job_id = 'set-%s-report' % comb_id
        report = comp(create_report_comb_stats,
                      comb_id,
                      tc_ids,
                      alg_ids,
                      deps,
                      job_id=job_id)

        job_id += '-write'
        filename = os.path.join(options.outdir, 'stats', '%s.html' % comb_id)
        comp(write_report, report, filename, job_id=job_id)

    if options.report or options.report_stats:
        if options.report:
            batch_command('clean *-report*')
        elif options.report_stats:
            batch_command('clean set-*  tex*')
        batch_command('parmake')
    elif options.remake:
        batch_command('clean *')
        batch_command('make set-* tex-*')
    else:
        compmake_console()
Exemplo n.º 34
0
#import tensorflow as tf
#from tensorflow.contrib import learn
from json_tricks.np import dump, dumps, load, loads, strip_comments

#
from toolz import valmap, map, partial, compose, first, pipe, thread_first
from toolz.itertoolz import accumulate, sliding_window
#Custom modules
from dfs_portal.utils.htools import order_dict, timing, d2l
#from dfs_portal.utils.custom_contracts import *
from dfs_portal.core.transforms import df2xy

from celery.contrib import rdb

#disable all contracts
disable_all()


class LassoPredictor(Persistent):
    @contract(hypers='dict')
    def __init__(self, hypers):
        modelHypers = self.extract_model_hypers(hypers)
        self.model = LassoLarsCV(**modelHypers)

    @timing
    def fit(self, df, features, targetCol, validationSplit=0.2):

        print("Running fit function:")
        print(df)
        XTrain, yTrain = df2xy(df, features, targetCol)
        if XTrain.shape[0] < 3:
Exemplo n.º 35
0
def dp(arguments):
    usage = substitute(usage_pattern, commands_list=commands_list,
                       cmd=MAIN_CMD_NAME)

    parser = MyOptionParser(prog=MAIN_CMD_NAME, usage=usage)
    parser.disable_interspersed_args()

    parser.add_option("--contracts", default=False, action='store_true',
                      help="Activate PyContracts (disabled by default)")

    parser.add_option("--profile", default=False, action='store_true',
                      help="Use Python profiler")

    parser.add_option("-d", "--directory", default="default:.",
                      help="Configuration directory")

    (options, args) = parser.parse_args(arguments)

    if not options.contracts:
        logger.warning('PyContracts disabled for speed. Use --contracts to activate.')
        contracts.disable_all()

    if not args:
        msg = ('Please supply a command.\nAvailable commands:\n%s' % commands_list)
        raise UserError(msg)

    cmd = args[0]
    cmd_args = args[1:]

    if not cmd in Storage.commands:
        msg = ('Unknown command %r. Available: %s.' % 
               (cmd, ", ".join(Storage.commands.keys())))
        raise UserError(msg)

    confdir = options.directory
    config = DiffeoplanConfigMaster()
    config.load()
    if confdir is not None:
        config.load(confdir)
    
    set_current_config(config)

    function = Storage.commands[cmd]
    usage = function.short_usage 
    parser = CmdOptionParser(prog='%s %s' % (MAIN_CMD_NAME, cmd), usage=usage,
                             args=cmd_args)
    parser.enable_interspersed_args()
    
    def go():
        return function(config, parser)

    if not options.profile:
        go()
    else:
        logger.warning('Note: the profiler does not work when using '
                       'parallel execution. (use "make" instead of "parmake").')
        import cProfile
        cProfile.runctx('go()', globals(), locals(), 'dp_prof')
        import pstats
        p = pstats.Stats('dp_prof')
        n = 30
        p.sort_stats('cumulative').print_stats(n)
        p.sort_stats('time').print_stats(n)
Exemplo n.º 36
0
    def go(self):  
         
        # check that if we have a parent who is a quickapp,
        # then use its context      
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            context = qapp_parent.child_context  
            self.define_jobs_context(context)
            return
        else:
            # self.info('Parent not found')
            pass
            

        if False:            
            import resource
            gbs = 5
            max_mem = long(gbs * 1000 * 1048576L)
            resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
            resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()
        
        if self.get_qapp_parent() is None:
            # only do this if somebody didn't do it before
            if not options.contracts:
                msg = 'PyContracts disabled for speed. Use --contracts to activate.'
                self.logger.warning(msg)
                contracts.disable_all()

        warnings.warn('removed configuration below')  # (start)

        output_dir = options.output
        
        # Compmake storage for results        
        storage = os.path.join(output_dir, 'compmake')
        sf = StorageFilesystem(storage, compress=True)
#     sf = StorageFilesystem2(directory)
#     sf = MemoryCache(sf)
        set_compmake_db(sf)

        # use_filesystem(storage)
        read_rc_files()
        
        context = CompmakeContext(parent=None, qapp=self, job_prefix=None,
                                  output_dir=output_dir)
        self.context = context
        original = get_comp_prefix()
        self.define_jobs_context(context)
        comp_prefix(original) 
        
        context.finalize_jobs()
        
        if context.n_comp_invocations == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else: 
            if not options.console:
                batch_result = batch_command(options.command)
                if isinstance(batch_result, str):
                    ret = QUICKAPP_COMPUTATION_ERROR
                elif isinstance(batch_result, int):
                    if batch_result == 0:
                        ret = 0
                    else:
                        # xxx: discarded information
                        ret = QUICKAPP_COMPUTATION_ERROR
                else:
                    assert False 
                return ret
            else:
                compmake_console()
                return 0
Exemplo n.º 37
0
def dp(arguments):
    usage = substitute(usage_pattern,
                       commands_list=commands_list,
                       cmd=MAIN_CMD_NAME)

    parser = MyOptionParser(prog=MAIN_CMD_NAME, usage=usage)
    parser.disable_interspersed_args()

    parser.add_option("--contracts",
                      default=False,
                      action='store_true',
                      help="Activate PyContracts (disabled by default)")

    parser.add_option("--profile",
                      default=False,
                      action='store_true',
                      help="Use Python profiler")

    parser.add_option("-d",
                      "--directory",
                      default="default:.",
                      help="Configuration directory")

    (options, args) = parser.parse_args(arguments)

    if not options.contracts:
        logger.warning(
            'PyContracts disabled for speed. Use --contracts to activate.')
        contracts.disable_all()

    if not args:
        msg = ('Please supply a command.\nAvailable commands:\n%s' %
               commands_list)
        raise UserError(msg)

    cmd = args[0]
    cmd_args = args[1:]

    if not cmd in Storage.commands:
        msg = ('Unknown command %r. Available: %s.' %
               (cmd, ", ".join(Storage.commands.keys())))
        raise UserError(msg)

    confdir = options.directory
    config = DiffeoplanConfigMaster()
    config.load()
    if confdir is not None:
        config.load(confdir)

    set_current_config(config)

    function = Storage.commands[cmd]
    usage = function.short_usage
    parser = CmdOptionParser(prog='%s %s' % (MAIN_CMD_NAME, cmd),
                             usage=usage,
                             args=cmd_args)
    parser.enable_interspersed_args()

    def go():
        return function(config, parser)

    if not options.profile:
        go()
    else:
        logger.warning(
            'Note: the profiler does not work when using '
            'parallel execution. (use "make" instead of "parmake").')
        import cProfile
        cProfile.runctx('go()', globals(), locals(), 'dp_prof')
        import pstats
        p = pstats.Stats('dp_prof')
        n = 30
        p.sort_stats('cumulative').print_stats(n)
        p.sort_stats('time').print_stats(n)
Exemplo n.º 38
0
def main():
    init_matplotlib()
    reprep.RepRepDefaults.default_image_format = MIME_PDF

    parser = OptionParser()

    group = OptionGroup(parser, "Files and directories")

    group.add_option("--outdir",
                     help='Directory with variables.pickle and where '
                     'the output will be placed.')

    #    group.add_option("--testdir", default=None)

    group.add_option("--data_sick",
                     default=None,
                     help='.pickle file containing Sick data.')
    group.add_option("--data_mino",
                     default=None,
                     help='directory containing Mino data.')
    group.add_option("--data_fly",
                     default=None,
                     help='.pickle file containing fly simulation data.')

    group.add_option("--test_cases",
                     default=None,
                     help='Base dire for test cases.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Experiments options")

    group.add_option("--contracts",
                     default=False,
                     action='store_true',
                     help='Enables PyContacts sanity checks.')

    group.add_option("--set",
                     default='*',
                     help='[= %default] Which combinations to run.')

    group.add_option("--seed",
                     default=None,
                     type='int',
                     help='[= %default] Seed for random number generator.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Compmake options")

    group.add_option("--remake",
                     default=False,
                     action='store_true',
                     help='Remakes all (non interactive).')

    group.add_option("--report",
                     default=False,
                     action='store_true',
                     help='Cleans and redoes all reports (non interactive).')

    group.add_option("--report_stats",
                     default=False,
                     action='store_true',
                     help='Cleans and redoes the reports for the stats. '
                     '(non interactive)')

    parser.add_option_group(group)

    (options, args) = parser.parse_args()  #@UnusedVariable

    np.random.seed(options.seed)

    if not options.contracts:
        disable_all()

    assert options.outdir is not None

    available_test_cases = {}

    if options.test_cases is not None:
        TCConfig.load(options.test_cases)

        for tc_id in TCConfig.test_cases:
            available_test_cases[tc_id] = \
                (tc_load_spec, {'spec': TCConfig.test_cases[tc_id]})

    print('Generating synthetic test cases...')
    synthetic = get_syntethic_test_cases()
    available_test_cases.update(synthetic)

    euclidean = get_euclidean_test_cases()
    available_test_cases.update(euclidean)

    if options.data_sick is not None:
        print('Preparing Sick data...')
        real = get_real_test_cases(options.data_sick)
        available_test_cases.update(real)

    if options.data_fly is not None:
        print('Preparing fly data...')
        available_test_cases.update(get_fly_testcase(options.data_fly))


#    if options.data_mino is not None:
#        print('Preparing Mino data...')
#        available_test_cases.update(get_mino_testcases(options.data_mino))

    check('dict(str: tuple(Callable, dict))', available_test_cases)

    print('Creating list of algorithms..')
    algorithms = get_list_of_algorithms()
    check('dict(str: tuple(Callable, dict))', algorithms)

    print('Creating list of combinations..')
    combinations = get_list_of_combinations()

    which = expand_string(options.set, list(combinations.keys()))
    print('I will use the sets: %s' % which)
    if len(which) == 1:
        compmake_storage = join(options.outdir, 'compmake', which[0])
    else:
        compmake_storage = join(options.outdir, 'compmake', 'common_storage')

    use_filesystem(compmake_storage)

    print('Available %d test cases and %d algorithms' %
          (len(available_test_cases), len(algorithms)))

    print('Staging creation of test cases reports')
    test_cases = {}
    test_case_reports = {}

    def stage_test_case_report(tcid):
        if not tcid in available_test_cases:
            msg = ('Could not find test case %r \n %s' %
                   (tcid, available_test_cases.keys()))
            raise Exception(msg)
        if not tcid in test_cases:
            f, args = available_test_cases[tcid]

            job_id = 'test_case_data-%s' % tcid
            test_cases[tcid] = comp(test_case_generate,
                                    f=f,
                                    args=args,
                                    job_id=job_id)

        if not tcid in test_case_reports:
            job_id = 'test_case-%s-report' % tcid
            report = comp(create_report_test_case,
                          tcid,
                          test_cases[tcid],
                          job_id=job_id)
            job_id += '-write'
            filename = join(options.outdir, 'test_cases', '%s.html' % tcid)
            comp(write_report, report, filename, job_id=job_id)
            test_case_reports[tcid] = report
        return test_case_reports[tcid]

    # set of tuple (algo, test_case)
    executions = {}

    def stage_execution(tcid, algid):
        exc_id = '%s-%s' % (tcid, algid)
        stage_test_case_report(tcid)

        key = (tcid, algid)
        if not key in executions:
            test_case = test_cases[tcid]
            if not algid in algorithms:
                raise Exception('No %r known in %s' %
                                (algid, algorithms.keys()))
            algo_class, algo_params = algorithms[algid]

            executions[key] = comp(run_combination,
                                   test_case,
                                   algo_class,
                                   algo_params,
                                   job_id='calib-%s-run' % exc_id)

            basename = join(options.outdir, 'results', '%s-%s' % (tcid, algid))
            comp(save_results,
                 basename=basename,
                 results=executions[key],
                 job_id='calib-%s-save' % exc_id)

            # Create iterations report
            report = comp(create_report_iterations,
                          exc_id,
                          executions[key],
                          job_id='calib-%s-report' % exc_id)

            filename = join(options.outdir, 'executions',
                            '%s-%s.html' % (tcid, algid))
            comp(write_report,
                 report,
                 filename,
                 job_id='calib-%s-report-write' % exc_id)

        return executions[key]

    for comb_id in which:
        comb = combinations[comb_id]
        alg_ids = expand_string(comb.algorithms, algorithms.keys())
        tc_ids = expand_string(comb.test_cases, available_test_cases.keys())

        print(
            'Set %r has %d test cases and %d algorithms (~%d jobs in total).' %
            (comb_id, len(alg_ids), len(tc_ids),
             len(alg_ids) * len(tc_ids) * 2))

        deps = {}
        for t, a in itertools.product(tc_ids, alg_ids):
            deps[(t, a)] = stage_execution(t, a)

        job_id = 'tex-%s' % comb_id
        tables_dir = join(options.outdir, 'tables')
        comp(create_tables_for_paper,
             tables_dir,
             comb_id,
             tc_ids,
             alg_ids,
             deps,
             job_id=job_id)

        job_id = 'set-%s-report' % comb_id
        report = comp(create_report_comb_stats,
                      comb_id,
                      tc_ids,
                      alg_ids,
                      deps,
                      job_id=job_id)

        job_id += '-write'
        filename = join(options.outdir, 'stats', '%s.html' % comb_id)
        comp(write_report, report, filename, job_id=job_id)

    if options.report or options.report_stats:
        if options.report:
            batch_command('clean *-report*')
        elif options.report_stats:
            batch_command('clean set-*  tex*')
        batch_command('parmake')
    elif options.remake:
        batch_command('clean *')
        batch_command('make set-* tex-*')
    else:
        compmake_console()
Exemplo n.º 39
0
# Patch the xml libs before anything else.
from safe_lxml import defuse_xml_libs
defuse_xml_libs()

# Disable PyContract contract checking when running as a webserver
import contracts
contracts.disable_all()

import os

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cms.envs.aws")

import cms.startup as startup
startup.run()

# This application object is used by the development server
# as well as any WSGI server configured to use this file.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()

Exemplo n.º 40
0
def main():
    init_matplotlib()
    reprep.RepRepDefaults.default_image_format = MIME_PDF

    parser = OptionParser()

    group = OptionGroup(parser, "Files and directories")

    group.add_option("--outdir",
                      help='Directory with variables.pickle and where '
                           'the output will be placed.')

#    group.add_option("--testdir", default=None)

    group.add_option("--data_sick", default=None,
                     help='.pickle file containing Sick data.')
    group.add_option("--data_mino", default=None,
                     help='directory containing Mino data.')
    group.add_option("--data_fly", default=None,
                     help='.pickle file containing fly simulation data.')

    group.add_option("--test_cases", default=None,
                    help='Base dire for test cases.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Experiments options")

    group.add_option("--contracts", default=False, action='store_true',
                      help='Enables PyContacts sanity checks.')

    group.add_option("--set", default='*',
                      help='[= %default] Which combinations to run.')

    group.add_option("--seed", default=None, type='int',
                      help='[= %default] Seed for random number generator.')

    parser.add_option_group(group)

    group = OptionGroup(parser, "Compmake options")

    group.add_option("--remake", default=False, action='store_true',
                      help='Remakes all (non interactive).')

    group.add_option("--report", default=False, action='store_true',
                      help='Cleans and redoes all reports (non interactive).')

    group.add_option("--report_stats", default=False, action='store_true',
                      help='Cleans and redoes the reports for the stats. '
                      '(non interactive)')

    parser.add_option_group(group)

    (options, args) = parser.parse_args() #@UnusedVariable

    np.random.seed(options.seed)

    if not options.contracts:
        disable_all()

    assert options.outdir is not None

    available_test_cases = {}

    if options.test_cases is not None:
        TCConfig.load(options.test_cases)

        for tc_id in TCConfig.test_cases:
            available_test_cases[tc_id] = \
                (tc_load_spec, {'spec': TCConfig.test_cases[tc_id]})

    print('Generating synthetic test cases...')
    synthetic = get_syntethic_test_cases()
    available_test_cases.update(synthetic)

    euclidean = get_euclidean_test_cases()
    available_test_cases.update(euclidean)

    if options.data_sick is not None:
        print('Preparing Sick data...')
        real = get_real_test_cases(options.data_sick)
        available_test_cases.update(real)

    if options.data_fly is not None:
        print('Preparing fly data...')
        available_test_cases.update(get_fly_testcase(options.data_fly))

#    if options.data_mino is not None:
#        print('Preparing Mino data...')
#        available_test_cases.update(get_mino_testcases(options.data_mino))

    check('dict(str: tuple(Callable, dict))', available_test_cases)

    print('Creating list of algorithms..')
    algorithms = get_list_of_algorithms()
    check('dict(str: tuple(Callable, dict))', algorithms)

    print('Creating list of combinations..')
    combinations = get_list_of_combinations()

    which = expand_string(options.set, list(combinations.keys()))
    print('I will use the sets: %s' % which)
    if len(which) == 1:
        compmake_storage = join(options.outdir, 'compmake', which[0])
    else:
        compmake_storage = join(options.outdir, 'compmake', 'common_storage')

    use_filesystem(compmake_storage)

    print('Available %d test cases and %d algorithms' %
          (len(available_test_cases), len(algorithms)))

    print('Staging creation of test cases reports')
    test_cases = {}
    test_case_reports = {}

    def stage_test_case_report(tcid):
        if not tcid in available_test_cases:
            msg = ('Could not find test case %r \n %s' %
                   (tcid, available_test_cases.keys()))
            raise Exception(msg)
        if not tcid in test_cases:
            f, args = available_test_cases[tcid]

            job_id = 'test_case_data-%s' % tcid
            test_cases[tcid] = comp(test_case_generate, f=f,
                                    args=args, job_id=job_id)

        if not tcid in  test_case_reports:
            job_id = 'test_case-%s-report' % tcid
            report = comp(create_report_test_case,
                          tcid, test_cases[tcid], job_id=job_id)
            job_id += '-write'
            filename = join(options.outdir, 'test_cases', '%s.html' % tcid)
            comp(write_report, report, filename, job_id=job_id)
            test_case_reports[tcid] = report
        return test_case_reports[tcid]

    # set of tuple (algo, test_case)
    executions = {}

    def stage_execution(tcid, algid):
        exc_id = '%s-%s' % (tcid, algid)
        stage_test_case_report(tcid)

        key = (tcid, algid)
        if not key in executions:
            test_case = test_cases[tcid]
            if not algid in algorithms:
                raise Exception('No %r known in %s' %
                                (algid, algorithms.keys()))
            algo_class, algo_params = algorithms[algid]

            executions[key] = comp(run_combination, test_case, algo_class,
                                   algo_params,
                                   job_id='calib-%s-run' % exc_id)

            basename = join(options.outdir, 'results', '%s-%s' % (tcid, algid))
            comp(save_results, basename=basename,
                 results=executions[key],
                    job_id='calib-%s-save' % exc_id)

            # Create iterations report
            report = comp(create_report_iterations, exc_id, executions[key],
                          job_id='calib-%s-report' % exc_id)

            filename = join(options.outdir, 'executions',
                            '%s-%s.html' % (tcid, algid))
            comp(write_report, report, filename,
                 job_id='calib-%s-report-write' % exc_id)

        return executions[key]

    for comb_id in which:
        comb = combinations[comb_id]
        alg_ids = expand_string(comb.algorithms, algorithms.keys())
        tc_ids = expand_string(comb.test_cases, available_test_cases.keys())

        print('Set %r has %d test cases and %d algorithms (~%d jobs in total).'
              % (comb_id, len(alg_ids), len(tc_ids),
                 len(alg_ids) * len(tc_ids) * 2))

        deps = {}
        for t, a in itertools.product(tc_ids, alg_ids):
            deps[(t, a)] = stage_execution(t, a)

        job_id = 'tex-%s' % comb_id
        tables_dir = join(options.outdir, 'tables')
        comp(create_tables_for_paper, tables_dir, comb_id, tc_ids, alg_ids,
             deps, job_id=job_id)

        job_id = 'set-%s-report' % comb_id
        report = comp(create_report_comb_stats,
                      comb_id, tc_ids, alg_ids, deps, job_id=job_id)

        job_id += '-write'
        filename = join(options.outdir, 'stats', '%s.html' % comb_id)
        comp(write_report, report, filename, job_id=job_id)

    if options.report or options.report_stats:
        if options.report:
            batch_command('clean *-report*')
        elif options.report_stats:
            batch_command('clean set-*  tex*')
        batch_command('parmake')
    elif options.remake:
        batch_command('clean *')
        batch_command('make set-* tex-*')
    else:
        compmake_console()
Exemplo n.º 41
0
 def __enter__(self):
     disable_all()
Exemplo n.º 42
0
def main():
    def spearman(a, b):
        ao = scale_score(a)
        bo = scale_score(b)
        return correlation_coefficient(ao, bo)

    disable_all()

    def seq():
        N = 180
        iterations = 10
        nradii = 100
        radii = np.linspace(5, 180, nradii)

        K = 1
        for radius_deg, i in itertools.product(radii, range(K)):
            print radius_deg, i
            # Generate a random symmetric matrix
            # x = np.random.rand(N, N)
            S = random_directions_bounded(3, np.radians(radius_deg), N)
            C = np.dot(S.T, S)
            alpha = 1
            f = lambda x: np.exp(-alpha * (1 - x))
            # f = lambda x : x
            R = f(C)
            # Normalize in [0,1]
            R1 = (R - R.min()) / (R.max() - R.min())
            # Normalize in [-1,1]
            R2 = (R1 - 0.5) * 2

            S1 = simplified_algo(R1, iterations)
            S1w = simplified_algo(R1, iterations, warp=50)
            S2 = simplified_algo(R2, iterations)

            s1 = spearman(cosines_from_directions(S1), R1)
            s1w = spearman(cosines_from_directions(S1w), R1)
            s2 = spearman(cosines_from_directions(S2), R2)

            e1 = np.degrees(overlap_error_after_orthogonal_transform(S, S1))
            e1w = np.degrees(overlap_error_after_orthogonal_transform(S, S1w))
            e2 = np.degrees(overlap_error_after_orthogonal_transform(S, S2))
            r0 = np.degrees(distribution_radius(S))
            r1 = np.degrees(distribution_radius(S1))
            r1w = np.degrees(distribution_radius(S1w))
            r2 = np.degrees(distribution_radius(S2))
            yield dict(R0=r0,
                       R1=r1,
                       R1w=r1w,
                       R2=r2,
                       e1=e1,
                       e2=e2,
                       s1=s1,
                       s2=s2,
                       s1w=s1w,
                       e1w=e1w)

    results = list(seq())
    data = dict((k, np.array([d[k] for d in results])) for k in results[0])

    r = Report('demo-convergence')

    api1 = 'pi1'
    api1w = 'pi1w'
    api2 = 'pi2'

    sets = [(data['R0'] < 90, 'r.'), (data['R0'] >= 90, 'g.')]

    f = r.figure('radius', cols=3, caption='radius of solution')
    with r.data_pylab('r0r1') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['R1'][sel]
            pylab.plot(x, x, 'k--')
            pylab.plot(x, y, col)

        pylab.xlabel('real radius')
        pylab.ylabel('radius (pi1)')
        pylab.axis('equal')
    r.last().add_to(f, caption=api1)

    with r.data_pylab('r0r1w') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['R1w'][sel]
            pylab.plot(x, x, 'k--')
            pylab.plot(x, y, col)

        pylab.xlabel('real radius')
        pylab.ylabel('radius (pi1 + warp)')
        pylab.axis('equal')
    r.last().add_to(f, caption=api1w)

    with r.data_pylab('r0r2') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['R2'][sel]
            pylab.plot(x, x, 'k--')
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('radius (pi2)')
        pylab.axis('equal')
    r.last().add_to(f, caption=api2)

    with r.data_pylab('r1r2') as pylab:
        for sel, col in sets:
            pylab.plot(data['R1'][sel], data['R2'][sel], col)

        pylab.xlabel('radius (pi1)')
        pylab.ylabel('radius (pi2)')
        pylab.axis('equal')
    r.last().add_to(f, 'Comparison %s - %s' % (api1, api2))

    with r.data_pylab('r1r1w') as pylab:
        for sel, col in sets:
            pylab.plot(data['R1'][sel], data['R1w'][sel], col)

        pylab.xlabel('radius (pi1)')
        pylab.ylabel('radius (pi1+warp)')
        pylab.axis('equal')
    r.last().add_to(f, 'Comparison %s - %s' % (api1, api1w))

    f = r.figure('spearman', cols=3, caption='Spearman score')
    with r.data_pylab('r0s1') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['s1'][sel]
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('spearman (pi1)')
    r.last().add_to(f, caption=api1)

    with r.data_pylab('r0s1w') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['s1w'][sel]
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('spearman (pi1+warp)')
    r.last().add_to(f, caption=api1w)

    with r.data_pylab('r0s2') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['s2'][sel]
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('spearman (pi2)')
    r.last().add_to(f, caption=api2)

    f = r.figure('final_error', cols=3, caption='Average absolute error')
    with r.data_pylab('r0e') as pylab:
        x = data['R0']
        y = data['e1']
        pylab.plot(x, y, 'm-', label=api1)
        x = data['R0']
        y = data['e1w']
        pylab.plot(x, y, 'g-', label=api1w)
        x = data['R0']
        y = data['e2']
        pylab.plot(x, y, 'b-', label=api2)
        pylab.xlabel('real radius')
        pylab.ylabel('average error (deg)')
        pylab.legend()
    r.last().add_to(f)

    filename = 'cbc_demos/convergence.html'
    print("Writing to %r." % filename)
    r.to_html(filename)
Exemplo n.º 43
0
def boot_olympics_manager(arguments):
    usage = substitute(usage_pattern, commands_list=commands_list,
                       cmd='boot_olympics_manager')

    parser = OptionParser(prog='boot_olympics_manager', usage=usage)
    parser.disable_interspersed_args()
    parser.add_option("-d", dest='boot_root', default=None,
                      help='Root directory with logs, config, etc. [%default]')

    parser.add_option("-c", dest='extra_conf_dirs', action="append", default=[],
                      help='Adds an extra config dir.')

    parser.add_option("-l", dest='extra_log_dirs', action="append", default=[],
                      help='Adds an extra directory storing logs.')

    parser.add_option("--contracts", default=False, action='store_true',
                      help="Activate PyContracts (disabled by default)")

    parser.add_option("--seterr", dest='seterr', default="warn",
                      help="Sets np.seterr. "
                      "Possible values: ignore, warn, raise, print, log")

    parser.add_option("--profile", default=False, action='store_true',
                      help="Use Python profiler")

    available = LogsFormat.formats.keys()
    parser.add_option("--logformat", dest='log_format',
                      default=BootOlympicsConstants.DEFAULT_LOG_FORMAT,
                      help="Choose format for writing logs in %s. [%%default]"
                        % str(available))

    (options, args) = parser.parse_args(arguments)

    if not args:
        msg = ('Please supply command. Available: %s'
               % ", ".join(Storage.commands.keys()))
        raise UserError(msg)

    cmd = args[0]
    cmd_options = args[1:]

    if not cmd in Storage.commands:
        msg = ('Unknown command %r. Available: %s.' % 
               (cmd, ", ".join(Storage.commands.keys())))
        raise UserError(msg)

    np.seterr(all=options.seterr)
    # underflow is very common in all libraries (e.g. matplotlib)
    np.seterr(under='warn')

    if not options.contracts:
        contracts.disable_all()

    if options.boot_root is None:
        options.boot_root = DirectoryStructure.DEFAULT_ROOT
        logger.info('Using %r as default root directory '
                    '(use -d <dir> to change)' % options.boot_root)


    data_central = DataCentral(options.boot_root)
    
    GlobalConfig.global_load_dir('default')  # need skins
    for dirname in options.extra_conf_dirs:
        GlobalConfig.global_load_dir(dirname)
    
    dir_structure = data_central.get_dir_structure() 
    dir_structure.set_log_format(options.log_format)
    for dirname in options.extra_log_dirs:
        dir_structure.add_log_directory(dirname)

    def go():
        return Storage.commands[cmd](data_central, cmd_options)

    if not options.profile:
        go()
    else:
        logger.warning('Note: the profiler does not work when using '
                       'parallel execution. (use "make" instead of "parmake").')
        import cProfile
        cProfile.runctx('go()', globals(), locals(), 'bom_prof')
        import pstats
        p = pstats.Stats('bom_prof')
        p.sort_stats('cumulative').print_stats(30)
        p.sort_stats('time').print_stats(30)
Exemplo n.º 44
0
def plan_analysis(global_options, data, args):  # @UnusedVariable
    from vehicles import VehicleSimulation, VehiclesConfig  # XXX
 
    np.random.seed(12345226)
    from matplotlib import rc
    rc('font', **{'family': 'serif',
                  'serif': ['Times', 'Times New Roman', 'Palatino'],
                   'size': 9.0})

    contracts.disable_all()
    usage = ""
    parser = OptionParser(usage=usage)
    parser.disable_interspersed_args()
    parser.add_option("--world", dest='id_world', default="stochastic_box_10",
                       help="Vehicles world to use")
    (options, args) = parser.parse_args(args)
    if args:
        raise Exception('Extra args')

    id_robot = data['id_robot']
#    id_agent = data['id_agent']
    pub = data['publisher']

    vehicle = VehiclesConfig.vehicles.instance(id_robot)  # @UndefinedVariable
    world = VehiclesConfig.worlds.instance(options.id_world)  # @UndefinedVariable

    sim = VehicleSimulation(vehicle, world)

    FORWARD = [1, 1]
    BACKWARD = [-1, -1]
    LEFT = [-1, +1]
    RIGHT = [+1, -1]

    FORWARD = np.array([0, +1])
    BACKWARD = np.array([0, -1])
    LEFT = np.array([+0.3, 0])
    RIGHT = np.array([-0.3, 0])
    FWD_L = np.array([1, +1])
    FWD_R = np.array([1, -1])
    BWD_R = np.array([-1, -1])
    BWD_L = np.array([+1, -1])

    T1 = 1
    T2 = 2
    T1 = 1
    dt = 0.02
    def commutator(cmd1, cmd2, T):
        return [(cmd1, T), (cmd2, T), (-cmd1, T), (-cmd2, T)]

    examples = {
                'forwd1': {'trajectory': [(FORWARD, T1)]},
# #                'forwd2': {'trajectory': [(FORWARD, T2)]},
#                'left1': {'trajectory': [(LEFT, T1)]},
#                'right1': {'trajectory': [(RIGHT, T1)]},
#                'back1': {'trajectory': [(BACKWARD, T1)]},
#                'turnl2': {'trajectory': [(LEFT, T2)]},

#                'l3': {'trajectory': [([0.3, 0], T1)]},
#                'l4': {'trajectory': [([0.4, 0], T1)]},
#                'l5': {'trajectory': [([0.5, 0], T1)]},
#                'l6': {'trajectory': [([0.6, 0], T1)]},
#                'l7': {'trajectory': [([0.7, 0], T1)]},
#                'l8': {'trajectory': [([0.8, 0], T1)]},
#                'l9': {'trajectory': [([0.9, 0], T1)]},
#                'l10': {'trajectory': [([1.0, 0], T1)]},
#                'l1': {'trajectory': [([1, 0], T1)]},
#                          
#                'sidel1': {'trajectory': [(LEFT, T1),
#                                          (FORWARD, T1),
#                                          (RIGHT, T1 * 2),
#                                          (BACKWARD, T1),
#                                          (LEFT, T1) ]},
#                'sidel2': {'trajectory': commutator(FWD_L, LEFT, T1) } 
#                'sidel2': {'trajectory': commutator(FORWARD, LEFT, T1) },
#                'sidel3': {'trajectory': commutator(LEFT, FORWARD, T1) }
#                
    }


#    examples = {}

    for name, scenario in examples.items():
        while True:
            try:
                scenario_compute_inputs(scenario, sim, dt=dt)
                break
            except ValueError as e:
                print(e)

    actions = data['actions']
    for name, scenario in examples.items():
        scenario_solve(scenario, actions)


    for name, scenario in examples.items():
        S = pub.section(name)
        scenario_display(scenario, S, sim)
Exemplo n.º 45
0
def main():

    def spearman(a, b):
        ao = scale_score(a)
        bo = scale_score(b)
        return correlation_coefficient(ao, bo)

    disable_all()

    def seq():
        N = 180
        iterations = 10
        nradii = 100
        radii = np.linspace(5, 180, nradii)

        K = 1
        for radius_deg, i in itertools.product(radii, range(K)):
            print radius_deg, i
            # Generate a random symmetric matrix
            # x = np.random.rand(N, N)
            S = random_directions_bounded(3, np.radians(radius_deg), N)
            C = np.dot(S.T, S)
            alpha = 1
            f = lambda x: np.exp(-alpha * (1 - x))
            # f = lambda x : x
            R = f(C)
            # Normalize in [0,1]
            R1 = (R - R.min()) / (R.max() - R.min())
            # Normalize in [-1,1]
            R2 = (R1 - 0.5) * 2

            S1 = simplified_algo(R1, iterations)
            S1w = simplified_algo(R1, iterations, warp=50)
            S2 = simplified_algo(R2, iterations)

            s1 = spearman(cosines_from_directions(S1), R1)
            s1w = spearman(cosines_from_directions(S1w), R1)
            s2 = spearman(cosines_from_directions(S2), R2)

            e1 = np.degrees(overlap_error_after_orthogonal_transform(S, S1))
            e1w = np.degrees(overlap_error_after_orthogonal_transform(S, S1w))
            e2 = np.degrees(overlap_error_after_orthogonal_transform(S, S2))
            r0 = np.degrees(distribution_radius(S))
            r1 = np.degrees(distribution_radius(S1))
            r1w = np.degrees(distribution_radius(S1w))
            r2 = np.degrees(distribution_radius(S2))
            yield dict(R0=r0, R1=r1, R1w=r1w, R2=r2, e1=e1, e2=e2,
                       s1=s1, s2=s2,
                       s1w=s1w, e1w=e1w)

    results = list(seq())
    data = dict((k, np.array([d[k] for d in results])) for k in results[0])

    r = Report('demo-convergence')

    api1 = 'pi1'
    api1w = 'pi1w'
    api2 = 'pi2'

    sets = [(data['R0'] < 90, 'r.'), (data['R0'] >= 90, 'g.')]

    f = r.figure('radius', cols=3, caption='radius of solution')
    with r.data_pylab('r0r1') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['R1'][sel]
            pylab.plot(x, x, 'k--')
            pylab.plot(x, y, col)

        pylab.xlabel('real radius')
        pylab.ylabel('radius (pi1)')
        pylab.axis('equal')
    r.last().add_to(f, caption=api1)

    with r.data_pylab('r0r1w') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['R1w'][sel]
            pylab.plot(x, x, 'k--')
            pylab.plot(x, y, col)

        pylab.xlabel('real radius')
        pylab.ylabel('radius (pi1 + warp)')
        pylab.axis('equal')
    r.last().add_to(f, caption=api1w)

    with r.data_pylab('r0r2') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['R2'][sel]
            pylab.plot(x, x, 'k--')
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('radius (pi2)')
        pylab.axis('equal')
    r.last().add_to(f, caption=api2)

    with r.data_pylab('r1r2') as pylab:
        for sel, col in sets:
            pylab.plot(data['R1'][sel], data['R2'][sel], col)

        pylab.xlabel('radius (pi1)')
        pylab.ylabel('radius (pi2)')
        pylab.axis('equal')
    r.last().add_to(f, 'Comparison %s - %s' % (api1, api2))

    with r.data_pylab('r1r1w') as pylab:
        for sel, col in sets:
            pylab.plot(data['R1'][sel], data['R1w'][sel], col)

        pylab.xlabel('radius (pi1)')
        pylab.ylabel('radius (pi1+warp)')
        pylab.axis('equal')
    r.last().add_to(f, 'Comparison %s - %s' % (api1, api1w))

    f = r.figure('spearman', cols=3, caption='Spearman score')
    with r.data_pylab('r0s1') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['s1'][sel]
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('spearman (pi1)')
    r.last().add_to(f, caption=api1)

    with r.data_pylab('r0s1w') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['s1w'][sel]
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('spearman (pi1+warp)')
    r.last().add_to(f, caption=api1w)

    with r.data_pylab('r0s2') as pylab:
        for sel, col in sets:
            x = data['R0'][sel]
            y = data['s2'][sel]
            pylab.plot(x, y, col)
        pylab.xlabel('real radius')
        pylab.ylabel('spearman (pi2)')
    r.last().add_to(f, caption=api2)

    f = r.figure('final_error', cols=3, caption='Average absolute error')
    with r.data_pylab('r0e') as pylab:
        x = data['R0']
        y = data['e1']
        pylab.plot(x, y, 'm-', label=api1)
        x = data['R0']
        y = data['e1w']
        pylab.plot(x, y, 'g-', label=api1w)
        x = data['R0']
        y = data['e2']
        pylab.plot(x, y, 'b-', label=api2)
        pylab.xlabel('real radius')
        pylab.ylabel('average error (deg)')
        pylab.legend()
    r.last().add_to(f)

    filename = 'cbc_demos/convergence.html'
    print("Writing to %r." % filename)
    r.to_html(filename)
Exemplo n.º 46
0
def main():
    parser = OptionParser()

    group = OptionGroup(parser, "Files and directories")

    group.add_option("--outdir",
                      help='Directory with variables.pickle and where '
                           'the output will be placed.')
    
    parser.add_option_group(group)

    group = OptionGroup(parser, "Experiments options")

    group.add_option("--fast", default=False, action='store_true',
                      help='Disables sanity checks.')
    
    group.add_option("--set", default='*',
                      help='[= %default] Which combinations to run.')

    group.add_option("--seed", default=None, type='int',
                      help='[= %default] Seed for random number generator.')
    
    parser.add_option_group(group)

    group = OptionGroup(parser, "Compmake options")

    group.add_option("--remake", default=False, action='store_true',
                      help='Remakes all (non interactive).')

    group.add_option("--report", default=False, action='store_true',
                      help='Cleans and redoes all reports (non interactive).')

    group.add_option("--report_stats", default=False, action='store_true',
                      help='Cleans and redoes the reports for the stats. (non interactive)')

    parser.add_option_group(group)

    (options, args) = parser.parse_args() #@UnusedVariable
    
    
    np.random.seed(options.seed)    
    
    if options.fast:
        disable_all()

    assert not args 
    assert options.outdir is not None 
    
    available_algorithms, available_test_cases, available_sets = get_everything()    
    
    which = expand_string(options.set, list(available_sets.keys()))

    if len(which) == 1:    
        compmake_storage = os.path.join(options.outdir, 'compmake', which[0])
    else:
        compmake_storage = os.path.join(options.outdir, 'compmake', 'common_storage')
    
    use_filesystem(compmake_storage)


    print('Staging creation of test cases reports')
    test_cases = {}
    test_case_reports = {} 
    def stage_test_case_report(tcid):
        if not tcid in available_test_cases:
            msg = ('Could not find test case %r \n %s' % 
                   (tcid, available_test_cases.keys()))
            raise Exception(msg)
        if not tcid in test_cases:
            command, args = available_test_cases[tcid]
            job_id = 'test_case_data-%s' % tcid
            test_cases[tcid] = comp(command, job_id=job_id, **args)
        
        if not tcid in  test_case_reports:
            job_id = 'test_case-%s-report' % tcid
            report = comp(create_report_tc,
                          tcid, test_cases[tcid], job_id=job_id)
            job_id += '-write'
            filename = os.path.join(options.outdir, 'test_cases', '%s.html' % tcid)
            comp(write_report, report, filename, job_id=job_id)
            test_case_reports[tcid] = report
        return test_case_reports[tcid]
    
    # set of tuple (algo, test_case)
    executions = {}
    def stage_execution(tcid, algid):
        stage_test_case_report(tcid)
        
        key = (tcid, algid)
        if not key in executions:
            test_case = test_cases[tcid]
            algo_class, algo_params = available_algorithms[algid]
            job_id = 'solve-%s-%s-run' % (tcid, algid)
            results = comp(run_combination, tcid,
                           test_case, algo_class, algo_params,
                            job_id=job_id)
            executions[key] = results
            
            exc_id = '%s-%s' % (tcid, algid)
            # Create iterations report
            job_id = 'solve-%s-report' % exc_id
            report = comp(create_report_execution, exc_id,
                           tcid,
                           test_case, algo_class, algo_params,
                          results, job_id=job_id)
            
            job_id += '-write'
            filename = os.path.join(options.outdir, 'executions',
                                    '%s-%s.html' % (tcid, algid))
            comp(write_report, report, filename, job_id=job_id)
            
        return executions[key]
     
    
    for comb_id in which:
        comb = available_sets[comb_id]
        alg_ids = expand_string(comb.algorithms, available_algorithms.keys())
        tc_ids = expand_string(comb.test_cases, available_test_cases.keys())
        
        print('Set %r has %d test cases and %d algorithms (~%d jobs in total).' % 
          (comb_id, len(alg_ids), len(tc_ids), len(alg_ids) * len(tc_ids) * 2))

        deps = {}
        for t, a in itertools.product(tc_ids, alg_ids):
            deps[(t, a)] = stage_execution(t, a)

        job_id = 'tex-%s' % comb_id
        comp(create_tables_for_paper, comb_id, tc_ids, alg_ids, deps,
             job_id=job_id)
        
        job_id = 'set-%s-report' % comb_id
        report = comp(create_report_comb_stats,
                      comb_id, tc_ids, alg_ids, deps, job_id=job_id)
        
        job_id += '-write'
        filename = os.path.join(options.outdir, 'stats', '%s.html' % comb_id)
        comp(write_report, report, filename, job_id=job_id)

    if options.report or options.report_stats:
        if options.report:
            batch_command('clean *-report*')
        elif options.report_stats:
            batch_command('clean set-*  tex*')
        batch_command('parmake')
    elif options.remake:
        batch_command('clean *')
        batch_command('make set-* tex-*')
    else:
        compmake_console()
Exemplo n.º 47
0
def main():
    usage = """Implements the interface of the SLAM evaluation utilities"""
    parser = OptionParser(usage=usage)

    parser.add_option("--slow", default=False, action='store_true',
                      help='Enables sanity checks.')

    parser.add_option("--max_dist", default=15, type='float',
                      help='[= %default] Maximum distance for graph simplification.')
    parser.add_option("--min_nodes", default=250, type='float',
                      help='[= %default] Minimum number of nodes to simplify to.')
    parser.add_option("--scale", default=10000, type='float',
                      help='[= %default] Controls the weight of angular vs linear .')
    
    parser.add_option("--seed", default=42, type='int',
                      help='[= %default] Seed for random number generator.')
    
    (options, args) = parser.parse_args() #@UnusedVariable
    np.random.seed(options.seed)
    
    if not options.slow: contracts.disable_all()
    
    fin = sys.stdin
    fout = sys.stdout
    G = DiGraph()
    
    progress = False
    count = 0
    def status():
        return ('Read %5d commands, built graph with %5d nodes and %5d edges. \r' % 
                (count, G.number_of_nodes(), G.number_of_edges()))

    for command in parse_command_stream(fin, raise_if_unknown=False):
        if isinstance(command, (AddVertex2D, Equiv, AddEdge2D)):
            graph_apply_operation(G, command)
        elif isinstance(command, SolveState):
            eprint(status())
            algorithm = EFPNO_S
            params = dict(max_dist=options.max_dist,
                          min_nodes=options.min_nodes,
                          scale=options.scale)
            instance = algorithm(params)
            
            results = instance.solve(G)
            G = results['solution']
            
        elif isinstance(command, QueryState):
            nodes = command.ids if command.ids else G.nodes()
            nodes = sorted(nodes)
            
            fout.write('BEGIN\n')
            for n in nodes:
                t, theta = translation_angle_from_SE2(G.node[n]['pose'])
                fout.write('VERTEX_XYT %d %g %g %g\n' % 
                           (n, t[0], t[1], theta))
            fout.write('END\n')
            fout.flush()
        
        if progress and count % 250 == 0:
            eprint(status())
        count += 1
Exemplo n.º 48
0
    @new_contract
    def array_view(value):
        try:
            s_[value]
        except:
            return False

    @new_contract
    def callable(value):
        return hasattr(value, '__call__')

try:
    from contracts import contract, ContractsMeta

    if not enable_contracts():
        from contracts import disable_all
        disable_all()

    _build_custom_contracts()

except ImportError:
    # no-op interface if PyContracts isn't installed

    def contract(*args, **kwargs):
        if args:  # called as @contract
            return args[0]
        else:   # called as @contract(x='int', ...)
            return lambda func: func

    ContractsMeta = type
Exemplo n.º 49
0
    def go(self): 
        # check that if we have a parent who is a quickapp,
        # then use its context      
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            qc = qapp_parent.child_context  
            self.define_jobs_context(qc)
            return
        else:
            # self.info('Parent not found')
            pass
            

        if False:            
            import resource
            gbs = 5
            max_mem = long(gbs * 1000 * 1048576L)
            resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
            resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()
        
        
        if self.get_qapp_parent() is None:
            # only do this if somebody didn't do it before
            if not options.contracts:
                msg = ('PyContracts disabled for speed. '
                       'Use --contracts to activate.')
                self.logger.warning(msg)
                contracts.disable_all()

        output_dir = options.output
        
        if options.reset:
            if os.path.exists(output_dir):
                self.logger.info('Removing output dir %r.' % output_dir)
                shutil.rmtree(output_dir)
        
        # Compmake storage for results        
        storage = os.path.join(output_dir, 'compmake')
        db = StorageFilesystem(storage, compress=True)
        currently_executing = ['root']
        # The original Compmake context
        oc = Context(db=db, currently_executing=currently_executing)
        # Our wrapper
        qc = CompmakeContext(cc=oc,
                                  parent=None, qapp=self, job_prefix=None,
                                  output_dir=output_dir)
        read_rc_files(oc)
        
        original = oc.get_comp_prefix()
        self.define_jobs_context(qc)
        oc.comp_prefix(original)
        
        merged  = context_get_merge_data(qc)
    
        # Only create the index job if we have reports defined
        # or some branched context (which might create reports)
        has_reports = len(qc.get_report_manager().allreports) > 0
        has_branched = qc.has_branched()
        if has_reports or has_branched:
            self.info('Creating reports')
            oc.comp_dynamic(_dynreports_create_index, merged)
        else:
            self.info('Not creating reports.')
        
        ndefined = len(oc.get_jobs_defined_in_this_session())
        if ndefined == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else: 
            if not options.console:
                try: 
                    oc.batch_command(options.command)
                except CommandFailed:
                    ret = QUICKAPP_COMPUTATION_ERROR
                else:
                    ret = 0
                     
                return ret
            else:
                oc.compmake_console()
                return 0
Exemplo n.º 50
0
def main():
    usage = """Implements the interface of the SLAM evaluation utilities"""
    parser = OptionParser(usage=usage)

    parser.add_option("--slow",
                      default=False,
                      action='store_true',
                      help='Enables sanity checks.')

    parser.add_option(
        "--max_dist",
        default=15,
        type='float',
        help='[= %default] Maximum distance for graph simplification.')
    parser.add_option(
        "--min_nodes",
        default=250,
        type='float',
        help='[= %default] Minimum number of nodes to simplify to.')
    parser.add_option(
        "--scale",
        default=10000,
        type='float',
        help='[= %default] Controls the weight of angular vs linear .')

    parser.add_option("--seed",
                      default=42,
                      type='int',
                      help='[= %default] Seed for random number generator.')

    (options, args) = parser.parse_args()  #@UnusedVariable
    np.random.seed(options.seed)

    if not options.slow: contracts.disable_all()

    fin = sys.stdin
    fout = sys.stdout
    G = DiGraph()

    progress = False
    count = 0

    def status():
        return (
            'Read %5d commands, built graph with %5d nodes and %5d edges. \r' %
            (count, G.number_of_nodes(), G.number_of_edges()))

    for command in parse_command_stream(fin, raise_if_unknown=False):
        if isinstance(command, (AddVertex2D, Equiv, AddEdge2D)):
            graph_apply_operation(G, command)
        elif isinstance(command, SolveState):
            eprint(status())
            algorithm = EFPNO_S
            params = dict(max_dist=options.max_dist,
                          min_nodes=options.min_nodes,
                          scale=options.scale)
            instance = algorithm(params)

            results = instance.solve(G)
            G = results['solution']

        elif isinstance(command, QueryState):
            nodes = command.ids if command.ids else G.nodes()
            nodes = sorted(nodes)

            fout.write('BEGIN\n')
            for n in nodes:
                t, theta = translation_angle_from_SE2(G.node[n]['pose'])
                fout.write('VERTEX_XYT %d %g %g %g\n' % (n, t[0], t[1], theta))
            fout.write('END\n')
            fout.flush()

        if progress and count % 250 == 0:
            eprint(status())
        count += 1
Exemplo n.º 51
0
def compmake_main(args):
    if not '' in sys.path:
        sys.path.append('')

    setproctitle('compmake')

    parser = OptionParser(version=version, usage=usage)

    parser.add_option("--profile", default=False, action='store_true',
                      help="Use Python profiler")

    parser.add_option("--contracts", default=False, action='store_true',
                      help="Activate PyContracts")

    parser.add_option("-c", "--command",
                      default=None,
                      help="Run the given command")

    parser.add_option('-n', '--namespace',
                      default='default')

    parser.add_option('--retcodefile',
                      help='If given, the return value is written in this '
                           'file. Useful to check when compmake finished in '
                           'a grid environment. ',
                      default=None)

    parser.add_option('--nosysexit', default=False, action='store_true',
                      help='Does not sys.exit(ret); useful for debugging.')

    config_populate_optparser(parser)

    (options, args) = parser.parse_args(args)

    if not options.contracts:
        # info('Disabling PyContracts; use --contracts to activate.')
        contracts.disable_all()

    # We load plugins after we parsed the configuration
    from compmake import plugins  # @UnusedImport

    # XXX make sure this is the default
    if not args:
        msg = ('I expect at least one argument (db path).'
               ' Use "compmake -h" for usage information.')
        raise UserError(msg)

    if len(args) >= 2:
        msg = 'I only expect one argument. Use "compmake -h" for usage ' \
              'information.'
        msg += '\n args: %s' % args
        raise UserError(msg)

    # if the argument looks like a dirname
    one_arg = args[0]
    if os.path.exists(one_arg) and os.path.isdir(one_arg):
        # If there is a compmake/ folder inside, take it as the root
        child = os.path.join(one_arg, 'compmake')
        if os.path.exists(child):
            one_arg = child

        context = load_existing_db(one_arg)
        # If the context was custom we load it
        if 'context' in context.compmake_db:
            context = context.compmake_db['context']

            # TODO: check number of jobs is nonzero
    else:
        msg = 'Directory not found: %s' % one_arg
        raise UserError(msg)

    args = args[1:]

    def go(context2):
        assert context2 is not None

        if options.command:
            set_compmake_status(CompmakeConstants.compmake_status_slave)
        else:
            set_compmake_status(CompmakeConstants.compmake_status_interactive)

        read_rc_files(context2)

        try:
            if options.command:
                context2.batch_command(options.command)
            else:
                context2.compmake_console()
        except MakeFailed:
            retcode = CompmakeConstants.RET_CODE_JOB_FAILED
        except CommandFailed:
            retcode = CompmakeConstants.RET_CODE_COMMAND_FAILED
        except CompmakeBug as e:
            sys.stderr.write('unexpected exception: %s' % my_format_exc(e))
            retcode = CompmakeConstants.RET_CODE_COMPMAKE_BUG
        except BaseException as e:
            sys.stderr.write('unexpected exception: %s' % my_format_exc(e))
            retcode = CompmakeConstants.RET_CODE_COMPMAKE_BUG
        except:
            retcode = CompmakeConstants.RET_CODE_COMPMAKE_BUG
        else:
            retcode = 0

        if options.retcodefile is not None:
            write_atomic(options.retcodefile, str(retcode))

        if options.nosysexit:
            return retcode
        else:
            sys.exit(retcode)

    if not options.profile:
        return go(context2=context)
    else:
        # XXX: change variables
        import cProfile

        cProfile.runctx('go(context)', globals(), locals(),
                        'out/compmake.profile')
        import pstats

        p = pstats.Stats('out/compmake.profile')
        n = 30
        p.sort_stats('cumulative').print_stats(n)
        p.sort_stats('time').print_stats(n)
Exemplo n.º 52
0
def fps_main():
    if True:
        import contracts
        contracts.disable_all()

    # import contracts
    # contracts.disable_all()

    usage = """
    #    vehicles = ['d_SE2_rb_v-rf180', 'd_SE2_rb_v-cam180']
#    vehicles = ['d_SE2_rb_v-rf180']
#    vehicles += ['d_SE2_rb_v-cam180']
#    vehicles += ['d_SE2_rb_v-fs_05_12x12']
    id_world = 'box10'
    id_world = 'StocSources_w10_n20_s1'
    
    d_SE2_rb_v-cam_f360_n180_s
     """
    parser = OptionParser(usage=usage)

    parser.add_option("-w", "--world",
                      default='StocSources_w10_n20_s1',
                      help="World")

    parser.add_option("-v", "--vehicle",
                      default=['d_SE2_rb_v-fs_05_12x12'],
                      action='append',
                      help="Vehicles to simulate")
 
    (options, _) = parser.parse_args()
    
    get_vehicles_config().load()

    id_world = options.world
    world = VehiclesConfig.worlds.instance(id_world)  # @UndefinedVariable
    stats = []
    Stat = namedtuple('Stat', 'id_vehicle id_world fps')

    def stat2str(s):
        return "v: %-25s w: %-25s %5dfps" % (s.id_vehicle, s.id_world, s.fps)

#    vehicles = list(VehiclesConfig.vehicles.keys())
#    print vehicles
    
    vehicles = options.vehicle

    T = 200
#    T = 100000
    dt = 0.05
    for id_vehicle in vehicles:
        instance = VehiclesConfig.vehicles.instance  # @UndefinedVariable
        vehicle = instance(id_vehicle)
        print('vehicle: %s' % id_vehicle)
        sim = VehicleSimulation(vehicle, world)
        fps = check_simulation(sim, num_instants=T, dt=dt)
        stats.append(Stat(id_vehicle=id_vehicle, id_world=id_world, fps=fps))
        print(stat2str(stats[-1]))

    print('---- Sorted:')
    stats.sort(key=lambda x: (-x.fps))
    for s in stats:
        print(stat2str(s))
Exemplo n.º 53
0
    def go(self):
        # check that if we have a parent who is a quickapp,
        # then use its context
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            qc = qapp_parent.child_context
            self.define_jobs_context(qc)
            return
        else:
            # self.info('Parent not found')
            pass

        # if False:
        #     import resource
        #     gbs = 5
        #     max_mem = long(gbs * 1000 * 1048576)
        #     resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
        #     resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()

        # if self.get_qapp_parent() is None:
        # only do this if somebody didn't do it before
        if not options.contracts:
            msg = ('PyContracts disabled for speed. '
                   'Use --contracts to activate.')
            self.logger.warning(msg)
            contracts.disable_all()

        output_dir = options.output

        if options.reset:
            if os.path.exists(output_dir):
                self.logger.info('Removing output dir %r.' % output_dir)
                try:
                    shutil.rmtree(output_dir)
                except OSError as e:
                    # Directory not empty -- common enough on NFS filesystems
                    # print('errno: %r' % e.errno)
                    if e.errno == 39:
                        pass
                    else:
                        raise

        # Compmake storage for results
        storage = os.path.join(output_dir, 'compmake')
        logger.debug('Creating storage in %s  (compress = %s)' %
                     (storage, options.compress))
        db = StorageFilesystem(storage, compress=options.compress)
        currently_executing = ['root']
        # The original Compmake context
        oc = Context(db=db, currently_executing=currently_executing)
        # Our wrapper
        qc = CompmakeContext(cc=oc,
                             parent=None,
                             qapp=self,
                             job_prefix=None,
                             output_dir=output_dir)
        read_rc_files(oc)

        original = oc.get_comp_prefix()
        self.define_jobs_context(qc)
        oc.comp_prefix(original)

        merged = context_get_merge_data(qc)

        # Only create the index job if we have reports defined
        # or some branched context (which might create reports)
        has_reports = len(qc.get_report_manager().allreports) > 0
        has_branched = qc.has_branched()
        if has_reports or has_branched:
            # self.info('Creating reports')
            oc.comp_dynamic(_dynreports_create_index, merged)
        else:
            pass
            # self.info('Not creating reports.')

        ndefined = len(oc.get_jobs_defined_in_this_session())
        if ndefined == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else:
            if options.console:
                oc.compmake_console()
                return 0
            else:
                cq = CacheQueryDB(oc.get_compmake_db())
                targets = cq.all_jobs()
                todo, done, ready = cq.list_todo_targets(targets)

                if not todo and options.command is None:
                    msg = "Note: there is nothing for me to do. "
                    msg += '\n(Jobs todo: %s done: %s ready: %s)' % (
                        len(todo), len(done), len(ready))
                    msg += """\
This application uses a cache system for the results.
This means that if you call it second time with the same arguments,
 and if you do not change any input, it will not do anything."""
                    self.warn(msg)
                    return 0

                if options.command is None:
                    command = 'make recurse=1'
                else:
                    command = options.command

                try:
                    _ = oc.batch_command(command)
                    # print('qapp: ret0 = %s'  % ret0)
                except CommandFailed:
                    # print('qapp: CommandFailed')
                    ret = QUICKAPP_COMPUTATION_ERROR
                except ShellExitRequested:
                    # print('qapp: ShellExitRequested')
                    ret = 0
                else:
                    # print('qapp: else ret = 0')
                    ret = 0

                return ret
Exemplo n.º 54
0
def compmake_main(args):
    if not '' in sys.path:
        sys.path.append('')

    setproctitle('compmake')

    parser = OptionParser(version=version, usage=usage)

    parser.add_option("--profile",
                      default=False,
                      action='store_true',
                      help="Use Python profiler")

    parser.add_option("--contracts",
                      default=False,
                      action='store_true',
                      help="Activate PyContracts")

    parser.add_option("-c",
                      "--command",
                      default=None,
                      help="Run the given command")

    parser.add_option('-n', '--namespace', default='default')

    parser.add_option('--retcodefile',
                      help='If given, the return value is written in this '
                      'file. Useful to check when compmake finished in '
                      'a grid environment. ',
                      default=None)

    parser.add_option('--nosysexit',
                      default=False,
                      action='store_true',
                      help='Does not sys.exit(ret); useful for debugging.')

    config_populate_optparser(parser)

    (options, args) = parser.parse_args(args)

    if not options.contracts:
        # info('Disabling PyContracts; use --contracts to activate.')
        contracts.disable_all()

    # We load plugins after we parsed the configuration
    from compmake import plugins  # @UnusedImport

    # XXX make sure this is the default
    if not args:
        msg = ('I expect at least one argument (db path).'
               ' Use "compmake -h" for usage information.')
        raise UserError(msg)

    if len(args) >= 2:
        msg = 'I only expect one argument. Use "compmake -h" for usage ' \
              'information.'
        msg += '\n args: %s' % args
        raise UserError(msg)

    # if the argument looks like a dirname
    one_arg = args[0]
    if os.path.exists(one_arg) and os.path.isdir(one_arg):
        # If there is a compmake/ folder inside, take it as the root
        child = os.path.join(one_arg, 'compmake')
        if os.path.exists(child):
            one_arg = child

        context = load_existing_db(one_arg)
        # If the context was custom we load it
        if 'context' in context.compmake_db:
            context = context.compmake_db['context']

            # TODO: check number of jobs is nonzero
    else:
        msg = 'Directory not found: %s' % one_arg
        raise UserError(msg)

    args = args[1:]

    def go(context2):
        assert context2 is not None

        if options.command:
            set_compmake_status(CompmakeConstants.compmake_status_slave)
        else:
            set_compmake_status(CompmakeConstants.compmake_status_interactive)

        read_rc_files(context2)

        try:
            if options.command:
                context2.batch_command(options.command)
            else:
                context2.compmake_console()
        except MakeFailed:
            retcode = CompmakeConstants.RET_CODE_JOB_FAILED
        except CommandFailed:
            retcode = CompmakeConstants.RET_CODE_COMMAND_FAILED
        except CompmakeBug as e:
            sys.stderr.write('unexpected exception: %s' %
                             traceback.format_exc())
            retcode = CompmakeConstants.RET_CODE_COMPMAKE_BUG
        except BaseException as e:
            sys.stderr.write('unexpected exception: %s' %
                             traceback.format_exc())
            retcode = CompmakeConstants.RET_CODE_COMPMAKE_BUG
        except:
            retcode = CompmakeConstants.RET_CODE_COMPMAKE_BUG
        else:
            retcode = 0

        if options.retcodefile is not None:
            write_atomic(options.retcodefile, str(retcode))

        if options.nosysexit:
            return retcode
        else:
            sys.exit(retcode)

    if not options.profile:
        return go(context2=context)
    else:
        # XXX: change variables
        import cProfile

        cProfile.runctx('go(context)', globals(), locals(),
                        'out/compmake.profile')
        import pstats

        p = pstats.Stats('out/compmake.profile')
        n = 50
        p.sort_stats('cumulative').print_stats(n)
        p.sort_stats('time').print_stats(n)