Exemplo n.º 1
0
def run_classical_experiments(
        path_generators=None,
        render=False,
        default_config={},
        controllers=None,
):
    if path_generators is None:
        path_generators = ['curriculum_angled_path_factory', 'sine_path_generator', 'circle_path_factory',
                           'figure_eight_generator', 'carla_json_generator', 'straight_variable_speed_generator',
                           'left_lane_change_generator', 'right_lane_change_generator', 'snider_2009_track_generator',
                           'double_lane_change_generator', 'straight_variable_speed_pulse_generator',
                           'hairpin_turn_generator', 'right_turn_generator']
    if controllers is None:
        controllers = ['stanley', 'pure_pursuit', 'pid', 'mpc_ltv']

    for control in controllers:
        for path in path_generators:
            default_config = merge_configs(default_config, {
                'num_runs': 1,
                'render': render,
                'controller': control,
                'name': control + "_" + path,
                'environment': {
                    'max_distance_error': 5.0,
                    'process': {
                        'path_generator': 'figure_eight_generator',
                    }
                }
            })

            config, run_prefix = setup_run(default_config)
            evaluations = [run(config, run_prefix, i) for i in range(config['num_runs'])]
    print("All classical controllers ran!")
Exemplo n.º 2
0
def run_checkpoint_experiment(default_config, info, path_generator, render):
    config = info['config']
    original_name = config['name']
    name = original_name + "_" + path_generator
    config['name'] = name
    config['checkpoint'] = info['checkpoint']
    config['environment']['process']['path_generator'] = path_generator
    config['render'] = render
    config['environment']['rewarder']['steering_control_effort_tradeoff'] = 0.0
    config['environment']['rewarder']['acceleration_control_effort_tradeoff'] = 0.0
    default_config = merge_configs(default_config, config)
    config, run_prefix = setup_run(default_config, use_command_line=False)
    try:
        print('Running experiment: {} | {}'.format(name, config['name']))
        evaluations = [run(config, run_prefix, i) for i in range(config['num_runs'])]
        print("Experiment " + name + " finished!")
    except Exception as e:
        print("Experiment " + name + " didn't run, exception '{}'".format(repr(e)))
    info['config']['name'] = original_name
Exemplo n.º 3
0
def run_one(path_generator, controller):
    print('path generator: {}, controller: {}.'.format(path_generator, controller))
    
    config = merge_configs(common_default_config, {
        'num_runs':    1,
        'render':      True,
        'controller':  controller,
        'name':        controller + "_" + path_generator,
        'environment': {
            'max_distance_error': 25.0,
            'process':            {
                'path_generator': path_generator,
                }
            }
        })
    print('merged:')
    pprint(config)
    print('running:')
    config, run_prefix = setup_run(config, use_command_line=False)
    run(config, run_prefix, 0)
    print('complete.')
Exemplo n.º 4
0
        'num_runs': 1,
        'render': False,
        'controller': 'scheduled',
        'name': 'scheduled',
        'environment': {
            'terminator': {},
            'controller': {
                # 'input_csv': '/home/ctripp/project/cavs/src/cavs-environments/cavs_environments/vehicle/k_road/controller'
                #             '/scheduled_controller_inputs.csv'
            },
            'max_distance_error': 50000000000000000.0,
            'process': {
                'path_generator': 'straight_variable_speed_pulse_generator',
            }
        }
    })

    config, run_prefix = setup_run(default_config)
    evaluations = [run(config, run_prefix, i) for i in range(config['num_runs'])]

    ##To run multuple runs in parallel
    # pool = multiprocessing.Pool(
    #    processes=multiprocessing.cpu_count() if config['num_cpus'] is None else config['num_cpus'])
    #
    # with pool:
    #    evaluations = pool.map(functools.partial(run, config, run_prefix), [i for i in range(config['num_runs'])])


    if ray.is_initialized():
        ray.shutdown()