Exemple #1
0
def world0():
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [clane.shifted(2), clane.shifted(-2)]
    world.cars.append(
        car.UserControlledCar(dyn, [-0.13, 0., math.pi / 2., 0.3],
                              color='red'))
    world.cars.append(
        car.NestedOptimizerCar(dyn, [0.0, 0.5, math.pi / 2., 0.3],
                               color='yellow'))
    world.cars[1].human = world.cars[0]
    r_h = world.simple_reward([
        world.cars[1].traj
    ]) + 100. * feature.bounded_control(world.cars[0].bounds)

    @feature.feature
    def human_speed(t, x, u):
        return -world.cars[1].traj_h.x[t][3]**2

    r_r = world.simple_reward(world.cars[1], speed=0.5)
    world.cars[1].rewards = (r_h, r_r)
    return world
Exemple #2
0
def world8(flag=False):
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    vlane = lane.StraightLane([0., -1.], [0., 1.], 0.15)
    hlane = lane.StraightLane([-1., 0.], [1., 0.], 0.15)

    world.lanes += [vlane.shifted(0.5), vlane.shifted(-0.5), hlane.shifted(0.5), hlane.shifted(-0.5)]

    world.fences += [hlane.shifted(-0.5), hlane.shifted(0.5)]


    world.cars.append(car.UserControlledCar(dyn, [0., -.3, math.pi/2., 0.0], color='red'))
    world.cars.append(car.NestedOptimizerCar(dyn, [-0.3, 0., 0., 0.], color='blue'))
    world.cars[1].human = world.cars[0]
    world.cars[0].bounds = [(-3., 3.), (-2., 2.)]
    if flag:
        world.cars[0].follow = world.cars[1].traj_h
    world.cars[1].bounds = [(-3., 3.), (-2., 2.)]
    @feature.feature
    def horizontal(t, x, u):
        return -x[2]**2
    r_h = world.simple_reward([world.cars[1].traj], lanes=[vlane], fences=[vlane.shifted(-1), vlane.shifted(1)]*2)+100.*feature.bounded_control(world.cars[0].bounds)
    @feature.feature
    def human(t, x, u):
        return -tt.exp(-10*(world.cars[1].traj_h.x[t][1]-0.13)/0.1)
    r_r = human*10.+horizontal*30.+world.simple_reward(world.cars[1], lanes=[hlane]*3, fences=[hlane.shifted(-1), hlane.shifted(1)]*3+[hlane.shifted(-1.5), hlane.shifted(1.5)]*2, speed=0.9)
    world.cars[1].rewards = (r_h, r_r)
    return world
Exemple #3
0
def world3(flag=False):
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [
        clane.shifted(2),
        clane.shifted(-2),
        clane.shifted(2.5),
        clane.shifted(-2.5)
    ]
    world.cars.append(
        car.UserControlledCar(dyn, [0., 0., math.pi / 2., 0.3], color='red'))
    world.cars.append(
        car.NestedOptimizerCar(dyn, [0., 0.3, math.pi / 2., 0.3],
                               color='yellow'))
    world.cars[1].human = world.cars[0]
    world.cars[0].bounds = [(-3., 3.), (-1., 1.)]
    if flag:
        world.cars[0].follow = world.cars[1].traj_h
    r_h = world.simple_reward([
        world.cars[1].traj
    ]) + 100. * feature.bounded_control(world.cars[0].bounds)

    @feature.feature
    def human(t, x, u):
        return (world.cars[1].traj_h.x[t][0]) * 10

    r_r = 300. * human + world.simple_reward(world.cars[1], speed=0.5)
    world.cars[1].rewards = (r_h, r_r)
    #world.objects.append(Object('firetruck', [0., 0.7]))
    return world
Exemple #4
0
def world1(flag=False):
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [clane.shifted(2), clane.shifted(-2)]
    world.cars.append(
        car.UserControlledCar(dyn, [-0.13, 0., math.pi / 2., 0.3],
                              color='red'))
    world.cars.append(
        car.NestedOptimizerCar(dyn, [0.0, 0.5, math.pi / 2., 0.3],
                               color='yellow'))
    world.cars[1].human = world.cars[0]
    if flag:
        world.cars[0].follow = world.cars[1].traj_h
    r_h = world.simple_reward(
        [world.cars[1].traj],
        speed_import=.2 if flag else 1.,
        speed=0.8
        if flag else 1.) + 100. * feature.bounded_control(world.cars[0].bounds)

    @feature.feature
    def human_speed(t, x, u):
        return -world.cars[1].traj_h.x[t][3]**2

    r_r = 300. * human_speed + world.simple_reward(world.cars[1], speed=0.5)
    if flag:
        world.cars[0].follow = world.cars[1].traj_h
    world.cars[1].rewards = (r_h, r_r)
    #world.objects.append(Object('cone', [0., 1.8]))
    return world
Exemple #5
0
def playground():
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.17)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [clane.shifted(2), clane.shifted(-2)]
    #world.cars.append(car.UserControlledCar(dyn, [0., 0., math.pi/2., 0.], color='orange'))
    world.cars.append(car.UserControlledCar(dyn, [-0.17, -0.17, math.pi/2., 0.], color='white'))
    return world
Exemple #6
0
def two_merge_demo():
    dyn = dynamics.CarDynamics(0.1)
    world = highway()
    world.cars.append(
        car.UserControlledCar(dyn, [0, 0.1, math.pi / 2., .0], color='red'))
    world.cars.append(
        car.UserControlledCar(dyn, [0.13, 0.2, math.pi / 2., 0.],
                              color='white'))
    world.cars.append(
        car.UserControlledCar(dyn, [-.13, 0.2, math.pi / 2., 0.],
                              color='white'))
    with open('data/two_merge_traj/two_merge_traj-1490744314.pickle') as f:
        feed_u, feed_x = pickle.load(f)
        world.cars[1].fix_control(feed_u[0])
    with open('data/two_merge_traj/two_merge_traj-1490744613.pickle') as f:
        feed_u, feed_x = pickle.load(f)
        world.cars[2].fix_control(feed_u[1])
    #world.cars[0].reward = world.simple_reward(world.cars[0], speed=.7)
    return world
Exemple #7
0
def world_test():
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [clane.shifted(2), clane.shifted(-2)]
    world.cars.append(car.UserControlledCar(dyn, [-0.13, 0., math.pi/2., 0.3], color='red'))
    world.cars.append(car.SimpleOptimizerCar(dyn, [0.0, 0.5, math.pi/2., 0.3], color='yellow'))
    world.cars[1].reward = world.simple_reward(world.cars[1], speed=0.5)
    return world
Exemple #8
0
def world_features(num=0):
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [clane.shifted(2), clane.shifted(-2)]
    world.cars.append(car.UserControlledCar(dyn, [-0.13, 0., math.pi/2., 0.3], color='red'))
    world.cars.append(car.Car(dyn, [0., 0.1, math.pi/2.+math.pi/5, 0.], color='yellow'))
    world.cars.append(car.Car(dyn, [-0.13, 0.2, math.pi/2.-math.pi/5, 0.], color='yellow'))
    world.cars.append(car.Car(dyn, [0.13, -0.2, math.pi/2., 0.], color='yellow'))
    #world.cars.append(car.NestedOptimizerCar(dyn, [0.0, 0.5, math.pi/2., 0.3], color='yellow'))
    return world
Exemple #9
0
def irl_ground():
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads += [clane]
    world.fences += [clane.shifted(2), clane.shifted(-2)]
    d = shelve.open('cache', writeback=True)
    cars = [
        (-0.13, .1, .9, -0.13),
        (0, .4, .8, 0.0),
        (.13, 0, .6, .13),
        (0, .8, .5, 0.),
        (0., 1., 0.5, 0.),
        (-.13, -0.5, 0.9, -0.13),
        (.13, -.8, 1., 0.13),
        (-.13, 1.0, 0.6, -0.13),
        (.13, 1.9, .5, 0.13),
        (0, 1.5, 0.5, 0),
    ]

    def goal(g):
        @feature.feature
        def r(t, x, u):
            return -(x[0] - g)**2

        return r

    for i, (x, y, s, gx) in enumerate(cars):
        if str(i) not in d:
            d[str(i)] = []
        world.cars.append(
            car.SimpleOptimizerCar(dyn, [x, y, math.pi / 2., s],
                                   color='yellow'))
        world.cars[-1].cache = d[str(i)]

        def f(j):
            def sync(cache):
                d[str(j)] = cache
                d.sync()

            return sync

        world.cars[-1].sync = f(i)
    for c, (x, y, s, gx) in zip(world.cars, cars):
        c.reward = world.simple_reward(c, speed=s) + 10. * goal(gx)
    world.cars.append(
        car.UserControlledCar(dyn, [0., -0.5, math.pi / 2., 0.7], color='red'))
    world.cars = world.cars[-1:] + world.cars[:-1]
    return world
Exemple #10
0
def world_test(initial_states='far_overtaking',
               interaction_data=None,
               init_planner=True):
    # lanes
    center_lane = lane.StraightLane([0., -1.], [0., 1.],
                                    constants.LANE_WIDTH_VIS)
    left_lane = center_lane.shifted(1)
    right_lane = center_lane.shifted(-1)
    lanes = [center_lane, left_lane, right_lane]
    roads = []
    # fences
    fences = [center_lane.shifted(2), center_lane.shifted(-2)]
    # dynamics
    dyn = dynamics.CarDynamics
    # cars
    x0_h = np.array([-constants.LANE_WIDTH_VIS, 0., np.pi / 2., 0.3])
    human_car = car.UserControlledCar(x0_h,
                                      constants.DT,
                                      dyn,
                                      constants.CAR_CONTROL_BOUNDS,
                                      horizon=config.HORIZON,
                                      color=constants.COLOR_H,
                                      name=constants.NAME_H)
    x0_r = np.array([0.0, 0.5, np.pi / 2., 0.3])
    robot_car = car.SimpleOptimizerCar(x0_r,
                                       constants.DT,
                                       dyn,
                                       constants.CAR_CONTROL_BOUNDS,
                                       horizon=config.HORIZON,
                                       color=constants.COLOR_R,
                                       name=constants.NAME_R)
    cars = [robot_car, human_car]

    name = 'world_test'
    world = World(name, constants.DT, cars, robot_car, human_car, lanes, roads,
                  fences)

    # rewards
    robot_car.reward = reward.simple_reward(world, [human_car.traj_linear],
                                            speed=0.5)

    # initialize planners
    print("number of robot cars", len(world.robot_cars))
    for c in world.robot_cars:
        if hasattr(c, 'init_planner'):
            print('Initializing planner for ' + c.name)
            c.init_planner()

    return world
Exemple #11
0
def fast_merge_right_demo():
    dyn = dynamics.CarDynamics(0.1)
    world = World()
    clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)
    world.lanes = [clane, clane.shifted(1), clane.shifted(-1)]
    world.roads = [clane]
    world.fences = [clane.shifted(2), clane.shifted(-2)]
    d = shelve.open('cache', writeback=True)
    world.cars.append(
        car.UserControlledCar(dyn, [0.0, -0.2, math.pi / 2., 0.],
                              color='white'))
    world.cars.append(
        car.SimpleOptimizerCar(dyn, [0, 0, math.pi / 2., .0], color='red'))
    world.cars[1].reward = world.simple_reward(world.cars[1], speed=0.5)
    return world
Exemple #12
0
def world_test_human():
    dyn = dynamics.CarDynamics(0.1)
    world = highway()
    world.cars.append(
        car.UserControlledCar(dyn, [-0.13, 0., math.pi / 2., 0.3],
                              color='red'))
    world.cars.append(
        car.SimpleOptimizerCar(dyn, [0.0, 0.3, math.pi / 2., 0.8],
                               color='yellow'))
    world.cars[1].reward = world.simple_reward(world.cars[1], speed=0.5)

    with open('data/run1/world_test_human-1486145445.pickle') as f:
        feed_u, feed_x = pickle.load(f)
        #traj_h = pickle.load(f)
        print feed_u.__class__.__name__
        print feed_u[0].__class__.__name__
        #world.cars[0].follow= traj_h
        world.cars[0].fix_control(feed_u[0])

    return world
        self.reset()
        self.feed_x = history_x
        self.feed_u = history_u
        pyglet.clock.schedule_interval(self.animation_loop, 0.02)
        pyglet.clock.schedule_interval(self.control_loop, self.dt)
        self.event_loop.run()


if __name__ == '__main__' and False:
    import lane
    dyn = dynamics.CarDynamics(0.1)
    vis = Visualizer(dyn.dt)
    vis.lanes.append(lane.StraightLane([0., -1.], [0., 1.], 0.13))
    vis.lanes.append(vis.lanes[0].shifted(1))
    vis.lanes.append(vis.lanes[0].shifted(-1))
    vis.cars.append(car.UserControlledCar(dyn, [0., 0., math.pi / 2., .1]))
    vis.cars.append(
        car.SimpleOptimizerCar(dyn, [0., 0.5, math.pi / 2., 0.], color='red'))
    r = -60. * vis.cars[0].linear.gaussian()
    r = r + vis.lanes[0].gaussian()
    r = r + vis.lanes[1].gaussian()
    r = r + vis.lanes[2].gaussian()
    r = r - 30. * vis.lanes[1].shifted(1).gaussian()
    r = r - 30. * vis.lanes[2].shifted(-1).gaussian()
    r = r + 30. * feature.speed(0.5)
    r = r + 10. * vis.lanes[0].gaussian(10.)
    r = r + .1 * feature.control()
    vis.cars[1].reward = r
    vis.main_car = vis.cars[0]
    vis.paused = True
    vis.set_heat(r)
Exemple #14
0
def car_from(dyn, definition):
    if "kind" not in definition:
        raise Exception("car definition must include 'kind'")

    if "x0" not in definition:
        raise Exception("car definition must include 'x0'")

    if "color" not in definition:
        raise Exception("car definition must include 'color'")

    if "T" not in definition:
        raise Exception("car definition must include 'T'")

    if definition["kind"] == CAR_SIMPLE:
        return car.SimpleOptimizerCar(dyn,
                                      definition["x0"],
                                      color=definition["color"],
                                      T=definition["T"])

    if definition["kind"] == CAR_USER:
        return car.UserControlledCar(dyn,
                                     definition["x0"],
                                     color=definition["color"],
                                     T=definition["T"])

    if definition["kind"] == CAR_NESTED:
        return car.NestedOptimizerCar(dyn,
                                      definition["x0"],
                                      color=definition["color"],
                                      T=definition["T"])

    if definition["kind"] == CAR_BELIEF:
        return car.BeliefOptimizerCar(dyn,
                                      definition["x0"],
                                      color=definition["color"],
                                      T=definition["T"])

    if definition["kind"] == CAR_CANNED:
        c = car.CannedCar(dyn,
                          definition["x0"],
                          color=definition["color"],
                          T=definition["T"])

        if "controls" not in definition:
            raise Exception("definition doesn't contain 'controls' key")

        c.follow(definition["controls"])

        return c

    if definition["kind"] == CAR_NEURAL:
        c = car.NeuralCar(dyn,
                          definition["x0"],
                          color=definition["color"],
                          T=definition["T"])

        if "model" not in definition:
            raise Exception("definition doesn't contain 'model' key")

        mu = None
        if "mu" in definition:
            mu = definition["mu"]

        c.use(neural.load(definition["model"]), mu=mu)

        return c

    if definition["kind"] == CAR_COPY:
        c = car.CopyCar(dyn,
                        definition["x0"],
                        color=definition["color"],
                        T=definition["T"])
        return c

    raise Exception("car kind not recognized " + definition["kind"])