Beispiel #1
0
def svg1():
    outdir = get_comptests_output_dir()
    control_points = [
        SE2Transform.from_SE2(geo.SE2_from_translation_angle([0, 0], 0)),
        SE2Transform.from_SE2(geo.SE2_from_translation_angle([1, 0], 0)),
        SE2Transform.from_SE2(geo.SE2_from_translation_angle([2, -1], np.deg2rad(-90))),
    ]

    width = 0.3

    ls = LaneSegment(width, control_points)

    area = RectangularArea((-1, -2), (3, 2))
    draw_static(ls, outdir, area=area)
Beispiel #2
0
def kinematics2d_test():
    q0 = geo.SE2_from_translation_angle([0, 0], 0)
    v0 = geo.se2.zero()
    c0 = q0, v0
    s0 = GenericKinematicsSE2.initialize(c0)

    k = 0.4
    radius = 1.0 / k
    print("radius: %s" % radius)
    v = 3.1
    perimeter = 2 * np.pi * radius
    dt_loop = perimeter / v

    w = 2 * np.pi / dt_loop

    dt = dt_loop * 0.25
    commands = geo.se2_from_linear_angular([v, 0], w)

    s1 = s0.integrate(dt, commands)
    s2 = s1.integrate(dt, commands)
    s3 = s2.integrate(dt, commands)
    s4 = s3.integrate(dt, commands)
    seq = [s0, s1, s2, s3, s4]
    for _ in seq:
        q1, v1 = _.TSE2_from_state()
        print("%s" % geo.SE2.friendly(q1))

    assert_almost_equal(geo.translation_from_SE2(s1.TSE2_from_state()[0]),
                        [radius, radius])
    assert_almost_equal(geo.translation_from_SE2(s2.TSE2_from_state()[0]),
                        [0, radius * 2])
    assert_almost_equal(geo.translation_from_SE2(s3.TSE2_from_state()[0]),
                        [-radius, radius])
    assert_almost_equal(geo.translation_from_SE2(s4.TSE2_from_state()[0]),
                        [0, 0])
def sample_duckies(
    nduckies: int,
    robot_positions: List[SE2value],
    min_dist_from_robot: float,
    min_dist_from_other_duckie: float,
    bounds: dw.RectangularArea,
    timeout: float = 10,
) -> List[SE2value]:
    poses: List[SE2value] = []

    def far_enough(pose_: SE2value) -> bool:
        for p in poses:
            if distance_poses(p, pose_) < min_dist_from_other_duckie:
                return False
        for p in robot_positions:
            if distance_poses(p, pose_) < min_dist_from_robot:
                return False
        return True

    t0 = time.time()
    while len(poses) < nduckies:

        theta = np.random.uniform(-np.pi, +np.pi)
        t = sample_in_rect(bounds)
        pose = g.SE2_from_translation_angle(t, theta)

        if far_enough(pose):
            poses.append(pose)

        dt = time.time() - t0
        if dt > timeout:
            msg = "Cannot sample in time."
            raise ZException(msg)
    return poses
Beispiel #4
0
def dd_test():
    radius = 0.1
    radius_left = radius
    radius_right = radius
    wheel_distance = 0.5
    dddp = DifferentialDriveDynamicsParameters(radius_left=radius_left, radius_right=radius_right,
                                               wheel_distance=wheel_distance)
    q0 = geo.SE2_from_translation_angle([0, 0], 0)
    v0 = geo.se2.zero()
    c0 = q0, v0
    s0 = dddp.initialize(c0)

    omega = 0.3
    commands = WheelVelocityCommands(omega, omega)
    dt = 4.0
    s1 = s0.integrate(dt, commands)

    q1, v1 = s1.TSE2_from_state()
    print(geo.se2.friendly(v1))
    print(geo.SE2.friendly(q1))

    p1, theta = geo.translation_angle_from_SE2(q1)
    assert_almost_equal(p1, [dt * radius * omega, 0])

    omega_left = 0.3
    omega_right = 0
    commands = WheelVelocityCommands(omega_left, omega_right)
    dt = 4.0
    s1 = s0.integrate(dt, commands)

    q1, v1 = s1.TSE2_from_state()
    p1, theta = geo.translation_angle_from_SE2(q1)
    print(geo.se2.friendly(v1))
    print(geo.SE2.friendly(q1))
Beispiel #5
0
def sample_many_good_starting_poses(
    po: dw.PlacedObject,
    nrobots: int,
    only_straight: bool,
    min_dist: float,
    delta_theta_rad: float,
    delta_y_m: float,
    timeout: float = 10,
) -> List[np.ndarray]:
    poses = []

    def far_enough(pose_):
        for p in poses:
            if distance_poses(p, pose_) < min_dist:
                return False
        return True

    t0 = time.time()
    while len(poses) < nrobots:
        pose = sample_good_starting_pose(po,
                                         only_straight=only_straight,
                                         along_lane=0.2)
        if far_enough(pose):
            theta = np.random.uniform(-delta_theta_rad, +delta_theta_rad)
            y = np.random.uniform(-delta_y_m, +delta_y_m)
            t = [0, y]
            q = g.SE2_from_translation_angle(t, theta)
            pose = g.SE2.multiply(pose, q)
            poses.append(pose)

        dt = time.time() - t0
        if dt > timeout:
            msg = "Cannot sample the poses"
            raise ZException(msg)
    return poses
Beispiel #6
0
def sample_duckies_poses(
    po: dw.PlacedObject,
    nduckies: int,
    robot_positions: List[SE2value],
    min_dist_from_robot: float,
    min_dist_from_other_duckie: float,
    from_side_bounds: Tuple[float, float],
    delta_theta_rad: float,
) -> List[np.ndarray]:
    poses: List[SE2value] = []

    def far_enough(pose_: SE2value) -> bool:
        for p in poses:
            if distance_poses(p, pose_) < min_dist_from_other_duckie:
                return False
        for p in robot_positions:
            if distance_poses(p, pose_) < min_dist_from_robot:
                return False
        return True

    while len(poses) < nduckies:
        pose = sample_good_starting_pose(po, only_straight=False, along_lane=0.2)
        if not far_enough(pose):
            continue

        theta = np.random.uniform(-delta_theta_rad, +delta_theta_rad)
        y = np.random.uniform(from_side_bounds[0], from_side_bounds[1])
        t = [0, y]
        q = g.SE2_from_translation_angle(t, theta)
        pose = g.SE2.multiply(pose, q)
        poses.append(pose)
    return poses
Beispiel #7
0
def lane_pose_test1():
    outdir = get_comptests_output_dir()

    # load one of the maps (see the list using dt-world-draw-maps)
    dw = load_map("udem1")

    v = 5

    # define a SampledSequence with timestamp, command
    commands_sequence = SampledSequence.from_iterator(
        [
            # we set these velocities at 1.0
            (1.0, WheelVelocityCommands(0.1 * v, 0.1 * v)),
            # at 2.0 we switch and so on
            (2.0, WheelVelocityCommands(0.1 * v, 0.4 * v)),
            (4.0, WheelVelocityCommands(0.1 * v, 0.4 * v)),
            (5.0, WheelVelocityCommands(0.1 * v, 0.2 * v)),
            (6.0, WheelVelocityCommands(0.1 * v, 0.1 * v)),
        ]
    )

    # we upsample the sequence by 5
    commands_sequence = commands_sequence.upsample(5)

    ## Simulate the dynamics of the vehicle
    # start from q0
    q0 = geo.SE2_from_translation_angle([1.8, 0.7], 0)

    # instantiate the class that represents the dynamics
    dynamics = reasonable_duckiebot()
    # this function integrates the dynamics
    poses_sequence = get_robot_trajectory(dynamics, q0, commands_sequence)

    #################
    # Visualization and rule evaluation

    # Creates an object 'duckiebot'
    ego_name = "duckiebot"
    db = DB18()  # class that gives the appearance

    # convert from SE2 to SE2Transform representation
    transforms_sequence = poses_sequence.transform_values(SE2Transform.from_SE2)
    # puts the object in the world with a certain "ground_truth" constraint
    dw.set_object(ego_name, db, ground_truth=transforms_sequence)

    # Rule evaluation (do not touch)
    interval = SampledSequence.from_iterator(enumerate(commands_sequence.timestamps))
    evaluated = evaluate_rules(
        poses_sequence=transforms_sequence,
        interval=interval,
        world=dw,
        ego_name=ego_name,
    )
    timeseries = make_timeseries(evaluated)
    # Drawing
    area = RectangularArea((0, 0), (3, 3))
    draw_static(dw, outdir, area=area, timeseries=timeseries)
Beispiel #8
0
    def asmatrix2d(self):
        angle0 = {"N": 0, "E": -np.pi / 2, "S": np.pi, "W": +np.pi / 2}[self.orientation] + np.pi / 2

        angle = {"N": np.pi / 2, "E": 0, "S": np.pi + np.pi / 2, "W": np.pi}[self.orientation]

        assert np.allclose(angle, angle0)

        p = [self.i + 0.5, self.j + 0.5]
        M = geometry.SE2_from_translation_angle(p, angle)
        return Matrix2D(M)
Beispiel #9
0
    def SE2Transform_from_lane_pose(self, lane_pose):
        beta = self.beta_from_along_lane(lane_pose.along_lane)
        # logger.info('beta: %s' % beta)
        center_point = self.center_point(beta)
        delta = np.array([0, lane_pose.lateral])

        rel = geo.SE2_from_translation_angle(delta, lane_pose.relative_heading)
        # logger.info('rel: %s' % geo.SE2.friendly(rel))
        res = geo.SE2.multiply(center_point, rel)

        return SE2Transform.from_SE2(res)
Beispiel #10
0
def integrator2D_test1():
    q0 = geo.SE2_from_translation_angle([0, 0], 0)
    v0 = geo.se2.zero()
    c0 = q0, v0
    s0 = Integrator2D.initialize(c0)

    commands = [1, 2]
    dt = 2
    s1 = s0.integrate(dt, commands)

    q1, v1 = s1.TSE2_from_state()
Beispiel #11
0
    def center_point(self, beta):
        n = len(self.control_points)
        i = int(np.floor(beta))

        if i < 0:
            q0 = self.control_points[0].asmatrix2d().m
            q1 = geo.SE2.multiply(q0,
                                  geo.SE2_from_translation_angle([0.1, 0], 0))
            alpha = beta

        elif i >= n - 1:
            # q0 = self.control_points[-2].asmatrix2d().m
            q0 = self.control_points[-1].asmatrix2d().m
            q1 = geo.SE2.multiply(q0,
                                  geo.SE2_from_translation_angle([0.1, 0], 0))
            alpha = beta - (n - 1)
        else:
            alpha = beta - i
            q0 = self.control_points[i].asmatrix2d().m
            q1 = self.control_points[i + 1].asmatrix2d().m
        q = SE2_interpolate(q0, q1, alpha)
        return q
    def asmatrix2d(self):
        # orientation2deg = dict(N=0, E=90, S=180, W=270, )
        # angle = ['S', 'E', 'N', 'W'].index(self.orientation)
        # angle = +angle * np.pi/2 + np.pi

        angle = {
            'N': 0,
            'E': -np.pi/2,
            'S': np.pi,
            'W': +np.pi/2,

        }[self.orientation] + np.pi/2
        # angle = 0
        # orientation2deg = dict(N=0, E=-90, S=-180, W=-270, )
        # angle = np.deg2rad(orientation2deg[self.orientation])
        p = [self.i + 0.5, self.j + 0.5]
        M = geometry.SE2_from_translation_angle(p, angle)
        return Matrix2D(M)
    def asmatrix2d(self):

        angle0 = {
            'N': 0,
            'E': -np.pi / 2,
            'S': np.pi,
            'W': +np.pi / 2,
        }[self.orientation] + np.pi / 2

        angle = {
            'N': np.pi / 2,
            'E': 0,
            'S': np.pi + np.pi / 2,
            'W': np.pi,
        }[self.orientation]

        assert np.allclose(angle, angle0)

        p = [self.i + 0.5, self.j + 0.5]
        M = geometry.SE2_from_translation_angle(p, angle)
        return Matrix2D(M)
Beispiel #14
0
def sample_many_good_starting_poses(po: dw.PlacedObject, nrobots: int, only_straight: bool, min_dist: float,
                                    delta_theta_rad: float, delta_y_m: float) -> List[
    np.ndarray]:
    poses = []

    def far_enough(pose):
        for p in poses:
            if distance_poses(p, pose) < min_dist:
                return False
        return True

    while len(poses) < nrobots:
        pose = sample_good_starting_pose(po, only_straight=only_straight)
        if far_enough(pose):
            theta = np.random.uniform(-delta_theta_rad, +delta_theta_rad)
            y = np.random.uniform(-delta_y_m, + delta_y_m)
            t = [0, y]
            q = g.SE2_from_translation_angle(t, theta)
            pose = g.SE2.multiply(pose, q)
            poses.append(pose)
    return poses
def sample_good_starting_pose(m: PlacedObject, only_straight: bool, along_lane: float) -> geo.SE2value:
    """ Samples a good starting pose on a straight lane """
    choices = list(iterate_by_class(m, LaneSegment))

    if only_straight:
        choices = [_ for _ in choices if is_straight(_)]

    choice = random.choice(choices)
    ls: LaneSegment = choice.object

    lp: LanePose = ls.lane_pose(along_lane, 0.0, 0.0)
    rel: SE2Transform = ls.SE2Transform_from_lane_pose(lp)

    m1 = choice.transform_sequence.asmatrix2d().m
    m2 = rel.asmatrix2d().m
    g = np.dot(m1, m2)

    t, a, s = geo.translation_angle_scale_from_E2(g)
    g = geo.SE2_from_translation_angle(t, a)

    return g
Beispiel #16
0
 def as_SE2(self) -> SE2value:
     import geometry
     M = geometry.SE2_from_translation_angle(self.p, self.theta)
     return M
def get_lane_at_point_test():
    m: dw.DuckietownMap = dw.load_map("robotarium2")

    p = geo.SE2_from_translation_angle([1.3, 0.3], 0.2)
    r = get_tile_at_point(m, p)
    assert r.i, r.j == (2, 0)
 def asmatrix2d(self):
     M = geometry.SE2_from_translation_angle(self.p, self.theta)
     return Matrix2D(M)
Beispiel #19
0
                           np.sqrt(R[0, 0, idx]**2 + R[0, 1, idx]**2)),
                np.arctan2(-R[0, 1, idx], R[0, 0, idx])
            ])

            realTimestamps.append(float(time) + float(timestart))
            timeStart.append(timestart)
            z = phi[2, idx]
            points = np.array([x[idx], y[idx]])
            final_trajectory.append([points, z])

    for entry in range(0, len(final_trajectory)):

        x = (final_trajectory[entry][0][0])
        y = final_trajectory[entry][0][1]
        alpha = final_trajectory[entry][1]
        q5 = geo.SE2_from_translation_angle([x, y], alpha)
        seqs2.append(q5)

    # Path to file with watchtower image timesteps
    path = 'trajectoryFiles/' + str(
        folderNamesSingle) + '/image_timestamps.csv'
    imagesDFcolumns = ['ImageName', 'Unused', 'Seconds', 'Nanoseconds']
    imagesDF = pd.read_csv(path)
    timeStampImagesSecondsArray = imagesDF.iloc[:, 2]
    timeStampImagesNanoSecArray = imagesDF.iloc[:, 3]
    imageNumber = imagesDF.iloc[:, 0]
    entryNumberTrajectory = 0

    # For each watchtower image timesteps, calculate the pose with the trajectory data
    for entry in range(0, len(timeStampImagesSecondsArray) - 1):
        if entryNumberTrajectory + 2 >= len(realTimestamps):
Beispiel #20
0
def check_car_dynamics_correct(klass, CarCommands, CarParams):
    """

    :param klass: the implementation
    :return:
    """

    # load one of the maps (see the list using dt-world-draw-maps)
    outdir = get_comptests_output_dir()
    dw = load_map('udem1')

    v = 5

    # define a SampledSequence with timestamp, command
    commands_sequence = SampledSequence.from_iterator([
        # we set these velocities at 1.0
        (1.0, CarCommands(0.1 * v, 0.1 * v)),
        # at 2.0 we switch and so on
        (2.0, CarCommands(0.1 * v, 0.4 * v)),
        (4.0, CarCommands(0.1 * v, 0.4 * v)),
        (5.0, CarCommands(0.1 * v, 0.2 * v)),
        (6.0, CarCommands(0.1 * v, 0.1 * v)),
    ])

    # we upsample the sequence by 5
    commands_sequence = commands_sequence.upsample(5)

    ## Simulate the dynamics of the vehicle
    # start from q0 and v0
    q0 = geo.SE2_from_translation_angle([1.8, 0.7], 0)
    v0 = geo.se2.zero()
    c0 = q0, v0
    # instantiate the class that represents the dynamics
    dynamics = CarParams(10.0)
    # this function integrates the dynamics
    poses_sequence = get_robot_trajectory(dynamics, q0, commands_sequence)

    #################
    # Visualization and rule evaluation

    # Creates an object 'duckiebot'
    ego_name = 'duckiebot'
    db = DB18()  # class that gives the appearance

    # convert from SE2 to SE2Transform representation
    transforms_sequence = poses_sequence.transform_values(
        SE2Transform.from_SE2)
    # puts the object in the world with a certain "ground_truth" constraint
    dw.set_object(ego_name, db, ground_truth=transforms_sequence)

    # Rule evaluation (do not touch)
    interval = SampledSequence.from_iterator(
        enumerate(commands_sequence.timestamps))
    evaluated = evaluate_rules(poses_sequence=transforms_sequence,
                               interval=interval,
                               world=dw,
                               ego_name=ego_name)
    timeseries = make_timeseries(evaluated)
    # Drawing
    area = RectangularArea((0, 0), (3, 3))
    draw_static(dw, outdir, area=area, timeseries=timeseries)

    expected = {
        1.0:
        geo.SE2_from_translation_angle([1.8, 0.7], 0.0),
        1.6:
        geo.SE2_from_translation_angle([2.09998657, 0.70245831],
                                       0.016389074695313716),
        2.0:
        geo.SE2_from_translation_angle([2.29993783, 0.70682836],
                                       0.027315124492189525),
        2.4:
        geo.SE2_from_translation_angle([2.49991893, 0.70792121],
                                       -0.016385672773040847),
        2.8:
        geo.SE2_from_translation_angle([2.69975684, 0.70027647],
                                       -0.060086470038271216),
        3.2:
        geo.SE2_from_translation_angle([2.89906999, 0.68390873],
                                       -0.10378726730350164),
        3.6:
        geo.SE2_from_translation_angle([3.09747779, 0.65884924],
                                       -0.14748806456873198),
        4.0:
        geo.SE2_from_translation_angle([3.2946014, 0.62514586],
                                       -0.19118886183396236),
        4.6:
        geo.SE2_from_translation_angle([3.58705642, 0.55852875],
                                       -0.25674005773180786),
        5.0:
        geo.SE2_from_translation_angle([3.77932992, 0.50353298],
                                       -0.3004408549970382),
        6.0:
        geo.SE2_from_translation_angle([4.2622088, 0.37429798],
                                       -0.22257046876429318),
    }
    for t, expected_pose in expected.items():
        assert np.allclose(poses_sequence.at(t=t), expected_pose), t

    print('All tests passed successfully!')
Beispiel #21
0
    def evaluate(self, context: RuleEvaluationContext,
                 result: RuleEvaluationResult):
        interval = context.get_interval()
        lane_pose_seq = context.get_lane_pose_seq()
        ego_pose_sequence = context.get_ego_pose_global()

        timestamps = []
        driven_any = []
        driven_lanedir = []

        tile_fqn2lane_fqn = {}
        for idt in iterate_with_dt(interval):
            t0, t1 = idt.v0, idt.v1  # not v
            try:
                name2lpr = lane_pose_seq.at(t0)

                p0 = ego_pose_sequence.at(t0).as_SE2()
                p1 = ego_pose_sequence.at(t1).as_SE2()
            except UndefinedAtTime:
                dr_any = dr_lanedir = 0.0

            else:
                prel = relative_pose(p0, p1)
                translation, _ = geo.translation_angle_from_SE2(prel)
                dr_any = np.linalg.norm(translation)

                if name2lpr:

                    ds = []
                    for k, lpr in name2lpr.items():
                        if lpr.tile_fqn in tile_fqn2lane_fqn:
                            if lpr.lane_segment_fqn != tile_fqn2lane_fqn[
                                    lpr.tile_fqn]:
                                # msg = 'Backwards detected'
                                # print(msg)
                                continue

                        tile_fqn2lane_fqn[lpr.tile_fqn] = lpr.lane_segment_fqn

                        assert isinstance(lpr, GetLanePoseResult)
                        c0 = lpr.center_point
                        ctas = geo.translation_angle_scale_from_E2(
                            c0.asmatrix2d().m)
                        c0_ = geo.SE2_from_translation_angle(
                            ctas.translation, ctas.angle)
                        prelc0 = relative_pose(c0_, p1)
                        tas = geo.translation_angle_scale_from_E2(prelc0)

                        # otherwise this lane should not be reported
                        # assert tas.translation[0] >= 0, tas
                        ds.append(tas.translation[0])

                    dr_lanedir = max(ds) if ds else 0.0
                else:
                    # no lp
                    dr_lanedir = 0.0

            driven_any.append(dr_any)
            driven_lanedir.append(dr_lanedir)
            timestamps.append(t0)

        title = "Consecutive lane distance"

        driven_lanedir_incremental = SampledSequence[float](timestamps,
                                                            driven_lanedir)
        driven_lanedir_cumulative = accumulate(driven_lanedir_incremental)

        if len(driven_lanedir_incremental) <= 1:
            total = 0
        else:
            total = driven_lanedir_cumulative.values[-1]
        description = textwrap.dedent("""\
            This metric computes how far the robot drove **in the direction of the correct lane**,
            discounting whenever it was driven in the wrong direction with respect to the start.
        """)
        result.set_metric(
            name=("driven_lanedir_consec", ),
            total=total,
            incremental=driven_lanedir_incremental,
            title=title,
            description=description,
            cumulative=driven_lanedir_cumulative,
        )
Beispiel #22
0
    def evaluate(self, context: RuleEvaluationContext,
                 result: RuleEvaluationResult):
        interval = context.get_interval()
        lane_pose_seq = context.get_lane_pose_seq()
        ego_pose_sequence = context.get_ego_pose_global()

        driven_any_builder = SampledSequenceBuilder[float]()
        driven_lanedir_builder = SampledSequenceBuilder[float]()

        for idt in iterate_with_dt(interval):
            t0, t1 = idt.v0, idt.v1  # not v
            try:
                name2lpr = lane_pose_seq.at(t0)

                p0 = ego_pose_sequence.at(t0).as_SE2()
                p1 = ego_pose_sequence.at(t1).as_SE2()
            except UndefinedAtTime:
                dr_any = dr_lanedir = 0.0

            else:
                prel = relative_pose(p0, p1)
                translation, _ = geo.translation_angle_from_SE2(prel)
                dr_any = np.linalg.norm(translation)

                if name2lpr:

                    ds = []
                    for k, lpr in name2lpr.items():
                        assert isinstance(lpr, GetLanePoseResult)
                        c0 = lpr.center_point
                        ctas = geo.translation_angle_scale_from_E2(
                            c0.asmatrix2d().m)
                        c0_ = geo.SE2_from_translation_angle(
                            ctas.translation, ctas.angle)
                        prelc0 = relative_pose(c0_, p1)
                        tas = geo.translation_angle_scale_from_E2(prelc0)

                        # otherwise this lane should not be reported
                        # assert tas.translation[0] >= 0, tas
                        ds.append(tas.translation[0])

                    dr_lanedir = max(ds)
                else:
                    # no lp
                    dr_lanedir = 0.0

            driven_any_builder.add(t0, dr_any)
            driven_lanedir_builder.add(t0, dr_lanedir)

        driven_any_incremental = driven_any_builder.as_sequence()

        driven_any_cumulative = accumulate(driven_any_incremental)

        if len(driven_any_incremental) <= 1:
            total = 0
        else:
            total = driven_any_cumulative.values[-1]

        title = "Distance"
        description = textwrap.dedent("""\
            This metric computes how far the robot drove.
        """)

        result.set_metric(
            name=("driven_any", ),
            total=total,
            incremental=driven_any_incremental,
            title=title,
            description=description,
            cumulative=driven_any_cumulative,
        )
        title = "Lane distance"

        driven_lanedir_incremental = driven_lanedir_builder.as_sequence()
        driven_lanedir_cumulative = accumulate(driven_lanedir_incremental)

        if len(driven_lanedir_incremental) <= 1:
            total = 0
        else:
            total = driven_lanedir_cumulative.values[-1]

        description = textwrap.dedent("""\
            This metric computes how far the robot drove
            **in the direction of the lane**.
        """)
        result.set_metric(
            name=("driven_lanedir", ),
            total=total,
            incremental=driven_lanedir_incremental,
            title=title,
            description=description,
            cumulative=driven_lanedir_cumulative,
        )