def update(self, delta, elapsed, state_controller, environment):
        # fetch roomba odometry
        target_roombas, _ = state_controller.query('RoombaState', environment)

        # fetch drone odometry
        drone_state = state_controller.query('DroneState', environment)

        if self.land_time is not None:
            if elapsed - self.land_time > cfg.PITTRAS_BLOCK_FLOOR_TIME * 1000:
                self.complete(TaskState.SUCCESS)
            return

        if drone_state['z_pos'] == 0:
            self.land_time = elapsed

        if not self.target_roomba in target_roombas:
            self.complete(TaskState.FAILURE, "Roomba not found")
            return

        roomba = target_roombas[self.target_roomba]

        # calculate the target yaw so that we turn less than 45 degrees
        # in either direction and land with a bumper side perpendicular
        # to the direction of roomba motion
        self.target_yaw = BlockRoombaTask._calculate_target_yaw(
            drone_state['yaw'], roomba['heading'])

        # calculate the target landing position
        block_vector = geometry.rotate_vector(self.block_vector,
                                              roomba['heading'])
        self.target_xy = np.array(roomba['pos']) + block_vector

        # PID calculations
        control_xy = self.pid_xy.get_control(
            self.target_xy - drone_state['xy_pos'], -drone_state['xy_vel'],
            delta)

        control_yaw = self.pid_yaw.get_control(
            self.target_yaw - drone_state['yaw'], -drone_state['yaw'], delta)

        # normalize acceleration vector
        adjusted_xy = geometry.rotate_vector(control_xy, -drone_state['yaw'])

        # perform control action
        environment.agent.control(adjusted_xy, control_yaw,
                                  cfg.PITTRAS_BLOCK_DESCENT_VEL)
    def update(self, delta, elapsed, state_controller, environment):
        # fetch roomba odometry
        target_roombas, _ = state_controller.query('RoombaState', environment)

        # fetch drone odometry
        drone_state = state_controller.query('DroneState', environment)

        if not self.target_roomba in target_roombas:
            self.complete(TaskState.FAILURE, "Roomba not found")
            return

        roomba = target_roombas[self.target_roomba]

        target_xy = np.array(roomba['pos'])

        # check if the roomba is too far away
        if np.linalg.norm(target_xy - drone_state['xy_pos']
                          ) > cfg.PITTRAS_HIT_ROOMBA_MAX_START_DIST:
            self.complete(TaskState.FAILURE, "Roomba is too far away")
            return

        if self.last_target_xy is None:
            self.last_target_xy = target_xy

        target_vel_xy = ((target_xy - self.last_target_xy) / delta)

        # PID calculations
        control_xy = self.pid_xy.get_control(
            target_xy - drone_state['xy_pos'],
            target_vel_xy - drone_state['xy_vel'], delta)

        # normalize acceleration vector
        adjusted_xy = geometry.rotate_vector(control_xy, -drone_state['yaw'])

        # perform control action
        environment.agent.control(adjusted_xy, 0,
                                  cfg.PITTRAS_HIT_ROOMBA_DESCENT_VELOCITY)

        # check if we have hit the roomba
        if drone_state['z_pos'] < cfg.PITTRAS_DRONE_PAD_ACTIVIATION_HEIGHT:
            if self.land_time is None:
                self.land_time = elapsed
            if elapsed - self.land_time > cfg.PITTRAS_HIT_ROOMBA_FLOOR_TIME * 1000:
                self.complete(TaskState.SUCCESS)
            return

        # update delayed trackers
        self.last_target_xy = target_xy
    def update(self, delta, elapsed, state_controller, environment):
        if (self.state == HoldPositionTaskStates.done
                or self.state == HoldPositionTaskStates.failed):
            return

        # Fetch current odometry
        drone_state = state_controller.query('DroneState', environment)

        if self.state == HoldPositionTaskStates.init:
            # TODO(zacyu): Remove this when `elapsed` is changed to represent
            #              the elipsed time since the start of the task.
            self.start_time = elapsed
            self.state = HoldPositionTaskStates.holding
            self.hold_xy = drone_state['xy_pos']
            self.hold_z = drone_state['z_pos']
            return

        if (self.hold_duration > 0) and (elapsed - self.start_time >
                                         self.hold_duration * 1000):
            if (np.linalg.norm(drone_state['xy_pos'] - self.hold_xy) <
                    cfg.PITTRAS_HOLD_POSITION_TOLERANCE
                    and abs(drone_state['z_pos'] - self.hold_z) <
                    cfg.PITTRAS_HOLD_POSITION_TOLERANCE):
                self.complete(TaskState.SUCCESS)
                self.state = HoldPositionTaskStates.done
            else:
                self.complete(TaskState.FAILURE)
                self.state = HoldPositionTaskStates.failed

        # PID calculations
        control_xy = self.pid_xy.get_control(
            self.hold_xy - drone_state['xy_pos'], -drone_state['xy_vel'],
            delta)

        control_z = self.pid_z.get_control(self.hold_z - drone_state['z_pos'],
                                           -drone_state['z_vel'], delta)

        # normalize acceleration vector
        adjusted_xy = geometry.rotate_vector(control_xy, -drone_state['yaw'])

        # Perform control action
        environment.agent.control(adjusted_xy, 0, control_z)
Beispiel #4
0
    def update(self, delta, elapsed, state_controller, environment):
        # Fetch current odometry
        drone_state = state_controller.query('DroneState', environment)

        if np.linalg.norm(self.target_xy - drone_state['xy_vel']) < \
                cfg.PITTRAS_VELOCITY_TOLERANCE:
            self.complete(TaskState.SUCCESS)
            return

        # Calculate control acceleration vector
        control_xy = self.pid_xy.get_control(
            self.target_xy - drone_state['xy_vel'],
            [0, 0],
            delta
        )

        # normalize acceleration vector
        adjusted_xy = geometry.rotate_vector(control_xy, -drone_state['yaw'])

        # Perform control action
        environment.agent.control(adjusted_xy, 0, self.target_z)
    def update(self, delta, elapsed, state_controller, environment):
        # fetch current odometry
        drone_state = state_controller.query('DroneState', environment)

        # check if we have reached the target
        dist = np.linalg.norm(self.target_xy - drone_state['xy_pos'])
        if dist < cfg.PITTRAS_XYZ_TRANSLATION_ACCURACY:
            self.complete(TaskState.SUCCESS)
            return

        # PID calculations
        control_xy = self.pid_xy.get_control(
            self.target_xy - drone_state['xy_pos'], -drone_state['xy_vel'],
            delta)

        control_z = self.pid_z.get_control(
            self.target_z - drone_state['z_pos'], -drone_state['z_vel'], delta)

        # rotate the control xy vector counterclockwise about
        # the origin by the current yaw
        adjusted_xy = geometry.rotate_vector(control_xy, -drone_state['yaw'])

        # perform control action
        environment.agent.control(adjusted_xy, 0, control_z)