Пример #1
0
 def moveTowardsDynamicPoint(self, distance, theta):
     agent_id, x_actual, y_actual, z, theta_actual, pitch, roll = self.locate(self.agent_id)
     long_error = distance * cos(theta)
     angular_error = wrapAnglePi(theta)
     # self.logger = append(self.logger, [[0, 0, 0, 0, x_actual, y_actual, theta_actual, time(),0]], axis=0)
     feedback_linear = self.dynamic_long_control.controllerOutput(long_error)
     feedback_angular = self.dynamic_angular_control.controllerOutput(angular_error)
     feedback_linear = max(0, feedback_linear)
     self.actuate(feedback_linear, feedback_angular)
Пример #2
0
 def faceDirection(self, theta_desired):
     theta_desired = wrapAnglePi(theta_desired)
     #print 'Facing direction.'
     bound = .02
     self.angular_control.setGain(self.angular_ultimate_gain / 4)
     theta_error = wrapAnglePi(theta_desired - self.locate(self.agent_id)[4])
     while abs(theta_error) > bound:
         feedback_angular = self.angular_control.controllerOutput(theta_error)
         # print feedback_angular
         self.actuate(0, feedback_angular)
         # print theta_desired,self.locate(self.agent_id)[4]
         agent_id, x_actual, y_actual, z, theta_actual, pitch, roll = self.locate(self.agent_id)
         theta_error = wrapAnglePi(theta_desired - theta_actual)
         self.logger = append(self.logger, [[0, 0,theta_error,
                                             0,0, theta_desired,
                                             x_actual, y_actual, theta_actual, time()]], axis=0)
     self.actuate(0, 0)
     agent_id, x_actual, y_actual, z, theta_actual, pitch, roll = self.locate(self.agent_id)
     theta_error = wrapAnglePi(theta_desired - theta_actual)
Пример #3
0
 def planPath(self, distance, angle):
     self.findGaps()
     angle = wrapAnglePi(angle)
     environment_state, max_travel = self.isObstacleInTheWay(distance, angle)
     # print environment_state
     if environment_state is 'safe':
         return distance, angle
     elif environment_state is 'not_safe':
         self.findSubgoals()
         best_subgoal = self.selectSubgoal(distance, angle)
         return self.possible_travel[best_subgoal], self.readings_polar[best_subgoal][1]
     elif environment_state is 'close_to_obstacle':
         print '\033[93m'+'\033[1m'+'WARNING: Target is too close to an obstacle!!!'+'\033[0m'
         return max_travel, angle
     else:
         raise PathPlanningError.ImplementationError('Something wrong in planPath method of' + __name__)