示例#1
0
def getObjectsInSight():
  rp=[0,0,0]
  robotpose.getRobotPose(rp)
  
  try:
    log_to_terminal("Getting objects in sight for robot in position [%s, %s, %s]"%(float(rp[0]),float(rp[1]),normalize(degrees(rp[2]))))
    answer = conversion_cells_from_coord_handle(float(rp[0]),float(rp[1]))
    result = prolog_handle('allObjectsInRoom', ["%s"%answer.cell_x,"%s"%answer.cell_y, "L"])
    
    itemList = result.ris[0].atoms[2]
    itemList = itemList.replace("\'","").replace("[[","").replace("]]","").split("],[")
    
    objectsList = []
    objectsName = []
    
    for i in itemList:
      splitted_item = i.split(",")
      euclid_coords = conversion_coord_from_cells_handle(int(splitted_item[1]),int(splitted_item[2]))
      polar_coords = convert_from_map_euclidian_to_robot_angular(euclid_coords.real_x, euclid_coords.real_y,rp)
      objectsList.append([splitted_item[0],polar_coords[0],polar_coords[1]])
    
    objectsList = [item for item in objectsList if item[1] < SIGHT_FAR_THRESHOLD and item[1] > SIGHT_CLOSE_THRESHOLD and (abs(item[2]) < ANGULAR_THRESHOLD or abs(item[2]) > 360 - ANGULAR_THRESHOLD)] 
    objectsList = removeElementsWithSameName(objectsList)
    
    for i in objectsList:
      result = prolog_handle('objectType', [i[0],"X"])
      objectsName.append(result.ris[0].atoms[1].replace("_"," "))
      
    return objectsName
    
  except:
    return []
示例#2
0
  def do_getcloser(self):
    rp = [0,0,0]
    while (not robotpose.getRobotPose(rp)):
      rospy.sleep(0.25)

    I_err       = 0
    prev_err    = 0
    yaw         = rp[2]
    GTh         = roundPI2(yaw)
    cmd_vel     = Twist()
    status      = 1 # go
    
    while((not self._as.is_preempt_requested()) and status==1):
      if (not robotpose.getRobotPose(rp)):
        continue

      X         = rp[0]
      Y         = rp[1]
      yaw       = rp[2]
      adist     = norm180(degrees(GTh-yaw))
      trans_vel = 0
      midang=35

      if (status==1):
          if (self.person_dist>MIN_PERSON_DIST):
              trans_vel = max(MIN_TRANS_VEL,MAX_TRANS_VEL*min(1.0,(self.person_dist-MIN_PERSON_DIST)/0.6))
          elif (self.person_dist<0):
              pass
          else:
              status = 2 # Finish

      if (abs(adist)>midang):
        cmd_vel.linear.x = 0.0
        cmd_vel.angular.z = MAX_ANG_VEL
      else:
        cmd_vel.linear.x = trans_vel * (midang-abs(adist))/midang
        cmd_vel.angular.z = MAX_ANG_VEL

      if (cmd_vel.linear.x < ZERO_TRANS_VEL):
        cmd_vel.linear.x = 0

      if (abs(adist)>40 and abs(adist)<50):
        I_err = 0
      
      # PID controller
      err = radians(adist)
      P_err = err
      I_err += err
      D_err = err - prev_err
      prev_err = err
      Kp = 0.50
      Ki = 0.000
      Kd = 0.001
      v = Kp * P_err + Ki * I_err + Kd * D_err
      cmd_vel.angular.z *= v

      self.pub_cmd.publish(cmd_vel)
      rospy.sleep(0.10)
    # end while
    self.stop_getcloser()
示例#3
0
def handle_evaluate_condition_command(req):
  condition     = req.condition
  result        = "False"
  
  if condition.startswith("in{"):
    argument = condition.replace("in{","")[:-1]
    robot_pose            = [0,0,0]
    robotpose.getRobotPose(robot_pose)
    converted_robot_pose  = conversion_cells_from_coord_handle(robot_pose[0], robot_pose[1])
    robot_current_room    = prolog_handle('cellCoordIsPartOf', [str(int(converted_robot_pose.cell_x)), str(int(converted_robot_pose.cell_y)), 'X']).ris[0].atoms[2]
    log_to_terminal("robot_current_room:%s ?== argument:%s"%(robot_current_room,argument))
    if robot_current_room == argument:
      result = "True"
  
  elif condition.startswith("isPerceived{"):
    argument            = condition.replace("isPerceived{","")[:-1]
    objectsInSight      = getObjectsInSight()
    
    log_to_terminal("argument:%s ?in list:%s"%(argument, objectsInSight))

    if argument in objectsInSight:
      result = "True"

  else:
    rospy.logwarn("Could not recognize the condition %s. Considering it as False"%condition)
  
  event_pub.publish("ConditionVerified_%s"%result)
  return result
def get_nearest_cell_in_room(atom):
  robot_pose = [0,0,0]
  robotpose.getRobotPose(robot_pose)
  converted_robot_pose = conversion_cells_from_coord_handle(robot_pose[0], robot_pose[1])
  query_results = prolog_handle ('get_cell_near_destination_room', [str(converted_robot_pose.cell_x), str(converted_robot_pose.cell_y), atom, 'X', 'Y']).ris[0]
  coords = [query_results.atoms[3], query_results.atoms[4]]
  return [coords[0],coords[1], 0]
示例#5
0
def get_nearest_cell_in_room(atom):
    robot_pose = [0, 0, 0]
    robotpose.getRobotPose(robot_pose)
    converted_robot_pose = conversion_cells_from_coord_handle(
        robot_pose[0], robot_pose[1])
    query_results = prolog_handle('get_cell_near_destination_room', [
        str(converted_robot_pose.cell_x),
        str(converted_robot_pose.cell_y), atom, 'X', 'Y'
    ]).ris[0]
    coords = [query_results.atoms[3], query_results.atoms[4]]
    return [coords[0], coords[1], 0]
示例#6
0
def ground_request(frame, target):
    prolog_handle = rospy.ServiceProxy('/prolog_interface/prolog_query',
                                       prologSrv)
    conversion_cells_from_coord_handle = rospy.ServiceProxy(
        '/semantic_map_extraction/get_cell_by_coords', GetCellByCoords)
    conversion_coord_from_cells_handle = rospy.ServiceProxy(
        '/semantic_map_extraction/get_coords_by_cell', GetCoordsByCell)

    robot_pose = [0, 0, 0]
    robotpose.getRobotPose(robot_pose)
    #converted_robot_pose = conversion_cells_from_coord_handle(robot_pose[0], robot_pose[1])

    # grounding the commands with frame CLOSEST and target: OBJECT (e.g. bed)
    if (frame == "CLOSEST"):
        targetList = str(target).replace(" ", "").replace("\'", "")
        print targetList
        try:
            target_atoms_list = prolog_handle(
                'nearest_atom_in_list_from',
                [targetList,
                 str(robot_pose[0]),
                 str(robot_pose[1]), "X"])
            target_atoms_list = [target_atoms_list.ris[0].atoms[3]]

        except:
            target_atoms_list = []

    # grounding other commands
    else:
        target_list = target.split(" ")

        try:
            listProlog = str(target_list).replace("\"", "").replace(
                "\'", "").replace(" ", "")
            print listProlog
            query_results = prolog_handle('getAtom', [
                listProlog, "V",
                str("%.2f" % robot_pose[0]),
                str("%.2f" % robot_pose[1])
            ]).ris

            target_atoms_list = []

            for result in query_results:
                target_atoms_list.append(result.atoms[1])

        except:
            target_atoms_list = []

    return list(set(target_atoms_list))
def is_robot_already_there(target_pose, target_category, target_id):
  robot_pose            = [0,0,0]
  threshold             = 0.5
  robotpose.getRobotPose(robot_pose)
  converted_robot_pose  = conversion_cells_from_coord_handle(robot_pose[0], robot_pose[1])

  is_robot_near         = math.sqrt(math.pow(float(robot_pose[0]) - float(target_pose[0]), 2) + math.pow(float(robot_pose[1]) - float(target_pose[1]),2)) < threshold
  robot_current_room    = prolog_handle('cellCoordIsPartOf', [str(int(converted_robot_pose.cell_x)), str(int(converted_robot_pose.cell_y)), 'X']).ris[0].atoms[2]
  is_robot_in_room      = (target_id == robot_current_room)
  
  if (target_category == "object" and is_robot_near) or (target_category == "room" and is_robot_in_room):
    return True
  else:
    return False
def publish_dot():
    rp=[0,0,0]
    robotpose.getRobotPose(rp)
    print "Robot pose: ",rp
    obs = Obs()
    obs.posx=rp[0] + 1.2*cos(rp[2])
    obs.posy=rp[1] + 1.2*sin(rp[2])
    obs.theta=rp[2]+3.14
    print "Tagged object: ",obs.posx,obs.posy
    obs.dimx=0.50
    obs.dimy=0.50
    obs.dimz=0.03
    #obs.properties="objImagedata~rgb-20130731174135.png^raw-20130731174135.png^depth-20130731174135.png^laser-20130731174135.png"
    pub_dot.publish(obs)
    say('[BEEP]500|400\n')
示例#9
0
def publish_dot():
    rp = [0, 0, 0]
    robotpose.getRobotPose(rp)
    print "Robot pose: ", rp
    obs = Obs()
    obs.posx = rp[0] + 1.2 * cos(rp[2])
    obs.posy = rp[1] + 1.2 * sin(rp[2])
    obs.theta = rp[2] + 3.14
    print "Tagged object: ", obs.posx, obs.posy
    obs.dimx = 0.50
    obs.dimy = 0.50
    obs.dimz = 0.03
    #obs.properties="objImagedata~rgb-20130731174135.png^raw-20130731174135.png^depth-20130731174135.png^laser-20130731174135.png"
    pub_dot.publish(obs)
    say('[BEEP]500|400\n')
示例#10
0
def ground_request(frame, target):
    prolog_handle = rospy.ServiceProxy("/prolog_interface/prolog_query", prologSrv)
    conversion_cells_from_coord_handle = rospy.ServiceProxy(
        "/semantic_map_extraction/get_cell_by_coords", GetCellByCoords
    )
    conversion_coord_from_cells_handle = rospy.ServiceProxy(
        "/semantic_map_extraction/get_coords_by_cell", GetCoordsByCell
    )

    robot_pose = [0, 0, 0]
    robotpose.getRobotPose(robot_pose)
    # converted_robot_pose = conversion_cells_from_coord_handle(robot_pose[0], robot_pose[1])

    # grounding the commands with frame CLOSEST and target: OBJECT (e.g. bed)
    if frame == "CLOSEST":
        targetList = str(target).replace(" ", "").replace("'", "")
        print targetList
        try:
            target_atoms_list = prolog_handle(
                "nearest_atom_in_list_from", [targetList, str(robot_pose[0]), str(robot_pose[1]), "X"]
            )
            target_atoms_list = [target_atoms_list.ris[0].atoms[3]]

        except:
            target_atoms_list = []

    # grounding other commands
    else:
        target_list = target.split(" ")

        try:
            listProlog = str(target_list).replace('"', "").replace("'", "").replace(" ", "")
            print listProlog
            query_results = prolog_handle(
                "getAtom", [listProlog, "V", str("%.2f" % robot_pose[0]), str("%.2f" % robot_pose[1])]
            ).ris

            target_atoms_list = []

            for result in query_results:
                target_atoms_list.append(result.atoms[1])

        except:
            target_atoms_list = []

    return list(set(target_atoms_list))
示例#11
0
def is_robot_already_there(target_pose, target_category, target_id):
    robot_pose = [0, 0, 0]
    threshold = 0.5
    robotpose.getRobotPose(robot_pose)
    converted_robot_pose = conversion_cells_from_coord_handle(
        robot_pose[0], robot_pose[1])

    is_robot_near = math.sqrt(
        math.pow(float(robot_pose[0]) - float(target_pose[0]), 2) +
        math.pow(float(robot_pose[1]) - float(target_pose[1]), 2)) < threshold
    robot_current_room = prolog_handle('cellCoordIsPartOf', [
        str(int(converted_robot_pose.cell_x)),
        str(int(converted_robot_pose.cell_y)), 'X'
    ]).ris[0].atoms[2]
    is_robot_in_room = (target_id == robot_current_room)

    if (target_category == "object"
            and is_robot_near) or (target_category == "room"
                                   and is_robot_in_room):
        return True
    else:
        return False
示例#12
0
            # (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            rospy.sleep(0.2)
            print '*** exception here: ', sys.exc_info()[0]
            continue


def initNode(NODE):
    global pub_cmd
    global sub_odom
    global tfl
    rospy.init_node(NODE + '_')
    tfl = tf.TransformListener()

    #rospy.sleep(1.0)
    #pub_cmd = rospy.Publisher('/'+robotname+'/ps3joy_cmd_vel', Twist)
    #sub_odom = rospy.Subscriber('/'+robotname+'/odom', Odometry, odom_callback)
    rospy.sleep(1.0)


#  MAIN

if __name__ == '__main__':
    global robotname
    initNode(NODE)
    robot_name = rospy.get_param('/robotname', "robot_0")
    V = [0, 0, 0]
    #getRobotPose_tf(V)
    #print "Direct tf get robot pose : ",V
    robotpose.getRobotPose(V)
    print "Service get robot pose : ", V
示例#13
0
    def do_followPerson(self):
        # getting robot pose
        rp = [0, 0, 0]
        while (not robotpose.getRobotPose(rp)):
            rospy.sleep(0.25)

        I_err = 0
        prev_err = 0
        yaw = rp[2]
        GTh = roundPI2(yaw)
        cmd_vel = Twist()
        status = 0  # waiting

        while (not self._as.is_preempt_requested()):
            if (not robotpose.getRobotPose(rp)):
                continue

            X = rp[0]
            Y = rp[1]
            yaw = rp[2]
            adist = norm180(degrees(GTh - yaw))
            trans_vel = 0
            midang = 35

            if (status == 0 and self.person_dist > 0
                    and self.person_dist < MIN_PERSON_DIST * 2.0):
                print 'Person detected. Start Following...'
                status = 1  # person detected

            if (status == 1 and self.person_dist < 0):
                print 'Person lost. Stop following...'
                status = 0  # waiting

            if (status == 1):
                if (self.person_dist > MIN_PERSON_DIST):
                    trans_vel = max(
                        MIN_TRANS_VEL,
                        MAX_TRANS_VEL *
                        min(1.0, (self.person_dist - MIN_PERSON_DIST) / 0.6))

                elif (abs(self.person_dist_y) > MIN_PERSON_DIST / 2):
                    status = 2  # Turning...
                    adist = norm180(degrees(GTh - yaw))

                    if (self.person_dist_y > 0):
                        GTh = GTh + pi / 2
                    else:
                        GTh = GTh - pi / 2

            elif (status == 2 and abs(adist) < 23):
                status = 1

            if (abs(adist) > midang):
                cmd_vel.linear.x = 0.0
                cmd_vel.angular.z = MAX_ANG_VEL
            else:
                cmd_vel.linear.x = trans_vel * (midang - abs(adist)) / midang
                cmd_vel.angular.z = MAX_ANG_VEL

            if (cmd_vel.linear.x < ZERO_TRANS_VEL):
                cmd_vel.linear.x = 0

            if (abs(adist) > 40 and abs(adist) < 50):
                I_err = 0
            # PID controller
            err = radians(adist)
            P_err = err
            I_err += err
            D_err = err - prev_err
            prev_err = err
            Kp = 0.50
            Ki = 0.000
            Kd = 0.001
            v = Kp * P_err + Ki * I_err + Kd * D_err
            #print 'PID err = ',err,' -> PID value = ',v
            cmd_vel.angular.z *= v

            self.pub_cmd.publish(cmd_vel)
            rospy.sleep(0.10)
        # end while

        self.stop_followperson()
    def do_followPerson(self):
        # getting robot pose
        rp = [0, 0, 0]
        while not robotpose.getRobotPose(rp):
            rospy.sleep(0.25)

        I_err = 0
        prev_err = 0
        yaw = rp[2]
        GTh = roundPI2(yaw)
        cmd_vel = Twist()
        status = 0  # waiting

        while not self._as.is_preempt_requested():
            if not robotpose.getRobotPose(rp):
                continue

            X = rp[0]
            Y = rp[1]
            yaw = rp[2]
            adist = norm180(degrees(GTh - yaw))
            trans_vel = 0
            midang = 35

            if status == 0 and self.person_dist > 0 and self.person_dist < MIN_PERSON_DIST * 2.0:
                print "Person detected. Start Following..."
                status = 1  # person detected

            if status == 1 and self.person_dist < 0:
                print "Person lost. Stop following..."
                status = 0  # waiting

            if status == 1:
                if self.person_dist > MIN_PERSON_DIST:
                    trans_vel = max(MIN_TRANS_VEL, MAX_TRANS_VEL * min(1.0, (self.person_dist - MIN_PERSON_DIST) / 0.6))

                elif abs(self.person_dist_y) > MIN_PERSON_DIST / 2:
                    status = 2  # Turning...
                    adist = norm180(degrees(GTh - yaw))

                    if self.person_dist_y > 0:
                        GTh = GTh + pi / 2
                    else:
                        GTh = GTh - pi / 2

            elif status == 2 and abs(adist) < 23:
                status = 1

            if abs(adist) > midang:
                cmd_vel.linear.x = 0.0
                cmd_vel.angular.z = MAX_ANG_VEL
            else:
                cmd_vel.linear.x = trans_vel * (midang - abs(adist)) / midang
                cmd_vel.angular.z = MAX_ANG_VEL

            if cmd_vel.linear.x < ZERO_TRANS_VEL:
                cmd_vel.linear.x = 0

            if abs(adist) > 40 and abs(adist) < 50:
                I_err = 0
            # PID controller
            err = radians(adist)
            P_err = err
            I_err += err
            D_err = err - prev_err
            prev_err = err
            Kp = 0.50
            Ki = 0.000
            Kd = 0.001
            v = Kp * P_err + Ki * I_err + Kd * D_err
            # print 'PID err = ',err,' -> PID value = ',v
            cmd_vel.angular.z *= v

            self.pub_cmd.publish(cmd_vel)
            rospy.sleep(0.10)
        # end while

        self.stop_followperson()
示例#15
0
            # (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            rospy.sleep(0.2)
            print "*** exception here: ", sys.exc_info()[0]
            continue


def initNode(NODE):
    global pub_cmd
    global sub_odom
    global tfl
    rospy.init_node(NODE + "_")
    tfl = tf.TransformListener()

    # rospy.sleep(1.0)
    # pub_cmd = rospy.Publisher('/'+robotname+'/ps3joy_cmd_vel', Twist)
    # sub_odom = rospy.Subscriber('/'+robotname+'/odom', Odometry, odom_callback)
    rospy.sleep(1.0)


#  MAIN

if __name__ == "__main__":
    global robotname
    initNode(NODE)
    robot_name = rospy.get_param("/robotname", "robot_0")
    V = [0, 0, 0]
    # getRobotPose_tf(V)
    # print "Direct tf get robot pose : ",V
    robotpose.getRobotPose(V)
    print "Service get robot pose : ", V