Beispiel #1
0
    def __init__(self, platformColor, hoverAltitude):

        self.platformColor = platformColor 
        self.hoverAltitude = hoverAltitude
        self.processVideo = ProcessVideo()
        self.moveTime = 0
        self.waitTime = 0.10
Beispiel #2
0
    def __init__(self, poseTracker, target, yaw, platformNumber, waitDist=0.1):

        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        self.Kp, self.Ki, self.Kd = 0.2, 0.0, 0.0005
        self.moveTime = 0.2
        self.waitTime = 0.0
        self.tracker = poseTracker
        self.target = target
        self.waitDist = waitDist
        self.worldTarget = self.tracker.body2World(target)[:, 0]
        self.processVideo = ProcessVideo()
        self.platformNumber = platformNumber
        self.centery = 360 / 2.0
        self.centerx = 640 / 2.0
        self.pub = rospy.Publisher('ardrone/tracker', tracker)
        self.track = tracker()
        self.platform = [0, 0, 0]
        self.filterSize = 50
        self.buff = np.repeat(np.asarray([self.worldTarget]).T,
                              self.filterSize,
                              axis=1)
        self.KpYaw, self.KiYaw, self.KdYaw = (1 / 180.0) * 0.8, 0, 0
        self.targetYaw = (yaw + 360) % 360

        self.worldPoint = np.asarray([[0, 0, 0]]).T
        #the amount of weight we would like to put towards correcting the drones drift by recognizing landmarks
        self.correctionRatio = 0.9999
    def __init__(self, platformColor):

        self.platformColor = platformColor
        self.hoverAltitude = hoverAltitude
        self.processVideo = ProcessVideo()
        self.moveTime = 0.2
        self.waitTime = 0
        rospy.logwarn("test4")
Beispiel #4
0
    def __init__(self, platformColor, speedModifier=0.3, radiusThresh=155):

        self.platformColor = platformColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.moveTime = 0.20
        self.waitTime = 0.10
Beispiel #5
0
    def __init__(self, lineColor, speedModifier=0.5, radiusThresh=255):

        self.lineColor = lineColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.moveTime = 0.25
        self.waitTime = 0.10
Beispiel #6
0
    def __init__(self, platformColor, lineColor, speedModifier = 0.6, radiusThresh = 170):

        self.platformColor = platformColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.lineColor = lineColor
        self.moveTime = 0.35
        self.waitTime = 0.10
        self.bestPlatformFound = None
Beispiel #7
0
    def __init__(self, lineColor, speed=0.4):

        self.lineColor = lineColor
        self.speed = speed
        self.processVideo = ProcessVideo()
        self.moveTime = 0.45
        self.waitTime = 0.1
        self.prevAngle = None
        self.prevAngleCount = 0
        self.prevAngleCountMax = 185
Beispiel #8
0
class HoverColorDirective(AbstractDroneDirective):
    
    # sets up this directive
    # platformColor: color to hover over
    # hoverAltitude: how high to hover over the color, in mm
    def __init__(self, platformColor, hoverAltitude):

        self.platformColor = platformColor 
        self.hoverAltitude = hoverAltitude
        self.processVideo = ProcessVideo()
        self.moveTime = 0
        self.waitTime = 0.10
    

    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't on orange yet
    #   1 if algorithm is finished and drone is now on orange
    #  -1 if an error has occured
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    #
    # A coordinate pair that represents where things are being tracked
    # If nothing is being tracked, (None,None) is returned

    def RetrieveNextInstruction(self, image, navdata):
        
        # color segmented image
        segImage = self.processVideo.DetectColor(image, self.platformColor)
        cx, cy = self.processVideo.CenterOfMass(segImage)
        xspeed, yspeed, zspeed = self.processVideo.ApproximateSpeed(segImage, cx, cy,
        navdata["altitude"][1], self.hoverAltitude, xtolerance = 55, ytolerance = 55)

        # if there is orange in the screen, and the drone is in the middle, return true
        if cx != None and cy != None and xspeed == 0 and yspeed == 0 and zspeed == 0:

            rospy.logwarn("Done Hovering on " + self.platformColor)
            directiveStatus = 1

        elif cx == None or cy == None:
            
            rospy.logwarn("Can't Hover on " + self.platformColor + "; lost platform")
            directiveStatus = -1

        else:

            rospy.logwarn("Trying to Hover on " + self.platformColor)
            directiveStatus = 0

        return directiveStatus, (xspeed, yspeed, 0, zspeed), segImage, (cx,cy), self.moveTime, self.waitTime, None
Beispiel #9
0
    def __init__(self, orientation, lineColor, platformColor, settingsPath):

        if orientation != "PARALLEL" and orientation != "PERPENDICULAR":
            raise Exception("Orientation not recognized.")
        else:
            self.orientation = orientation
        self.lineColor = lineColor
        self.platformColor = platformColor

        self.processVideo = ProcessVideo()
        P, I, D = self.GetSettings(settingsPath)
        self.pid = PIDController(360, 640, P, I, D)
        self.moveTime = 0.2
        self.waitTime = 0.1
class MultiCenterTestDirective(AbstractDroneDirective):

    # sets up this directive
    def __init__(self, color):
        
        self.color = color
        self.processVideo = ProcessVideo()

        
    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   1 as long as the drone is idle
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        image = self.processVideo.RemoveNoise(navdata["segImage"])
        alt = int(navdata["SVCLAltitude"][1])
        rospy.logwarn("Curr Altitude = " + str(alt))
        if alt == -1:
            rospy.logwarn("**************************************************")
            rospy.logwarn("asdfasdf")

        
        cx, cy = navdata["center"][1][0], navdata["center"][1][1]

        if cx != None and cy != None:
            cv2.circle(image, (cx,cy), 6, (255,255,255), -1)

            
        return 1, (0, 0, 0, 0), image, (None, None), 0, 0, None
Beispiel #11
0
class FaceDirective(AbstractDroneDirective):

    # sets up this directive
    # platformColor: color to hover over. Altitude is maintained
    def __init__(self):

        #self.Kp,self.Ki,self.Kd = 0.11,0.0,0.0004
        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        self.processVideo = ProcessVideo()

    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't on orange yet
    #   1 if algorithm is finished and drone is now on orange
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        distance, center = self.processVideo.DetectFaces(image)
        if distance != None:
            rospy.logwarn("Distance to face: " + str(distance / 1000.0))
        directiveStatus = 0
        return directiveStatus, (0, 0, 0, 0), image, None, 0, 0, None

    # This method is called by the state machine when it considers this directive finished
    def Finished(self):
        self.Reset()

    def Reset(self):
        pass
    def __init__(self, platformColor, settingsPath):

        self.platformColor = platformColor
        self.processVideo = ProcessVideo()
        P, I, D = self.GetSettings(settingsPath)
        self.pid = PIDController(360, 640, Kp=P, Ki=I, Kd=D)

        self.moveTime = 0.2
        self.waitTime = 0.1
Beispiel #13
0
    def __init__(self, image):

        self.pid = PIDController()
        self.process = ProcessVideo()

        #take input as bgr image, given orange is visible, will command to hover ontop of it
        def HoverOnCheckpoint(self, image):

            # calculating cx,cy,xspeed,yspeed
            orange_image = self.process.DetectColor(image, 'orange')
            self.cx, self.cy = self.process.CenterofMass(orange_image)
            self.pid.UpdateDeltaTime()
            self.pid.SetPoint(orang_image)
            self.pid.UpdateError(self.cx, self.cy)
            self.pid.SetPIDTerm()
            xspeed, yspeed = self.pid.GetPIDValues()
            self.pid.DrawArrow(orange_image, xspeed, yspeed)

            return xspeed, yspeed

        def OrienttoLine(self, image):

            orange_image = self.process.DetectColor(image, 'orange')
            self.cx, self.cy = self.process.CenterofMass(orange_image)
            blue_image = self.process.DetectColor(image, 'blue')
            angle = self.process.ShowLine(blue_image)
            yawspeed = self.process.LineOrientation(blue_image, angle, 5)

            return yawspeed

        def OrienttoObject(self, image):

            orange_image = self.process.DetectColor(image, 'orange')
            self.cx, self.cy = self.process.CenterofMass(orange_image)
            green_image = self.process.DetectColor(image, 'green')
            angle = self.process.ShowLine(green_image)
            yawspeed = self.process.ObjectOrientation(green_image, angle, 5)

            return yawspeed
Beispiel #14
0
 def __init__(self, platformColor):
     
     self.platformColor = platformColor 
     self.processVideo = ProcessVideo()
     P,I,D = 0.0054, 0.006352, 0.0011475
     #P,I,D = 0.00405, 0.00285, 0
     self.pid = PIDController(Kp = P, Ki = I, Kd = D)
     self.moveTime = 0.2
     self.waitTime = 0
     self.pub_pid_xspeed = rospy.Publisher('pid_xspeed', Float32, queue_size = 10)
     self.pub_pid_yspeed = rospy.Publisher('pid_yspeed', Float32, queue_size = 10)
     self.pub_pid_in_alt = rospy.Publisher('pid_in_alt', Int32, queue_size = 10)
     rate = rospy.Rate(5)
Beispiel #15
0
    def __init__(self,
                 poseTracker,
                 target,
                 yaw,
                 platformNumber,
                 waitDist=0.13):

        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        #self.Kp,self.Ki,self.Kd = 0.2,0.0,0.0005
        #self.Kp,self.Ki,self.Kd = 0.21,0.0,0.0006
        self.Kp, self.Ki, self.Kd = 0.17, 10, 0.0005
        self.Kpz = 0.1
        self.KpYaw, self.KiYaw, self.KdYaw = (2 / 90.), 0, 0
        self.targetYaw = -yaw
        self.moveTime = 0.2
        self.waitTime = 0.0
        self.tracker = poseTracker
        self.target = target
        self.target[2] = target[2] - self.tracker.translation[2]
        self.waitDist = waitDist
        self.worldTarget = self.tracker.body2World(target)[:, 0]
        self.processVideo = ProcessVideo()
        self.platformNumber = platformNumber
        self.centery = 360 / 2.0
        self.centerx = 640 / 2.0
        self.pub = rospy.Publisher('ardrone/tracker', tracker)
        self.track = tracker()
        self.platform = [0, 0, 0]
        self.filterSize = 30
        self.platformBuff = np.repeat(np.asarray([self.worldTarget]).T,
                                      self.filterSize,
                                      axis=1)
        self.heightBuff = np.zeros(4)
        self.yawBuff = np.zeros(4)
        self.worldPoint = np.asarray([[0, 0, 0]]).T
        self.lastLocation = self.tracker.translation
        #the amount of weight we would like to put towards correcting the drones drift by recognizing landmarksgggs
        self.correctionRatio = 0.9999
Beispiel #16
0
    def __init__(self,
                 orientation,
                 lineColor,
                 platformColor,
                 hoverAltitude,
                 heightTolerance=50,
                 yOffset=0,
                 ySizeOffset=0):

        if orientation != "PARALLEL" and orientation != "PERPENDICULAR":
            raise Exception("Orientation not recognized.")
        else:
            self.orientation = orientation

        self.lineColor = lineColor
        self.platformColor = platformColor
        self.hoverAltitude = hoverAltitude
        self.processVideo = ProcessVideo()
        self.prevCenter = None
        self.forceCenter = None
        self.prevAngle = None
        self.heightTolerance = heightTolerance
        self.yOffset = yOffset
        self.ySizeOffset = ySizeOffset
Beispiel #17
0
    def __init__(self, platformColor, tolerance):

        self.platformColor = platformColor
        self.tolerance = tolerance
        self.processVideo = ProcessVideo()
Beispiel #18
0
    def __init__(self):

        #self.Kp,self.Ki,self.Kd = 0.11,0.0,0.0004
        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        self.processVideo = ProcessVideo()
 def __init__(self, color):
     
     self.color = color
     self.processVideo = ProcessVideo()
Beispiel #20
0
class FollowLineDirective(AbstractDroneDirective):

    # sets up this directive
    # lineColor: color of the line to follow
    def __init__(self, lineColor, speed=0.4):

        self.lineColor = lineColor
        self.speed = speed
        self.processVideo = ProcessVideo()
        self.moveTime = 0.45
        self.waitTime = 0.1
        self.prevAngle = None
        self.prevAngleCount = 0
        self.prevAngleCountMax = 185

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone is still following line
    #   1 if algorithm is finished and has finished following line
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)
        platforms = navdata["allCenters"][1]
        lines, image = self.processVideo.MultiShowLine(segLineImage)

        if lines[0] != None:
            cv2.circle(image, lines[0][1], 15, (0, 0, 255), -1)
        if lines[1] != None:
            cv2.circle(image, lines[1][1], 15, (0, 255, 0), -1)
        if lines[2] != None:
            cv2.circle(image, lines[2][1], 15, (255, 0, 0), -1)

        linesVisible = (lines[0] != None) + (lines[1] != None) + (lines[2] !=
                                                                  None)

        if linesVisible == 0:
            rospy.logwarn(" *** ERROR: Lost " + self.lineColor + " line *** ")
            return -1, (0, 0, 0, 0), segLineImage, (None, None), 0, 0, None

        cx = lines[1][1][0]
        cy = lines[1][1][1]
        _, yspeed, _ = self.processVideo.ApproximateSpeed(
            segLineImage,
            cx,
            cy,
            None,
            None,
            navdata["SVCLAltitude"][1],
            0,
            xtolerance=80,
            ytolerance=95)

        newCenter = None
        thresh = 15

        # Checking if angle is consistent with before
        if (self.prevAngle == None
                or abs(self.prevAngle - lines[1][0]) < thresh
                or abs(self.prevAngle - lines[1][0]) > (180 - thresh)):

            self.prevAngle = lines[1][0]
            self.prevAngleCount = 0
            directiveStatus = 0

            # checking finish conditions
            # alternate way to finish
            xWindowSize = 200
            yWindowSize = 120
            xLower = 320 - xWindowSize
            yLower = 180 - yWindowSize
            xUpper = 320 + xWindowSize
            yUpper = 180 + yWindowSize

            foundRightPlatform = False
            tolerance = 15
            for platform in platforms:
                if (linesVisible > 1 and lines[1] != None
                        and platform[0] > lines[1][1][0]
                        and min(abs(lines[1][0] - 180),
                                180 - abs(lines[1][0] - 180)) < 16
                        and platform[0] < xUpper and platform[0] > xLower
                        and platform[1] < yUpper and platform[1] > yLower):
                    cv2.rectangle(image, (xLower, yLower), (xUpper, yUpper),
                                  (255, 255, 255), 4)
                    foundRightPlatform = True
                    newCenter = platform

            # in order to be considered "finished", there must be 2 lines,
            # one which is horizontal and one that is less than 90 degrees.
            # The horizontal line must be far enough left.
            if (foundRightPlatform or
                (len(platforms) == 1 and yspeed == 0 and lines[1] != None
                 and lines[2] != None and
                 ((lines[1][0] < (0 + tolerance)) or (lines[1][0]) >
                  (180 - tolerance)) and lines[2][1][0] < int(640 * 0.9))):

                xspeed = 0
                yspeed = 0
                yawspeed = 0
                directiveStatus = 1
                rospy.logwarn("Finished following line")
                return directiveStatus, (xspeed, yspeed, yawspeed, 0), image, (
                    (cx, cy),
                    self.prevAngle), self.moveTime, self.waitTime, newCenter

        else:

            # only uses the angle if it is similar to the last one. If it is too different, algorithm
            # uses the previous angle for a time until the timer is up, as a buffer against a random large angle change

            rospy.logwarn(
                "Large sudden change in angle -- using old angle of " +
                str(self.prevAngle) + " instead of " + str(lines[1][0]))
            directiveStatus = -1

        xspeed = -self.speed

        # converting
        line1Angle = self.prevAngle
        if line1Angle == 90:
            line1Angle = 0
        elif line1Angle < 90:
            line1Angle = line1Angle + 90
        else:
            line1Angle = line1Angle - 90

        yawspeed = self.processVideo.LineOrientation(segLineImage,
                                                     line1Angle,
                                                     8,
                                                     yawspeed=0.45)

        # If drone is still trying follow the line, it adapts to one of three algorithms:

        # Drone will just go back near the center if the drone is not "near" the
        # center as defined by a bounding box.
        # No turning or horizontal movement is applied.
        if yspeed != 0:
            rospy.logwarn("Moving blue line back to center")
            self.moveTime = 0.2
            self.waitTime = 0.1
            xspeed = 0
            yawspeed = 0

        # If drone is near the center but angle is off, it fixes the angle.
        # Drone does not move forward.
        elif yawspeed != 0:

            yawspeed = -yawspeed

            direction = "LEFT"
            if yawspeed < 0:
                direction = "RIGHT"

            rospy.logwarn("Turning the drone horizontal " + direction +
                          ",  yaw = " + str(yawspeed))
            self.moveTime = 1
            self.waitTime = 0.1
            xspeed = 0

        else:
            rospy.logwarn("Drone just going forward")
            self.moveTime = 0.9
            self.waitTime = 0.1

        return directiveStatus, (xspeed, yspeed, yawspeed, 0), image, ((
            cx, cy), self.prevAngle), self.moveTime, self.waitTime, newCenter

    def Finished(self):
        self.prevAngle = None
        self.prevAngleCount = 0
        return None
Beispiel #21
0
class OrientLineDirective(AbstractDroneDirective):

    # orientation:
    #   > either "VERTICAL" or "PERPENDICULAR";
    #     algorithm will orient drone vertically or perpendicular to the line respectively
    # lineColor:
    #   > color of the line to orient to
    # platformColor:
    #   > color of the platform to orient to
    # hoverAltitude:
    #   > how high to hover over the platform
    def __init__(self,
                 orientation,
                 lineColor,
                 platformColor,
                 hoverAltitude,
                 heightTolerance=50,
                 yOffset=0,
                 ySizeOffset=0):

        if orientation != "PARALLEL" and orientation != "PERPENDICULAR":
            raise Exception("Orientation not recognized.")
        else:
            self.orientation = orientation

        self.lineColor = lineColor
        self.platformColor = platformColor
        self.hoverAltitude = hoverAltitude
        self.processVideo = ProcessVideo()
        self.prevCenter = None
        self.forceCenter = None
        self.prevAngle = None
        self.heightTolerance = heightTolerance
        self.yOffset = yOffset
        self.ySizeOffset = ySizeOffset

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't oriented yet
    #   1 if algorithm is finished and drone is now oriented
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        self.moveTime = 0.17
        #self.moveTime=0.23
        self.waitTime = 0.1

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)

        cx, cy = navdata["center"][1][0], navdata["center"][1][1]

        if cx != None and cy != None:
            cv2.circle(segLineImage, (cx, cy), 6, (255, 255, 255), -1)

        centers = navdata["allCenters"][1]

        if self.forceCenter != None:
            self.forceCenter = None

        # when directive first starts, it latches onto the first correct orange platform it sees
        if self.prevCenter == None:

            rospy.logwarn("FINDING INITAL CENTER---")

            if cx != None and cy != None:
                self.prevCenter = (cx, cy)

            # pick the rightmost center
            rightmostCenter = centers[0]
            if self.orientation == "PARALLEL":
                for i in range(len(centers)):
                    if centers[i][0] > rightmostCenter[0]:
                        rightmostCenter = centers[i]
                self.forceCenter = rightmostCenter
            else:
                # pick the center that is closest to the most vertical pink line
                # finding most vertical line
                objectLineImg = self.processVideo.DetectColor(image, "pink")
                objectLines, objectLineImg = self.processVideo.MultiShowLine(
                    objectLineImg, sort=False)
                mostVertical = None
                rospy.logwarn(" All pink lines: ")
                for line in objectLines:
                    if line != None:
                        rospy.logwarn("@: " + str(line[1]) + ", " +
                                      str(line[0]) + " degrees")
                        if mostVertical == None or (
                            (abs(90 - line[0]) < abs(90 - mostVertical[0]))
                                and line[4] > 30):
                            #if ( mostHorizontal == None or
                            #( min(mostHorizontal[0], 180 - mostHorizontal[0] ) > (min(line[0], 180 - line[0] ) )
                            #and line[4] > 30 ) ):
                            #mostHorizontal = line
                            mostVertical = line

                rospy.logwarn("Found most vertical pink line @: " +
                              str(mostVertical[1]) + ", with angle " +
                              str(mostVertical[0]))
                """
                # finding center closest to the left endpoint of that vertical line
                if mostHorizontal[2][0] < mostHorizontal[3][0]:
                    leftEndpoint = mostHorizontal[2]
                else:
                    leftEndpoint = mostHorizontal[3]
                """

                correctCenter = centers[0]

                rospy.logwarn("All centers: ")
                for i in range(len(centers)):
                    """
                    correctEndpointDist = math.sqrt( math.pow((correctCenter[1] - leftEndpoint[1]),2) 
                    + math.pow((correctCenter[0] - leftEndpoint[0]),2 ) ) 

                    currEndpointDist = math.sqrt( math.pow((centers[i][1] - leftEndpoint[1]),2) 
                    + math.pow((centers[i][0] - leftEndpoint[0]),2 ) ) 
                    
                    if currEndpointDist < correctEndpointDist:
                        correctCenter = centers[i]
                    """
                    rospy.logwarn("@: " + str(centers[i]))
                    if abs(mostVertical[1][0] -
                           centers[i][0]) < abs(mostVertical[1][0] -
                                                correctCenter[0]):
                        correctCenter = centers[i]

                self.forceCenter = correctCenter
                rospy.logwarn("Closest center to vertical pink line is @: " +
                              str(correctCenter))

        elif cx != None and cy != None:

            # checking if curr center is consistent with previous one
            centerDist = math.sqrt(
                math.pow((self.prevCenter[1] - cy), 2) +
                math.pow((self.prevCenter[0] - cx), 2))
            if centerDist > 225:
                rospy.logwarn("ERROR: ORIGINAL CENTER LOST, showing all " +
                              str(len(centers)))
                for i in range(len(centers)):
                    cv2.circle(segLineImage, centers[i], 6, (255, 0, 0), -1)
                if cx != None and cy != None:
                    cv2.circle(segLineImage, (cx, cy), 10, (255, 255, 255), -1)

                cx = self.prevCenter[0]
                cy = self.prevCenter[1]
                cv2.circle(segLineImage, (cx, cy), 10, (0, 0, 255), 4)
                directiveStatus = -1
                return directiveStatus, (0, 0, 0,
                                         0), segLineImage, (cx, cy), 0, 0, None
            else:
                self.prevCenter = (cx, cy)

        if self.orientation == "PARALLEL":
            lines, segLineImage = self.processVideo.MultiShowLine(segLineImage,
                                                                  sort=False)

            # pick the pink line closest to the hover platform
            angle = None
            closest = None
            closestDist = None
            for line in lines:
                if cx != None:

                    dist = math.sqrt(
                        math.pow((line[1][1] - cy), 2) +
                        math.pow((line[1][0] - cx), 2))

                    if (line != None and line[4] > 30
                            and (closest == None or (dist < closestDist))):

                        closest = line
                        angle = closest[0]
                        closestDist = dist

            if closest != None:
                cv2.circle(segLineImage, closest[1], 15, (0, 255, 0), -1)
                for line in lines:
                    if line != None and line[1] != closest[1]:
                        cv2.circle(segLineImage, line[1], 15, (0, 0, 255), -1)

            #converting angle
            if angle != None:

                # checking if previous angle is consistent with current one
                if self.prevAngle == None or min(
                        abs(self.prevAngle - angle),
                        180 - abs(self.prevAngle - angle)) < 17:
                    self.prevAngle = angle
                else:
                    rospy.logwarn(
                        "ERROR: ORIGINAL CENTER LOST; angle mismatch. Before: "
                        + str(self.prevAngle) + " Now: " + str(angle))
                    directiveStatus = -1
                    return directiveStatus, (0, 0, 0, 0), segLineImage, ((
                        cx, cy), self.prevAngle), 0, 0, None

                if angle == 90:
                    angle = 0
                elif angle < 90:
                    angle = angle + 90
                else:
                    angle = angle - 90

            yawspeed = self.processVideo.ObjectOrientation(segLineImage,
                                                           angle,
                                                           4,
                                                           yawspeed=0.50)
            #.42
            if yawspeed != None:
                yawspeed = -1 * yawspeed
            xWindowSize = 60
            yWindowSize = 105 + self.ySizeOffset
            xWindowOffset = 0
            yWindowOffset = self.yOffset
            altLowerTolerance = self.heightTolerance
            altUpperTolerance = self.heightTolerance - 15
            # defines window to make the drone focus on moving away from the edges and back into
            # the center; yaw will be turned off
            xReturnSize = xWindowSize + 210
            yReturnSize = yWindowSize + 110

        elif self.orientation == "PERPENDICULAR":

            kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (2, 2))
            segLineImage = cv2.morphologyEx(segLineImage, cv2.MORPH_OPEN,
                                            kernel)
            lines, segLineImage = self.processVideo.MultiShowLine(segLineImage,
                                                                  sort=False)

            # pick the blue line closest to the hover platform, AND is right of the hover platform
            angle = None
            closest = None
            for line in lines:
                if (line != None and cx != None and line[1][0] >= cx and
                    (closest == None
                     or abs(cx - line[1][0]) < abs(cx - closest[1][0]))):
                    closest = line
                    angle = closest[0]

            if closest != None:
                cv2.circle(segLineImage, closest[1], 15, (0, 255, 0), -1)
                for line in lines:
                    if line != None and line[1] != closest[1]:
                        cv2.circle(segLineImage, line[1], 15, (0, 0, 255), -1)

            #converting angle
            if angle != None:
                # checking if previous angle is consistent with current one
                if self.prevAngle == None or min(
                        abs(self.prevAngle - angle),
                        180 - abs(self.prevAngle - angle)) < 27:
                    self.prevAngle = angle
                else:
                    rospy.logwarn(
                        "ERROR: ORIGINAL CENTER LOST; angle mismatch. Before: "
                        + str(self.prevAngle) + " Now: " + str(angle))
                    directiveStatus = -1
                    return directiveStatus, (0, 0, 0, 0), segLineImage, ((
                        cx, cy), self.prevAngle), 0, 0, None

                if angle == 90:
                    angle = 0
                elif angle < 90:
                    angle = angle + 90
                else:
                    angle = angle - 90

            yawspeed = self.processVideo.LineOrientation(segLineImage,
                                                         angle,
                                                         9,
                                                         yawspeed=0.50)
            if yawspeed != None:
                yawspeed = -1 * yawspeed
            xWindowSize = 295
            yWindowSize = 70
            xWindowOffset = 0
            yWindowOffset = 0
            altLowerTolerance = 200
            altUpperTolerance = 250
            # defines window to make the drone focus on moving away from the edges and back into
            # the center; yaw will be turned off
            xReturnSize = xWindowSize
            yReturnSize = yWindowSize

        numRows, numCols, _ = image.shape
        centerx = numCols / 2 - xWindowOffset
        centery = numRows / 2 - yWindowOffset

        xspeed, yspeed, zspeed = self.processVideo.ApproximateSpeed(
            segLineImage,
            cx,
            cy,
            centerx,
            centery,
            navdata["SVCLAltitude"][1],
            self.hoverAltitude,
            xtolerance=xWindowSize,
            ytolerance=yWindowSize,
            ztolerance=(altLowerTolerance, altUpperTolerance),
            xOffset=xWindowOffset,
            yOffset=yWindowOffset)

        # box defines when the directive is finished
        xLower = (numCols / 2) - xReturnSize
        yLower = (numRows / 2) - yReturnSize
        xUpper = (numCols / 2) + xReturnSize
        yUpper = (numRows / 2) + yReturnSize

        # perpendicular can disregard height
        #if self.orientation == "PERPENDICULAR":
        #    zspeed = 0

        if (yawspeed == 0 and xspeed == 0 and yspeed == 0 and zspeed == 0
                and cx != None and cy != None):

            rospy.logwarn("Oriented " + self.orientation + " to " +
                          self.lineColor + " line")
            directiveStatus = 1

        elif cx == None or cy == None:

            rospy.logwarn("*** ERROR: Lost " + self.platformColor +
                          " platform ***")
            directiveStatus = -1

        else:

            # If drone is still trying to align, it adapts to one of three algorithms:

            # Drone will just go back near the center if: 1) no line is detcted, or 2)
            # the drone is not "near" the center as defined by a bounding box
            # No turning or altitude change applied
            if yawspeed == None or (cx > xUpper or cx < xLower or cy > yUpper
                                    or cy < yLower):
                cv2.rectangle(segLineImage, (xLower, yLower), (xUpper, yUpper),
                              (0, 0, 255), 2)
                rospy.logwarn("Too far out; only MOVING drone back to center")
                yawspeed = 0
                zspeed = zspeed * 0.2

            # if drone isn't perpendicular yet and is "near" the center (defined by a box),
            # just turn the drone; no need move drone
            elif yawspeed != 0:
                rospy.logwarn("Only TURNING drone. Yaw speed = " +
                              str(yawspeed))
                self.moveTime = 3.5
                xspeed = 0
                yspeed = 0
                zspeed = zspeed * 0.45

            # if the drone is aligned to the line and is near the center,
            # keep moving it to the center and adjusting the height until the
            # directive is finished
            else:
                rospy.logwarn("Curr Altitude = " +
                              str(int(navdata["SVCLAltitude"][1])) +
                              " mm; Goal = [ " +
                              str(self.hoverAltitude - altLowerTolerance) +
                              " mm, " +
                              str(self.hoverAltitude + altUpperTolerance) +
                              " mm ].")

            directiveStatus = 0

        return directiveStatus, (
            xspeed, yspeed, yawspeed, zspeed), segLineImage, (
                (cx, cy),
                self.prevAngle), self.moveTime, self.waitTime, self.forceCenter

    def Finished(self):
        center = self.prevCenter
        self.prevAngle = None
        self.prevCenter = None
        self.forceCenter = None
        return center

    def OnErrorReturn(self, returnData):
        # set previous center to what was found in the error algorithm
        rospy.logwarn("ORIENT LINE ON ERROR RETURN***")
        self.prevCenter = returnData
        self.prevAngle == None
Beispiel #22
0
class PIDYawDirective(AbstractDroneDirective):

    # sets up this directive
    # plrratformColor: color to hover over. Altitude is maintained
    def __init__(self, poseTracker, target, yaw, platformNumber, waitDist=0.1):

        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        self.Kp, self.Ki, self.Kd = 0.2, 0.0, 0.0005
        self.moveTime = 0.2
        self.waitTime = 0.0
        self.tracker = poseTracker
        self.target = target
        self.waitDist = waitDist
        self.worldTarget = self.tracker.body2World(target)[:, 0]
        self.processVideo = ProcessVideo()
        self.platformNumber = platformNumber
        self.centery = 360 / 2.0
        self.centerx = 640 / 2.0
        self.pub = rospy.Publisher('ardrone/tracker', tracker)
        self.track = tracker()
        self.platform = [0, 0, 0]
        self.filterSize = 50
        self.buff = np.repeat(np.asarray([self.worldTarget]).T,
                              self.filterSize,
                              axis=1)
        self.KpYaw, self.KiYaw, self.KdYaw = (1 / 180.0) * 0.8, 0, 0
        self.targetYaw = (yaw + 360) % 360

        self.worldPoint = np.asarray([[0, 0, 0]]).T
        #the amount of weight we would like to put towards correcting the drones drift by recognizing landmarks
        self.correctionRatio = 0.9999

    def distance(self, x, y):
        dist = (x[0] - y[0])**2 + (x[1] - y[1])**2
        dist = dist**(0.5)
        return dist

    def weightedUpdate(self, prediction, updateTerm):
        return (self.correctionRatio * updateTerm[0, 0] +
                (1 - self.correctionRatio) * prediction[0, 0],
                self.correctionRatio * updateTerm[1, 0] +
                (1 - self.correctionRatio) * prediction[1, 0],
                updateTerm[2, 0], 1.0)

    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't on orange yet
    #   1 if algorithm is finished and drone is now on orange
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segImage, radius, center = self.processVideo.RecognizeShape(
            image, 'orange', (None, None))
        blue = self.processVideo.detectColor(image, 'blue', 'segmented')
        lines, blueImg = self.processVideo.MultiShowLine(blue)
        bestTheta = None
        minDist = -1
        for line in lines:
            theta = line[0]
            theta = -theta
            tapeCenter = line[1]
            dist = self.distance(center, tapeCenter)
            if minDist == -1 or dist < minDist:
                minDist = dist
                bestTheta = theta
        if bestTheta != None:
            self.currentYaw = bestTheta
        else:
            self.currentYaw = 0

        #Calculate closest rotation to get to target angle
        theta = ((0 - self.currentYaw) % 360 + 360) % 360
        theta = (theta - 360) if (theta > 180) else theta
        loc = (0, 0, 0, 0)
        #circle detection
        #rospy.logwarn("x: "+str(self.tracker.translation[0])+" y: "+str(self.tracker.translation[1]))
        if radius != None:
            predictedZ = self.processVideo.CalcDistanceNew(88.0,
                                                           radius * 2) / 1000.0
            scale = (88.0 / (radius * 2)) / 1000.0  #meters/pixel
            x = (center[0] - self.centerx) * scale
            y = (self.centery - center[1]) * scale

            tape = self.tracker.camera2Body([x, y, -predictedZ])
            worldPoint = self.tracker.camera2World([x, y, -predictedZ])
            self.worldPoint = worldPoint
            if (self.distance(worldPoint, self.worldTarget) < 0.35):

                for i in range(self.filterSize - 1):
                    self.buff[:, i] = self.buff[:, i + 1]
                self.buff[:, self.filterSize - 1] = np.asarray(
                    [worldPoint[0, 0], worldPoint[1, 0], worldPoint[2, 0]])
                self.worldTarget = np.mean(self.buff, 1)
            '''
            if self.tapeLocation != None:
                dist = self.distance(worldPoint,self.tapeLocation)
                if dist < 0.35 and dist > 0.15:
                    loc = self.tracker.tape2World([x,y,-predictedZ],self.yaw,[self.tapeLocation[0],self.tapeLocation[1],0])
                    loc = self.weightedUpdate(worldPoint,loc)
                    rospy.logwarn("Fixing location to ..."+str(loc))
            '''
            self.track.landMark = (self.worldTarget[0], self.worldTarget[1],
                                   0.0, 1.0)
        else:
            self.track.landMark = (0, 0, 0, 0.0)

        #rospy.logwarn("world target: " + str(self.worldTarget))
        self.track.landMark = (self.worldTarget[0], self.worldTarget[1], 0.0,
                               1.0)
        self.track.loc = loc
        self.pub.publish(self.track)
        self.currentTarget = self.tracker.world2Body(self.worldTarget)
        self.currentTime = time.time()

        if self.lastTime == 0:
            self.rollError = 0
            self.pitchError = 0
            self.yawError = 0
        else:
            self.rollError = self.currentTarget[0]
            self.pitchError = self.currentTarget[1]
            self.yawError = theta
        self.dt = (self.currentTime - self.lastTime) / 1000.

        self.totalError = [
            self.totalError[0] + self.rollError * self.dt,
            self.totalError[1] + self.pitchError * self.dt,
            self.totalError[2] + self.yawError * self.dt, 0
        ]

        pRoll = -self.Kp * (self.rollError)
        iRoll = -self.Ki * (self.totalError[0])
        dRoll = -self.Kd * ((self.rollError - self.lastError[0]) / self.dt)

        pPitch = self.Kp * (self.pitchError)
        iPitch = self.Ki * (self.totalError[1])
        dPitch = self.Kd * ((self.pitchError - self.lastError[1]) / self.dt)

        pYaw = self.KpYaw * (self.yawError)
        iYaw = self.KiYaw * (self.totalError[2])
        dYaw = self.KdYaw * ((self.yawError - self.lastYawError) / self.dt)
        yaw = pYaw + iYaw + dYaw

        self.lastError = self.currentTarget
        self.lastYawError = self.yawError

        self.lastTime = self.currentTime

        roll = pRoll + iRoll + dRoll
        pitch = pPitch + iPitch + dPitch

        if (abs(self.rollError) <= self.waitDist
                and abs(self.pitchError) <= self.waitDist
                and abs(self.yawError) < 2):
            directiveStatus = 1
            rospy.logwarn(self.yawError)
        else:
            directiveStatus = 0
        #Trim commands over the drones command limit
        roll = 1 if roll > 1 else roll
        roll = -1 if roll < -1 else roll
        pitch = 1 if pitch > 1 else pitch
        pitch = -1 if pitch < -1 else pitch

        #rospy.logwarn("roll: "+str(self.tracker.roll))
        #rospy.logwarn("pitch: "+str(self.tracker.pitch))
        rospy.logwarn(directiveStatus)
        return directiveStatus, (
            roll, pitch, 0,
            0), segImage, None, self.moveTime, self.waitTime, None

    # This method is called by the state machine when it considers this directive finished
    def Finished(self):
        self.Reset()
        #tapeLocation = self.tracker.body2World(self.target)[:,0]
        #loc = self.tracker.tape2World([x,y,-predictedZ],self.yaw,[tapeLocation[0],tapeLocation[1],0])
        if (self.platformNumber % 3 == 0):
            loc = np.asarray([self.target]).T
            loc[2] = 1.0
            rospy.logwarn("Reseting location to" + str(loc))
            loc = self.weightedUpdate(self.worldPoint, loc)
            self.track.loc = loc
            self.pub.publish(self.track)

    def Reset(self):
        self.dt = 0
        self.currentTime = time.time()
        self.lastTime = 0
        self.rollError = 0
        self.pitchError = 0
        self.lastError = [0, 0, 0]
        self.lastYawError = 0
        self.totalError = [0, 0, 0, 0]
Beispiel #23
0
class PIDOrientLineDirective(AbstractDroneDirective):

    # orientation:
    #   > either "VERTICAL" or "PERPENDICULAR";
    #     algorithm will orient drone vertically or perpendicular to the line respectively
    # lineColor:
    #   > color of the line to orient to
    # platformColor:
    #   > color of the platform to orient to
    # settingsPath:
    #   > Path for getting PID settings
    # hoverAltitude:
    #   > how high to hover over the platform
    def __init__(self, orientation, lineColor, platformColor, settingsPath):

        if orientation != "PARALLEL" and orientation != "PERPENDICULAR":
            raise Exception("Orientation not recognized.")
        else:
            self.orientation = orientation
        self.lineColor = lineColor
        self.platformColor = platformColor

        self.processVideo = ProcessVideo()
        P, I, D = self.GetSettings(settingsPath)
        self.pid = PIDController(360, 640, P, I, D)
        self.moveTime = 0.2
        self.waitTime = 0.1

    def GetSettings(self, settingsPath):
        # read a text file as a list of lines
        # find the last line, change to a file you have
        fileHandle = open(settingsPath, 'r')
        last = fileHandle.readlines()
        fileHandle.close()

        last = str(last[len(last) - 1]).split()
        p, i, d = [float(x) for x in (last)]

        return p, i, d

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't oriented yet
    #   1 if algorithm is finished and drone is now oriented
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)

        cx, cy = navdata["center"][1][0], navdata["center"][1][1]
        altitude = navdata["SVCLAltitude"][1]

        #draws center of circle on image
        self.processVideo.DrawCircle(segLineImage, (cx, cy))

        self.pid.UpdateDeltaTime()
        self.pid.UpdateError(cx, cy, altitude)
        self.pid.SetPIDTerms()
        xspeed, yspeed = self.pid.GetPIDValues()

        if self.orientation == "PARALLEL":

            angle = self.processVideo.ShowLine(segLineImage,
                                               lowerAngleBound=0,
                                               upperAngleBound=70,
                                               secondBounds=(110, 180),
                                               thresh=35)
            yawspeed = self.processVideo.ObjectOrientation(segLineImage,
                                                           angle,
                                                           5,
                                                           yawspeed=0.5)
            xWindowSize = 50
            yWindowSize = 50

        elif self.orientation == "PERPENDICULAR":

            angle = self.processVideo.ShowLine(segLineImage,
                                               lowerAngleBound=30,
                                               upperAngleBound=125,
                                               thresh=15)
            yawspeed = self.processVideo.LineOrientation(segLineImage,
                                                         angle,
                                                         5,
                                                         yawspeed=0.5)
            xWindowSize = 180
            yWindowSize = 70

        numRows, numCols, _ = image.shape
        centerx = numCols / 2
        centery = numRows / 2

        # box defines when the directive is finished
        xLower = centerx - xWindowSize
        yLower = centery - yWindowSize
        xUpper = centerx + xWindowSize
        yUpper = centery + yWindowSize
        cv2.rectangle(segLineImage, (xLower, yLower), (xUpper, yUpper),
                      (255, 255, 255), 3)

        if (yawspeed == 0 and cx != None and cy != None and cx < xUpper
                and cx > xLower and cy < yUpper and cy > yLower):

            rospy.logwarn("Oriented " + self.orientation + " to " +
                          self.lineColor + " line")
            directiveStatus = 1

        elif cx == None or cy == None:

            rospy.logwarn("*** ERROR: Lost " + self.platformColor +
                          " platform ***")
            directiveStatus = -1

        else:

            # If drone is still trying to align, it adapts to one of three algorithms:

            # if this frame failed to detect a line, go towards platform
            # without turning in hopes that the next frames will detect one again
            if yawspeed == None:
                #rospy.logwarn("No line detected")
                yawspeed = 0

            # if drone is not "near" the center defined by a box, just focus on moving drone back;
            # no turning
            elif self.orientation == "PERPENDICULAR" and (
                    cx > xUpper or cx < xLower or cy > yUpper or cy < yLower):
                cv2.rectangle(segLineImage, (xLower, yLower), (xUpper, yUpper),
                              (0, 0, 255), 3)
                #rospy.logwarn("Only MOVING drone. x speed = " + str(xspeed) + "; y speed = " + str(yspeed))
                rospy.logwarn("Only MOVING drone")
                self.moveTime = 0.1
                self.waitTime = 0.01
                yawspeed = 0

            # if drone isn't perpendicular yet and is "near" the center (defined by a box),
            # just turn the drone; no need move drone
            elif yawspeed != 0:
                #rospy.logwarn("Only TURNING drone. yaw speed = " + str(yawspeed))
                rospy.logwarn("Only TURNING drone")
                self.moveTime = 0.2
                self.waitTime = 0.1
                xspeed = 0
                yspeed = 0

            #else:

            #   rospy.logwarn("Only MOVING drone")
            #rospy.logwarn("Only MOVING drone. x speed = " + str(xspeed) + "; y speed = " + str(yspeed))

            directiveStatus = 0

        return directiveStatus, (xspeed, yspeed, yawspeed, 0), segLineImage, (
            cx, cy), self.moveTime, self.waitTime, None

    # This method is called by the state machine when it considers this directive finished
    def Finished(self):
        rospy.logwarn("***** Resetting PID Values *****")
        self.pid.ResetPID()
Beispiel #24
0
class PIDHoverDirective(AbstractDroneDirective):

    # sets up this directive
    # plrratformColor: color to hover over. Altitude is maintained
    def __init__(self,
                 poseTracker,
                 target,
                 yaw,
                 platformNumber,
                 waitDist=0.13):

        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        #self.Kp,self.Ki,self.Kd = 0.2,0.0,0.0005
        #self.Kp,self.Ki,self.Kd = 0.21,0.0,0.0006
        self.Kp, self.Ki, self.Kd = 0.17, 10, 0.0005
        self.Kpz = 0.1
        self.KpYaw, self.KiYaw, self.KdYaw = (2 / 90.), 0, 0
        self.targetYaw = -yaw
        self.moveTime = 0.2
        self.waitTime = 0.0
        self.tracker = poseTracker
        self.target = target
        self.target[2] = target[2] - self.tracker.translation[2]
        self.waitDist = waitDist
        self.worldTarget = self.tracker.body2World(target)[:, 0]
        self.processVideo = ProcessVideo()
        self.platformNumber = platformNumber
        self.centery = 360 / 2.0
        self.centerx = 640 / 2.0
        self.pub = rospy.Publisher('ardrone/tracker', tracker)
        self.track = tracker()
        self.platform = [0, 0, 0]
        self.filterSize = 30
        self.platformBuff = np.repeat(np.asarray([self.worldTarget]).T,
                                      self.filterSize,
                                      axis=1)
        self.heightBuff = np.zeros(4)
        self.yawBuff = np.zeros(4)
        self.worldPoint = np.asarray([[0, 0, 0]]).T
        self.lastLocation = self.tracker.translation
        #the amount of weight we would like to put towards correcting the drones drift by recognizing landmarksgggs
        self.correctionRatio = 0.9999

    def distance(self, x, y):
        dist = (x[0] - y[0])**2 + (x[1] - y[1])**2
        dist = dist**(0.5)
        return dist

    def weightedUpdate(self, prediction, updateTerm):
        return (self.correctionRatio * updateTerm[0, 0] +
                (1 - self.correctionRatio) * prediction[0, 0],
                self.correctionRatio * updateTerm[1, 0] +
                (1 - self.correctionRatio) * prediction[1, 0],
                updateTerm[2, 0], 1.0)

    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't on orange yet
    #   1 if algorithm is finished and drone is now on orange
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):
        #algTime = time.time()
        image = cv2.inRange(image, np.array([200, 200, 200]),
                            np.array([255, 255, 255]))
        row, col = image.shape
        img, circles = self.processVideo.detectCircles(image)
        if len(circles) != 0:
            #just use a circle width to filter out bad lines
            circle = circles[0]
            radius = circle[1]
            lineWidth = (2 * radius) * (40.0 / 88.0)
            image, lines = self.processVideo.detectLines(image, int(lineWidth))
            image = cv2.bitwise_or(image, img)

        bestTheta = None
        self.pixCoord = self.tracker.world2Camera(self.worldTarget)
        if len(circles) != 0:
            minDist = -1
            for circle in circles:
                radius = circle[1]
                center = circle[0]
                predictedZ = self.processVideo.CalcDistanceNew(
                    88.0, radius * 2) / 1000.0
                scale = (88.0 / (radius * 2)) / 1000.0  #meters/pixel
                x = (center[0] - self.centerx) * scale
                y = (self.centery - center[1]) * scale
                ex = self.pixCoord[0] / scale + self.centerx
                why = self.centery - self.pixCoord[1] / scale
                rospy.logwarn("circle loc:" + str(ex) + "," + str(why))
                point = self.tracker.camera2World([x, y, -predictedZ])
                #dist = self.distance(point,self.worldTarget)
                dist = self.distance([center[0], center[1]], [ex, why])
                if dist < minDist or minDist == -1:
                    worldPoint = point
                    z = predictedZ
                    Center = center
                    X = center[0]
                    Y = center[1]
                    minDist = dist
            rospy.logwarn("nearest circle choosen: (" + str(X) + "," + str(Y) +
                          ") out of:")
            rospy.logwarn(circles)
            minDist = -1
            for line in lines:
                line = line[0]
                rho = line[0]
                theta = line[1]
                dist = self.processVideo.line2PointDist(rho, theta, Center)
                if minDist == -1 or dist < minDist:
                    minDist = dist
                    bestTheta = theta
        if bestTheta != None:
            self.currentYaw = bestTheta
        else:
            self.currentYaw = 0
        theta = self.currentYaw
        for i in range(len(self.yawBuff) - 1):
            self.yawBuff[i] = self.yawBuff[i + 1]
        self.yawBuff[len(self.yawBuff) - 1] = theta
        theta = np.mean(self.yawBuff)
        #Calculate closest rotation to get to target angle
        #theta = (self.currentYaw+360)%360
        #theta = (theta-360) if (theta >180) else theta

        zError = 0
        loc = (0, 0, 0, 0)

        #circle detection
        #rospy.logwarn("x: "+str(self.tracker.translation[0])+" y: "+str(self.tracker.translation[1]))
        if len(circles) != 0:
            self.worldPoint = worldPoint
            rospy.logwarn(self.worldPoint)
            if (self.distance(worldPoint, self.worldTarget) < 0.35):
                for i in range(self.heightBuff.shape[0] - 1):
                    self.heightBuff[i] = self.heightBuff[i + 1]
                self.heightBuff[self.heightBuff.shape[0] - 1] = z
                height = np.mean(self.heightBuff)
                zError = (self.worldTarget[2] - height)
                for i in range(self.filterSize - 1):
                    self.platformBuff[:, i] = self.platformBuff[:, i + 1]
                self.platformBuff[:, self.filterSize - 1] = np.asarray(
                    [worldPoint[0, 0], worldPoint[1, 0], self.worldTarget[2]])
                self.worldTarget = np.mean(self.platformBuff, 1)
            self.track.landMark = (self.worldTarget[0], self.worldTarget[1],
                                   0.0, 1.0)
        else:
            self.track.landMark = (0, 0, 0, 0.0)
            rospy.logwarn("not seen")

        #rospy.logwarn("world target: " + str(self.worldTarget))
        self.track.landMark = (self.worldTarget[0], self.worldTarget[1], 0.0,
                               1.0)
        self.track.loc = loc
        self.track.yaw = (self.targetYaw, 0.0)
        self.pub.publish(self.track)
        self.currentTarget = self.tracker.world2Body(self.worldTarget)
        self.currentTime = time.time()

        if self.lastTime == 0:
            self.rollError = 0
            self.pitchError = 0
            self.zError = 0
            self.yawError = 0
        else:
            self.rollError = self.currentTarget[0]
            self.pitchError = self.currentTarget[1]
            self.zError = zError
            self.yawError = theta

        if (self.tracker.translation[0] == self.lastLocation[0]
                and self.tracker.translation[1] == self.lastLocation[1]
                and self.lastLocation[2] == self.tracker.translation[2]):
            #self.rollError = 0
            #self.pitchError = 0
            rospy.logwarn("Error: State estimation may be crashed")
        self.dt = (self.currentTime - self.lastTime) / 1000.

        self.totalError = [
            self.totalError[0] + self.rollError * self.dt,
            self.totalError[1] + self.pitchError * self.dt,
            self.totalError[2] + self.yawError * self.dt, 0
        ]

        pRoll = -self.Kp * (self.rollError)
        iRoll = -self.Ki * (self.totalError[0])
        dRoll = -self.Kd * ((self.rollError - self.lastError[0]) / self.dt)

        pPitch = self.Kp * (self.pitchError)
        iPitch = self.Ki * (self.totalError[1])
        dPitch = self.Kd * ((self.pitchError - self.lastError[1]) / self.dt)

        pYaw = self.KpYaw * (self.yawError)
        iYaw = self.KiYaw * (self.totalError[2])
        dYaw = self.KdYaw * ((self.yawError - self.lastYawError) / self.dt)

        self.lastError = self.currentTarget
        self.lastYawError = self.yawError

        pHeight = self.Kpz * (self.zError)

        self.lastTime = self.currentTime

        roll = pRoll + iRoll + dRoll
        pitch = pPitch + iPitch + dPitch
        yaw = pYaw + iYaw + dYaw

        zVel = pHeight

        if (abs(self.rollError) <= self.waitDist
                and abs(self.pitchError) <= self.waitDist
                and abs(self.zError) <= self.waitDist) and abs(
                    self.yawError) <= 6:  # and len(circles) != 0):
            directiveStatus = 1
        else:
            directiveStatus = 0
        #Trim commands over the drones command limit
        roll = 1 if roll > 1 else roll
        roll = -1 if roll < -1 else roll
        pitch = 1 if pitch > 1 else pitch
        pitch = -1 if pitch < -1 else pitch
        yaw = -1 if yaw < -1 else yaw
        yaw = 1 if yaw > 1 else yaw
        zVel = -1 if zVel < -1 else zVel
        zVel = 1 if zVel > 1 else zVel

        #rospy.logwarn("roll: "+str(self.tracker.roll))
        #rospy.logwarn("pitch: "+str(self.tracker.pitch))
        #rospy.logwarn(directiveStatus)
        #rospy.logwarn(self.zError)
        #rospy.logwarn("zError: "+str(self.zError))
        #rospy.logwarn("yaw: " +str(self.currentYaw))
        rospy.logwarn("yawErr: " + str(self.yawError))
        #rospy.logwarn("algTime: "+str(time.time()-algTime))
        if abs(self.rollError) > self.waitDist or abs(
                self.pitchError) > self.waitDist:
            yaw = 0
            #rospy.logwarn("not close enough to adjust yaw")
        return directiveStatus, (
            roll, pitch, yaw,
            zVel), image, None, self.moveTime, self.waitTime, None

    # This method is called by the state machine when it considers this directive finished
    def Finished(self):
        self.Reset()
        #tapeLocation = self.tracker.body2World(self.target)[:,0]
        #loc = self.tracker.tape2World([x,y,-predictedZ],self.yaw,[tapeLocation[0],tapeLocation[1],0])
        #if (self.platformNumber%2==0):
        loc = np.asarray([self.target]).T
        loc[2] = 1.2
        loc = (loc[0], loc[1], loc[2], 1.0)
        rospy.logwarn("Reseting location to" + str(loc))
        #loc = self.weightedUpdate(self.worldPoint,loc)
        self.track.loc = loc
        self.track.yaw = (self.targetYaw, 1.0)
        self.pub.publish(self.track)

    def Reset(self):
        self.dt = 0
        self.currentTime = time.time()
        self.lastTime = 0
        self.rollError = 0
        self.pitchError = 0
        self.lastError = [0, 0, 0]
        self.lastYawError = 0
        self.totalError = [0, 0, 0, 0]
class FlightstatsReceiver(object):
    

    def __init__(self):

        super(FlightstatsReceiver, self).__init__()

        # Subscribe to the navdata topic, and call self.ProcessNavdata whenever 
        # update navdata messages are recieved
        self.navdataSub = rospy.Subscriber('/ardrone/navdata', Navdata, self.UpdateNavdata)
        self.altitudeSub = rospy.Subscriber('/ardrone/navdata_altitude', navdata_altitude, self.UpdateAltitude)
        #self.video=rospy.Subscriber('/ardrone/image_raw', Image, self.VideoUpdate )

        # dictionary will hold a useful subset of available flight info
        # Key is the variable name; value is (description, value, units, (optional) direction string following units)
        self.defaultValue = "?"
        self.flightInfo = collections.OrderedDict()
        self.flightInfo["batteryPercent"]=["Battery Left: ", self.defaultValue, "%", ""]
        self.flightInfo["state"]=["Status: ", self.defaultValue, "", ""]
        self.flightInfo["altitude"]=["Drone Altitude: ", self.defaultValue, "mm", ""]
        self.flightInfo["altitude_raw"]=["Drone Raw Altitude: ", self.defaultValue, "mm", ""]

        self.flightInfo["SVCLAltitude"] = ["SVCL Altitude: ", -1, "mm", ""]
        self.flightInfo["center"] = ["Inferred Center: ", self.defaultValue, "", ""]
        self.flightInfo["lastCenter"] = ["Previous Center: ", (None,None), "", ""]
        self.flightInfo["lastRecordedCenter"] = ["Last Recorded Algorithm Center: ", (None,None), "", ""]
        self.flightInfo["allCenters"] = ["All Centers: ", self.defaultValue, "", ""]

        self.flightInfo["rotX"]=["Left/Right Tilt: ", self.defaultValue, u'\N{DEGREE SIGN}', ""]
        self.flightInfo["rotY"]=["Front/Back Tilt: ", self.defaultValue, u'\N{DEGREE SIGN}', ""]
        self.flightInfo["rotZ"]=["Rotation Amount: ", self.defaultValue, u'\N{DEGREE SIGN}', ""]

        self.flightInfo["velY"]=["Left/Right Velocity: ", self.defaultValue, "mm/s", ""]
        self.flightInfo["velX"]=["Forwards/Backwards Velocity: ", self.defaultValue, "mm/s", ""]
        self.flightInfo["velZ"]=["Up/Down Velocity: ", self.defaultValue, "mm/s", ""]

        self.flightInfo["accelZ"]=["Up/Down Acceleration: ", self.defaultValue, "mm/s", ""]

        self.flightInfo["dispLR"]=["Left/Right Displacement: ", self.defaultValue, "mm", ""]
        self.flightInfo["dispFB"]=["Forwards/Backwards Displacement: ", self.defaultValue, "mm", ""]
        self.flightInfo["dispUD"]=["Up/Down Displacement: ", self.defaultValue, "mm", ""]

        self.flightInfo["segImage"] = [None]

        self.LRDisplacement = 0.0
        self.FBDisplacement = 0.0
        self.UDDisplacement = 0.0
        self.oldTime = rospy.Time.now()
        self.bridge = CvBridge()
        self.processVideo = ProcessVideo()

        # sometimes the altitude doesn't start at 0; "zero balances" the drone such that where it started is considered 0 altitude
        self.zeroBalanced = False
        self.zeroAltitude = 0

        # counter is designed to throttle the lag that comes with executing videoupdate too often
        # a higher videoUpdateMax will make everything run faster, but getting height/center updates will be slower
        # describes a ratio: compute/rest 
        self.computeMax = 1
        self.restMax = 0
        self.counter = 0
        self.lastLocation = (None,None)
        self.lastLoc = (None,None)
        self.lastCenter = (None,None)
        self.lastCenterCount = 0
        self.lastCenterMax = 8

    '''
    def VideoUpdate(self, image):
        
        if (self.counter < self.computeMax) or self.restMax == 0:
            
            self.counter +=1

            # converting to hsv
            image = self.bridge.imgmsg_to_cv2(image, "bgr8")
            segImage, radius, center = self.processVideo.RecognizeShape(image, 'orange',self.lastLocation)

            (self.flightInfo["center"][1]) = self.InferCenter(self.processVideo.RemoveNoise(segImage))

            if radius == None:
                if center == (None,None):
                    distance = -1
                    (self.flightInfo["SVCLAltitude"])[1] = distance
            else:
                distance = self.processVideo.CalcDistanceNew(88, radius* 2)
                (self.flightInfo["SVCLAltitude"])[1] = distance

            #(self.flightInfo["center"])[1] = center 
            self.lastLocation = center
            (self.flightInfo["segImage"]) = segImage

        else:

            if self.counter < self.computeMax + self.restMax:
                self.counter +=1

            if self.counter >= self.computeMax + self.restMax :
                self.counter = 0
    '''
    

    # Uses a more complex way to infer what platform to use. If there is only one platform, this is trivial,
    # but becomes more useful if there are >1 visible.
    def InferCenter(self, image):
        
        centers, _ = self.processVideo.MultiCenterOfMass(image)
        (self.flightInfo["allCenters"][1]) = centers

        if len(centers) == 0:

            #rospy.logwarn("No Platform")
            center = (None,None)

        elif len(centers) == 1:

            center = centers[0]
            #rospy.logwarn("Found 1")

        elif len(centers) == 2:

            if self.lastLoc != (None,None):
                #rospy.logwarn("Found 2 -- picking closer to last")

                # just pick whichever is closer to the last center
                c1Dist = math.sqrt( math.pow((self.lastLoc[1] - centers[0][1]),2) 
                + math.pow((self.lastLoc[0] - centers[0][0]),2 ) ) 

                c2Dist = math.sqrt( math.pow((self.lastLoc[1] - centers[1][1]),2) 
                + math.pow((self.lastLoc[0] - centers[1][0]),2 ) ) 

                if c1Dist < c2Dist:
                    center = centers[0]
                else:
                    center = centers[1]

            else:

                #rospy.logwarn("Found 2 -- picking closer to center")
                # just pick whichever's x-coord is nearer to center
                xCenter = 320
                c1XDist = abs(xCenter - centers[0][0])
                c2XDist = abs(xCenter - centers[1][0])

                if c1XDist < c2XDist:
                    center = centers[0]
                else:
                    center = centers[1]
        
        # if there are 3 or more platforms
        else:

            centers = sorted(centers, key=self.getX)

            if len(centers) % 2 == 0:
                midX = int( (centers[len(centers)/2][0] + centers[(len(centers)/2)+1][0])/2 )
                midY = int( (centers[len(centers)/2][1] + centers[(len(centers)/2)+1][1])/2 )
            else:
                midX = centers[int( (len(centers)/2.0) + 0.5 )-1][0] 
                midY = centers[int( (len(centers)/2.0) + 0.5 )-1][1] 

            center = (midX, midY)

            #rospy.logwarn("Found " + str(len(centers)) + ": " + str(centers))
            #rospy.logwarn("Using " + str(center))

        if len(centers) > 1 and self.lastCenter != (None,None):
            
            closest = None
            smallestLen = None
            # loop to pick whichever is closer to the last center
            for i in range(len(centers)):

                cXDist = abs(self.lastCenter[0] - centers[i][0])
                cYDist = abs(self.lastCenter[1] - centers[i][1])
                length = math.sqrt(math.pow(cXDist,2) + math.pow(cYDist,2) )
                if smallestLen == None or length < smallestLen:
                    closest = centers[i]
                    smallestLen = length

            self.lastCenter = closest
            (self.flightInfo["lastCenter"][1]) = self.lastCenter 
            

        # algorithm waits for a brief moment if last location changes drastically to make
        # sure that it really changed
        if self.lastLoc != (None,None) and center != (None,None):
            dist = math.sqrt( math.pow((self.lastLoc[1] - center[1]),2) 
            + math.pow((self.lastLoc[0] - center[0]),2 ) ) 
            if dist > 140:
                self.lastCenterCount +=1
                rospy.logwarn("visible: " + str(len(centers)) + " change Center Count: " 
                + str(self.lastCenterCount) + " / " + str(self.lastCenterMax))
                if self.lastCenterCount >= self.lastCenterMax:
                    rospy.logwarn("Changing center")
                    self.lastCenterCount = 0
                    # saving last center in case
                    self.lastCenter = self.lastLoc
                    self.lastLoc = center
            else:
                self.lastCenterCount = 0
                self.lastLoc = center
        else:
            self.lastLoc = center


        #return center
        return self.lastLoc

    def getX(self,coordinates):
        return coordinates[0]
    
    # force set what the center is
    def SetCenter(self, center):
        self.lastLoc = center


    def UpdateRecordedCenter(self, center):
        (self.flightInfo["lastRecordedCenter"][1]) = center 


    def UpdateAltitude(self, altitude):
        if self.zeroBalanced:
            (self.flightInfo["altitude_raw"])[1] = altitude.altitude_raw - self.zeroAltitude
        else:
            self.zeroBalanced = True
            self.zeroAltitude = altitude.altitude_raw

        
    # update dictionary as new info from drone comes
    def UpdateNavdata(self, navdata):

        # first, update instance variables
        (self.flightInfo["batteryPercent"])[1] = navdata.batteryPercent
        (self.flightInfo["state"])[1] = navdata.state
        #(self.flightInfo["altitude"])[1] = navdata.altd
        (self.flightInfo["rotX"])[1] = navdata.rotX
        (self.flightInfo["rotY"])[1] = navdata.rotY
        (self.flightInfo["rotZ"])[1] = navdata.rotZ
        (self.flightInfo["velX"])[1] = navdata.vx
        (self.flightInfo["velY"])[1] = navdata.vy
        (self.flightInfo["velZ"])[1] = navdata.vz
        (self.flightInfo["altitude"])[1] = navdata.altd
        (self.flightInfo["accelZ"])[1] = navdata.az

        
        dt = rospy.Time.now() - self.oldTime
        # Calculating horizontal displacement
        currLRVelocity = self.flightInfo["velY"][1]
        self.LRDisplacement += float(currLRVelocity) * dt.to_sec()
        (self.flightInfo["dispLR"])[1]=self.LRDisplacement

        # Calculating vertical displacement
        currFBVelocity = self.flightInfo["velX"][1]
        self.FBDisplacement += float(currFBVelocity) * dt.to_sec()
        (self.flightInfo["dispFB"])[1]=self.FBDisplacement

        # Calculating Z displacement
        currUDAcceleration = self.flightInfo["accelZ"][1]
        self.UDDisplacement += float(currUDAcceleration * dt.to_sec() * dt.to_sec())
        (self.flightInfo["dispUD"])[1]=self.UDDisplacement

        self.oldTime = rospy.Time.now()
    def __init__(self):

        super(FlightstatsReceiver, self).__init__()

        # Subscribe to the navdata topic, and call self.ProcessNavdata whenever 
        # update navdata messages are recieved
        self.navdataSub = rospy.Subscriber('/ardrone/navdata', Navdata, self.UpdateNavdata)
        self.altitudeSub = rospy.Subscriber('/ardrone/navdata_altitude', navdata_altitude, self.UpdateAltitude)
        #self.video=rospy.Subscriber('/ardrone/image_raw', Image, self.VideoUpdate )

        # dictionary will hold a useful subset of available flight info
        # Key is the variable name; value is (description, value, units, (optional) direction string following units)
        self.defaultValue = "?"
        self.flightInfo = collections.OrderedDict()
        self.flightInfo["batteryPercent"]=["Battery Left: ", self.defaultValue, "%", ""]
        self.flightInfo["state"]=["Status: ", self.defaultValue, "", ""]
        self.flightInfo["altitude"]=["Drone Altitude: ", self.defaultValue, "mm", ""]
        self.flightInfo["altitude_raw"]=["Drone Raw Altitude: ", self.defaultValue, "mm", ""]

        self.flightInfo["SVCLAltitude"] = ["SVCL Altitude: ", -1, "mm", ""]
        self.flightInfo["center"] = ["Inferred Center: ", self.defaultValue, "", ""]
        self.flightInfo["lastCenter"] = ["Previous Center: ", (None,None), "", ""]
        self.flightInfo["lastRecordedCenter"] = ["Last Recorded Algorithm Center: ", (None,None), "", ""]
        self.flightInfo["allCenters"] = ["All Centers: ", self.defaultValue, "", ""]

        self.flightInfo["rotX"]=["Left/Right Tilt: ", self.defaultValue, u'\N{DEGREE SIGN}', ""]
        self.flightInfo["rotY"]=["Front/Back Tilt: ", self.defaultValue, u'\N{DEGREE SIGN}', ""]
        self.flightInfo["rotZ"]=["Rotation Amount: ", self.defaultValue, u'\N{DEGREE SIGN}', ""]

        self.flightInfo["velY"]=["Left/Right Velocity: ", self.defaultValue, "mm/s", ""]
        self.flightInfo["velX"]=["Forwards/Backwards Velocity: ", self.defaultValue, "mm/s", ""]
        self.flightInfo["velZ"]=["Up/Down Velocity: ", self.defaultValue, "mm/s", ""]

        self.flightInfo["accelZ"]=["Up/Down Acceleration: ", self.defaultValue, "mm/s", ""]

        self.flightInfo["dispLR"]=["Left/Right Displacement: ", self.defaultValue, "mm", ""]
        self.flightInfo["dispFB"]=["Forwards/Backwards Displacement: ", self.defaultValue, "mm", ""]
        self.flightInfo["dispUD"]=["Up/Down Displacement: ", self.defaultValue, "mm", ""]

        self.flightInfo["segImage"] = [None]

        self.LRDisplacement = 0.0
        self.FBDisplacement = 0.0
        self.UDDisplacement = 0.0
        self.oldTime = rospy.Time.now()
        self.bridge = CvBridge()
        self.processVideo = ProcessVideo()

        # sometimes the altitude doesn't start at 0; "zero balances" the drone such that where it started is considered 0 altitude
        self.zeroBalanced = False
        self.zeroAltitude = 0

        # counter is designed to throttle the lag that comes with executing videoupdate too often
        # a higher videoUpdateMax will make everything run faster, but getting height/center updates will be slower
        # describes a ratio: compute/rest 
        self.computeMax = 1
        self.restMax = 0
        self.counter = 0
        self.lastLocation = (None,None)
        self.lastLoc = (None,None)
        self.lastCenter = (None,None)
        self.lastCenterCount = 0
        self.lastCenterMax = 8
Beispiel #27
0
class ReturnToLineDirective(AbstractDroneDirective):

    # sets up this directive
    # platformColor: color of the platform to return to
    def __init__(self, lineColor, speedModifier=0.5, radiusThresh=255):

        self.lineColor = lineColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.moveTime = 0.25
        self.waitTime = 0.10

    def InsideCircle(self, point, circleCenter, circleRadius):
        x = point[0]
        y = point[1]
        center_x = circleCenter[0]
        center_y = circleCenter[1]
        radius = circleRadius

        return (math.pow((x - center_x), 2) + math.pow(
            (y - center_y), 2)) < math.pow(radius, 2)

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone hasn't returned to the line yet
    #   1 if algorithm is finished and drone is now over the color
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)
        lines, image = self.processVideo.MultiShowLine(segLineImage)

        #navdata stores the last location and angle in the case of an error
        cx = navdata[1][0][0]
        cy = navdata[1][0][1]
        angle = navdata[1][1]

        #cv2.circle(image, (cx,cy), self.radiusThresh, (0,255,0), 1)

        hasPlatform = False
        # thresh in degrees
        thresh = 18
        for line in lines:
            if line != None:
                # original line was found if angle matches original, to some threshold
                if ((abs(angle - line[0]) < thresh or abs(angle - line[0]) >
                     (180 - thresh))):
                    hasPlatform = True
                    cv2.circle(image, line[1], 15, (0, 255, 0), -1)
                    cx = line[1][0]
                    cy = line[1][1]
                else:
                    cv2.circle(image, line[1], 15, (0, 0, 255), 5)

        if hasPlatform:
            rospy.logwarn("Returned to " + self.lineColor + " line")
            directiveStatus = 1
            zspeed = 0

        else:
            rospy.logwarn("Returning to " + self.lineColor + " line")
            directiveStatus = 0
            zspeed = 0.1

        if cx == None or cy == None:
            rospy.logwarn("Returning -- no " + self.lineColor +
                          " detected @ this altitude, increasing altitude")
            return 0, (0, 0, 0, 0.5), image, (cx, cy), 0, 0

        xspeed, yspeed, _ = self.processVideo.ApproximateSpeed(image,
                                                               cx,
                                                               cy,
                                                               ytolerance=50,
                                                               xtolerance=50)

        yspeed = min(yspeed * self.speedModifier, 1)
        xspeed = min(xspeed * self.speedModifier, 1)
        rospy.logwarn("X Speed: " + str(xspeed) + " Y Speed: " + str(yspeed))

        self.processVideo.DrawCircle(image, (cx, cy))

        return directiveStatus, (xspeed, yspeed, 0, zspeed), image, ((
            cx, cy), angle), self.moveTime, self.waitTime, None
Beispiel #28
0
class ReturnToOriginDirective(AbstractDroneDirective):


    # sets up this directive
    # tolerance is in mm; how close it needs to be from where it took off
    def __init__(self, platformColor, tolerance):

        self.platformColor = platformColor
        self.tolerance = tolerance
        self.processVideo = ProcessVideo()


    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   1 if algorithm is finished and the drone has reached where it took off
    #   0 if the algorithm is still running and the drone hasn't reached where it took off
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        dispH = navdata["dispLR"][1]
        
        speed = 0.2
    
        # calculating horizontal
        if dispH < self.tolerance * -1:
            
            roll = speed
            rospy.logwarn("Drone is returning to origin; going left")

        elif dispH > self.tolerance:

            roll = speed * -1
            rospy.logwarn("Drone is returning to origin; going right")
        
        else:
            roll = 0

        # calculating vertical 
        dispV = navdata["dispFB"][1]
        if dispV < self.tolerance * -1:
            rospy.logwarn("Drone is returning to origin; going forward")
            pitch = speed

        elif dispV > self.tolerance:
            rospy.logwarn("Drone is returning to origin; going backwards")
            pitch = speed * -1

        else:
            pitch = 0
        
        # check if the platform is visible
        platform_image = self.processVideo.DetectColor(image, self.platformColor)
        hasPlatform = self.processVideo.IsHueDominant(platform_image, 0, 360, 0.2)   

        if (pitch == 0 and roll == 0) or hasPlatform:
            directiveStatus = 1
            rospy.logwarn("Drone has returned to origin")
        else:
            directiveStatus = 0


        return directiveStatus, (roll, pitch, 0, 0), image, (None,None), 0, 0, None
Beispiel #29
0
class ReturnToColorDirective(AbstractDroneDirective):


    # sets up this directive
    # platformColor: color of the platform to return to
    def __init__(self, platformColor, lineColor, speedModifier = 0.6, radiusThresh = 170):

        self.platformColor = platformColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.lineColor = lineColor
        self.moveTime = 0.35
        self.waitTime = 0.10
        self.bestPlatformFound = None
    
    def InsideCircle(self, point, circleCenter, circleRadius):
        x = point[0]
        y = point[1]
        center_x = circleCenter[0]
        center_y = circleCenter[1]
        radius = circleRadius
        
        return (math.pow((x-center_x),2) + math.pow((y-center_y),2)) < math.pow(radius,2)

    def PointAlongLine(self, linePoint, lineAngle, point, thresh):
        # edge case: both lines are vertical
        if point[0] == linePoint[0]:
            if lineAngle == 90:
                return True
            else:
                return False
        else:
            # slope compensates for a upper left origin coord system
            slope = (float(linePoint[1])-point[1]) / (point[0]-float(linePoint[0]))

            slopeAngle = math.degrees(math.atan(slope))
            if slopeAngle < 0:
                #rospy.logwarn("seen added 180")
                slopeAngle += 180
            #rospy.logwarn(str(point) + str(linePoint) +" >> slope: " + str(slope))
            #rospy.logwarn("Original: " + str(lineAngle) + " seen: " + str(slopeAngle))
            if( min( abs( lineAngle - slopeAngle), 180 - abs( lineAngle - slopeAngle) ) < thresh):
                #rospy.logwarn("GOOD")
                return True
            else:
                #rospy.logwarn("BAD")
                return False

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone hasn't returned to the color yet
    #   1 if algorithm is finished and drone is now over the color
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):
        
        lineSeg = self.processVideo.DetectColor(image, self.lineColor)
        #platformSeg = self.processVideo.DetectColor(image, self.platformColor)
        #platformSeg = self.processVideo.RemoveNoise(platformSeg)
        #platformSeg = navdata[0]["segImage"]
        #centers, _ = self.processVideo.MultiCenterOfMass(platformSeg)
        centers = navdata[0]["allCenters"][1]

        #navdata stores the last location in the case of an error
        cx = navdata[1][0][0]
        cy = navdata[1][0][1] 
        lastAngle = navdata[1][1]
        
        hasPlatform = False

        # find last platform based on last seen line angle
        if lastAngle != None:
            lines, image = self.processVideo.MultiShowLine(lineSeg, sort = False)
            for center in centers:
                cv2.circle(image, center, 4, (255,255,255), -1)

            thresh = 21
            validLine = None
            # picks the line closest to last angle, and within thresh (degrees)
            for line in lines:
                #rospy.logwarn("last: " + str(lastAngle) + " this: " + str(line[0]))
                angleDifference = min( abs(lastAngle -line[0]), 180 -abs(lastAngle -line[0]) ) 
                if( line != None and angleDifference < thresh and (validLine == None or validLine[1] > angleDifference) ):
                    #rospy.logwarn("valid")
                    validLine = (line, angleDifference)

            if validLine != None:
                line = validLine[0]
                # finding center closest to the most valid line
                for c in centers:
                    alongLine = self.PointAlongLine(line[1], line[0], c, 25)
                    # blue line => orient perpendicular => valid point must be to the left of line
                    if self.lineColor == "blue" and c[0] > line[1][0]:
                        alongLine = False
                    if alongLine:
                        lastAngle = line[0]
                        cv2.line(image, line[1], c, (0,255,255),3)
                        cx, cy = c[0], c[1]
                        cv2.circle(image, (cx,cy), 12, (0,255,0), -1)
                        cv2.circle(image, (cx,cy), 12, (255,255,255), 7)
                        cv2.circle(image, line[1], 7, (0,255,0), -1)
                        cv2.circle(image, line[1], 7, (255,255,255), 4)
                        hasPlatform = True

        # if no angle was found, just use location
        else:

            image = navdata[0]["segImage"]

            cv2.circle(image, (cx,cy), self.radiusThresh, (0,255,0), 1)
            cv2.circle(image, (cx,cy), 7, (0,255,0), -1)

            for c in centers:

                cv2.circle(image, c, 10, (0,255,255), -1)
                if self.InsideCircle( c , (cx,cy), self.radiusThresh):
                    hasPlatform = True
                    cx, cy = c[0], c[1]
        
        if hasPlatform:
            rospy.logwarn("Successfully returned to platform -- last angle seen was "+ str(lastAngle))
            directiveStatus = 1
            zspeed = 0

        else:
            rospy.logwarn("Returning to platform -- last angle seen was "+ str(lastAngle))
            directiveStatus = 0
            zspeed = 0.2

        xspeed, yspeed, _ = self.processVideo.ApproximateSpeed(image.copy(), cx, cy,
        ytolerance = 50, xtolerance = 50)
        
        xspeed = min( xspeed * self.speedModifier, 1 )
        yspeed = min( yspeed * self.speedModifier, 1 )
        rospy.logwarn("X Speed: " + str(xspeed) + " Y Speed: " + str(yspeed))
        
        # draw rectangles so it's easy to tell that it's in return mode
        border = 15
        offset = 2
        cv2.rectangle(image, (border, border), (640-border,360-border), (0,0, 255), 1)
        cv2.rectangle(image, (border-1*offset, border-1*offset), (640-border+1*offset,360-border+1*offset), (0,229, 255), 1)
        cv2.rectangle(image, (border-2*offset, border-2*offset), (640-border+2*offset,360-border+2*offset), (0,0, 255), 1)

        return directiveStatus, (xspeed, yspeed, 0, zspeed), image, ((cx,cy), lastAngle), self.moveTime, self.waitTime, None

    def Finished(self):
        rospy.logwarn("RETURN TO COLOR FINISHED *******************")
        rospy.logwarn("RETURN TO COLOR FINISHED *******************")
        self.bestPlatformFound = None
        return None
Beispiel #30
0
class ResumePlatformDirective(AbstractDroneDirective):

    # sets up this directive
    # platformColor: color of the platform to return to
    def __init__(self, platformColor, speedModifier=0.3, radiusThresh=155):

        self.platformColor = platformColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.moveTime = 0.20
        self.waitTime = 0.10

    def InsideCircle(self, point, circleCenter, circleRadius):
        x = point[0]
        y = point[1]
        center_x = circleCenter[0]
        center_y = circleCenter[1]
        radius = circleRadius

        return (math.pow((x - center_x), 2) + math.pow(
            (y - center_y), 2)) < math.pow(radius, 2)

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone hasn't identified the previous platform
    #   1 if algorithm is finished and drone is now over the previous platform
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        image = navdata["segImage"]
        centers = navdata["allCenters"][1]

        # contains last platform location
        cx = navdata["lastRecordedCenter"][1][0]
        cy = navdata["lastRecordedCenter"][1][1]

        if cx == None or cy == None:
            rospy.logwarn("ERROR")
            return -1, (0, 0, 0, 0.5), image, (cx, cy), 0, 0

        cv2.circle(image, (cx, cy), self.radiusThresh, (0, 255, 0), 1)
        cv2.circle(image, (cx, cy), 7, (0, 255, 0), -1)

        resumed = False

        for c in centers:

            cv2.circle(image, c, 10, (0, 255, 255), -1)
            if self.InsideCircle(c, (cx, cy), self.radiusThresh):
                resumed = True
                cx, cy = c[0], c[1]

        if resumed:
            rospy.logwarn("Over last platform")
            directiveStatus = 1
            zspeed = 0

        else:
            rospy.logwarn("Returning to last platform -- increasing altitude")
            directiveStatus = 0
            zspeed = 0.5

        #todo: incorrect calculation. need to change center to last, not center
        xspeed, yspeed, _ = self.processVideo.ApproximateSpeed(image.copy(),
                                                               cx,
                                                               cy,
                                                               ytolerance=0,
                                                               xtolerance=0)

        xspeed = xspeed * self.speedModifier
        yspeed = yspeed * self.speedModifier

        rospy.logwarn("X Speed: " + str(xspeed) + " Y Speed: " + str(yspeed))

        return directiveStatus, (xspeed, yspeed, 0, zspeed), image, (
            cx, cy), self.moveTime, self.waitTime, None