Exemplo n.º 1
0
class OrientLineDirective(AbstractDroneDirective):

    # orientation:
    #   > either "VERTICAL" or "PERPENDICULAR";
    #     algorithm will orient drone vertically or perpendicular to the line respectively
    # lineColor:
    #   > color of the line to orient to
    # platformColor:
    #   > color of the platform to orient to
    # hoverAltitude:
    #   > how high to hover over the platform
    def __init__(self,
                 orientation,
                 lineColor,
                 platformColor,
                 hoverAltitude,
                 heightTolerance=50,
                 yOffset=0,
                 ySizeOffset=0):

        if orientation != "PARALLEL" and orientation != "PERPENDICULAR":
            raise Exception("Orientation not recognized.")
        else:
            self.orientation = orientation

        self.lineColor = lineColor
        self.platformColor = platformColor
        self.hoverAltitude = hoverAltitude
        self.processVideo = ProcessVideo()
        self.prevCenter = None
        self.forceCenter = None
        self.prevAngle = None
        self.heightTolerance = heightTolerance
        self.yOffset = yOffset
        self.ySizeOffset = ySizeOffset

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't oriented yet
    #   1 if algorithm is finished and drone is now oriented
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        self.moveTime = 0.17
        #self.moveTime=0.23
        self.waitTime = 0.1

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)

        cx, cy = navdata["center"][1][0], navdata["center"][1][1]

        if cx != None and cy != None:
            cv2.circle(segLineImage, (cx, cy), 6, (255, 255, 255), -1)

        centers = navdata["allCenters"][1]

        if self.forceCenter != None:
            self.forceCenter = None

        # when directive first starts, it latches onto the first correct orange platform it sees
        if self.prevCenter == None:

            rospy.logwarn("FINDING INITAL CENTER---")

            if cx != None and cy != None:
                self.prevCenter = (cx, cy)

            # pick the rightmost center
            rightmostCenter = centers[0]
            if self.orientation == "PARALLEL":
                for i in range(len(centers)):
                    if centers[i][0] > rightmostCenter[0]:
                        rightmostCenter = centers[i]
                self.forceCenter = rightmostCenter
            else:
                # pick the center that is closest to the most vertical pink line
                # finding most vertical line
                objectLineImg = self.processVideo.DetectColor(image, "pink")
                objectLines, objectLineImg = self.processVideo.MultiShowLine(
                    objectLineImg, sort=False)
                mostVertical = None
                rospy.logwarn(" All pink lines: ")
                for line in objectLines:
                    if line != None:
                        rospy.logwarn("@: " + str(line[1]) + ", " +
                                      str(line[0]) + " degrees")
                        if mostVertical == None or (
                            (abs(90 - line[0]) < abs(90 - mostVertical[0]))
                                and line[4] > 30):
                            #if ( mostHorizontal == None or
                            #( min(mostHorizontal[0], 180 - mostHorizontal[0] ) > (min(line[0], 180 - line[0] ) )
                            #and line[4] > 30 ) ):
                            #mostHorizontal = line
                            mostVertical = line

                rospy.logwarn("Found most vertical pink line @: " +
                              str(mostVertical[1]) + ", with angle " +
                              str(mostVertical[0]))
                """
                # finding center closest to the left endpoint of that vertical line
                if mostHorizontal[2][0] < mostHorizontal[3][0]:
                    leftEndpoint = mostHorizontal[2]
                else:
                    leftEndpoint = mostHorizontal[3]
                """

                correctCenter = centers[0]

                rospy.logwarn("All centers: ")
                for i in range(len(centers)):
                    """
                    correctEndpointDist = math.sqrt( math.pow((correctCenter[1] - leftEndpoint[1]),2) 
                    + math.pow((correctCenter[0] - leftEndpoint[0]),2 ) ) 

                    currEndpointDist = math.sqrt( math.pow((centers[i][1] - leftEndpoint[1]),2) 
                    + math.pow((centers[i][0] - leftEndpoint[0]),2 ) ) 
                    
                    if currEndpointDist < correctEndpointDist:
                        correctCenter = centers[i]
                    """
                    rospy.logwarn("@: " + str(centers[i]))
                    if abs(mostVertical[1][0] -
                           centers[i][0]) < abs(mostVertical[1][0] -
                                                correctCenter[0]):
                        correctCenter = centers[i]

                self.forceCenter = correctCenter
                rospy.logwarn("Closest center to vertical pink line is @: " +
                              str(correctCenter))

        elif cx != None and cy != None:

            # checking if curr center is consistent with previous one
            centerDist = math.sqrt(
                math.pow((self.prevCenter[1] - cy), 2) +
                math.pow((self.prevCenter[0] - cx), 2))
            if centerDist > 225:
                rospy.logwarn("ERROR: ORIGINAL CENTER LOST, showing all " +
                              str(len(centers)))
                for i in range(len(centers)):
                    cv2.circle(segLineImage, centers[i], 6, (255, 0, 0), -1)
                if cx != None and cy != None:
                    cv2.circle(segLineImage, (cx, cy), 10, (255, 255, 255), -1)

                cx = self.prevCenter[0]
                cy = self.prevCenter[1]
                cv2.circle(segLineImage, (cx, cy), 10, (0, 0, 255), 4)
                directiveStatus = -1
                return directiveStatus, (0, 0, 0,
                                         0), segLineImage, (cx, cy), 0, 0, None
            else:
                self.prevCenter = (cx, cy)

        if self.orientation == "PARALLEL":
            lines, segLineImage = self.processVideo.MultiShowLine(segLineImage,
                                                                  sort=False)

            # pick the pink line closest to the hover platform
            angle = None
            closest = None
            closestDist = None
            for line in lines:
                if cx != None:

                    dist = math.sqrt(
                        math.pow((line[1][1] - cy), 2) +
                        math.pow((line[1][0] - cx), 2))

                    if (line != None and line[4] > 30
                            and (closest == None or (dist < closestDist))):

                        closest = line
                        angle = closest[0]
                        closestDist = dist

            if closest != None:
                cv2.circle(segLineImage, closest[1], 15, (0, 255, 0), -1)
                for line in lines:
                    if line != None and line[1] != closest[1]:
                        cv2.circle(segLineImage, line[1], 15, (0, 0, 255), -1)

            #converting angle
            if angle != None:

                # checking if previous angle is consistent with current one
                if self.prevAngle == None or min(
                        abs(self.prevAngle - angle),
                        180 - abs(self.prevAngle - angle)) < 17:
                    self.prevAngle = angle
                else:
                    rospy.logwarn(
                        "ERROR: ORIGINAL CENTER LOST; angle mismatch. Before: "
                        + str(self.prevAngle) + " Now: " + str(angle))
                    directiveStatus = -1
                    return directiveStatus, (0, 0, 0, 0), segLineImage, ((
                        cx, cy), self.prevAngle), 0, 0, None

                if angle == 90:
                    angle = 0
                elif angle < 90:
                    angle = angle + 90
                else:
                    angle = angle - 90

            yawspeed = self.processVideo.ObjectOrientation(segLineImage,
                                                           angle,
                                                           4,
                                                           yawspeed=0.50)
            #.42
            if yawspeed != None:
                yawspeed = -1 * yawspeed
            xWindowSize = 60
            yWindowSize = 105 + self.ySizeOffset
            xWindowOffset = 0
            yWindowOffset = self.yOffset
            altLowerTolerance = self.heightTolerance
            altUpperTolerance = self.heightTolerance - 15
            # defines window to make the drone focus on moving away from the edges and back into
            # the center; yaw will be turned off
            xReturnSize = xWindowSize + 210
            yReturnSize = yWindowSize + 110

        elif self.orientation == "PERPENDICULAR":

            kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (2, 2))
            segLineImage = cv2.morphologyEx(segLineImage, cv2.MORPH_OPEN,
                                            kernel)
            lines, segLineImage = self.processVideo.MultiShowLine(segLineImage,
                                                                  sort=False)

            # pick the blue line closest to the hover platform, AND is right of the hover platform
            angle = None
            closest = None
            for line in lines:
                if (line != None and cx != None and line[1][0] >= cx and
                    (closest == None
                     or abs(cx - line[1][0]) < abs(cx - closest[1][0]))):
                    closest = line
                    angle = closest[0]

            if closest != None:
                cv2.circle(segLineImage, closest[1], 15, (0, 255, 0), -1)
                for line in lines:
                    if line != None and line[1] != closest[1]:
                        cv2.circle(segLineImage, line[1], 15, (0, 0, 255), -1)

            #converting angle
            if angle != None:
                # checking if previous angle is consistent with current one
                if self.prevAngle == None or min(
                        abs(self.prevAngle - angle),
                        180 - abs(self.prevAngle - angle)) < 27:
                    self.prevAngle = angle
                else:
                    rospy.logwarn(
                        "ERROR: ORIGINAL CENTER LOST; angle mismatch. Before: "
                        + str(self.prevAngle) + " Now: " + str(angle))
                    directiveStatus = -1
                    return directiveStatus, (0, 0, 0, 0), segLineImage, ((
                        cx, cy), self.prevAngle), 0, 0, None

                if angle == 90:
                    angle = 0
                elif angle < 90:
                    angle = angle + 90
                else:
                    angle = angle - 90

            yawspeed = self.processVideo.LineOrientation(segLineImage,
                                                         angle,
                                                         9,
                                                         yawspeed=0.50)
            if yawspeed != None:
                yawspeed = -1 * yawspeed
            xWindowSize = 295
            yWindowSize = 70
            xWindowOffset = 0
            yWindowOffset = 0
            altLowerTolerance = 200
            altUpperTolerance = 250
            # defines window to make the drone focus on moving away from the edges and back into
            # the center; yaw will be turned off
            xReturnSize = xWindowSize
            yReturnSize = yWindowSize

        numRows, numCols, _ = image.shape
        centerx = numCols / 2 - xWindowOffset
        centery = numRows / 2 - yWindowOffset

        xspeed, yspeed, zspeed = self.processVideo.ApproximateSpeed(
            segLineImage,
            cx,
            cy,
            centerx,
            centery,
            navdata["SVCLAltitude"][1],
            self.hoverAltitude,
            xtolerance=xWindowSize,
            ytolerance=yWindowSize,
            ztolerance=(altLowerTolerance, altUpperTolerance),
            xOffset=xWindowOffset,
            yOffset=yWindowOffset)

        # box defines when the directive is finished
        xLower = (numCols / 2) - xReturnSize
        yLower = (numRows / 2) - yReturnSize
        xUpper = (numCols / 2) + xReturnSize
        yUpper = (numRows / 2) + yReturnSize

        # perpendicular can disregard height
        #if self.orientation == "PERPENDICULAR":
        #    zspeed = 0

        if (yawspeed == 0 and xspeed == 0 and yspeed == 0 and zspeed == 0
                and cx != None and cy != None):

            rospy.logwarn("Oriented " + self.orientation + " to " +
                          self.lineColor + " line")
            directiveStatus = 1

        elif cx == None or cy == None:

            rospy.logwarn("*** ERROR: Lost " + self.platformColor +
                          " platform ***")
            directiveStatus = -1

        else:

            # If drone is still trying to align, it adapts to one of three algorithms:

            # Drone will just go back near the center if: 1) no line is detcted, or 2)
            # the drone is not "near" the center as defined by a bounding box
            # No turning or altitude change applied
            if yawspeed == None or (cx > xUpper or cx < xLower or cy > yUpper
                                    or cy < yLower):
                cv2.rectangle(segLineImage, (xLower, yLower), (xUpper, yUpper),
                              (0, 0, 255), 2)
                rospy.logwarn("Too far out; only MOVING drone back to center")
                yawspeed = 0
                zspeed = zspeed * 0.2

            # if drone isn't perpendicular yet and is "near" the center (defined by a box),
            # just turn the drone; no need move drone
            elif yawspeed != 0:
                rospy.logwarn("Only TURNING drone. Yaw speed = " +
                              str(yawspeed))
                self.moveTime = 3.5
                xspeed = 0
                yspeed = 0
                zspeed = zspeed * 0.45

            # if the drone is aligned to the line and is near the center,
            # keep moving it to the center and adjusting the height until the
            # directive is finished
            else:
                rospy.logwarn("Curr Altitude = " +
                              str(int(navdata["SVCLAltitude"][1])) +
                              " mm; Goal = [ " +
                              str(self.hoverAltitude - altLowerTolerance) +
                              " mm, " +
                              str(self.hoverAltitude + altUpperTolerance) +
                              " mm ].")

            directiveStatus = 0

        return directiveStatus, (
            xspeed, yspeed, yawspeed, zspeed), segLineImage, (
                (cx, cy),
                self.prevAngle), self.moveTime, self.waitTime, self.forceCenter

    def Finished(self):
        center = self.prevCenter
        self.prevAngle = None
        self.prevCenter = None
        self.forceCenter = None
        return center

    def OnErrorReturn(self, returnData):
        # set previous center to what was found in the error algorithm
        rospy.logwarn("ORIENT LINE ON ERROR RETURN***")
        self.prevCenter = returnData
        self.prevAngle == None
Exemplo n.º 2
0
class ReturnToColorDirective(AbstractDroneDirective):


    # sets up this directive
    # platformColor: color of the platform to return to
    def __init__(self, platformColor, lineColor, speedModifier = 0.6, radiusThresh = 170):

        self.platformColor = platformColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.lineColor = lineColor
        self.moveTime = 0.35
        self.waitTime = 0.10
        self.bestPlatformFound = None
    
    def InsideCircle(self, point, circleCenter, circleRadius):
        x = point[0]
        y = point[1]
        center_x = circleCenter[0]
        center_y = circleCenter[1]
        radius = circleRadius
        
        return (math.pow((x-center_x),2) + math.pow((y-center_y),2)) < math.pow(radius,2)

    def PointAlongLine(self, linePoint, lineAngle, point, thresh):
        # edge case: both lines are vertical
        if point[0] == linePoint[0]:
            if lineAngle == 90:
                return True
            else:
                return False
        else:
            # slope compensates for a upper left origin coord system
            slope = (float(linePoint[1])-point[1]) / (point[0]-float(linePoint[0]))

            slopeAngle = math.degrees(math.atan(slope))
            if slopeAngle < 0:
                #rospy.logwarn("seen added 180")
                slopeAngle += 180
            #rospy.logwarn(str(point) + str(linePoint) +" >> slope: " + str(slope))
            #rospy.logwarn("Original: " + str(lineAngle) + " seen: " + str(slopeAngle))
            if( min( abs( lineAngle - slopeAngle), 180 - abs( lineAngle - slopeAngle) ) < thresh):
                #rospy.logwarn("GOOD")
                return True
            else:
                #rospy.logwarn("BAD")
                return False

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone hasn't returned to the color yet
    #   1 if algorithm is finished and drone is now over the color
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):
        
        lineSeg = self.processVideo.DetectColor(image, self.lineColor)
        #platformSeg = self.processVideo.DetectColor(image, self.platformColor)
        #platformSeg = self.processVideo.RemoveNoise(platformSeg)
        #platformSeg = navdata[0]["segImage"]
        #centers, _ = self.processVideo.MultiCenterOfMass(platformSeg)
        centers = navdata[0]["allCenters"][1]

        #navdata stores the last location in the case of an error
        cx = navdata[1][0][0]
        cy = navdata[1][0][1] 
        lastAngle = navdata[1][1]
        
        hasPlatform = False

        # find last platform based on last seen line angle
        if lastAngle != None:
            lines, image = self.processVideo.MultiShowLine(lineSeg, sort = False)
            for center in centers:
                cv2.circle(image, center, 4, (255,255,255), -1)

            thresh = 21
            validLine = None
            # picks the line closest to last angle, and within thresh (degrees)
            for line in lines:
                #rospy.logwarn("last: " + str(lastAngle) + " this: " + str(line[0]))
                angleDifference = min( abs(lastAngle -line[0]), 180 -abs(lastAngle -line[0]) ) 
                if( line != None and angleDifference < thresh and (validLine == None or validLine[1] > angleDifference) ):
                    #rospy.logwarn("valid")
                    validLine = (line, angleDifference)

            if validLine != None:
                line = validLine[0]
                # finding center closest to the most valid line
                for c in centers:
                    alongLine = self.PointAlongLine(line[1], line[0], c, 25)
                    # blue line => orient perpendicular => valid point must be to the left of line
                    if self.lineColor == "blue" and c[0] > line[1][0]:
                        alongLine = False
                    if alongLine:
                        lastAngle = line[0]
                        cv2.line(image, line[1], c, (0,255,255),3)
                        cx, cy = c[0], c[1]
                        cv2.circle(image, (cx,cy), 12, (0,255,0), -1)
                        cv2.circle(image, (cx,cy), 12, (255,255,255), 7)
                        cv2.circle(image, line[1], 7, (0,255,0), -1)
                        cv2.circle(image, line[1], 7, (255,255,255), 4)
                        hasPlatform = True

        # if no angle was found, just use location
        else:

            image = navdata[0]["segImage"]

            cv2.circle(image, (cx,cy), self.radiusThresh, (0,255,0), 1)
            cv2.circle(image, (cx,cy), 7, (0,255,0), -1)

            for c in centers:

                cv2.circle(image, c, 10, (0,255,255), -1)
                if self.InsideCircle( c , (cx,cy), self.radiusThresh):
                    hasPlatform = True
                    cx, cy = c[0], c[1]
        
        if hasPlatform:
            rospy.logwarn("Successfully returned to platform -- last angle seen was "+ str(lastAngle))
            directiveStatus = 1
            zspeed = 0

        else:
            rospy.logwarn("Returning to platform -- last angle seen was "+ str(lastAngle))
            directiveStatus = 0
            zspeed = 0.2

        xspeed, yspeed, _ = self.processVideo.ApproximateSpeed(image.copy(), cx, cy,
        ytolerance = 50, xtolerance = 50)
        
        xspeed = min( xspeed * self.speedModifier, 1 )
        yspeed = min( yspeed * self.speedModifier, 1 )
        rospy.logwarn("X Speed: " + str(xspeed) + " Y Speed: " + str(yspeed))
        
        # draw rectangles so it's easy to tell that it's in return mode
        border = 15
        offset = 2
        cv2.rectangle(image, (border, border), (640-border,360-border), (0,0, 255), 1)
        cv2.rectangle(image, (border-1*offset, border-1*offset), (640-border+1*offset,360-border+1*offset), (0,229, 255), 1)
        cv2.rectangle(image, (border-2*offset, border-2*offset), (640-border+2*offset,360-border+2*offset), (0,0, 255), 1)

        return directiveStatus, (xspeed, yspeed, 0, zspeed), image, ((cx,cy), lastAngle), self.moveTime, self.waitTime, None

    def Finished(self):
        rospy.logwarn("RETURN TO COLOR FINISHED *******************")
        rospy.logwarn("RETURN TO COLOR FINISHED *******************")
        self.bestPlatformFound = None
        return None
Exemplo n.º 3
0
class PIDYawDirective(AbstractDroneDirective):

    # sets up this directive
    # plrratformColor: color to hover over. Altitude is maintained
    def __init__(self, poseTracker, target, yaw, platformNumber, waitDist=0.1):

        #self.Kp,self.Ki,self.Kd = 0.1,20.0,0.0005 #best
        self.Kp, self.Ki, self.Kd = 0.2, 0.0, 0.0005
        self.moveTime = 0.2
        self.waitTime = 0.0
        self.tracker = poseTracker
        self.target = target
        self.waitDist = waitDist
        self.worldTarget = self.tracker.body2World(target)[:, 0]
        self.processVideo = ProcessVideo()
        self.platformNumber = platformNumber
        self.centery = 360 / 2.0
        self.centerx = 640 / 2.0
        self.pub = rospy.Publisher('ardrone/tracker', tracker)
        self.track = tracker()
        self.platform = [0, 0, 0]
        self.filterSize = 50
        self.buff = np.repeat(np.asarray([self.worldTarget]).T,
                              self.filterSize,
                              axis=1)
        self.KpYaw, self.KiYaw, self.KdYaw = (1 / 180.0) * 0.8, 0, 0
        self.targetYaw = (yaw + 360) % 360

        self.worldPoint = np.asarray([[0, 0, 0]]).T
        #the amount of weight we would like to put towards correcting the drones drift by recognizing landmarks
        self.correctionRatio = 0.9999

    def distance(self, x, y):
        dist = (x[0] - y[0])**2 + (x[1] - y[1])**2
        dist = dist**(0.5)
        return dist

    def weightedUpdate(self, prediction, updateTerm):
        return (self.correctionRatio * updateTerm[0, 0] +
                (1 - self.correctionRatio) * prediction[0, 0],
                self.correctionRatio * updateTerm[1, 0] +
                (1 - self.correctionRatio) * prediction[1, 0],
                updateTerm[2, 0], 1.0)

    # given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone isn't on orange yet
    #   1 if algorithm is finished and drone is now on orange
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segImage, radius, center = self.processVideo.RecognizeShape(
            image, 'orange', (None, None))
        blue = self.processVideo.detectColor(image, 'blue', 'segmented')
        lines, blueImg = self.processVideo.MultiShowLine(blue)
        bestTheta = None
        minDist = -1
        for line in lines:
            theta = line[0]
            theta = -theta
            tapeCenter = line[1]
            dist = self.distance(center, tapeCenter)
            if minDist == -1 or dist < minDist:
                minDist = dist
                bestTheta = theta
        if bestTheta != None:
            self.currentYaw = bestTheta
        else:
            self.currentYaw = 0

        #Calculate closest rotation to get to target angle
        theta = ((0 - self.currentYaw) % 360 + 360) % 360
        theta = (theta - 360) if (theta > 180) else theta
        loc = (0, 0, 0, 0)
        #circle detection
        #rospy.logwarn("x: "+str(self.tracker.translation[0])+" y: "+str(self.tracker.translation[1]))
        if radius != None:
            predictedZ = self.processVideo.CalcDistanceNew(88.0,
                                                           radius * 2) / 1000.0
            scale = (88.0 / (radius * 2)) / 1000.0  #meters/pixel
            x = (center[0] - self.centerx) * scale
            y = (self.centery - center[1]) * scale

            tape = self.tracker.camera2Body([x, y, -predictedZ])
            worldPoint = self.tracker.camera2World([x, y, -predictedZ])
            self.worldPoint = worldPoint
            if (self.distance(worldPoint, self.worldTarget) < 0.35):

                for i in range(self.filterSize - 1):
                    self.buff[:, i] = self.buff[:, i + 1]
                self.buff[:, self.filterSize - 1] = np.asarray(
                    [worldPoint[0, 0], worldPoint[1, 0], worldPoint[2, 0]])
                self.worldTarget = np.mean(self.buff, 1)
            '''
            if self.tapeLocation != None:
                dist = self.distance(worldPoint,self.tapeLocation)
                if dist < 0.35 and dist > 0.15:
                    loc = self.tracker.tape2World([x,y,-predictedZ],self.yaw,[self.tapeLocation[0],self.tapeLocation[1],0])
                    loc = self.weightedUpdate(worldPoint,loc)
                    rospy.logwarn("Fixing location to ..."+str(loc))
            '''
            self.track.landMark = (self.worldTarget[0], self.worldTarget[1],
                                   0.0, 1.0)
        else:
            self.track.landMark = (0, 0, 0, 0.0)

        #rospy.logwarn("world target: " + str(self.worldTarget))
        self.track.landMark = (self.worldTarget[0], self.worldTarget[1], 0.0,
                               1.0)
        self.track.loc = loc
        self.pub.publish(self.track)
        self.currentTarget = self.tracker.world2Body(self.worldTarget)
        self.currentTime = time.time()

        if self.lastTime == 0:
            self.rollError = 0
            self.pitchError = 0
            self.yawError = 0
        else:
            self.rollError = self.currentTarget[0]
            self.pitchError = self.currentTarget[1]
            self.yawError = theta
        self.dt = (self.currentTime - self.lastTime) / 1000.

        self.totalError = [
            self.totalError[0] + self.rollError * self.dt,
            self.totalError[1] + self.pitchError * self.dt,
            self.totalError[2] + self.yawError * self.dt, 0
        ]

        pRoll = -self.Kp * (self.rollError)
        iRoll = -self.Ki * (self.totalError[0])
        dRoll = -self.Kd * ((self.rollError - self.lastError[0]) / self.dt)

        pPitch = self.Kp * (self.pitchError)
        iPitch = self.Ki * (self.totalError[1])
        dPitch = self.Kd * ((self.pitchError - self.lastError[1]) / self.dt)

        pYaw = self.KpYaw * (self.yawError)
        iYaw = self.KiYaw * (self.totalError[2])
        dYaw = self.KdYaw * ((self.yawError - self.lastYawError) / self.dt)
        yaw = pYaw + iYaw + dYaw

        self.lastError = self.currentTarget
        self.lastYawError = self.yawError

        self.lastTime = self.currentTime

        roll = pRoll + iRoll + dRoll
        pitch = pPitch + iPitch + dPitch

        if (abs(self.rollError) <= self.waitDist
                and abs(self.pitchError) <= self.waitDist
                and abs(self.yawError) < 2):
            directiveStatus = 1
            rospy.logwarn(self.yawError)
        else:
            directiveStatus = 0
        #Trim commands over the drones command limit
        roll = 1 if roll > 1 else roll
        roll = -1 if roll < -1 else roll
        pitch = 1 if pitch > 1 else pitch
        pitch = -1 if pitch < -1 else pitch

        #rospy.logwarn("roll: "+str(self.tracker.roll))
        #rospy.logwarn("pitch: "+str(self.tracker.pitch))
        rospy.logwarn(directiveStatus)
        return directiveStatus, (
            roll, pitch, 0,
            0), segImage, None, self.moveTime, self.waitTime, None

    # This method is called by the state machine when it considers this directive finished
    def Finished(self):
        self.Reset()
        #tapeLocation = self.tracker.body2World(self.target)[:,0]
        #loc = self.tracker.tape2World([x,y,-predictedZ],self.yaw,[tapeLocation[0],tapeLocation[1],0])
        if (self.platformNumber % 3 == 0):
            loc = np.asarray([self.target]).T
            loc[2] = 1.0
            rospy.logwarn("Reseting location to" + str(loc))
            loc = self.weightedUpdate(self.worldPoint, loc)
            self.track.loc = loc
            self.pub.publish(self.track)

    def Reset(self):
        self.dt = 0
        self.currentTime = time.time()
        self.lastTime = 0
        self.rollError = 0
        self.pitchError = 0
        self.lastError = [0, 0, 0]
        self.lastYawError = 0
        self.totalError = [0, 0, 0, 0]
Exemplo n.º 4
0
class ReturnToLineDirective(AbstractDroneDirective):

    # sets up this directive
    # platformColor: color of the platform to return to
    def __init__(self, lineColor, speedModifier=0.5, radiusThresh=255):

        self.lineColor = lineColor
        self.processVideo = ProcessVideo()
        self.speedModifier = speedModifier
        self.radiusThresh = radiusThresh
        self.moveTime = 0.25
        self.waitTime = 0.10

    def InsideCircle(self, point, circleCenter, circleRadius):
        x = point[0]
        y = point[1]
        center_x = circleCenter[0]
        center_y = circleCenter[1]
        radius = circleRadius

        return (math.pow((x - center_x), 2) + math.pow(
            (y - center_y), 2)) < math.pow(radius, 2)

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone hasn't returned to the line yet
    #   1 if algorithm is finished and drone is now over the color
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)
        lines, image = self.processVideo.MultiShowLine(segLineImage)

        #navdata stores the last location and angle in the case of an error
        cx = navdata[1][0][0]
        cy = navdata[1][0][1]
        angle = navdata[1][1]

        #cv2.circle(image, (cx,cy), self.radiusThresh, (0,255,0), 1)

        hasPlatform = False
        # thresh in degrees
        thresh = 18
        for line in lines:
            if line != None:
                # original line was found if angle matches original, to some threshold
                if ((abs(angle - line[0]) < thresh or abs(angle - line[0]) >
                     (180 - thresh))):
                    hasPlatform = True
                    cv2.circle(image, line[1], 15, (0, 255, 0), -1)
                    cx = line[1][0]
                    cy = line[1][1]
                else:
                    cv2.circle(image, line[1], 15, (0, 0, 255), 5)

        if hasPlatform:
            rospy.logwarn("Returned to " + self.lineColor + " line")
            directiveStatus = 1
            zspeed = 0

        else:
            rospy.logwarn("Returning to " + self.lineColor + " line")
            directiveStatus = 0
            zspeed = 0.1

        if cx == None or cy == None:
            rospy.logwarn("Returning -- no " + self.lineColor +
                          " detected @ this altitude, increasing altitude")
            return 0, (0, 0, 0, 0.5), image, (cx, cy), 0, 0

        xspeed, yspeed, _ = self.processVideo.ApproximateSpeed(image,
                                                               cx,
                                                               cy,
                                                               ytolerance=50,
                                                               xtolerance=50)

        yspeed = min(yspeed * self.speedModifier, 1)
        xspeed = min(xspeed * self.speedModifier, 1)
        rospy.logwarn("X Speed: " + str(xspeed) + " Y Speed: " + str(yspeed))

        self.processVideo.DrawCircle(image, (cx, cy))

        return directiveStatus, (xspeed, yspeed, 0, zspeed), image, ((
            cx, cy), angle), self.moveTime, self.waitTime, None
Exemplo n.º 5
0
class FollowLineDirective(AbstractDroneDirective):

    # sets up this directive
    # lineColor: color of the line to follow
    def __init__(self, lineColor, speed=0.4):

        self.lineColor = lineColor
        self.speed = speed
        self.processVideo = ProcessVideo()
        self.moveTime = 0.45
        self.waitTime = 0.1
        self.prevAngle = None
        self.prevAngleCount = 0
        self.prevAngleCountMax = 185

    # Given the image and navdata of the drone, returns the following in order:
    #
    # A directive status int:
    #   0 if algorithm is still running and drone is still following line
    #   1 if algorithm is finished and has finished following line
    #
    # A tuple of (xspeed, yspeed, yawspeed, zspeed):
    #   indicating the next instructions to fly the drone
    #
    # An image reflecting what is being done as part of the algorithm
    def RetrieveNextInstruction(self, image, navdata):

        segLineImage = self.processVideo.DetectColor(image, self.lineColor)
        platforms = navdata["allCenters"][1]
        lines, image = self.processVideo.MultiShowLine(segLineImage)

        if lines[0] != None:
            cv2.circle(image, lines[0][1], 15, (0, 0, 255), -1)
        if lines[1] != None:
            cv2.circle(image, lines[1][1], 15, (0, 255, 0), -1)
        if lines[2] != None:
            cv2.circle(image, lines[2][1], 15, (255, 0, 0), -1)

        linesVisible = (lines[0] != None) + (lines[1] != None) + (lines[2] !=
                                                                  None)

        if linesVisible == 0:
            rospy.logwarn(" *** ERROR: Lost " + self.lineColor + " line *** ")
            return -1, (0, 0, 0, 0), segLineImage, (None, None), 0, 0, None

        cx = lines[1][1][0]
        cy = lines[1][1][1]
        _, yspeed, _ = self.processVideo.ApproximateSpeed(
            segLineImage,
            cx,
            cy,
            None,
            None,
            navdata["SVCLAltitude"][1],
            0,
            xtolerance=80,
            ytolerance=95)

        newCenter = None
        thresh = 15

        # Checking if angle is consistent with before
        if (self.prevAngle == None
                or abs(self.prevAngle - lines[1][0]) < thresh
                or abs(self.prevAngle - lines[1][0]) > (180 - thresh)):

            self.prevAngle = lines[1][0]
            self.prevAngleCount = 0
            directiveStatus = 0

            # checking finish conditions
            # alternate way to finish
            xWindowSize = 200
            yWindowSize = 120
            xLower = 320 - xWindowSize
            yLower = 180 - yWindowSize
            xUpper = 320 + xWindowSize
            yUpper = 180 + yWindowSize

            foundRightPlatform = False
            tolerance = 15
            for platform in platforms:
                if (linesVisible > 1 and lines[1] != None
                        and platform[0] > lines[1][1][0]
                        and min(abs(lines[1][0] - 180),
                                180 - abs(lines[1][0] - 180)) < 16
                        and platform[0] < xUpper and platform[0] > xLower
                        and platform[1] < yUpper and platform[1] > yLower):
                    cv2.rectangle(image, (xLower, yLower), (xUpper, yUpper),
                                  (255, 255, 255), 4)
                    foundRightPlatform = True
                    newCenter = platform

            # in order to be considered "finished", there must be 2 lines,
            # one which is horizontal and one that is less than 90 degrees.
            # The horizontal line must be far enough left.
            if (foundRightPlatform or
                (len(platforms) == 1 and yspeed == 0 and lines[1] != None
                 and lines[2] != None and
                 ((lines[1][0] < (0 + tolerance)) or (lines[1][0]) >
                  (180 - tolerance)) and lines[2][1][0] < int(640 * 0.9))):

                xspeed = 0
                yspeed = 0
                yawspeed = 0
                directiveStatus = 1
                rospy.logwarn("Finished following line")
                return directiveStatus, (xspeed, yspeed, yawspeed, 0), image, (
                    (cx, cy),
                    self.prevAngle), self.moveTime, self.waitTime, newCenter

        else:

            # only uses the angle if it is similar to the last one. If it is too different, algorithm
            # uses the previous angle for a time until the timer is up, as a buffer against a random large angle change

            rospy.logwarn(
                "Large sudden change in angle -- using old angle of " +
                str(self.prevAngle) + " instead of " + str(lines[1][0]))
            directiveStatus = -1

        xspeed = -self.speed

        # converting
        line1Angle = self.prevAngle
        if line1Angle == 90:
            line1Angle = 0
        elif line1Angle < 90:
            line1Angle = line1Angle + 90
        else:
            line1Angle = line1Angle - 90

        yawspeed = self.processVideo.LineOrientation(segLineImage,
                                                     line1Angle,
                                                     8,
                                                     yawspeed=0.45)

        # If drone is still trying follow the line, it adapts to one of three algorithms:

        # Drone will just go back near the center if the drone is not "near" the
        # center as defined by a bounding box.
        # No turning or horizontal movement is applied.
        if yspeed != 0:
            rospy.logwarn("Moving blue line back to center")
            self.moveTime = 0.2
            self.waitTime = 0.1
            xspeed = 0
            yawspeed = 0

        # If drone is near the center but angle is off, it fixes the angle.
        # Drone does not move forward.
        elif yawspeed != 0:

            yawspeed = -yawspeed

            direction = "LEFT"
            if yawspeed < 0:
                direction = "RIGHT"

            rospy.logwarn("Turning the drone horizontal " + direction +
                          ",  yaw = " + str(yawspeed))
            self.moveTime = 1
            self.waitTime = 0.1
            xspeed = 0

        else:
            rospy.logwarn("Drone just going forward")
            self.moveTime = 0.9
            self.waitTime = 0.1

        return directiveStatus, (xspeed, yspeed, yawspeed, 0), image, ((
            cx, cy), self.prevAngle), self.moveTime, self.waitTime, newCenter

    def Finished(self):
        self.prevAngle = None
        self.prevAngleCount = 0
        return None