示例#1
0
def writeDATA(s):
    try:
        temp = sensor.read_temperature()
        pascals = sensor.read_pressure()
        press = pascals / 100
        hum = sensor.read_humidity()
    except:
        print 'Sensor Error. Please try again.'
    #hum = 50
    #press = 1000
    #temp = 85.3
    print str(temp)
    print str(press)
    print str(hum)

    t = time.strftime("%Y%m%d%H%M%S", time.localtime())
    sendData(s,t, humidity = hum , pressure = press, temperature=temp)
示例#2
0
    gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
    faces = face_cascade.detectMultiScale(gray, 1.3, 5)

    #finds dimensions for each face found
    for (x, y, w, h) in faces:
        count = 0
        cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0))
        state = 2
        centerX = x + (w / 2)
        centerY = y + (h / 2)
        size = w * h
        if follow is False:
            if centerX > 280 and centerX < 360:
                while tf == True:
                    time.sleep(1)
                    client.sendData("Howdy")
                    #client.killSocket()
                    tf = False
                xx, yy = keys.getHead()
                if xx > 5700 and xx < 6300:
                    print("moves")
                    move2you(w)
                    if w >= 89 and w <= 120:
                        follow = True
                        break
                else:
                    print("turns")
                    turn2center(xx)
    faceLR = motion(keys, state, centerX, centerY, size, faceLR)
    if len(faces) == 0:
        count += 1
示例#3
0
def getBuckets():
    camera = PiCamera()
    camera.resolution = (640, 480)
    camera.framerate = 32
    rawCapture = PiRGBArray(camera, size=(640, 480))

    time.sleep(1)

    win = "Frame"
    keys = KeyControl(win)

    keys.arrow('f')
    time.sleep(.1)

    keys.head('d')
    keys.arm('sh3')
    found = False

    client.sendData("Entering Score Zone")

    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        img = frame.array

        hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)

        hsv = hsv[0:480, 160:480]

        lowerOrange = np.array([150, 0, 200])
        upperOrange = np.array([165, 135, 255])

        orangeMask = cv2.inRange(hsv, lowerOrange, upperOrange)
        orangePath = np.argwhere(orangeMask != 0)

        print(len(orangePath))
        if len(orangePath) >= 500:
            found = True

        mean = np.mean(orangePath, axis=0)
        floor = mean.astype(int)
        y = floor[0]
        x = floor[1]

        rawCapture.truncate(0)

        if found == False:  #if not found, search room for bucket
            keys.arrow('l')
            time.sleep(.5)
            keys.arrow('s')
        else:
            if x >= 150 and x <= 170:  #if bucket is centered then go forward
                if y >= 300:  #if close enough, then make bucket
                    keys.arrow('f')
                    time.sleep(1.75)
                    keys.arrow('s')
                    keys.arrow('l')
                    time.sleep(.75)
                    keys.arrow('s')
                    keys.arrow('h2')
                    print("letgo")
                    time.sleep(2)
                    cv2.destroyAllWindows()
                    break
                else:  #else go forward
                    keys.arrow('f')
                    time.sleep(1)
                    keys.arrow('s')
            if x < 150:  #center bucket
                keys.arrow('r')
                time.sleep(.25)
                keys.arrow('s')
            if x > 170:
                keys.arrow('l')
                time.sleep(.25)
                keys.arrow('s')

        cv2.imshow("HSV", hsv)
        cv2.imshow('Image', orangeMask)

        if cv2.waitKey(1) & 0xFF == ord('q'):
            break
示例#4
0
def getIce():
    camera = PiCamera()
    camera.resolution = (640, 480)
    camera.framerate = 32
    rawCapture = PiRGBArray(camera, size=(640, 480))

    time.sleep(1)

    win = "Frame"
    keys = KeyControl(win)

    keys.head('l')
    keys.arm('sh1')
    keys.arm('h2')
    frameCount = 0
    frameCountOther = 0

    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        found = False
        img = frame.array

        hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)

        #hsv = hsv[300:480,160:480] #make screen smaller

        #pink values
        lowerPink = np.array([150, 0, 200])
        upperPink = np.array([165, 135, 255])

        pinkMask = cv2.inRange(hsv, lowerPink, upperPink)
        pinkPath = np.argwhere(pinkMask != 0)

        if len(pinkPath) >= 500:
            found = True
        else:
            found = False

        if found == True:
            frameCount += 1
            frameCountOther = 0
        else:
            frameCount = 0
            frameCountOther += 1

        #closes hand and moves arm down after grabbing the ice
        if frameCount >= 60:
            client.sendData("Thank you for the pink ice")
            keys.arm('h1')
            time.sleep(1.5)
            keys.arm('sh2')
            keys.arm('u')
            time.sleep(.5)
            break

        #asks for pink ice if it doesn't see the pink ice
        if frameCountOther >= 30:
            client.sendData("I Want the pink ice")
            frameCountOther = 0

        key = cv2.waitKey(1) & 0xFF
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

        cv2.imshow('Image', pinkMask)
        cv2.imshow("HSV", hsv)

        rawCapture.truncate(0)
    camera.close()
示例#5
0
def findHuman():
    camera = PiCamera()
    camera.resolution = (640, 480)
    camera.framerate = 32
    rawCapture = PiRGBArray(camera, size=(640, 480))

    face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')

    time.sleep(1)

    win = "Frame"
    keys = KeyControl(win)

    keys.arrow('f')
    time.sleep(.25)
    client.sendData("Entering Mining Zone")

    keys.head('u')
    found = False

    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        img = frame.array
        gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
        faces = face_cascade.detectMultiScale(gray, 1.3, 5)

        width = 0
        #draws the contours around face
        for (x, y, w, h) in faces:
            found = True
            cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0))
            centerX = x + (w / 2)
            centerY = y + (h / 2)
            width = w
            size = w * h

        rawCapture.truncate(0)

        print(found)
        if found == False:  #if not found, search room for face
            keys.arrow('r')
            time.sleep(.5)
            keys.arrow('s')
        else:

            print(centerX)
            if centerX >= 280 and centerX <= 360:  #if person is centered then go forward
                print("Width:", width)
                if width >= 100:  #if close enough, then break out of function
                    keys.arrow('s')
                    client.sendData("Give me ice")
                    cv2.destroyAllWindows()
                    break
                else:  #else go forward
                    print(w)
                    keys.arrow('f')
                    time.sleep(1)
                    keys.arrow('s')
            if centerX < 280:  #center face
                keys.arrow('r')
                time.sleep(.5)
                keys.arrow('s')
            if centerX > 360:  #center face
                keys.arrow('l')
                time.sleep(.5)
                keys.arrow('s')

        cv2.imshow('Image', img)
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break
        centerX = 160
        time.sleep(.5)

    camera.close()
示例#6
0
def rockField():
    camera = PiCamera()
    camera.resolution = (640, 480)
    camera.framerate = 32
    rawCapture = PiRGBArray(camera, size=(640, 480))

    kernel = np.ones((5, 5), np.uint8)

    # allow the camera to warmup
    time.sleep(1)

    win = "Frame"
    keys = KeyControl(win)

    #keys.head('u')
    keys.head('d')
    pastFirstLine = False
    inRocks = False
    frameTime = 0

    client.sendData("Entering Rock Field")

    # capture frames from the camera
    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        # grab the raw NumPy array representing the image, then initialize the timestamp
        # and occupied/unoccupied text
        pic = frame.array
        hsv = cv2.cvtColor(pic, cv2.COLOR_BGR2HSV)

        #hsv = cv2.blur(hsv, (5,5))
        hsv = hsv[300:480, 160:480]

        lowerOrange = np.array([10, 50, 50])
        upperOrange = np.array([35, 255, 255])

        orangeMask = cv2.inRange(hsv, lowerOrange, upperOrange)
        orangePath = np.argwhere(orangeMask != 0)

        #values of white
        lowerWhite = np.array([100, 0, 200])
        upperWhite = np.array([180, 135, 255])
        xLowWhite = np.array([0, 0, 200])
        xUpperWhite = np.array([10, 135, 255])

        #masks image to get more of the white from the folders
        whiteMask1 = cv2.inRange(hsv, lowerWhite, upperWhite)
        whiteMask2 = cv2.inRange(hsv, xLowWhite, xUpperWhite)
        whiteMask = whiteMask1 + whiteMask2
        whitePath = np.argwhere(whiteMask != 255)

        cv2.imshow("HSV", hsv)
        cv2.imshow("Frame", whiteMask)

        #keys.arrow("f")
        #time.sleep(1.5)
        #print(mask)
        #cv2.imshow("HSV", mask1)
        #cv2.imshow("HSV1", hsv)

        #pic = brighten(pic)
        # pic = cv2.cvtColor(pic, cv2.COLOR_BGR2GRAY)
        # ret,thresh = cv2.threshold(pic,127,255,0)
        # pic = cv2.blur(pic, (3,3))
        # pic = pic[420:480,160:480]
        # pic = cv2.Canny(pic, 100, 170)
        # pic = cv2.dilate(pic, kernel, iterations=5)
        # pixels = np.argwhere(pic == 0)
        orangeMean = np.mean(orangePath, axis=0)
        orangeFloor = orangeMean.astype(int)
        orangeY = orangeFloor[0]
        orangeX = orangeFloor[1]

        whiteMean = np.mean(whitePath, axis=0)
        whiteFloor = whiteMean.astype(int)
        whiteY = whiteFloor[0]
        whiteX = whiteFloor[1]
        mid = 160
        #print(whiteX)

        #cv2.namedWindow("Frame", cv2.WINDOW_NORMAL)

        key = cv2.waitKey(1) & 0xFF

        while sys.stdin in select.select([sys.stdin], [], [], 0)[0]:
            line = sys.stdin.readline()
            if line[0] == "q":
                keys.arrow('s')
                exit()
        # clear the stream in preparation for the next frame
        rawCapture.truncate(0)

        #checks for orange at bottom of camera for first line
        #print(pastFirstLine, frameTime)
        #print(inRocks)
        pastFirstLine = True

        #Checks if first line is crossed, if so starts short timer
        if pastFirstLine == True:
            frameTime += 1
            if frameTime >= 60:  #when timer reaches 15, declares in rockfield
                inRocks = True
            if inRocks == True:  #if in rock field
                #print("True:", orangeY)
                if orangeY >= 120:  #if next line in about to be crossed then break function
                    cv2.destroyAllWindows()
                    break

        #print(whiteX)
        #if white is not in the middle, go forward
        if whiteX >= 155 and whiteX <= 165:
            keys.arrow('f')
        #if white is on the left side of screen, go right
        if whiteX < 155:
            keys.arrow('r')
        #if white is on the right side of screen, go left
        if whiteX > 165:
            keys.arrow('l')
    camera.close()
示例#7
0
 def sendData(self, data):
     server = self.params["server"]
     return client.sendData(server["host"], server["port"], data)