def tweet(a): pc.unlock() pc.move(xy.app(3, 2)) pc.tap() # open twitter pc.movexy(2850, 3000) pc.lightTap() # hit tweet button sleep(4) # wait for keyboard to come up pc.type(a) pc.movexy(2800, 6350) # hit tweet button pc.tap() pc.getOutTheWay() sleep(5) pc.homeButton() pc.zero() sleep(1) pc.onOff()
def lookForHearts(): #Create a memory stream so photos doesn't need to be saved in a file stream = io.BytesIO() camW = 1280 camH = 960 #Get the picture (low resolution, so it should be quite fast) #Here you can also specify other parameters (e.g.:rotate the image) with picamera.PiCamera() as camera: camera.resolution = (camW, camH) # 4:3 1280:960 also works camera.capture(stream, format='jpeg') #Convert the picture into a numpy array buff = numpy.fromstring(stream.getvalue(), dtype=numpy.uint8) #Now creates an OpenCV image image = cv2.imdecode(buff, 1) #crops in on image to search for hearts only in the left bit of the phone roiX = 640 roiY = 300 roiYMax = 800 roi = image[roiY:roiYMax, 640:750] # height range, width range (weird) #Load a cascade file for detecting faces heart_cascade = cv2.CascadeClassifier('camera/heart_cascade_level_16.xml') #Convert to grayscale gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY) # look for hearts within roi #Look for faces in the image using the loaded cascade file hearts = heart_cascade.detectMultiScale( roi) # previously looked at gray instead of cropped roi numHearts = len(hearts) print "Found " + str(numHearts) + " heart(s)" if numHearts == 1: print "one heart" #Draw a rectangle around every found heart for (x, y, w, h) in hearts: heartX = roiX + x + w / 2 # around the heart plus the distance from the edge to the roi heartY = roiY + y + h / 2 cv2.rectangle(image, (x + roiX, y + roiY), (x + w + roiX, y + h + roiY), (255, 0, 0), 2) # draw a point at the center of the heart cv2.line(image, (heartX, heartY), (heartX, heartY), (0, 0, 255), 5) cv2.imwrite('result.jpg', image) # save image print "heartX, heartY = (%d, %d)" % (heartX, heartY) toY = mapValues.myMap(heartY) print "toY = %d" % toY pc.movexy(900, toY) pc.lightTap() pc.checkStatus() b = random.randint(1, 100) #tapped the heart, now decide whether to comment or not print "random number = %d" % b if b <= 20: pc.x(1100) # move over to the comment button pc.tap() pc.type("nice") pc.movexy(2800, 4300) #post comment pc.tap() sleep(2) pc.movexy(900, 6300) # back to stream pc.tap() if b > 20 and b <= 40: pc.x(1100) # move over to the comment button pc.tap() pc.type('great pic') pc.movexy(2800, 4300) #post comment pc.tap() sleep(2) pc.movexy(900, 6300) #back to stream pc.tap() return (toY) else: print "no hearts found"
def lookForLikes(): #Create a memory stream so photos doesn't need to be saved in a file stream = io.BytesIO() camW = 1280 camH = 960 #Get the picture (low resolution, so it should be quite fast) #Here you can also specify other parameters (e.g.:rotate the image) with picamera.PiCamera() as camera: camera.resolution = (camW,camH) # 4:3 1280:960 also works camera.capture(stream, format='jpeg') #Convert the picture into a numpy array buff = numpy.fromstring(stream.getvalue(), dtype=numpy.uint8) #Now creates an OpenCV image image = cv2.imdecode(buff, 1) #crops in on image to search for hearts only in the left bit of the phone roiX = 670 roiXMax = 830 roiY = 300 roiYMax = 1000 minsize = 60 maxsize = 69 roi = image[roiY:roiYMax,roiX:roiXMax] # height range, width range (weird) #Load a cascade file for detecting faces the_cascade = cv2.CascadeClassifier('camera/cascades/fb_like5.xml') #Convert to grayscale gray = cv2.cvtColor(roi,cv2.COLOR_BGR2GRAY) # look for hearts within roi bluramt = 2 blur = cv2.blur(gray,(bluramt,bluramt)) #Look for faces in the image using the loaded cascade file hearts = the_cascade.detectMultiScale(blur,1.05,20,minSize=(minsize,minsize),maxSize=(maxsize,maxsize)) # previously looked at gray instead of cropped roi numHearts = len(hearts) print "Found "+str(numHearts)+" heart(s)" if numHearts == 1: print "one heart" #Draw a rectangle around every found heart for (x,y,w,h) in hearts: heartX = roiX + x + w/2 # around the heart plus the distance from the edge to the roi heartY = roiY + y + h/2 cv2.rectangle(image,(x + roiX,y + roiY),(x+w + roiX,y+h + roiY),(255,0,0),2) # draw a point at the center of the heart cv2.line(image,(heartX,heartY),(heartX,heartY),(0,0,255),5) cv2.imwrite('result.jpg',image) # save image print "heartX, heartY = (%d, %d)" % (heartX, heartY) toY = mapValues.myMap(heartY) print "toY = %d" % toY pc.movexy(1200,toY) pc.lightTap() pc.checkStatus() b = random.randint(1,100) return(toY) else: print "no hearts found"
def followBack(): #Create a memory stream so photos doesn't need to be saved in a file stream = io.BytesIO() camW = 1280 camH = 960 #Get the picture (low resolution, so it should be quite fast) #Here you can also specify other parameters (e.g.:rotate the image) with picamera.PiCamera() as camera: camera.resolution = (camW, camH) # 4:3 1280:960 also works camera.capture(stream, format='jpeg') #Convert the picture into a numpy array buff = numpy.fromstring(stream.getvalue(), dtype=numpy.uint8) #Now creates an OpenCV image image = cv2.imdecode(buff, 1) #crops in on image to search for hearts only in the left bit of the phone roiX = 955 roiXMax = 1255 roiY = 50 roiYMax = 850 roi = image[roiY:roiYMax, roiX:roiXMax] # height range, width range (weird) cv2.rectangle(image, (roiX, roiY), (roiXMax, roiYMax), (255, 255, 255), 2) # rect around roi #Load a cascade file for detecting follow button heart_cascade = cv2.CascadeClassifier( 'camera/follow_button_cascade_level_9.xml') #Convert to grayscale gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY) # look for hearts within roi bluramt = 4 blur = cv2.blur(gray, (bluramt, bluramt)) #Look for faces in the image using the loaded cascade file hearts = heart_cascade.detectMultiScale(blur, 1.3, 4, minSize=(120, 120), maxSize=(130, 130)) # hearts = heart_cascade.detectMultiScale(roi,1.6,10, minSize=(100,100), maxSize=(160,160)) # previously looked at gray instead of cropped roi numHearts = len(hearts) print "Found " + str(numHearts) + " follow(s)" if numHearts > 0: #Draw a rectangle around every found heart for (x, y, w, h) in hearts: heartX = roiX + x + w / 2 # around the heart plus the distance from the edge to the roi heartY = roiY + y + h / 2 cv2.rectangle(image, (x + roiX, y + roiY), (x + w + roiX, y + h + roiY), (0, 0, 0), 2) # draw a point at the center of the heart cv2.line(image, (heartX, heartY), (heartX, heartY), (255, 255, 255), 5) ''' for whatever reason this haar cascade finds both the blue "follow" and the white "following" buttons. The following code determines average color and looks for detected buttons that are primarily blue ''' thisHeart = roi[y:y + h, x:x + h] # cv2.imwrite('thisHeart.jpg',thisHeart) avg_color_per_row = numpy.average(thisHeart, axis=0) avg_color = numpy.average(avg_color_per_row, axis=0) print(avg_color) # gives average color of the heart in (B, G, R) avg_r_g = (avg_color[1] + avg_color[2]) / 2 print "avg_r_g = %r" % avg_r_g # the average red and green values blue_ratio = avg_color[0] / avg_r_g print "blue_ratio = %r" % blue_ratio if (blue_ratio > 1): # previously set to 1.15 print "gotta follow this guy" cv2.line(image, (heartX, heartY), (heartX, heartY), (0, 0, 255), 5) print "heartX, heartY = (%d, %d)" % (heartX, heartY) print "heartW, heartH = (%d, %d)" % (w, h) toY = mapValues.myMap(heartY) print "toY = %d" % toY pc.movexy(2650, toY) sleep(1) pc.lightTap() pc.checkStatus()