def Centrize(center): if center <= 50: dcmotor.left(.75) time.sleep(.00001) if center <= 100: dcmotor.left(.5) time.sleep(.00001) if center <= 150: dcmotor.left(.25) time.sleep(.00001) if center <= 185: dcmotor.left(.1) time.sleep(.00001) if center <= 190: dcmotor.left(.02) time.sleep(.001) if center >= 350: dcmotor.right(.75) time.sleep(.00001) if center >= 300: dcmotor.right(.5) time.sleep(.00001) if center >= 250: dcmotor.right(.25) time.sleep(.00001) if center >= 215: dcmotor.right(.1) time.sleep(.00001) if center >= 210: dcmotor.right(.02) time.sleep(.001)
def Centrize(centroidX): if centroidX <= 50: dcmotor.left(.5) time.sleep(.00001) if centroidX <= 100: dcmotor.left(.35) time.sleep(.00001) if centroidX <= 150: dcmotor.left(.25) time.sleep(.00001) if centroidX <= 185: dcmotor.left(.15) time.sleep(.00001) if centroidX <= 180: dcmotor.left(.025) time.sleep(.001) if centroidX >= 350: dcmotor.right(.5) time.sleep(.00001) if centroidX >= 300: dcmotor.right(.35) time.sleep(.00001) if centroidX >= 250: dcmotor.right(.25) time.sleep(.00001) if centroidX >= 215: dcmotor.right(.15) time.sleep(.00001) if centroidX >= 200: dcmotor.right(.025) time.sleep(.001)
#pi.set_servo_pulsewidth(servo1, 1490) #time.sleep(.05) #fro #pi.set_servo_pulsewidth(servo1, 1540) #time.sleep(.4) #pi.set_servo_pulsewidth(servo1, 1490) #time.sleep(.05) f = 1 #od.Object() print(f) if f == 1: #pandp.pick() b = 1 #color.Blue() dcmotor.backward(y) dcmotor.left(2.2) dcmotor.forward(3 - x) x += 1 if x == 3: x = 0 y += 1 if y == 3: y = 0 #placenter.Center() print(b) #if b == 1: #pandp.place() #dcmotor.backward(1)
def Blue(): print("[INFO] starting video stream...") #vs = VideoStream(src=0).start() vs = VideoStream(usePiCamera=True).start() time.sleep(0.5) fps = FPS().start() b = 0 while True: frame = vs.read() frame = imutils.rotate_bound(frame, 180) frame = imutils.resize(frame, width=400) #for i in range(0,300): #print(frame[i][200]) #fsh lower = np.array([15, 70, 0]) upper = np.array([115, 155, 20]) #hsvframe = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV) mask = cv2.inRange(frame, lower, upper) mask = cv2.erode(mask, None, iterations=1) mask = cv2.dilate(mask, None, iterations=3) color = cv2.bitwise_and(frame, frame, mask=mask) cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] center = None if len(cnts) == 0: dcmotor.left(2) time.sleep(.1) if len(cnts) > 0: c = max(cnts, key=cv2.contourArea) rect = cv2.minAreaRect(c) M = cv2.moments(c) center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"])) if rect[1][0] > 10: box = cv2.boxPoints(rect) box = np.int0(box) frame = cv2.drawContours(frame, [box], 0, (0, 0, 255), 2) center = center[0] if (center <= 190 or center >= 210): color_centrize.Centrize(center) elif (rect[1][0] <= 200): if rect[1][0] <= 100: dcmotor.forward(3) elif rect[1][0] <= 220: dcmotor.forward(1) if (center >= 190 and center <= 210 and rect[1][0] >= 220): b = 1 if b == 1: dcmotor.forward(1) break cv2.imshow("Frame", frame) cv2.imshow("Color", color) key = cv2.waitKey(1) & 0xFF if key == ord("q"): break fps.update() fps.stop() print("[INFO] elapsed time: {:.2f}".format(fps.elapsed())) print("[INFO] approx. FPS: {:.2f}".format(fps.fps())) cv2.destroyAllWindows() vs.stop() return b
def Object(): from imutils.video import VideoStream from imutils.video import FPS import numpy as np import argparse import imutils import time import cv2 import dcmotor import centrize import object_found import pandp import color ap = argparse.ArgumentParser() ap.add_argument("-p", "--prototxt", required=True, help="path to Caffe 'deploy' prototxt file") ap.add_argument("-m", "--model", required=True, help="path to Caffe pre-trained model") ap.add_argument("-c", "--confidence", type=float, default=0.2, help="minimum probability to filter weak detections") args = vars(ap.parse_args()) CLASSES = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"] COLORS = np.random.uniform(0, 255, size=(len(CLASSES), 3)) print("[INFO] loading model...") net = cv2.dnn.readNetFromCaffe(args["prototxt"], args["model"]) print("[INFO] starting video stream...") #vs = VideoStream(src=0).start() vs = VideoStream(usePiCamera=True).start() time.sleep(0.5) fps = FPS().start() while True: frame = vs.read() frame = imutils.rotate_bound(frame, 180) frame = imutils.resize(frame, width=400) (h, w) = frame.shape[:2] blob = cv2.dnn.blobFromImage(cv2.resize(frame, (300, 300)), 0.007843, (300, 300), 127.5) net.setInput(blob) detections = net.forward() reqimg = [] for i in np.arange(0, detections.shape[2]): idx = detections[0, 0, i, 1] if idx == 5: reqimg.append(i) if reqimg == []: dcmotor.left(2) time.sleep(.1) arealist = [] for i in reqimg: box = detections[0, 0, i, 3:7] * np.array([w, h, w, h]) (startX, startY, endX, endY) = box.astype("int") a = endX-startX b = endY-startY area = a*b arealist.append(area) arealist.sort(reverse = True) flag = 0 for i in reqimg: confidence = detections[0, 0, i, 2] if confidence > args["confidence"]: idx = int(detections[0, 0, i, 1]) box = detections[0, 0, i, 3:7] * np.array([w, h, w, h]) (startX, startY, endX, endY) = box.astype("int") a = endX-startX b = endY-startY area = a*b if area == arealist[0]: label = "{}: {:.2f}%".format(CLASSES[idx], confidence * 100) cv2.rectangle(frame, (startX, startY), (endX, endY), COLORS[idx], 2) y = startY - 15 if startY - 15 > 15 else startY + 15 cv2.putText(frame, label, (startX, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) centroidX =startX + a/2 #41 known width(mm),100 pixel width(px),180 distance(mm) focal = float((100*180)/41) distance = float((focal*41)/a) print("Distance using camera: {}mm".format(distance)) cv2.putText(frame, "Distance using camera: %.2fmm" % distance, (frame.shape[1] - 390, frame.shape[0] - 15), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 2) centrize.Centrize(centroidX) object_found.towards_object(a) flag = 0 if (centroidX>= 190 and centroidX <= 210) and (a>=138): flag=1 if flag==1: break cv2.imshow("Frame", frame) key = cv2.waitKey(1) & 0xFF if key == ord("q"): break fps.update() fps.stop() print("[INFO] elapsed time: {:.2f}".format(fps.elapsed())) print("[INFO] approx. FPS: {:.2f}".format(fps.fps())) cv2.destroyAllWindows() vs.stop() return flag