def run(self): while self.__running.isSet(): self.__flag.wait() if servo_command == 'lookleft': move.look_left() elif servo_command == 'lookright': move.look_right() elif servo_command == 'up': move.look_up() elif servo_command == 'down': move.look_down() print('servo_move') time.sleep(0.03)
def run(): global direction_command, turn_command, SmoothMode, steadyMode moving_threading = threading.Thread( target=move_thread) #Define a thread for moving moving_threading.setDaemon( True ) #'True' means it is a front thread,it would close when the mainloop() closes moving_threading.start() #Thread starts info_threading = threading.Thread( target=info_send_client) #Define a thread for information sending info_threading.setDaemon( True ) #'True' means it is a front thread,it would close when the mainloop() closes info_threading.start() #Thread starts ws_R = 0 ws_G = 0 ws_B = 0 Y_pitch = 300 Y_pitch_MAX = 600 Y_pitch_MIN = 100 while True: data = '' data = str(tcpCliSock.recv(BUFSIZ).decode()) if not data: continue elif 'forward' == data: direction_command = 'forward' elif 'backward' == data: direction_command = 'backward' elif 'DS' in data: direction_command = 'stand' elif 'left' == data: turn_command = 'left' elif 'right' == data: turn_command = 'right' elif 'leftside' == data: turn_command = 'left' elif 'rightside' == data: turn_command = 'right' elif 'TS' in data: turn_command = 'no' elif 'headup' == data: move.look_up() elif 'headdown' == data: move.look_down() elif 'headhome' == data: move.look_home() elif 'headleft' == data: move.look_left() elif 'headright' == data: move.look_right() elif 'wsR' in data: try: set_R = data.split() ws_R = int(set_R[1]) LED.colorWipe(Color(ws_R, ws_G, ws_B)) except: pass elif 'wsG' in data: try: set_G = data.split() ws_G = int(set_G[1]) LED.colorWipe(Color(ws_R, ws_G, ws_B)) except: pass elif 'wsB' in data: try: set_B = data.split() ws_B = int(set_B[1]) LED.colorWipe(Color(ws_R, ws_G, ws_B)) except: pass elif 'steady' in data: LED.breath_status_set(1) LED.breath_color_set('blue') steadyMode = 1 tcpCliSock.send(('steady').encode()) elif 'funEnd' in data: LED.breath_status_set(0) steadyMode = 0 tcpCliSock.send(('FunEnd').encode()) elif 'Smooth_on' in data: SmoothMode = 1 tcpCliSock.send(('Smooth_on').encode()) elif 'Smooth_off' in data: SmoothMode = 0 tcpCliSock.send(('Smooth_off').encode()) elif 'Switch_1_on' in data: switch.switch(1, 1) tcpCliSock.send(('Switch_1_on').encode()) elif 'Switch_1_off' in data: switch.switch(1, 0) tcpCliSock.send(('Switch_1_off').encode()) elif 'Switch_2_on' in data: switch.switch(2, 1) tcpCliSock.send(('Switch_2_on').encode()) elif 'Switch_2_off' in data: switch.switch(2, 0) tcpCliSock.send(('Switch_2_off').encode()) elif 'Switch_3_on' in data: switch.switch(3, 1) tcpCliSock.send(('Switch_3_on').encode()) elif 'Switch_3_off' in data: switch.switch(3, 0) tcpCliSock.send(('Switch_3_off').encode()) else: pass
def run(): global direction_command, turn_command, SmoothMode, steadyMode moving_threading = threading.Thread( target=move_thread) #Define a thread for moving moving_threading.setDaemon( True ) #'True' means it is a front thread,it would close when the mainloop() closes moving_threading.start() #Thread starts info_threading = threading.Thread( target=info_send_client) #Define a thread for communication info_threading.setDaemon( True ) #'True' means it is a front thread,it would close when the mainloop() closes info_threading.start() #Thread starts #info_threading=threading.Thread(target=FPV_thread) #Define a thread for FPV and OpenCV #info_threading.setDaemon(True) #'True' means it is a front thread,it would close when the mainloop() closes #info_threading.start() #Thread starts ws_R = 0 ws_G = 0 ws_B = 0 Y_pitch = 300 Y_pitch_MAX = 600 Y_pitch_MIN = 100 while True: data = '' data = str(tcpCliSock.recv(BUFSIZ).decode()) if not data: continue elif 'forward' == data: direction_command = 'forward' elif 'backward' == data: direction_command = 'backward' elif 'DS' in data: direction_command = 'stand' elif 'left' == data: turn_command = 'left' elif 'right' == data: turn_command = 'right' elif 'leftside' == data: turn_command = 'left' elif 'rightside' == data: turn_command = 'right' elif 'TS' in data: turn_command = 'no' elif 'headup' == data: move.look_up() elif 'headdown' == data: move.look_down() elif 'headhome' == data: move.look_home() elif 'headleft' == data: move.look_left() elif 'headright' == data: move.look_right() elif 'wsR' in data: try: set_R = data.split() ws_R = int(set_R[1]) LED.colorWipe(Color(ws_R, ws_G, ws_B)) except: pass elif 'wsG' in data: try: set_G = data.split() ws_G = int(set_G[1]) LED.colorWipe(Color(ws_R, ws_G, ws_B)) except: pass elif 'wsB' in data: try: set_B = data.split() ws_B = int(set_B[1]) LED.colorWipe(Color(ws_R, ws_G, ws_B)) except: pass elif 'FindColor' in data: LED.breath_status_set(1) fpv.FindColor(1) tcpCliSock.send(('FindColor').encode()) elif 'WatchDog' in data: LED.breath_status_set(1) fpv.WatchDog(1) tcpCliSock.send(('WatchDog').encode()) elif 'steady' in data: LED.breath_status_set(1) LED.breath_color_set('blue') steadyMode = 1 tcpCliSock.send(('steady').encode()) elif 'funEnd' in data: LED.breath_status_set(0) fpv.FindColor(0) fpv.WatchDog(0) steadyMode = 0 tcpCliSock.send(('FunEnd').encode()) elif 'Smooth_on' in data: SmoothMode = 1 tcpCliSock.send(('Smooth_on').encode()) elif 'Smooth_off' in data: SmoothMode = 0 tcpCliSock.send(('Smooth_off').encode()) elif 'Switch_1_on' in data: switch.switch(1, 1) tcpCliSock.send(('Switch_1_on').encode()) elif 'Switch_1_off' in data: switch.switch(1, 0) tcpCliSock.send(('Switch_1_off').encode()) elif 'Switch_2_on' in data: switch.switch(2, 1) tcpCliSock.send(('Switch_2_on').encode()) elif 'Switch_2_off' in data: switch.switch(2, 0) tcpCliSock.send(('Switch_2_off').encode()) elif 'Switch_3_on' in data: switch.switch(3, 1) tcpCliSock.send(('Switch_3_on').encode()) elif 'Switch_3_off' in data: switch.switch(3, 0) tcpCliSock.send(('Switch_3_off').encode()) elif 'CVFL' in data: #2 start if not FPV.FindLineMode: FPV.FindLineMode = 1 tcpCliSock.send(('CVFL_on').encode()) else: # move.motorStop() # FPV.cvFindLineOff() FPV.FindLineMode = 0 tcpCliSock.send(('CVFL_off').encode()) elif 'Render' in data: if FPV.frameRender: FPV.frameRender = 0 else: FPV.frameRender = 1 elif 'WBswitch' in data: if FPV.lineColorSet == 255: FPV.lineColorSet = 0 else: FPV.lineColorSet = 255 elif 'lip1' in data: try: set_lip1 = data.split() lip1_set = int(set_lip1[1]) FPV.linePos_1 = lip1_set except: pass elif 'lip2' in data: try: set_lip2 = data.split() lip2_set = int(set_lip2[1]) FPV.linePos_2 = lip2_set except: pass elif 'err' in data: #2 end try: set_err = data.split() err_set = int(set_err[1]) FPV.findLineError = err_set except: pass elif 'setEC' in data: #Z ECset = data.split() try: fpv.setExpCom(int(ECset[1])) except: pass elif 'defEC' in data: #Z fpv.defaultExpCom() else: pass
def capture_thread(self, IPinver): global frame_image, camera #Z ap = argparse.ArgumentParser() #OpenCV initialization ap.add_argument("-b", "--buffer", type=int, default=64, help="max buffer size") args = vars(ap.parse_args()) pts = deque(maxlen=args["buffer"]) font = cv2.FONT_HERSHEY_SIMPLEX context = zmq.Context() footage_socket = context.socket(zmq.PUB) print(IPinver) footage_socket.connect('tcp://%s:5555' % IPinver) avg = None motionCounter = 0 lastMovtionCaptured = datetime.datetime.now() for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True): frame_image = frame.array cv2.line(frame_image, (300, 240), (340, 240), (128, 255, 128), 1) cv2.line(frame_image, (320, 220), (320, 260), (128, 255, 128), 1) timestamp = datetime.datetime.now() if FindLineMode: #2 cvFindLine() camera.exposure_mode = 'off' run_thread.switch(True) else: camera.exposure_mode = 'auto' run_thread.switch(False) if FindColorMode: ####>>>OpenCV Start<<<#### hsv = cv2.cvtColor(frame_image, cv2.COLOR_BGR2HSV) mask = cv2.inRange(hsv, self.colorLower, self.colorUpper) mask = cv2.erode(mask, None, iterations=2) mask = cv2.dilate(mask, None, iterations=2) cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] center = None if len(cnts) > 0: cv2.putText(frame_image, 'Target Detected', (40, 60), font, 0.5, (255, 255, 255), 1, cv2.LINE_AA) c = max(cnts, key=cv2.contourArea) ((x, y), radius) = cv2.minEnclosingCircle(c) M = cv2.moments(c) center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"])) X = int(x) Y = int(y) if radius > 10: cv2.rectangle(frame_image, (int(x - radius), int(y + radius)), (int(x + radius), int(y - radius)), (255, 255, 255), 1) if Y < (240 - tor): error = (240 - Y) / 5 outv = int(error) move.look_up(outv) Y_lock = 0 elif Y > (240 + tor): error = (Y - 240) / 5 outv = int(error) move.look_down(outv) Y_lock = 0 else: Y_lock = 1 if X < (320 - tor): error_X = (320 - X) / 5 outv_X = int(error_X) move.look_left(outv_X) X_lock = 0 elif X > (320 + tor): error_X = (X - 320) / 5 outv_X = int(error_X) move.look_right(outv_X) X_lock = 0 X_lock = 0 else: X_lock = 1 if X_lock == 1 and Y_lock == 1: LED.breath_color_set('red') else: cv2.putText(frame_image, 'Target Detecting', (40, 60), font, 0.5, (255, 255, 255), 1, cv2.LINE_AA) LED.breath_color_set('yellow') ####>>>OpenCV Ends<<<#### if not WatchDogMode: pass else: gray = cv2.cvtColor(frame_image, cv2.COLOR_BGR2GRAY) gray = cv2.GaussianBlur(gray, (21, 21), 0) if avg is None: print("[INFO] starting background model...") avg = gray.copy().astype("float") rawCapture.truncate(0) continue cv2.accumulateWeighted(gray, avg, 0.5) frameDelta = cv2.absdiff(gray, cv2.convertScaleAbs(avg)) # threshold the delta image, dilate the thresholded image to fill # in holes, then find contours on thresholded image thresh = cv2.threshold(frameDelta, 5, 255, cv2.THRESH_BINARY)[1] thresh = cv2.dilate(thresh, None, iterations=2) cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) cnts = imutils.grab_contours(cnts) #print('x') # loop over the contours for c in cnts: # if the contour is too small, ignore it if cv2.contourArea(c) < 5000: continue # compute the bounding box for the contour, draw it on the frame, # and update the text (x, y, w, h) = cv2.boundingRect(c) cv2.rectangle(frame_image, (x, y), (x + w, y + h), (128, 255, 0), 1) text = "Occupied" motionCounter += 1 #print(motionCounter) #print(text) LED.breath_color_set('red') lastMovtionCaptured = timestamp if (timestamp - lastMovtionCaptured).seconds >= 0.5: LED.breath_color_set('blue') if FindLineMode and not frameRender: #2 encoded, buffer = cv2.imencode('.jpg', frame_findline) else: encoded, buffer = cv2.imencode('.jpg', frame_image) jpg_as_text = base64.b64encode(buffer) footage_socket.send(jpg_as_text) rawCapture.truncate(0)