def rec_control(key): global rec_flag, vname # start,pause if key == 'q': key = None if rec_flag == 'stop': rec_flag = 'start' # set name vname = strftime("%Y-%m-%d-%H.%M.%S", localtime()) Vilib.rec_video_set["name"] = vname # start record Vilib.rec_video_run() Vilib.rec_video_start() print('rec start ...') elif rec_flag == 'start': rec_flag = 'pause' Vilib.rec_video_pause() print('pause') elif rec_flag == 'pause': rec_flag = 'start' Vilib.rec_video_start() print('continue') # stop elif key == 'e' and rec_flag != 'stop': key = None rec_flag = 'stop' Vilib.rec_video_stop() print("The video saved as %s%s.avi" % (Vilib.rec_video_set["path"], vname), end='\n')
def color_detect(color): if color == 'close': print("Color detect off!") Vilib.color_detect_switch(False) else: print("detecting color :" + color) Vilib.color_detect(color)
def main(): global color_flag path = "/home/pi/Pictures/vilib/color_detection/" Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) time.sleep(2) print(manual) while True: try: key = input().lower() if key in ['0', '1', '2', '3', '4', '5', '6']: color_flag = color_list[int(key)] color_detect(color_flag) elif key == "s": show_info() elif key == 'q': _time = time.strftime("%y-%m-%d_%H-%M-%S", time.localtime()) Vilib.take_photo(photo_name=str(_time), path=path) print("The photo save as %s%s.jpg" % (path, _time)) elif key == "g": Vilib.camera_close() break except KeyboardInterrupt: Vilib.camera_close() break
def main(): Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) Vilib.image_classify_set_model( path='/home/pi/pan-tilt-hat/models/mobilenet_v1_0.25_224_quant.tflite') Vilib.image_classify_set_labels( path='/home/pi/pan-tilt-hat/models/labels_mobilenet_quant_v1_224.txt') Vilib.image_classify_switch(True)
def continuous_shooting(path, interval_ms: int = 50, number=10): print("continuous_shooting .. ") path = path + '/' + strftime("%Y-%m-%d-%H.%M.%S", localtime()) for i in range(number): Vilib.take_photo(photo_name='%03d' % i, path=path) print("take_photo: %s" % i) sleep(interval_ms / 1000) print("continuous_shooting done,the pictures save as %s" % path) sleep(0.2)
def frames(): camera = cv2.VideoCapture(Camera.video_source) camera.set(3,320) camera.set(4,240) width = int(camera.get(3)) height = int(camera.get(4)) M = cv2.getRotationMatrix2D((width / 2, height / 2), 180, 1) camera.set(cv2.CAP_PROP_BUFFERSIZE,1) # camera.rotation = 180 # rawCapture = PiRGBArray(camera, size=(320, 240)) # if not camera.isOpened(): # raise RuntimeError('Could not start camera.') start_time = time.time() fpscount = 0 # print(cv2.useOptimized()) cv2.setUseOptimized(True) # print(cv2.useOptimized()) while True: # read current frame # img = cv2.warpAffine(img, M, (640, 480)) # print(time.time()- start_time) _, img = camera.read() img = cv2.warpAffine(img, M, (320, 240)) img = Vilib.human_detect_func(img) img = Vilib.color_detect_func(img) ## FPS # fpscount += 1 # if (time.time()- start_time) >= 1: # print(fpscount) # start_time = time.time() # fpscount = 0 ## FPS # start_time = time.time() # t1 = cv2.getTickCount() # img = Vilib.human_detect_func(img) # t2 = cv2.getTickCount() # print(int(1/round((t2-t1)/cv2.getTickFrequency(),2))) # img = cv2.resize(img, (320,240), interpolation=cv2.INTER_LINEAR) # img = Vilib.color_detect_func(img) # encode as a jpeg image and return it yield cv2.imencode('.jpg', img)[1].tobytes()
def panorama_shooting(path): global panAngle, tiltAngle temp_path = "/home/pi/Pictures/vilib/panorama/.temp/" imgs = [] # check path check_dir(path) check_dir(temp_path) # take photo for a in range(panAngle, -81, -5): panAngle = a pan.set_angle(panAngle) sleep(0.1) num = 0 for angle in range(-80, 81, 20): for a in range(panAngle, angle, 1): panAngle = a pan.set_angle(a) sleep(0.1) sleep(0.5) # sleep(0.5) print(num, angle) Vilib.take_photo(photo_name='%s' % num, path=temp_path) sleep(0.2) num += 1 # stitch image stitcher = cv2.Stitcher_create(cv2.Stitcher_SCANS) for index in range(num): imgs.append(cv2.imread('%s/%s.jpg' % (temp_path, index))) print('imgs num: %s, ' % len(imgs)) status, pano = stitcher.stitch(imgs) # imwrite and imshow print('status: %s , %s' % (status, Status_info[status])) if status == 0: cv2.imwrite( '%s/%s.jpg' % (path, strftime("%Y-%m-%d-%H.%M.%S", localtime())), pano) cv2.imshow('panorama', pano) # remove cache os.system('sudo rm -r %s' % temp_path)
def continuous_shooting(path, interval_s=3, duration_s=3600): print('\nStart time-lapse photography, press the "e" key to stop') start_time = time() node_time = start_time while True: if time() - node_time > interval_s: node_time = time() Vilib.take_photo(photo_name=strftime("%Y-%m-%d-%H-%M-%S", localtime()), path=path) if key == 'e' or time() - start_time > duration_s: break sleep(0.01) # second
def human_follow(): global pan_angle_human, tilt_angle_human Vilib.human_detect_switch(True) status = [Vilib.human_detect_object('x'), Vilib.human_detect_object('y')] time.sleep(0.005) #on left -1 on right 1 if status[0] == -1: pan_angle_human = pan_angle_human + 1 set_camera_servo1_angle(min(90, pan_angle_human)) elif status[0] == 1: pan_angle_human = pan_angle_human - 1 set_camera_servo1_angle(max(-90, pan_angle_human)) if status[1] == -1: tilt_angle_human = tilt_angle_human + 1 set_camera_servo2_angle(min(45, tilt_angle_human)) elif status[1] == 1: tilt_angle_human = tilt_angle_human - 1 set_camera_servo2_angle(max(-45, tilt_angle_human))
def forever(): if (Vilib.human_detect_object(('number'))) >= 1: set_camera_servo1_angle(30) delay(150) set_camera_servo1_angle((-30)) delay(150) set_camera_servo2_angle(0) delay(150) __tts__.say('Hello,nice to meet projesh!')
def main(): path = "/home/pi/Pictures/vilib/" Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) time.sleep(2) print(manual) while True: try: key = input().lower() if key == "q": _time = time.strftime("%y-%m-%d_%H-%M-%S", time.localtime()) Vilib.take_photo(photo_name=str(_time), path=path) print("The photo save as %s%s.jpg" % (path, _time)) elif key == "g": Vilib.camera_close() break except KeyboardInterrupt: Vilib.camera_close() break
def forever(): global x_axis, width, pan_angle x_axis = Vilib.color_detect_object('x') width = Vilib.color_detect_object('width') delay(5) if x_axis == -1: pan_angle = pan_angle - 1 pan_angle = constrain(pan_angle, -90, 90) set_camera_servo1_angle(pan_angle) pan_angle = constrain(pan_angle, -45, 45) set_dir_servo_angle(pan_angle) elif x_axis == 1: pan_angle = pan_angle + 1 pan_angle = constrain(pan_angle, -90, 90) set_camera_servo1_angle(pan_angle) pan_angle = constrain(pan_angle, -45, 45) set_dir_servo_angle(pan_angle) if width > 50: forward(50) else: stop()
async def recv_server_func(self, websocket): while 1: tmp = await websocket.recv() print(tmp) tmp = json.loads(tmp) for key in tmp: self.recv_dict[key] = tmp[key] print("recv_dict: %s"%self.recv_dict) self.remote_control(self.recv_dict['JA']) self.camera_contrl(self.recv_dict['JB']) # print(recv_dict) if self.recv_dict['MS'][0] =='on': car.set_motor_speed(int(self.recv_dict['MS'][1]), int(self.recv_dict['MS'][2])) if self.recv_dict['DO'][0] =='on': car.dir_servo_angle_calibration(int(self.recv_dict['DO'][1])) if self.recv_dict['PO'][0] =='on': car.camera_servo1_angle_calibration(-int(self.recv_dict['PO'][1])) if self.recv_dict['TO'][0] =='on': car.camera_servo2_angle_calibration(-int(self.recv_dict['TO'][1])) Vilib.detect_color_name(str(self.recv_dict['CC']))
def color_follow(): global pan_angle_color, tilt_angle_color Vilib.color_detect_switch(True) status = [Vilib.color_detect_object('x'), Vilib.color_detect_object('y')] size = [ Vilib.color_detect_object('width'), Vilib.color_detect_object('height') ] time.sleep(0.005) #on left -1 on right 1 if status[0] == -1: pan_angle_color = pan_angle_color + 1 set_camera_servo1_angle(min(90, pan_angle_color)) set_dir_servo_angle(max(-45, -pan_angle_color)) elif status[0] == 1: pan_angle_color = pan_angle_color - 1 set_camera_servo1_angle(max(-90, pan_angle_color)) set_dir_servo_angle(min(45, -pan_angle_color)) if status[1] == -1: tilt_angle_color = tilt_angle_color + 1 set_camera_servo2_angle(min(45, tilt_angle_color)) elif status[1] == 1: tilt_angle_color = tilt_angle_color - 1 set_camera_servo2_angle(max(-45, tilt_angle_color)) elif 0 < size[0] < 100 or 0 < size[1] < 100: forward(50) else: stop()
def main(): Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) Vilib.human_detect_switch(True) while True: object_show() sleep(0.2)
def frames(): with picamera.PiCamera() as camera: # let camera warm up # camera = PiCamera() camera.resolution = (320, 240) camera.framerate = 32 camera.rotation = 180 # rawCapture = PiRGBArray(camera, size=(320, 240)) # time.sleep(2) # stream = io.BytesIO() rawCapture = PiRGBArray(camera, size=(320, 240)) start_time = time.time() fpscount = 0 Vilib.cdf_flag = True Vilib.hdf_flag = True Vilib.color_change('blue') print(cv2.useOptimized()) cv2.setUseOptimized(True) print(cv2.useOptimized()) for _ in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True): # return current frame # stream.seek(0) img = _.array # img = frame.array t1 = cv2.getTickCount() img = Vilib.color_detect_func(img) img = Vilib.human_detect_func(img) t2 = cv2.getTickCount() print(round((t2 - t1) / cv2.getTickFrequency(), 3)) # yield stream.read() yield cv2.imencode('.jpg', img)[1].tobytes() rawCapture.truncate(0)
def main(): Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) Vilib.hands_detect_switch(True) joints = [] while True: joints = Vilib.detect_obj_parameter['hands_joints'] if isinstance(joints, list) and len(joints) == 21: print(joints[8]) sleep(1)
def main(): Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) path = "/home/pi/Pictures/vilib/continuous_shooting" print(manual) while True: key = readchar().lower() servo_control(key) if key == 'q': continuous_shooting(path, interval_ms=50, number=10) elif key == 'g': Vilib.camera_close() break sleep(0.01)
def main(): Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) sleep(2) print(manual) while True: key = readchar().lower() # rec control rec_control(key) # servo control servo_control(key) # esc if key == 'g': Vilib.camera_close() break sleep(0.1)
def main(): global key Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) sleep(2) print(manual) sleep(0.2) t = threading.Thread(target=keyboard_scan) t.setDaemon(True) t.start() path = "/home/pi/Videos/vilib/time_lapse" check_dir(path) while True: servo_control(key) # time-lapse photography if key == 'q': # check path output = path + '/' + strftime("%Y-%m-%d-%H-%M-%S", localtime()) check_dir(output) # take a picture every 3 seconds for 3600 seconds continuous_shooting(output, interval_s=3, duration_s=3600) # video_synthesis name = strftime("%Y-%m-%d-%H-%M-%S", localtime()) video_synthesis(name=name, output=output, path=path, fps=30, format='.jpg', datetime=True) # esc if key == 'g': Vilib.camera_close() global breakout_flag breakout_flag = True sleep(0.1) print('The program ends, please press CTRL+C to exit.') break sleep(0.01)
def main(): path = "/home/pi/Pictures/vilib/panorama" Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) sleep(2) print(manual) while True: key = readchar() # take photo if key == 'q': print("panorama shooting ...") panorama_shooting(path) # esc if key == 'g': print('Quit') Vilib.camera_close() break sleep(0.01)
def main(): Vilib.camera_start(vflip=True, hflip=True) Vilib.display(local=True, web=True) Vilib.object_detect_set_model(path='/opt/vilib/detect.tflite') Vilib.object_detect_set_labels(path='/opt/vilib/coco_labels.txt') Vilib.object_detect_switch(True)
def __init__(self): Vilib.camera_start(True) self.sensitivity = sensitivity self.capture = cv2.VideoCapture(0)
from ezblock import __reset_mcu__ import time __reset_mcu__() time.sleep(0.01) from vilib import Vilib from picarmini import dir_servo_angle_calibration from picarmini import set_camera_servo2_angle from ezblock import delay from ezblock import TTS from picarmini import camera_servo1_angle_calibration from picarmini import camera_servo2_angle_calibration from ezblock import WiFi __tts__ = TTS() Vilib.camera_start(True) Vilib.human_detect_switch(True) dir_servo_angle_calibration(0) camera_servo1_angle_calibration(0) camera_servo2_angle_calibration(0) WiFi().write('DE', 'Projesh', 'Syepto17') def forever(): if (Vilib.human_detect_object(('number'))) >= 1: set_camera_servo1_angle(30) delay(150) set_camera_servo1_angle((-30)) delay(150) set_camera_servo2_angle(0) delay(150)
from ezblock import * from ezblock import __reset_mcu__ __reset_mcu__() time.sleep(0.01) except ImportError: print( "This computer does not appear to be a PiCar -X system(/opt/ezblock is not present). Shadowing hardware calls with substitute functions " ) from sim_ezblock import * from picarx_w3 import picar_thing from picar_opencv import * import sys sys.path.append(r'/opt/ezblock') from vilib import Vilib Vilib.camera_start(True) #Vilib.color_detect_switch(True) #Vilib.detect_color_name('red') cap = cv2.VideoCapture(0) class sensor(): def __init__(self): self.S0 = ADC('A0') self.S1 = ADC('A1') self.S2 = ADC('A2') def get_adc_value(self): # finds the values going to the ADC pins adc_value_list = [] adc_value_list.append(self.S0.read()) adc_value_list.append(self.S1.read())
def forever(): print("%s"%(''.join([str(x) for x in ['There are ', Vilib.human_detect_object('number'), ' people']])))
import time from vilib import Vilib # from ezblock import Servo,PWM # from robothat import * import numpy as np # from th_test import task_start Vilib.camera_start(web_func=True) # Vilib.human_detect_switch(True) Vilib.color_detect_switch(True) Vilib.detect_color_name('blue') while True: # print(Vilib.human_detect_object('number')) # print(Vilib.human_detect_object('y')) start_time = time.time() time.sleep(10) Vilib.detect_color_name('blue') time.sleep(10) Vilib.detect_color_name('red') # print(" ") # count+=1 # for i in range(1000): # pass # # print("end") # # pass # this_time = time.time() - start_time # print(round(this_time,3)) # all_time += this_time # avge_time = round(all_time/float(count),3)
from flask_camera import web_camera_start from camera import Camera import time import threading from vilib import Vilib Vilib.cdf_flag = True # Vilib.hdf_flag = True Vilib.color_change('red') # def forever_threading(): # while True: # forever() def forever(): last_time = time.time() time.sleep(1) sub_time = time.time() - last_time print(sub_time) if __name__ == '__main__': Vilib.threading_start_with(web_camera_start,forever) while True: print('end') # t1 = threading.Thread(target=web_camera_start) #Thread是一个类,实例化产生t1对象,这里就是创建了一个线程对象t1 # t1.start() #线程执行 # t2 = threading.Thread(target=forever_threading) #这里就是创建了一个线程对象t2 # t2.start()
from picarmini import dir_servo_angle_calibration from picarmini import camera_servo1_angle_calibration from picarmini import camera_servo2_angle_calibration from ezblock import delay from ezblock import constrain from picarmini import set_camera_servo1_angle from picarmini import set_dir_servo_angle from picarmini import forward from picarmini import stop from ezblock import WiFi x_axis = None width = None pan_angle = None Vilib.detect_color_name('red') Vilib.color_detect_switch(True) Vilib.camera_start(True) pan_angle = 0 dir_servo_angle_calibration(0) camera_servo1_angle_calibration(0) camera_servo2_angle_calibration(0) WiFi().write('CN', 'MakerStarsHall', 'sunfounder') def forever(): global x_axis, width, pan_angle x_axis = Vilib.color_detect_object('x') width = Vilib.color_detect_object('width') delay(5) if x_axis == -1:
from vilib import Vilib # initialize the camera and grab a reference to the raw camera capture camera = PiCamera() camera.resolution = (320, 240) camera.framerate = 32 camera.rotation = 180 rawCapture = PiRGBArray(camera, size=(320, 240)) # allow the camera to warmup time.sleep(0.1) start_time = time.time() fpscount = 0 # capture frames from the camera Vilib.cdf_flag = True Vilib.hdf_flag = True Vilib.color_change('blue') print(cv2.useOptimized()) cv2.setUseOptimized(True) print(cv2.useOptimized()) for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True): # grab the raw NumPy array representing the image, then initialize the timestamp # and occupied/unoccupied text img = frame.array t1 = cv2.getTickCount() img = Vilib.color_detect_func(img) img = Vilib.human_detect_func(img) t2 = cv2.getTickCount() print(round((t2 - t1) / cv2.getTickFrequency(), 3)) # cv2.imshow("Frame", image)