def init(is_debug, pixformat, delay_time): #关闭串口,防止初始化过程溢出 uart.deinit() uart2.deinit() sensor.reset() sensor.set_pixformat(sensor.RGB565) #RGB565 sensor.set_framesize(sensor.QVGA) #320*240 sensor.set_gainceiling(128) #增益上限 2,4,8,16,32,64,128 sensor.set_contrast(3) #对比度 -3至3 sensor.set_brightness(0) #亮度。-3至+3 sensor.set_saturation(3) #饱和度。-3至+3 sensor.set_auto_exposure(True) #自动曝光 sensor.skip_frames(time=delay_time) sensor.set_auto_gain(False) # 在进行颜色追踪时,必须关闭 sensor.set_auto_whitebal(False) # 在进行颜色追踪时,必须关闭 #重新打开串口 uart.init(115200, timeout_char=1000) uart2.init(115200, timeout_char=1000) #判断是否debug模式 global CompetitionScene if is_debug == True: CompetitionScene = 0 else: CompetitionScene = 1
def test_color_bars(): sensor.reset() # Set sensor settings sensor.set_brightness(0) sensor.set_saturation(3) sensor.set_gainceiling(8) sensor.set_contrast(2) # Set sensor pixel format sensor.set_framesize(sensor.QVGA) sensor.set_pixformat(sensor.RGB565) # Enable colorbar test mode sensor.set_colorbar(True) # Skip a few camera to allow the sensor settle down for i in range(0, 100): image = sensor.snapshot() #color bars thresholds t = [ lambda r, g, b: r < 70 and g < 70 and b < 70, # Black lambda r, g, b: r < 70 and g < 70 and b > 200, # Blue lambda r, g, b: r > 200 and g < 70 and b < 70, # Red lambda r, g, b: r > 200 and g < 70 and b > 200, # Purple lambda r, g, b: r < 70 and g > 200 and b < 70, # Green lambda r, g, b: r < 70 and g > 200 and b > 200, # Aqua lambda r, g, b: r > 200 and g > 200 and b < 70, # Yellow lambda r, g, b: r > 200 and g > 200 and b > 200 ] # White # color bars are inverted for OV7725 if (sensor.get_id() == sensor.OV7725): t = t[::-1] #320x240 image with 8 color bars each one is approx 40 pixels. #we start from the center of the frame buffer, and average the #values of 10 sample pixels from the center of each color bar. for i in range(0, 8): avg = (0, 0, 0) idx = 40 * i + 20 #center of colorbars for off in range(0, 10): #avg 10 pixels rgb = image.get_pixel(idx + off, 120) avg = tuple(map(sum, zip(avg, rgb))) if not t[i](avg[0] / 10, avg[1] / 10, avg[2] / 10): raise Exception('COLOR BARS TEST FAILED.' 'BAR#(%d): RGB(%d,%d,%d)' % (i + 1, avg[0] / 10, avg[1] / 10, avg[2] / 10)) print('COLOR BARS TEST PASSED...')
def init(self, robot_): self.robot = robot_ if self.robot == self.ROBOT_O: #O_bot self.thresholds = [ (35, 100, 26, 78, 22, 72), #Ball (68, 100, -19, 27, 41, 81), #Yellow Goal (20, 41, -6, 15, -55, -15) ] # Blue Goal self.window = (59, 18, 184, 181) elif self.robot == self.ROBOT_P2: #P2_bot self.thresholds = [ (39, 100, 26, 78, 22, 72), #Ball (68, 100, -19, 27, 41, 81), #Yellow Goal (20, 41, -6, 25, -55, -15) ] # Blue Goal self.window = (71, 4, 193, 191) # sensor setup sensor.reset() sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QVGA) #Resolution sensor.set_windowing(self.window) sensor.skip_frames(time=1000) sensor.set_auto_exposure(False) sensor.set_auto_whitebal(False) # Need to let the above settings get in... sensor.skip_frames(time=250) # === GAIN === curr_gain = sensor.get_gain_db() sensor.set_auto_gain(False, gain_db=curr_gain) # === EXPOSURE === curr_exposure = sensor.get_exposure_us() sensor.set_auto_exposure(False, exposure_us=int(curr_exposure * 0.5)) # === WHITE BAL === sensor.set_auto_whitebal( False, rgb_gain_db=(-6.02073, -3.762909, 3.33901)) #Must remain false for blob tracking sensor.set_brightness(2) sensor.set_contrast(2) sensor.set_saturation(2) sensor.skip_frames(time=500)
def test_color_bars(): sensor.reset() # Set sensor settings sensor.set_brightness(0) sensor.set_saturation(0) sensor.set_gainceiling(8) sensor.set_contrast(2) # Set sensor pixel format sensor.set_framesize(sensor.QVGA) sensor.set_pixformat(sensor.RGB565) # Enable colorbar test mode sensor.set_colorbar(True) # Skip a few frames to allow the sensor settle down # Note: This takes more time when exec from the IDE. for i in range(0, 100): image = sensor.snapshot() # Color bars thresholds t = [lambda r, g, b: r < 50 and g < 50 and b < 50, # Black lambda r, g, b: r < 50 and g < 50 and b > 200, # Blue lambda r, g, b: r > 200 and g < 50 and b < 50, # Red lambda r, g, b: r > 200 and g < 50 and b > 200, # Purple lambda r, g, b: r < 50 and g > 200 and b < 50, # Green lambda r, g, b: r < 50 and g > 200 and b > 200, # Aqua lambda r, g, b: r > 200 and g > 200 and b < 50, # Yellow lambda r, g, b: r > 200 and g > 200 and b > 200] # White # 320x240 image with 8 color bars each one is approx 40 pixels. # we start from the center of the frame buffer, and average the # values of 10 sample pixels from the center of each color bar. for i in range(0, 8): avg = (0, 0, 0) idx = 40*i+20 # center of colorbars for off in range(0, 10): # avg 10 pixels rgb = image.get_pixel(idx+off, 120) avg = tuple(map(sum, zip(avg, rgb))) if not t[i](avg[0]/10, avg[1]/10, avg[2]/10): raise Exception("COLOR BARS TEST FAILED. " "BAR#(%d): RGB(%d,%d,%d)"%(i+1, avg[0]/10, avg[1]/10, avg[2]/10)) print("COLOR BARS TEST PASSED...")
def exposure_compensation(): """ use P control to set the exposure time according to the clamps brightness """ P = 2000 pid = P_control(P) pid.SetPoint = 40 END = 50 expo = sensor.get_exposure_us() for i in range(1, END): img = sensor.snapshot() llist = [] for r in clamp_roi(roi): lab = lab_median(img, r) img = img.draw_rectangle(r) llist.append(lab[0]) l_mean = int(sum(llist) / len(llist)) print("l_mean = ", l_mean) img = img.draw_rectangle(roi) if abs(l_mean - pid.SetPoint) <= 1: sensor.set_saturation(2) return pid.update(l_mean) output = pid.output expo += int(output) if expo < 10000: sensor.set_auto_gain(0) expo = sensor.get_exposure_us() sensor.set_auto_exposure(0, expo) P = P / 2 continue if sensor.get_exposure_us() >= 120190: sensor.set_saturation(2) return # max exposure time sensor.set_auto_exposure(0, expo) sensor.skip_frames(n=60) print("exp time", sensor.get_exposure_us())
def init(is_debug, pixformat, delay_time): uart.deinit() sensor.reset() if pixformat == "GRAY": sensor.set_pixformat(sensor.GRAYSCALE) elif pixformat == "RGB": sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QQVGA) sensor.set_gainceiling(128) sensor.set_contrast(3) sensor.set_brightness(0) sensor.set_saturation(3) sensor.set_auto_exposure(True) sensor.skip_frames(time=delay_time) sensor.set_auto_gain(False) sensor.set_auto_whitebal(False) uart.init(115200, timeout_char=1000) global CompetitionScene if is_debug == True: CompetitionScene = 0 else: CompetitionScene = 1
def setConfig(): while not uart.uart.any() > 0: pass uart.checking = True data = uart.uart.readline() data = data.decode('ascii') data = list(map(int, data.split('_'))) Camera.frame = RES[data[0]] Camera.mode = CLR[data[1]] Camera.filter1 = data[2] Camera.filter2 = data[3] Camera.postproc = data[4] Camera.contrast = data[5] / 10 Camera.brightness = data[6] / 10 Camera.saturation = data[7] sensor.set_gainceiling(2**(data[8] if data[8] > 0 and data[8] < 8 else 3)) Camera.flash = data[9] if data[9] <= 255 and data[9] >= 0 else 0 LED.set_led(0, (Camera.flash, Camera.flash, Camera.flash)) LED.display() sensor.set_pixformat(Camera.mode) sensor.set_framesize(Camera.frame) sensor.set_saturation(Camera.saturation) uart.checking = False
# Filters are image functions that process a single line at a time. # Since filters process lines on the fly, they run at sensor speed. # Note: Only one filter can be enabled at a time. import time, sensor # Reset sensor sensor.reset() # Set sensor settings sensor.set_contrast(1) sensor.set_brightness(3) sensor.set_saturation(3) sensor.set_gainceiling(16) sensor.set_framesize(sensor.QVGA) sensor.set_pixformat(sensor.GRAYSCALE) # Enable BW filter sensor.set_image_filter(sensor.FILTER_BW, lower=200, upper=255) # Enable SKIN filter (Note doesn't work very well on RGB) #sensor.set_image_filter(sensor.FILTER_SKIN) # FPS clock clock = time.clock() while (True): clock.tick() img = sensor.snapshot() # Draw FPS # Note: Actual FPS is higher, the IDE slows down streaming. img.draw_string(0, 0, "FPS:%.2f"%(clock.fps()))
# Thermopile Shield Demo # # Note: To run this example you will need a Thermopile Shield for your OpenMV # Cam. Also, please disable JPEG mode in the IDE. # # The Thermopile Shield allows your OpenMV Cam to see heat! import sensor, image, time, fir # Reset sensor sensor.reset() # Set sensor settings sensor.set_contrast(1) sensor.set_brightness(0) sensor.set_saturation(2) sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QQVGA) # The following registers fine-tune the image # sensor window to align it with the FIR sensor. if (sensor.get_id() == sensor.OV2640): sensor.__write_reg(0xFF, 0x01) # switch to reg bank sensor.__write_reg(0x17, 0x19) # set HSTART sensor.__write_reg(0x18, 0x43) # set HSTOP # Initialize the thermal sensor fir.init() # FPS clock clock = time.clock()
maxnum = a else: maxnum = b # 再比较maxnum和c if c > maxnum: maxnum = c return maxnum find_initpoint() #自定义函数 确定电机回到原点 sensor.set_auto_exposure(False, 600) #设置曝光 sensor.skip_frames(time=2000) sensor.set_auto_gain(False) # must be turned off for color tracking sensor.set_auto_whitebal(False) # must be turned off for color tracking sensor.set_saturation(1) #设置饱和度 clock = time.clock() while (True): clock.tick() img = sensor.snapshot() color_value = p_in_7.value( ) # get value, 0 or 1#读入p_in_7引脚的值 抽签确定的球的颜色 0为黑 1为白 judge_value = p_in_8.value() # get value, 0 or 1#读入p_in_8引脚的值 判断是否开始分球 Ready_value = p_in_9.value() # get value, 0 or 1#读入p_in_9引脚的值 射球是否就绪 if color_value == 1: color_flag = 1 #白球 else: color_flag = 0 #黑球
# Chrome, Firefox and MJpegViewer App on Android have been tested. # Connect to OPENMV_AP and use this URL: http://192.168.1.1:8080 to view the stream. import sensor, image, time, network, usocket, sys SSID ='OPENMV_AP' # Network SSID KEY ='1234567890' # Network key (must be 10 chars) HOST = '' # Use first available interface PORT = 8080 # Arbitrary non-privileged port # Reset sensor sensor.reset() # Set sensor settings sensor.set_contrast(1) sensor.set_brightness(1) sensor.set_saturation(1) sensor.set_gainceiling(16) sensor.set_framesize(sensor.QQVGA) sensor.set_pixformat(sensor.GRAYSCALE) # Init wlan module in AP mode. wlan = network.WINC(mode=network.WINC.MODE_AP) wlan.start_ap(SSID, key=KEY, security=wlan.WEP, channel=2) # You can block waiting for client to connect #print(wlan.wait_for_sta(10000)) def start_streaming(s): print ('Waiting for connections..') client, addr = s.accept() # set client socket timeout to 2s
i = 0 z = 0 for i in range(datalen): z = z + data[i]*data[i] return z #设置摄像头相关参数 sensor.reset() sensor.set_pixformat(sensor.RGB565) #设置图片格式(彩色/灰度图) sensor.set_framesize(sensor.QVGA) #设置图像大小 sensor.skip_frames(10) #等待设置生效 sensor.set_auto_whitebal(False) #关闭白平衡 sensor.set_auto_gain(False) #关闭自动亮度调节 sensor.set_contrast(0) #对比度 -3~3 7个调节 sensor.set_brightness(0) #亮度 sensor.set_saturation(0) #饱和度 uart = UART(3,115200) #设置串口 led = pyb.LED(3) #提示灯 clock = time.clock() #初始化时钟 #颜色识别区域的中心坐标 x_distance = y_distance = 0 #路径识别参数 ROIS = [ # [ROI, weight] #(0, 110, 160, 10), #(0, 100, 160, 10), (0, 090, 160, 10), (0, 080, 160, 10), (0, 070, 160, 10),
# MicroPython v0.5.0-29-g97fad3a on 2020-03-13; Sipeed_M1 with kendryte-k210 # Importe de librerias import sensor, image, lcd, time, utime import KPU as kpu # Configuración inicial de la pantalla LCD y la camara OV2640 lcd.init() # Inicializa la pantalla sensor.reset() # Inicializa la camara sensor.set_pixformat(sensor.RGB565) # Define el formato de color de la imagen sensor.set_framesize( sensor.QVGA) # Establece la captura de imagen como QVGA (320x240) sensor.set_windowing( (224, 224)) # Establece el tamaño de imagen con el que se entreno la red sensor.set_vflip(1) # Rotación vertical de la imagen sensor.set_saturation(-3) # Saturacion sensor.set_brightness(-3) # brightness sensor.set_contrast(-3) # contrast lcd.clear() # Limpia la pantalla y la deja en negro # Descripción y carga del modelo labels = ['Acaro', 'Bueno', 'Manchado'] # Etiquetas de la ultima capa de la red task = kpu.load( '/sd/3clases.kmodel') # Acá va al ubicación del archivo .kmodel (CARGA) kpu.set_outputs(task, 0, 1, 1, 3) # Aqúi van las dimensiones de la ultima capa de la red while (True): tick1 = utime.ticks_ms()
watchdog_led = False elif (cmd == b'ctrt+'): contrast = max(contrast + 1, 3) sensor.set_contrast(constrast) elif (cmd == b'ctrt-'): contrast = min(contrast - 1, -3) sensor.set_contrast(constrast) elif (cmd == b'brgt+'): brightness = max(brightness + 1, 3) sensor.set_brightness(brightness) elif (cmd == b'brgt-'): brightness = min(brightness - 1, -3) sensor.set_brightness(brightness) elif (cmd == b'satr+'): saturation = max(saturation + 1, 3) sensor.set_saturation(saturation) elif (cmd == b'satr-'): saturation = min(saturation - 1, -3) sensor.set_saturation(saturation) elif (cmd == b'width'): usb.send(sensor.width()) elif (cmd == b'heigh'): usb.send(sensor.height()) elif (cmd == b'q4vga'): sensor.set_framesize(sensor.QQQQVGA) #40x30 elif (cmd == b'q3vga'): sensor.set_framesize(sensor.QQQVGA) #80x60 elif (cmd == b'q2vga'): sensor.set_framesize(sensor.QQVGA) #160x120 elif (cmd == b'q1vga'): sensor.set_framesize(sensor.QQVGA) #320x240
import sensor sensor.reset() # Reset and initialize the sensor. sensor.set_pixformat( sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE) sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240) sensor.skip_frames(time=2000) # Wait for settings take effect. sensor.set_windowing(30, 30) #Sets the resolution of the camera to a #sub resolution inside of the current resolution. sensor.set_contrast(0) #Set the camera image contrast. -3 to +3. sensor.set_brightness(0) #Set the camera image brightness. -3 to +3. sensor.set_saturation(0) #Set the camera image saturation. -3 to +3. sensor.set_auto_gain( False) #You need to turn off white balance too if you want to #track colors. sensor.set_auto_whitebal( False) #You need to turn off white balance too if you want to #track colors. sensor.set_lens_correction( True) #radi integer radius of pixels to correct (int). #coef power of correction (int). while (True): img = sensor.snapshot() # Take a picture and return the image.
import sensor, mlx, time # Initialize the MLX module mlx.init(mlx.IR_REFRESH_64HZ) # Reset sensor sensor.reset() # Set sensor settings sensor.set_contrast(1) sensor.set_brightness(0) sensor.set_saturation(2) sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QQVGA) # The following registers fine-tune the image # sensor window to align it with the FIR sensor. if (sensor.get_id() == sensor.OV2640): sensor.__write_reg(0xFF, 0x01) # switch to reg bank sensor.__write_reg(0x17, 0x19) # set HSTART sensor.__write_reg(0x18, 0x43) # set HSTOP # FPS clock clock = time.clock() # Ambient temperature ta = 0.0 # Minimum object temperature to_min = 0.0 # Maximum object temperature to_max = 0.0
gain_db = sensor.get_gain_db() exposure_us = sensor.get_exposure_us() rgb_gain_db = sensor.get_rgb_gain_db() print("gain_db is " + str(gain_db)) print("exposure is " + str(exposure_us)) print("rgb_gain_db is " + str(rgb_gain_db)) # Set the gain and exposure as fixed (not concerned about the values) sensor.set_auto_gain(False, gain_db) sensor.set_auto_exposure(False, exposure_us) sensor.set_auto_whitebal(False, rgb_gain_db) # Setup contrast, brightness and saturation sensor.set_contrast(0) # range -3 to +3 sensor.set_brightness(0) # range -3 to +3 sensor.set_saturation(0) # range -3 to +3 # Disable night mode (auto frame rate) and black level calibration (BLC) sensor.__write_reg(0x0E, 0b00000000) # Disable night mode sensor.__write_reg(0x3E, 0b00000000) # Disable BLC sensor.__write_reg(0x13, 0b00000000) # disable automated gain gain_db = sensor.get_gain_db() exposure_us = sensor.get_exposure_us() rgb_gain_db = sensor.get_rgb_gain_db() print("gain_db is " + str(gain_db)) print("exposure is " + str(exposure_us)) print("rgb_gain_db is " + str(rgb_gain_db)) reg_list = [
import sensor, image, time, math from pyb import LED,UART,Pin sensor.reset() sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QQSIF) sensor.skip_frames(time = 2000) sensor.set_auto_gain(True) sensor.set_auto_whitebal(False) sensor.set_auto_exposure(True) sensor.set_contrast(3) sensor.set_saturation(3) ImageX = 88 ImageY = 60 letter_thresholds = (0, 50) red_thresholds = (30, 70, 30, 100, 30, 100) yellow_thresholds = (70, 100, -10, 30, 60, 100) green_thresholds = (30, 100, -100, -40, 30, 100) uart = UART(3, 9600, timeout_char=10) uart.init(9600) clock = time.clock() blobHcenter = 0 blobHtop = 255 blobHbottom = 255 blobHleft = 0 blobHright = 0 blobHtopleft = 0 blobHtopright = 0 blobHbottomleft = 0 blobHbottomright = 0 blobScenter = 0 blobStop = 0
user_exposure_time = 500 #camera exposure time, feel free to change. 900 is default value for tested setup sensor.reset() # Initialize the camera sensor sensor.set_pixformat( sensor.GRAYSCALE ) # set camera format to grayscale (color not important in this example) sensor.set_framesize(sensor.QVGA) # set camera resolution to QVGA 320 x 240 sensor.set_auto_whitebal(False) # Turn off white balance sensor.set_auto_exposure( False, exposure_us=user_exposure_time ) # set exposure time, user changable (user_exposure_time variable) #these setting are manually set in order to prevent camera to change it during use (no automatic setting in machine vision!) sensor.set_brightness(0) #set camera brightness sensor.set_contrast(2) #set camera contrast sensor.set_saturation(0) #set camera saturation # Print out the initial gain for comparison. print("Initial gain == %f db" % sensor.get_gain_db()) sensor.skip_frames( time=2000) #small 2 seconds pause, so camera can update its settings # calculating sensor gain # user can change GAIN_SCALE factor in order to obtain more bright image current_gain_in_decibels = sensor.get_gain_db() #current sensor gain print("Current Gain == %f db" % current_gain_in_decibels) #reporting current gain sensor.set_auto_gain(False, gain_db=current_gain_in_decibels * GAIN_SCALE) #new gain print("New gain == %f db" % sensor.get_gain_db()) #reporting new sensor gain sensor.skip_frames(
import sensor, time sensor.reset() # Set sensor settings sensor.set_brightness(0) sensor.set_saturation(0) sensor.set_gainceiling(8) sensor.set_contrast(2) # Set sensor pixel format sensor.set_framesize(sensor.QVGA) sensor.set_pixformat(sensor.RGB565) # Enable colorbar test mode sensor.set_colorbar(True) # Skip a few frames to allow the sensor settle down for i in range(0, 30): image = sensor.snapshot() #color bars thresholds t = [lambda r, g, b: r < 50 and g < 50 and b < 50, # Black lambda r, g, b: r < 50 and g < 50 and b > 200, # Blue lambda r, g, b: r > 200 and g < 50 and b < 50, # Red lambda r, g, b: r > 200 and g < 50 and b > 200, # Purple lambda r, g, b: r < 50 and g > 200 and b < 50, # Green lambda r, g, b: r < 50 and g > 200 and b > 200, # Aqua lambda r, g, b: r > 200 and g > 200 and b < 50, # Yellow lambda r, g, b: r > 200 and g > 200 and b > 200] # White #320x240 image with 8 color bars each one is approx 40 pixels.
# with open("img.jpg", "w") as f: # f.write(img) import sensor, image, pin, time, ustruct, pyb from pyb import USB_VCP sensor.reset() # Reset and initialize the sensor. sensor.set_pixformat( sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE) sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240) sensor.set_auto_exposure(False, 5000) sensor.set_auto_gain(False) sensor.set_auto_whitebal(False) #sensor.set_contrast( -3 ) #-3 +3 #sensor.set_brightness( +3 ) #-3 +3 sensor.set_saturation(+3) #-3 +3 sensor.skip_frames(time=1000) DBG = True pin = Pin('P0', Pin.OUT_OD) usb = USB_VCP() imgMs = 0 usbMs = 0 stream = True while (True): ticks = time.ticks() img = sensor.snapshot() cnt = 0