Beispiel #1
0
def camera_init():
    sensor.reset()
    sensor.set_pixformat(sensor.RGB565)
    sensor.set_framesize(sensor.QVGA)
    # ov2640 id:9794 ,ov5642 id:22082
    if sensor.get_id() == 9794:
        sensor.set_hmirror(1)
        sensor.set_vflip(1)
    else:
        sensor.set_hmirror(0)
        sensor.set_vflip(1)
    lcd.rotation(1)
Beispiel #2
0
def test_color_bars():
    sensor.reset()
    # Set sensor settings
    sensor.set_brightness(0)
    sensor.set_saturation(3)
    sensor.set_gainceiling(8)
    sensor.set_contrast(2)

    # Set sensor pixel format
    sensor.set_framesize(sensor.QVGA)
    sensor.set_pixformat(sensor.RGB565)

    # Enable colorbar test mode
    sensor.set_colorbar(True)

    # Skip a few camera to allow the sensor settle down
    for i in range(0, 100):
        image = sensor.snapshot()

    #color bars thresholds
    t = [
        lambda r, g, b: r < 70 and g < 70 and b < 70,  # Black
        lambda r, g, b: r < 70 and g < 70 and b > 200,  # Blue
        lambda r, g, b: r > 200 and g < 70 and b < 70,  # Red
        lambda r, g, b: r > 200 and g < 70 and b > 200,  # Purple
        lambda r, g, b: r < 70 and g > 200 and b < 70,  # Green
        lambda r, g, b: r < 70 and g > 200 and b > 200,  # Aqua
        lambda r, g, b: r > 200 and g > 200 and b < 70,  # Yellow
        lambda r, g, b: r > 200 and g > 200 and b > 200
    ]  # White

    # color bars are inverted for OV7725
    if (sensor.get_id() == sensor.OV7725):
        t = t[::-1]

    #320x240 image with 8 color bars each one is approx 40 pixels.
    #we start from the center of the frame buffer, and average the
    #values of 10 sample pixels from the center of each color bar.
    for i in range(0, 8):
        avg = (0, 0, 0)
        idx = 40 * i + 20  #center of colorbars
        for off in range(0, 10):  #avg 10 pixels
            rgb = image.get_pixel(idx + off, 120)
            avg = tuple(map(sum, zip(avg, rgb)))

        if not t[i](avg[0] / 10, avg[1] / 10, avg[2] / 10):
            raise Exception('COLOR BARS TEST FAILED.'
                            'BAR#(%d): RGB(%d,%d,%d)' %
                            (i + 1, avg[0] / 10, avg[1] / 10, avg[2] / 10))

    print('COLOR BARS TEST PASSED...')
Beispiel #3
0
else:
    sensor.reset()  # OV2640 Reset and initialize the sensor. It will
    # run automatically, call sensor.run(0) to stop
#sensor.shutdown(enable)
gc.collect()
sensor.set_pixformat(
    sensor.RGB565)  # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.VGA)
#sensor.set_framesize(sensor.QVGA)   # Set frame size to QVGA (320x240)
#sensor.set_auto_whitebal(True)  #OV2640
sensor.set_hmirror(1)  #for unit V
sensor.set_vflip(1)  #for unit V

img_w = sensor.width()
img_h = sensor.height()
sensor_ID = sensor.get_id()
print("image sensor is " + str(sensor_ID) + ", with size " + str(img_w) +
      " x " + str(img_h))
sensor.run(1)
sensor.skip_frames(time=1000)  # Wait for settings take effect.
#sensor.skip_frames(30)             #Wait for settings take effect.
clock = time.clock()  # Create a clock object to track the FPS.
Zeit_end = 600 * 1000
condition = True
Zeit_Anfang = time.ticks_ms()
run_cnt = 0
loop_time = 1
fps = 0
#BtnA to save img to"train"; BtnB to toggle QR-scan, may out of memory @ VGA!
while (condition):
    clock.tick()  # Update the FPS clock.
          -1, +8, -1,\
          -1, -1, -1]
# This is a high pass filter kernel. see here for more kernels:
# http://www.fmwconcepts.com/imagemagick/digital_image_filtering.pdf
thresholds = [(100, 255)] # grayscale thresholds

sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.

# On the OV7725 sensor, edge detection can be enhanced
# significantly by setting the sharpness/edge registers.
# Note: This will be implemented as a function later.
if (sensor.get_id() == sensor.OV7725):
    sensor.__write_reg(0xAC, 0xDF)
    sensor.__write_reg(0x8F, 0xFF)

while(True):
    clock.tick() # Track elapsed milliseconds between snapshots().
    img = sensor.snapshot() # Take a picture and return the image.

    img.morph(kernel_size, kernel)
    img.binary(thresholds)

    # Erode pixels with less than 2 neighbors using a 3x3 image kernel
    img.erode(1, threshold = 2)

    print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
    # connected to your computer. The FPS should increase once disconnected.
Beispiel #5
0
        if result != 0x7F:
            break
        sleep(0.5)


# Reset sensor
sensor.reset()

# Sensor settings
sensor.set_contrast(3)
sensor.set_gainceiling(16)
# HQVGA and GRAYSCALE are the best for face tracking.
sensor.set_framesize(sensor.HQVGA)
sensor.set_pixformat(sensor.GRAYSCALE)

if sensor.get_id() != sensor.OV5640:
    print('Only run for ov5640')
    sys.exit(0)

OV5640AF_Init()

blue_led = LED(1)
KEY = Pin('C13', Pin.IN, Pin.PULL_DOWN)

# Load Haar Cascade
# By default this will use all stages, lower satges is faster but less accurate.
face_cascade = image.HaarCascade("frontalface", stages=25)
print(face_cascade)

# FPS clock
clock = time.clock()
        while (True):
            result = sensor.__read_reg(0x3029)
            print('FW_STATUS: %X' % result)
            if result != 0x7F:
                break
            sleep(500)


blue_led = LED(1)
KEY = Pin('C13', Pin.IN, Pin.PULL_DOWN)

sensor.reset()
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QQVGA)

if sensor.get_id() != sensor.OV5640:
    if sensor.get_id() == sensor.OV7725:
        sensor.set_hmirror(True)
        sensor.set_vflip(True)
    # sys.exit(0)
else:
    print('ov5640 AF Firmware Init ...')
    OV5640AF_Init()

lcd.init(type=2)  # Initialize the lcd screen.
clock = time.clock()

keycount = 0
while (True):
    clock.tick()
    img = sensor.snapshot()
Beispiel #7
0
    image = sensor.snapshot()

# Color bars thresholds
t = [
    lambda r, g, b: r < 70 and g < 70 and b < 70,  # Black
    lambda r, g, b: r < 70 and g < 70 and b > 200,  # Blue
    lambda r, g, b: r > 200 and g < 70 and b < 70,  # Red
    lambda r, g, b: r > 200 and g < 70 and b > 200,  # Purple
    lambda r, g, b: r < 70 and g > 200 and b < 70,  # Green
    lambda r, g, b: r < 70 and g > 200 and b > 200,  # Aqua
    lambda r, g, b: r > 200 and g > 200 and b < 70,  # Yellow
    lambda r, g, b: r > 200 and g > 200 and b > 200
]  # White

# color bars are inverted for OV7725
if (sensor.get_id() == sensor.OV7725):
    t = t[::-1]

# 320x240 image with 8 color bars each one is approx 40 pixels.
# we start from the center of the frame buffer, and average the
# values of 10 sample pixels from the center of each color bar.
for i in range(0, 8):
    avg = (0, 0, 0)
    idx = 40 * i + 20  # center of colorbars
    for off in range(0, 10):  # avg 10 pixels
        rgb = image.get_pixel(idx + off, 120)
        avg = tuple(map(sum, zip(avg, rgb)))

    if not t[i](avg[0] / 10, avg[1] / 10, avg[2] / 10):
        raise Exception("COLOR BARS TEST FAILED. "
                        "BAR#(%d): RGB(%d,%d,%d)" %
Beispiel #8
0
# Initialize the MLX module
mlx.init(mlx.IR_REFRESH_64HZ)

# Reset sensor
sensor.reset()

# Set sensor settings
sensor.set_contrast(1)
sensor.set_brightness(0)
sensor.set_saturation(2)
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QQVGA)

# The following registers fine-tune the image
# sensor window to align it with the FIR sensor.
if (sensor.get_id() == sensor.OV2640):
    sensor.__write_reg(0xFF, 0x01)  # switch to reg bank
    sensor.__write_reg(0x17, 0x19)  # set HSTART
    sensor.__write_reg(0x18, 0x43)  # set HSTOP

# FPS clock
clock = time.clock()

# Ambient temperature
ta = 0.0
# Minimum object temperature
to_min = 0.0
# Maximum object temperature
to_max = 0.0

while (True):
Beispiel #9
0
#import cpufreq

#utime.mktime((2020, 2, 13, 16, 40, 0, 4, 44))
img_quality = 100
fm.register(34, fm.fpioa.UART1_TX)
fm.register(35, fm.fpioa.UART1_RX)
uart_out = UART(UART.UART1,
                115200,
                8,
                None,
                1,
                timeout=1000,
                read_buf_len=4096)
#https://docs.singtown.com/micropython/zh/latest/openmvcam/library/omv.sensor.html#module-sensor
sensor.reset()
Cam_ID = sensor.get_id()
sensor.set_pixformat(sensor.RGB565)
sensor.set_hmirror(1)  #for unit V
sensor.set_vflip(1)  #for unit V
sensor.set_framesize(sensor.QVGA)
#sensor.set_quality(img_quality)
sensor.set_auto_whitebal(False)  #关闭白平衡。白平衡是默认开启的,在颜色识别中,需要关闭白平衡
img_w = sensor.width()
img_h = sensor.height()
sensor.skip_frames(30)

clock = time.clock()
Target_ROI = (10, 10, img_w - 10, img_h - 10)

print("Image sensor chip ID is " + str(Cam_ID) + " with size:" + str(img_w) +
      "*" + str(img_h))
Beispiel #10
0
def runIt ():
    #print ("send_ccd_id");
    l = list()
    l.append(sensor.get_id())
    cam5procs.send_packet(l,len(l),CCDID)
Beispiel #11
0
            )  #OV7740  Loop Time :91ms, run fps:10.98901, fps 22 at serial terminal
            break
        except:
            OV_err_counter = OV_err_counter + 1
            print("image sensor reset failed:")
            if (OV_err_counter == 10):
                print("image sensor reset failed:" + str(err_counter))
                time.sleep(5)
                machine.reset()
            time.sleep(0.1)
            continue

else:
    sensor.reset()  # OV2640 Reset and initialize the sensor. It will
    # run automatically, call sensor.run(0) to stop
sensor_ID = sensor.get_id(
)  #0x7742 for ov7740  0x77 for ov7725, 0x2642 for OV2640
if (sensor_ID == 30530):
    sensor_ID_str = 'OV7740'
#sensor.shutdown(enable)
sensor.set_pixformat(
    sensor.RGB565)  # Set pixel format to RGB565 (or GRAYSCALE)
#sensor.set_pixformat(sensor.YUV422) # Set pixel format YUV422, test ok, slower than RGB565
sensor.set_framesize(sensor.VGA)
sensor.set_hmirror(1)  #for unit V  #bugs??
sensor.set_vflip(1)  #for unit V
img_w = sensor.width()
img_h = sensor.height()

print("image sensor is " + str(sensor_ID_str) + ", with size " + str(img_w) +
      " x " + str(img_h))
sensor.skip_frames(time=1000)  # Wait for settings take effect.
Beispiel #12
0
                resultList.append(l)
                return resultList
    return resultList


led = pyb.LED(2)
sensor.reset()#
sensor.set_pixformat(sensor.GRAYSCALE) # grayscale is faster
sensor.set_framesize(sensor.QQVGA)
sensor.skip_frames(time = 2000)
clock = time.clock()

min_degree = 0
max_degree = 50

if (sensor.get_id() == sensor.OV7725):
    sensor.__write_reg(0xAC, 0xDF)
    sensor.__write_reg(0x8F, 0xFF)

print(sensor.get_id())
print(sensor.OV7725)
iCount = 0
haveCount = 0
resultList = []

contral_Switch = False

led.on()
time.sleep(150)
led.off()
while(True):
Beispiel #13
0
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QQVGA)
sensor.skip_frames(time=20)

blue_led = LED(1)
KEY = Pin('C13', Pin.IN, Pin.PULL_DOWN)
lcd.init(type=2, refresh=120)

clock = time.clock()
keycount = 0
while (True):
    clock.tick()
    img = sensor.snapshot()
    print(clock.fps())
    lcd.display(img)
    if sensor.get_id() == sensor.OV5640:
        if KEY.value() == 1:
            while KEY.value() == 1:
                blue_led.on()
                sleep(0.05)
                blue_led.off()
                sleep(0.05)
                keycount += 1
                if keycount > 3:
                    sensor.ioctl(sensor.IOCTL_RESET_AUTO_FOCUS)
                    while KEY.value() == 1:
                        blue_led.on()
                        sleep(0.1)
                        blue_led.off()
                        sleep(0.1)
            if keycount <= 3:
# In Memory Shadow Removal w/ Frame Differencing Example
#
# This example demonstrates using frame differencing with your OpenMV Cam using
# shadow removal to help reduce the affects of cast shadows in your scene.

import sensor, image, pyb, os, time

TRIGGER_THRESHOLD = 5

sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
if sensor.get_id() == sensor.OV7725: # Reduce sensor PLL from 6x to 4x.
    sensor.__write_reg(0x0D, (sensor.__read_reg(0x0D) & 0x3F) | 0x40)
sensor.skip_frames(time = 2000) # Let new settings take affect.
sensor.set_auto_whitebal(False) # Turn off white balance.
sensor.set_auto_gain(False) # Turn this off too.
clock = time.clock() # Tracks FPS.

# Take from the main frame buffer's RAM to allocate a second frame buffer.
# There's a lot more RAM in the frame buffer than in the MicroPython heap.
# However, after doing this you have a lot less RAM for some algorithms...
# So, be aware that it's a lot easier to get out of RAM issues now. However,
# frame differencing doesn't use a lot of the extra space in the frame buffer.
# But, things like AprilTags do and won't work if you do this...
extra_fb = sensor.alloc_extra_fb(sensor.width(), sensor.height(), sensor.RGB565)

print("About to save background image...")
sensor.skip_frames(time = 2000) # Give the user time to get ready.
extra_fb.replace(sensor.snapshot())
print("Saved background image - Now frame differencing!")
Beispiel #15
0
# Untitled - By: Weact - 7 8 2020
red_threshold = (53, 31, 44, 82, 18, 78)
area = (0, 0, 320, 240)
import sensor, image, time, lcd

clock = time.clock()

sensor.reset()
if sensor.get_id() == sensor.OV7725:
    sensor.set_hmirror(True)
    sensor.set_vflip(True)
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QVGA)
sensor.skip_frames(20)
sensor.set_auto_whitebal(False)
sensor.set_auto_gain(False)
# Close the white balance.White Balance is turned on by default. In color recognition, white balance needs to be turned off
clock = time.clock()


# search max blob
def search_max(blobs):
    max_blob = 0
    max_size = 0
    for blob in blobs:
        blob_area = blob[2] * blob[3]
        if blob_area > max_size:
            max_blob = blob
            max_size = blob_area
    return max_blob
# In Memory Shadow Removal w/ Frame Differencing Example
#
# Note: You will need an SD card to run this example.
#
# This example demonstrates using frame differencing with your OpenMV Cam using
# shadow removal to help reduce the affects of cast shadows in your scene.

import sensor, image, pyb, os, time

TRIGGER_THRESHOLD = 5

sensor.reset()  # Initialize the camera sensor.
sensor.set_pixformat(sensor.RGB565)  # or sensor.GRAYSCALE
sensor.set_framesize(sensor.QQVGA)  # or sensor.QVGA (or others)
if sensor.get_id() == sensor.OV7725:  # Reduce sensor PLL from 6x to 4x.
    sensor.__write_reg(0x0D, (sensor.__read_reg(0x0D) & 0x3F) | 0x40)
sensor.skip_frames(time=2000)  # Let new settings take affect.
sensor.set_auto_whitebal(False)  # Turn off white balance.
sensor.set_auto_gain(False)  # Turn this off too.
clock = time.clock()  # Tracks FPS.

if not "temp" in os.listdir(): os.mkdir("temp")  # Make a temp directory

print("About to save background image...")
sensor.skip_frames(time=2000)  # Give the user time to get ready.
sensor.snapshot().save("temp/bg.bmp")
print("Saved background image - Now frame differencing!")

while (True):
    clock.tick()  # Track elapsed milliseconds between snapshots().
    img = sensor.snapshot()  # Take a picture and return the image.
Beispiel #17
0
import sensor, image, time, fir

# Reset sensor
sensor.reset()

# Set sensor settings
sensor.set_contrast(1)
sensor.set_brightness(0)
sensor.set_saturation(2)
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QQVGA)

# The following registers fine-tune the image
# sensor window to align it with the FIR sensor.
if (sensor.get_id() == sensor.OV2640):
    sensor.__write_reg(0xFF, 0x01) # switch to reg bank
    sensor.__write_reg(0x17, 0x19) # set HSTART
    sensor.__write_reg(0x18, 0x43) # set HSTOP

# Initialize the thermal sensor
fir.init()

# FPS clock
clock = time.clock()

while (True):
    clock.tick()

    # Capture an image
    image = sensor.snapshot()