コード例 #1
0
def test_http_download():
    pos = 0
    while True:
        time.sleep(1)
        pos += 1

        filename = '/sd/test_' + str(pos) + '.zip'
        fileurl = 'http://120.78.165.108/juwan/K-Flash.zip'

        #filename = '/sd/face.kmodel'
        #fileurl = 'http://120.78.165.108/juwan/face.kmodel'

        #try:
            #import os
            #os.remove(filename)
        #except Exception as e:
            #pass
        gc.collect()

        bak = time.ticks()
        try:
            contentType = MicroWebCli.FileRequest(fileurl, filename, progressCallback)
            print('File of content type "%s" was saved to "%s"' % (contentType, filename))
        except Exception as e:
            print(e)
        print('total time', (time.ticks() - bak) / 1000, 's')
コード例 #2
0
ファイル: button.py プロジェクト: Donghyun-34/KUCIS
def btn_function(pin_num):
    current_time = time.ticks()
    global btn_time
    # print(current_time - btn_time)
    if (current_time - btn_time) >= 500:
        print("버튼이 눌렸습니다.")
        btn_time = time.ticks()
コード例 #3
0
def test_http_get():

    #wCli = MicroWebCli('http://api.nnzhp.cn/api/user/stu_info')
    wCli = MicroWebCli('https://tcc.taobao.com/cc/json/mobile_tel_segment.htm?tel=13631786501')

    while True:
        time.sleep(1)
        bak = time.ticks()
        try:
            print('GET %s' % wCli.URL)
            wCli.OpenRequest()
            buf  = memoryview(bytearray(2048))
            resp = wCli.GetResponse()
            if resp.IsSuccess() :
              while not resp.IsClosed() :
                x = resp.ReadContentInto(buf)
                if x < len(buf) :
                  buf = buf[:x]
                print(bytes(buf))
              print('GET success with "%s" content type' % resp.GetContentType())
            else :
              print('GET return %d code (%s)' % (resp.GetStatusCode(), resp.GetStatusMessage()))
        except Exception as E:
            print(E)
        print('total time ', (time.ticks() - bak) / 1000, ' s')
コード例 #4
0
def pi_test(n=5000):
    t1 = time.ticks()
    t = pi(n)
    t2 = time.ticks()
    r = (t2 * 1.0 - t1) / 1000
    print('  Pi', n, 'digit calculation: ', r, 's')
    return '%.2f' % r
コード例 #5
0
def BlobTest(thresholds, loopCnt = 390, barLen = 120):
    sensor.reset()
    sensor.set_pixformat(sensor.RGB565)
    sensor.set_framesize(sensor.CIF)
    #sensor.set_windowing((320,240))
    sensor.set_auto_gain(True)
    #sensor.set_auto_whitebal(True) # must be turned off for color tracking
    clock = time.clock()
    avg = 0.0
    startTick = time.ticks()
    while(True):
        if time.ticks() - startTick > loopCnt:
            break
        clock.tick()
        img = sensor.snapshot()
        img.draw_string(4, 8, 'red,green,blue blob detect', color=(0,0,0))
        t0 = time.ticks()
        blobSet = img.find_blobs(thresholds, pixels_threshold=200, area_threshold=200)
        t1 = time.ticks() - t0
        avg = avg * 0.95 + t1 * 0.05
        lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
        DrawPgsBar(img, barLen, loopCnt, startTick)
        for blob in blobSet:
            img.draw_rectangle(blob.rect())
            img.draw_cross(blob.cx(), blob.cy())
        print('algo time cost : %.2f ms' % (avg))
コード例 #6
0
def CorrTest(loopCnt = 220, barLen=120):
    sensor.reset()

    # Sensor settings
    sensor.set_contrast(1)
    sensor.set_gainceiling(16)

    sensor.set_framesize(sensor.QVGA)
    sensor.set_pixformat(sensor.RGB565)
    #sensor.set_windowing((480,272))
    clock = time.clock()
    avg = 0.0
    startTick = time.ticks()
    corr = 0.3
    while (True):
        if time.ticks() - startTick > loopCnt:
            break
        clock.tick()
        img = sensor.snapshot()
        for i in range(7):
            img.draw_rectangle(160-i*15, 120-i*15, i*15*2, i*15*2)
        corr += 0.05
        if corr >= 4.0:
            corr = 0.3
        img.lens_corr(corr)

        lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
        DrawPgsBar(img, barLen, loopCnt, startTick)
        img.draw_string(4,4,'Lens correction %.2f' % (corr), color=(0,0,0))
コード例 #7
0
def float_div_test(n=1000000, a=12345.678, b=56.789):
    t1 = time.ticks()
    sum = 0
    for i in range(n):
        sum = a / b
    t2 = time.ticks()
    r = (t2 * 1.0 - t1) / 1000
    print('  Float Div test', n, 'times: ', r, 's')
    return '%.2f' % r
コード例 #8
0
def int_div_test(n=1000000, a=123456, b=567):
    t1 = time.ticks()
    sum = 0
    for i in range(n):
        sum = a // b
    t2 = time.ticks()
    r = (t2 * 1.0 - t1) / 1000
    print('  Integer Div test', n, 'times: ', r, 's')
    return '%.2f' % r
コード例 #9
0
def float_mul_test(n=1000000, a=1234.5678, b=5678.1234):
    t1 = time.ticks()
    sum = 0
    for i in range(n):
        sum = a * b
    t2 = time.ticks()
    r = (t2 * 1.0 - t1) / 1000
    print('  Float Mul test', n, 'times: ', r, 's')
    return '%.2f' % r
コード例 #10
0
def int_mul_test(n=1000000, a=12345, b=56789):
    t1 = time.ticks()
    sum = 0
    for i in range(n):
        sum = a * b
    t2 = time.ticks()
    r = (t2 * 1.0 - t1) / 1000
    print('  Integer Mul test', n, 'times: ', r, 's')
    return '%.2f' % r
コード例 #11
0
ファイル: esp32spi.py プロジェクト: nezra/ESP32SPI
 def _wait_spi_char(self, spi, desired):
     """Read a byte with a time-out, and if we get it, check that its what we expect"""
     times = time.ticks()
     while (time.ticks() - times) < 100:
         r = self._read_byte(spi)
         if r == _ERR_CMD:
             raise RuntimeError("Error response to command")
         if r == desired:
             return True
     raise RuntimeError("Timed out waiting for SPI char")
コード例 #12
0
def CIFAR10Test(loopCnt=600, isFull=False, barLen=105):
    pyb.LED(1).off()
    sensor.reset()  # Reset and initialize the sensor.
    sensor.set_contrast(3)
    sensor.set_pixformat(
        sensor.RGB565)  # Set pixel format to RGB565 (or GRAYSCALE)
    sensor.set_framesize(sensor.VGA)  # Set frame size to QVGA (320x240)
    sensor.set_windowing((192, 192))  # Set window
    sensor.skip_frames(time=300)  # Wait for settings take effect.
    sensor.set_auto_gain(False)
    #sensor.set_framerate(0<<9|1<<12)
    if isFull:
        net = nn.load('/cifar10.network')
    else:
        net = nn.load('/cifar10_fast.network')
    labels = [
        'plane', 'auto', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship',
        'truck'
    ]
    clock = time.clock()
    tAvg = 0.0
    startTick = time.ticks()
    while (True):
        if time.ticks() - startTick > loopCnt:
            break
        clock.tick()
        img = sensor.snapshot()
        t0 = time.ticks()
        lst = net.search(img, threshold=0.640, min_scale=1, scale_mul=0.8, \
        x_overlap=-1, y_overlap=-1, contrast_threshold=0.5)
        t1 = time.ticks() - t0
        tAvg = tAvg * 0.9 + t1 * 0.1
        img.draw_string(
            4,
            8,
            'CIFAR-10: classify:\nplane,auto,cat,dog,\ndeer,horse,frog,ship,\ntruck,horse',
            color=(0, 0, 0))
        lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
        DrawPgsBar(img, barLen, loopCnt, startTick)
        for obj in lst:
            print(' %s  - Confidence %f%%' %
                  (labels[obj.index()], obj.value()))
            rc = obj.rect()
            #img.draw_rectangle(rc, color=(255,255,255))
            img.draw_rectangle(barLen + 10,
                               1,
                               50,
                               8,
                               fill=True,
                               color=(0, 0, 0))
            img.draw_string(barLen + 10, 0, labels[obj.index()])
        print('algo time cost : %.2f ms' % (tAvg))
コード例 #13
0
ファイル: esp32spi.py プロジェクト: nezra/ESP32SPI
    def socket_connect(self, socket_num, dest, port, conn_mode=TCP_MODE):
        """Open and verify we connected a socket to a destination IP address or hostname
        using the ESP32's internal reference number. By default we use
        'conn_mode' TCP_MODE but can also use UDP_MODE or TLS_MODE (dest must
        be hostname for TLS_MODE!)"""
        if self._debug:
            print("*** Socket connect mode", conn_mode)

        self.socket_open(socket_num, dest, port, conn_mode=conn_mode)
        times = time.ticks()
        while (time.ticks() - times) < 3:  # wait 3 seconds
            if self.socket_connected(socket_num):
                return True
            time.sleep(0.01)
        raise RuntimeError("Failed to establish connection")
コード例 #14
0
ファイル: esp32spi.py プロジェクト: nezra/ESP32SPI
 def _wait_for_ready(self):
     """Wait until the ready pin goes low"""
     if self._debug:
         print("Wait for ESP32 ready", end='')
     times = time.ticks()
     while (time.ticks() - times) < 10000:  # wait up to 10 seconds
         if not self._ready.value():  # we're ready!
             break
         if self._debug:
             print('.', end='')
             utime.sleep(0.05)
     else:
         raise RuntimeError("ESP32 not responding")
     if self._debug:
         print()
コード例 #15
0
 def readline(self):
     """Attempt to return as many bytes as we can up to but not including '\r\n'"""
     #print("Socket readline")
     stamp = time.ticks()
     while b'\r\n' not in self._buffer:
         # there's no line already in there, read some more
         avail = min(_the_interface.socket_available(self._socknum), MAX_PACKET)
         if avail:
             self._buffer += _the_interface.socket_read(self._socknum, avail)
         elif self._timeout > 0 and time.ticks() - stamp > self._timeout:
             self.close()  # Make sure to close socket so that we don't exhaust sockets.
             raise RuntimeError("Didn't receive full response, failing out")
     firstline, self._buffer = self._buffer.split(b'\r\n', 1)
     gc.collect()
     return firstline
コード例 #16
0
 def _timer_permission_check(self):
     if self.unix:
         timer_check = self._motion_start_time + self._motion_duration <= time.time(
         )
     else:
         timer_check = self._motion_start_time + self._motion_duration <= time.ticks(
         )
     motion_finished = False
     while not (motion_finished and timer_check):
         try:
             if self.unix:
                 time.sleep(0.1)
             else:
                 time.sleep(100)
             if self.kondo is not None:
                 current_motion = self.kondo.getMotionPlayNum()
         except OSError:
             if self.unix:
                 time.sleep(CHANNEL_WAIT_TIME / 1000)
             else:
                 time.sleep(CHANNEL_WAIT_TIME)
             if self.kondo is not None:
                 current_motion = self.kondo.getMotionPlayNum()
         if self.kondo is not None:
             motion_finished = current_motion == 0 or current_motion == 1 or current_motion == 2
         else:
             motion_finished = True
     return True
コード例 #17
0
ファイル: esp32spi.py プロジェクト: nezra/ESP32SPI
    def _send_command(self, cmd, params=None, *, param_len_16=False):
        """Send over a command with a list of parameters"""
        if not params:
            params = ()

        packet_len = 4  # header + end byte
        for i, param in enumerate(params):
            packet_len += len(param)  # parameter
            packet_len += 1  # size byte
            if param_len_16:
                packet_len += 1  # 2 of em here!
        while packet_len % 4 != 0:
            packet_len += 1
        # we may need more space
        if packet_len > len(self._sendbuf):
            self._sendbuf = bytearray(packet_len)

        self._sendbuf[0] = _START_CMD
        self._sendbuf[1] = cmd & ~_REPLY_FLAG
        self._sendbuf[2] = len(params)

        # handle parameters here
        ptr = 3
        for i, param in enumerate(params):
            if self._debug >= 2:
                print("\tSending param #%d is %d bytes long" % (i, len(param)))
            if param_len_16:
                self._sendbuf[ptr] = (len(param) >> 8) & 0xFF
                ptr += 1
            self._sendbuf[ptr] = len(param) & 0xFF
            ptr += 1
            for j, par in enumerate(param):
                self._sendbuf[ptr + j] = par
            ptr += len(param)
        self._sendbuf[ptr] = _END_CMD

        self._wait_for_ready()
        with self._spi_device as spi:
            times = time.ticks()
            while (time.ticks() - times) < 1000:  # wait up to 1000ms
                if self._ready.value():  # ok ready to send!
                    break
            else:
                raise RuntimeError("ESP32 timed out on SPI select")
            spi.write(self._sendbuf)  # pylint: disable=no-member
            if self._debug:
                print("Wrote: ", [hex(b) for b in self._sendbuf[0:packet_len]])
コード例 #18
0
def sendimage(spi, pin):
    global last_time
    frame = sensor.snapshot()
    frame.draw_string(0,
                      0,
                      str(time.ticks() - last_time),
                      color=(255, 0, 0),
                      scale=2)
    last_time = time.ticks()
    #frame = frame.compressed(quality=45) 用此语句图片过大时会内存报错
    frame = frame.compress(45)
    print("size=", str(frame.size()), "time=", time.ticks() - last_time)

    #减少生成data内存占用
    #data = ustruct.pack("<bi%ds" % frame.size(), 85, frame.size(), frame) # 85 is a sync char.
    data = ustruct.pack("<bi", 85, frame.size())  # 85 is a sync char.
    sendblob(spi, pin, data, frame)
コード例 #19
0
 def _set_timer(self, duration):
     self._motion_duration = duration
     if self.unix:
         self._motion_start_time = time.time()
         time.sleep(int(duration / 1000))
     else:
         self._motion_start_time = time.ticks()
         time.sleep(int(duration))
コード例 #20
0
def QRCodeTest(loopCnt=120, barLen=120):
    sensor.reset()
    sensor.set_framerate(1 << 11)
    sensor.set_pixformat(sensor.RGB565)
    sensor.set_framesize(sensor.VGA)
    sensor.set_windowing((400, 272))
    sensor.skip_frames(time=300)
    sensor.set_auto_gain(False)
    clock = time.clock()
    avg = 0.0
    startTick = time.ticks()
    while (True):
        if time.ticks() - startTick > loopCnt:
            break
        clock.tick()
        img = sensor.snapshot()
        #img.lens_corr(1.5) # strength of 1.8 is good for the 2.8mm lens.
        t1 = time.ticks()
        codeSet = img.find_qrcodes()
        t2 = time.ticks() - t1
        lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
        DrawPgsBar(img, barLen, loopCnt, startTick, 'QR code scan')
        avg = avg * 0.92 + t2 * 0.08

        for code in codeSet:
            rc = code.rect()
            img.draw_rectangle(rc, thickness=2, color=(0, 191, 255))
            #print(type(code))
            #print(code.payload())

            sPayload = code.payload()
            #print(len(sPayload))
            lnLen = len(sPayload) * 8
            if rc[0] + lnLen >= 400:
                x = 400 - lnLen
            else:
                x = rc[0]
            img.draw_rectangle(x - 1,
                               rc[1] + 1,
                               lnLen + 2,
                               8,
                               color=(0, 0, 0),
                               fill=True)
            img.draw_string(x, rc[1], sPayload)
        print('algo time cost : %.2f ms' % (avg))
コード例 #21
0
ファイル: core.py プロジェクト: zeilenschubser/micropython
def run_until_complete(main_task=None, run_once=False):
    global cur_task
    excs_all = (CancelledError, Exception)  # To prevent heap allocation in loop
    excs_stop = (CancelledError, StopIteration)  # To prevent heap allocation in loop
    runs = 0
    while True:
        runs += 1
        if run_once and runs > 1:break
        # Wait until the head of _task_queue is ready to run
        dt = 1
        while dt > 0:
            dt = -1
            t = _task_queue.peek()
            if t:
                # A task waiting on _task_queue; "ph_key" is time to schedule task at
                dt = max(0, ticks_diff(t.ph_key, ticks()))
            elif not _io_queue.map:
                # No tasks can be woken so finished running
                return
            # print('(poll {})'.format(dt), len(_io_queue.map))
            _io_queue.wait_io_event(dt)

        # Get next task to run and continue it
        t = _task_queue.pop_head()
        cur_task = t
        try:
            # Continue running the coroutine, it's responsible for rescheduling itself
            exc = t.data
            if not exc:
                t.coro.send(None)
            else:
                t.data = None
                t.coro.throw(exc)
        except excs_all as er:
            # Check the task is not on any event queue
            assert t.data is None
            # This task is done, check if it's the main task and then loop should stop
            if t is main_task:
                if isinstance(er, StopIteration):
                    return er.value
                raise er
            # Save return value of coro to pass up to caller
            t.data = er
            # Schedule any other tasks waiting on the completion of this task
            waiting = False
            if hasattr(t, "waiting"):
                while t.waiting.peek():
                    _task_queue.push_head(t.waiting.pop_head())
                    waiting = True
                t.waiting = None  # Free waiting queue head
            # Print out exception for detached tasks
            if not waiting and not isinstance(er, excs_stop):
                _exc_context["exception"] = er
                _exc_context["future"] = t
                Loop.call_exception_handler(_exc_context)
            # Indicate task is done
            t.coro = None
コード例 #22
0
def FaceTest(loopCnt=220, barLen=120):
    sensor.reset()

    # Sensor settings
    sensor.set_contrast(1)
    #sensor.set_gainceiling(16)
    # HQVGA and GRAYSCALE are the best for face tracking.

    #sensor.set_framesize(sensor.VGA)
    #sensor.set_windowing((320,240))
    sensor.set_framesize(sensor.VGA)
    sensor.set_windowing((320, 240))
    sensor.set_pixformat(sensor.GRAYSCALE)
    #sensor.set_auto_gain(False)
    #sensor.set_auto_whitebal(True) # must be turned off for color tracking
    # Load Haar Cascade
    # By default this will use all stages, lower satges is faster but less accurate.
    face_cascade = image.HaarCascade("frontalface", stages=25)
    print(face_cascade)
    clock = time.clock()
    avg = 0.0
    startTick = time.ticks()
    while (True):
        if time.ticks() - startTick > loopCnt:
            break
        clock.tick()
        img = sensor.snapshot()
        img.draw_string(4, 4, 'Face Detect', color=(0, 0, 0))
        t0 = time.ticks()
        objects = img.find_features(face_cascade,
                                    threshold=0.75,
                                    scale_factor=1.25)
        t1 = time.ticks() - t0
        avg = avg * 0.90 + t1 * 0.10
        fID = 0
        lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
        DrawPgsBar(img, barLen, loopCnt, startTick)
        for r in objects:
            img.draw_rectangle(r, thickness=3)
            img.draw_rectangle(r[0], r[1], 48, 10, fill=True)
            fID += 1
            s = 'face %d' % (fID)
            img.draw_string(r[0], r[1], s, color=(0, 0, 0))
        print('algo time cost : %.2f ms' % (avg))
コード例 #23
0
def LENETTest(loopCnt=1200, barLen=60):
    sensor.reset()  # Reset and initialize the sensor.
    sensor.set_contrast(3)
    sensor.set_pixformat(
        sensor.GRAYSCALE)  # Set pixel format to RGB565 (or GRAYSCALE)
    sensor.set_framesize(sensor.VGA)  # Set frame size to QVGA (320x240)
    sensor.set_windowing((84, 84))  # Set 128x128 window.
    sensor.skip_frames(time=1400)  # Wait for settings take effect.
    sensor.set_auto_gain(False)
    sensor.set_framerate(2 << 2)
    #sensor.set_auto_whitebal(False)
    #sensor.set_auto_exposure(False)

    net = nn.load('/lenet.network')
    labels = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
    clock = time.clock()
    avg = 0.0
    pyb.LED(1).on()
    startTick = time.ticks()
    while (True):
        if time.ticks() - startTick > loopCnt:
            break
        clock.tick()
        img = sensor.snapshot()
        img.draw_string(3, 8, 'recg 0-9', color=(0, 0, 0))
        t1 = time.ticks()
        tmp_img = img.copy().binary([(120, 255)], invert=True)
        lst = net.search(tmp_img, threshold=0.8, min_scale=1, scale_mul=0.8, \
        x_overlap=-1, y_overlap=-1, contrast_threshold=0.5, softmax=False)
        t2 = time.ticks() - t1
        avg = avg * 0.95 + t2 * 0.05
        lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
        img.draw_rectangle(0, 2, barLen + 1, 3)
        img.draw_rectangle(0, 3, lnLen, 1, fill=True)
        for obj in lst:
            print('Detected %s - Confidence %f%%' %
                  (labels[obj.index()], obj.value()))
            img.draw_rectangle(obj.rect())
            img.draw_string(barLen + 8,
                            2,
                            labels[obj.index()],
                            color=(0, 0, 0))
        # print(clock.fps())
        print('algo time cost : %.2f ms' % (avg))
コード例 #24
0
def run_until_complete(main_task=None):
    global cur_task
    excs_all = (CancelledError, Exception
                )  # To prevent heap allocation in loop
    excs_stop = (CancelledError, StopIteration
                 )  # To prevent heap allocation in loop
    while True:
        # Wait until the head of _task_queue is ready to run
        dt = 1
        while dt > 0:
            dt = -1
            t = _task_queue.peek()
            if t:
                # A task waiting on _task_queue; "ph_key" is time to schedule task at
                dt = max(0, ticks_diff(t.ph_key, ticks()))
            elif not _io_queue.map:
                # No tasks can be woken so finished running
                return
            # print('(poll {})'.format(dt), len(_io_queue.map))
            _io_queue.wait_io_event(dt)

        # Get next task to run and continue it
        t = _task_queue.pop_head()
        cur_task = t
        try:
            # Continue running the coroutine, it's responsible for rescheduling itself
            exc = t.data
            if not exc:
                t.coro.send(None)
            else:
                t.data = None
                t.coro.throw(exc)
        except excs_all as er:
            # Check the task is not on any event queue
            assert t.data is None
            # This task is done, check if it's the main task and then loop should stop
            if t is main_task:
                if isinstance(er, StopIteration):
                    return er.value
                raise er
            # Schedule any other tasks waiting on the completion of this task
            waiting = False
            if hasattr(t, "waiting"):
                while t.waiting.peek():
                    _task_queue.push_head(t.waiting.pop_head())
                    waiting = True
                t.waiting = None  # Free waiting queue head
            if not waiting and not isinstance(er, excs_stop):
                # An exception ended this detached task, so queue it for later
                # execution to handle the uncaught exception if no other task retrieves
                # the exception in the meantime (this is handled by Task.throw).
                _task_queue.push_head(t)
            # Indicate task is done by setting coro to the task object itself
            t.coro = t
            # Save return value of coro to pass up to caller
            t.data = er
コード例 #25
0
def DrawPgsBar(img, barLen, loopCnt, startTick, width=5):
    global barCol
    lnLen = (barLen * (loopCnt - (time.ticks() - startTick))) // loopCnt
    if (barCol & 0x80) == 0:
        c = barCol & 0x7F
    else:
        c = 128 - (barCol & 0x7F)
    img.draw_rectangle(2, 2, barLen + 2, width, color=(0,0,0))
    img.draw_rectangle(2, 3, lnLen, width-2, fill=True, color=(c,c,c))
    barCol += 16
コード例 #26
0
    def read(self, size=0):
        """Read up to 'size' bytes from the socket, this may be buffered internally!
        If 'size' isnt specified, return everything in the buffer."""
        #print("Socket read", size)
        if size == 0:   # read as much as we can at the moment
            while True:
                avail = min(_the_interface.socket_available(self._socknum), MAX_PACKET)
                if avail:
                    self._buffer += _the_interface.socket_read(self._socknum, avail)
                else:
                    break
            gc.collect()
            ret = self._buffer
            self._buffer = b''
            gc.collect()
            return ret
        stamp = time.ticks()

        to_read = size - len(self._buffer)
        received = []
        while to_read > 0:
            #print("Bytes to read:", to_read)
            avail = min(_the_interface.socket_available(self._socknum), MAX_PACKET)
            if avail:
                stamp = time.ticks()
                recv = _the_interface.socket_read(self._socknum, min(to_read, avail))
                received.append(recv)
                to_read -= len(recv)
                gc.collect()
            if self._timeout > 0 and time.ticks() - stamp > self._timeout:
                break
        #print(received)
        self._buffer += b''.join(received)

        ret = None
        if len(self._buffer) == size:
            ret = self._buffer
            self._buffer = b''
        else:
            ret = self._buffer[:size]
            self._buffer = self._buffer[size:]
        gc.collect()
        return ret
コード例 #27
0
    def __init__(self, P=0.2, I=0.0, D=0.0):

        self.Kp = P
        self.Ki = I
        self.Kd = D

        self.sample_time = 0.00
        self.current_time = time.ticks()
        self.last_time = self.current_time

        self.clear()
コード例 #28
0
def BlobTest(thresholds):
    sensor.reset()
    pyb.LED(1).on()
    sensor.set_pixformat(sensor.RGB565)
    #sensor.set_framesize(sensor.VGA)
    #sensor.set_windowing((432,432))
    sensor.set_framesize(sensor.QVGA)
    #sensor.set_framerate(1<<9|7<<11)
    #sensor.set_framerate(2<<9|2<<11) #120/3=40M
    sensor.set_auto_gain(True)
    #sensor.set_windowing((50, 50, 50, 90))
    sensor.skip_frames(time=500)
    #sensor.__write_reg(0xac, 0xF7)  #needed for CIF RGB565
    #sensor.set_auto_gain(True) # must be turned off for color tracking
    #sensor.set_auto_whitebal(False) # must be turned off for color tracking
    clock = time.clock()
    print('main loop')
    avg = 0
    lpCnt = 0
    #sensor.__write_reg(0xa6, c)
    for i in range(50000):
        clock.tick()
        img = sensor.snapshot()
        n = 0
        t1 = time.ticks()
        #pyb.LED(2).toggle()
        blbs = img.find_blobs(thresholds,
                              pixels_threshold=120,
                              area_threshold=120)
        t2 = time.ticks() - t1
        avg = avg * 0.95 + t2 * 0.05
        for blob in blbs:
            img.draw_rectangle(blob.rect(), thickness=2, color=(0, 255, 255))
            img.draw_cross(blob.cx(), blob.cy())
            n += 1
            #print('blob %d, cx=%d,cy=%d' % (n,blob.cx(), blob.cy()))
        print('time=', avg, lpCnt)
        #if lpCnt % 10 == 0:
        #print('fps=', clock.fps())
        lpCnt += 1
コード例 #29
0
def run_until_complete(main_task=None):
    global cur_task
    excs_all = (CancelledError, Exception) # To prevent heap allocation in loop
    excs_stop = (CancelledError, StopIteration) # To prevent heap allocation in loop
    while True:
        # Wait until the head of _queue is ready to run
        dt = 1
        while dt > 0:
            dt = -1
            if _queue.next:
                # A task waiting on _queue
                if isinstance(_queue.next.data, int):
                    # "data" is time to schedule task at
                    dt = max(0, ticks_diff(_queue.next.data, ticks()))
                else:
                    # "data" is an exception to throw into the task
                    dt = 0
            elif not _io_queue.map:
                # No tasks can be woken so finished running
                return
            #print('(poll {})'.format(dt), len(_io_queue.map))
            _io_queue.wait_io_event(dt)

        # Get next task to run and continue it
        t = _queue.pop_head()
        cur_task = t
        try:
            # Continue running the coroutine, it's responsible for rescheduling itself
            if isinstance(t.data, int):
                t.coro.send(None)
            else:
                t.coro.throw(t.data)
        except excs_all as er:
            # This task is done, schedule any tasks waiting on it
            if t is main_task:
                if isinstance(er, StopIteration):
                    return er.value
                raise er
            t.data = er # save return value of coro to pass up to caller
            waiting = False
            if hasattr(t, 'waiting'):
                while t.waiting.next:
                    _queue.push_head(t.waiting.pop_head())
                    waiting = True
                t.waiting = None # Free waiting queue head
            _io_queue.remove(t) # Remove task from the IO queue (if it's on it)
            t.coro = None # Indicate task is done
            # Print out exception for detached tasks
            if not waiting and not isinstance(er, excs_stop):
                print('task raised exception:', t.coro)
                sys.print_exception(er)
コード例 #30
0
ファイル: esp32spi.py プロジェクト: nezra/ESP32SPI
    def _wait_response_cmd(self,
                           cmd,
                           num_responses=None,
                           *,
                           param_len_16=False):
        """Wait for ready, then parse the response"""
        self._wait_for_ready()

        responses = []
        with self._spi_device as spi:
            times = time.ticks()
            while (time.ticks() - times) < 1000:  # wait up to 1000ms
                if self._ready.value():  # ok ready to send!
                    break
            else:
                raise RuntimeError("ESP32 timed out on SPI select")

            self._wait_spi_char(spi, _START_CMD)
            self._check_data(spi, cmd | _REPLY_FLAG)
            if num_responses is not None:
                self._check_data(spi, num_responses)
            else:
                num_responses = self._read_byte(spi)
            for num in range(num_responses):
                param_len = self._read_byte(spi)
                if param_len_16:
                    param_len <<= 8
                    param_len |= self._read_byte(spi)
                if self._debug >= 2:
                    print("\tParameter #%d length is %d" % (num, param_len))
                response = bytearray(param_len)
                self._read_bytes(spi, response)
                responses.append(response)
            self._check_data(spi, _END_CMD)

        if self._debug:
            print("Read %d: " % len(responses[0]), responses)
        return responses
コード例 #31
0
ファイル: video.py プロジェクト: PierreBizouard/openmv
import sensor, avi, time

REC_LENGTH = 15 # recording length in seconds

# Set sensor parameters
sensor.reset()
sensor.set_brightness(-2)
sensor.set_contrast(1)

sensor.set_framesize(sensor.QVGA)
sensor.set_pixformat(sensor.JPEG)

vid = avi.AVI("1:/test.avi", 320, 240, 15)

start = time.ticks()
clock = time.clock()

while ((time.ticks()-start) < (REC_LENGTH*1000)):
    clock.tick()
    # capture and store frame
    image = sensor.snapshot()
    vid.add_frame(image)
    print(clock.fps())

vid.flush()