Пример #1
0
def replay(fname, f):
    udmap1, udmap2, udmapsd = init_remap()

    while True:
        ok, record = read_frame(f)
        (tstamp, throttle, steering, accel, gyro, servo, wheels, periods,
         frame) = record
        if not ok:
            break

        bgr = cv2.cvtColor(frame, cv2.COLOR_YUV2BGR_I420)
        bgr[vpy:vpy + bandheight] = 255 - bgr[vpy:vpy + bandheight]

        annotate.draw_throttle(bgr, throttle)
        annotate.draw_steering(bgr, steering, servo)
        annotate.draw_speed(bgr, tstamp, wheels, periods)

        cv2.imshow("frame", bgr)

        k = cv2.waitKey()
        if k == ord('q'):
            break
Пример #2
0
            anncenter = (bgr.shape[1] // 2, 600)
        if t0 is None:
            t0 = tstamp
            tstamp_last = tstamp
            dt = 0
        else:
            n += 1
            dt = tstamp - tstamp_last
            tstamp_last = tstamp

        ang += carstate[3][2] * dt
        variance += dt**2
        max_dt = max(dt, max_dt)

        annotate.draw_steering(bgr, carstate[1], carstate[4], anncenter)
        annotate.draw_speed(bgr, tstamp, carstate[5], carstate[6],
                            (anncenter[0] - 100, anncenter[1]))
        annotate.draw_throttle(bgr, carstate[0],
                               (anncenter[0], anncenter[1] + 50))
        if args.interactive:
            cv2.imshow("camera", bgr)
        if vidout is not None:
            vidout.write(bgr)
        print(carstate)

        if 'activations' in data:
            a = data['activations'][1:] - data['activations'][:-1]
            a += 128 * 14
            a = np.clip(a / 14, 0, 255).astype(np.uint8)
            N = a.shape[0]
            a = cv2.resize(a[None, :], (N, 30))
            if 'c0' in data:
    n = 0
    while True:
        ok, data = read_frame(f)
        if not ok:
            break

        tstamp, carstate, particles, controldata, frame = data
        bgr = cv2.cvtColor(
            frame.reshape((-1, imgsiz[0])), cv2.COLOR_YUV2BGR_I420)
        if t0 is None:
            t0 = tstamp
        else:
            n += 1

        annotate.draw_steering(bgr, carstate[1], carstate[4])
        annotate.draw_speed(bgr, tstamp, carstate[5], carstate[6])
        annotate.draw_throttle(bgr, carstate[0])
        cv2.imshow("camera", bgr)
        if vidout is not None:
            vidout.write(bgr)

        # draw the particle filter state
        partview = np.zeros((240, 450), np.uint8)
        partxy = (25*particles[:, :2]).astype(np.int)
        inview = ((partxy[:, 0] >= 0) & (partxy[:, 0] < 450) &
                  (partxy[:, 1] <= 0) & (partxy[:, 1] > -240))
        partview[(-partxy[inview, 1], partxy[inview, 0])] = 255
        cv2.imshow("particles", partview)

        (x, y, theta, vf, vr, w, ierr_v, ierr_w, delta,
         target_k, target_v, target_w, ye, psie, k, bw_w, bw_v) = controldata
Пример #4
0
def main(f):
    np.random.seed(1)
    # bg = cv2.imread("trackmap.jpg")
    # bg = cv2.imread("drtrack-2cm.png")
    # bg = cv2.imread("bball-2cm.png")
    bg = cv2.imread("cl.png")
    # bg = cv2.imread("voyage-top.png")

    Np = 300
    X = np.zeros((4, Np))
    # X[:2] = 100 * np.random.rand(2, Np)
    # X[1] -= 400
    X[0] = 0.125*pseudorandn(Np)
    X[1] = 0.125*pseudorandn(Np)
    X[2] = pseudorandn(Np) * 0.1
    print 'Lhome', Lhome
    X.T[:, :3] += Lhome
    X[3] = X[2]
    tstamp = None
    last_wheels = None

    done = False
    i = 0
    if VIDEO:
        # vidout = cv2.VideoWriter("particlefilter.h264", cv2.VideoWriter_fourcc(
        #    'X', '2', '6', '4'), 30, (bg.shape[1], bg.shape[0]), True)
        vidout = cv2.VideoWriter("particlefilter.h264", cv2.VideoWriter_fourcc(
            'X', '2', '6', '4'), 32.4, (640, 480), True)
        if not vidout:
            return 'video out fail?'

    Am, Ae = 0, 0
    Vlat = 0
    totalL = 0
    vest2 = 0
    while not done:
        ok, framedata = recordreader.read_frame(f)
        if not ok:
            break

        throttle, steering, accel, gyro, servo, wheels, periods = framedata[1]
        savedparticles = framedata[2]
        yuv = framedata[-1].reshape((-1, 640))
        if tstamp is None:
            tstamp = framedata[0] - 1.0 / 30
        if last_wheels is None:
            last_wheels = wheels
        ts = framedata[0]
        dt = framedata[0] - tstamp
        if dt > 0.1:
            print 'WARNING: frame', i, 'has a', dt, 'second gap'
        tstamp = ts
        tsfrac = tstamp - int(tstamp)
        tstring = time.strftime("%H:%M:%S.", time.localtime(tstamp)) + "%02d" % (tsfrac*100)
        gyroz = gyro[2]  # we only need the yaw rate from the gyro
        dw = wheels - last_wheels
        last_wheels = wheels
        print 'wheels', dw, 'periods', periods, 'gyro', gyroz, 'dt', dt
        ds = np.sum(dw) * params.WHEEL_TICK_LENGTH / params.NUM_ENCODERS
        vest1 = np.mean(periods[:params.NUM_ENCODERS])
        if vest1 != 0:
            vest1 = params.WHEEL_TICK_LENGTH * 1e6 / vest1
        vest2 += 0.2*(ds / dt - vest2)
        # ds = 0.5*np.sum(dw[2:])
        # w = v k
        # a = v^2 k = v w
        print 'accel', accel, ' expected ', gyroz * vest1 / 9.8, 'v', vest1, vest2, accel[0]*dt * 9.8
        step(X, dt, ds, gyroz, vest2, -accel[1]*9.8)

        if False:
            Am = 0.8*Am + 0.2*accel[1]*9.8
            Ae = 0.8*Ae + 0.2*ds*gyroz/dt*0.02
            Vlat = 0.8*Vlat + dt*accel[1]*9.8 - ds*gyroz
            print 'alat', accel[1]*9.8, 'estimated', ds*gyroz/dt*0.02
            print 'Vlat estimate', Vlat
            print 'measured alat', Am, 'estimated alat', Ae

        yuv = yuv.copy()
        yuv[480+120:] = np.maximum(yuv[480+120:], 128)  # filter out blue
        bgr = cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR_I420)
        mapview = bg.copy()
        # mapview = 200*np.ones(bg.shape, np.uint8)

        # draw all particles
        xi = np.uint32(XOFF + X[0]/a)
        xj = np.uint32(YOFF - X[1]/a)
        xin = (xi >= 0) & (xi < mapview.shape[1]) & (xj >= 0) & (xj < mapview.shape[0])
        mapview[xj[xin], xi[xin], :] = 0
        mapview[xj[xin], xi[xin], 1] = 255
        mapview[xj[xin], xi[xin], 2] = 255

        xi = np.uint32(XOFF + savedparticles[:, 0]/.02)
        xj = np.uint32(YOFF - savedparticles[:, 1]/.02)
        xin = (xi >= 0) & (xi < mapview.shape[1]) & (xj >= 0) & (xj < mapview.shape[0])
        mapview[xj[xin], xi[xin], :] = 0
        mapview[xj[xin], xi[xin], 1] = 255
        mapview[xj[xin], xi[xin], 2] = 0

        # draw mean/covariance also
        x = np.mean(X, axis=1)
        P = np.cov(X)
        # print "%d: %f %f %f" % (i, x[0], x[1], x[2])

        x0, y0 = x[:2] / a
        dx, dy = 20*np.cos(x[2]), 20*np.sin(x[2])
        U, V, _ = np.linalg.svd(P[:2, :2])
        axes = np.sqrt(V)
        angle = -np.arctan2(U[0, 1], U[0, 0]) * 180 / np.pi
        cv2.ellipse(mapview, (XOFF+int(x0), YOFF-int(y0)), (int(axes[0]), int(axes[1])),
                    angle, 0, 360, (0, 0, 220), 1)
        cv2.circle(mapview, (XOFF+int(x0), YOFF-int(y0)), 3, (0, 0, 220), 2)
        cv2.line(mapview, (XOFF+int(x0), YOFF-int(y0)), (XOFF+int(x0+dx), YOFF-int(y0+dy)), (0, 0, 220), 2)
        for l in range(len(L)):
            lxy = (XOFF+int(L[l, 0]/a), YOFF-int(L[l, 1]/a))
            cv2.circle(mapview, lxy, 3, (0, 128, 255), 3)
            cv2.putText(mapview, "%d" % l, (lxy[0] + 3, lxy[1] + 3), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 0), 1, cv2.LINE_AA)

        conecenters, origcenters, cone_acts = coneclassify.classify(yuv, gyroz)
        LLs = np.zeros(Np)
        for n, z in enumerate(conecenters):
            # mark the cone on the original view
            p = origcenters[n]
            cv2.circle(bgr, (int(p[0]), int(p[1])), 8, (255, 200, 0), cv2.FILLED)

            zz = np.arctan(z[0])
            j, LL = likeliest_lm(X, L, zz)
            LLs += LL
            totalL += np.sum(LL)
            # print 'frame', i, 'cone', n, 'LL', np.min(LL), np.mean(LL), '+-', np.std(LL), np.max(LL)

            # we could also update the landmarks at this point

            # TODO: visualize the various landmarks being hit by the various particles
            # we could draw thicker lines for more hits and thinner for fewer

            # cv2.line(mapview, (XOFF+int(x0), YOFF-int(y0)), (XOFF+int(ll[0]), YOFF-int(ll[1])), (255,128,0), 1)
            # ll = L[j]
            # x, P, LL = ekf.update_lm_bearing(x, P, -zz, ll[0], ll[1], R)

        bgr[params.vpy, ::2][cone_acts] = 255
        bgr[params.vpy, 1::2][cone_acts] = 255
        bgr[params.vpy+1, ::2][cone_acts] = 255
        bgr[params.vpy+1, 1::2][cone_acts] = 255

        if ds > 0 and len(conecenters) > 0:
            # resample the particles based on their landmark likelihood
            X = resample_particles(X, LL)

        cv2.putText(mapview, "%d" % i, (20, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
        cv2.putText(bgr, tstring, (20, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1, cv2.LINE_AA)

        if VIDEO:
            #s = mapview[::2, ::2].shape
            #bgr[-s[0]:, -s[1]:] = mapview[::2, ::2]
            pass

        # draw particle map on front view too
        xi = np.uint32(400 + savedparticles[:, 0]/.08)
        xj = np.uint32(-savedparticles[:, 1]/.08)
        xin = (xi >= 0) & (xi < bgr.shape[1]) & (xj >= 0) & (xj < bgr.shape[0])
        bgr[xj[xin], xi[xin], :] = 0
        bgr[xj[xin], xi[xin], 1] = 255
        for l in range(len(L)):
            lxy = (400+int(L[l, 0]/.08), 0-int(L[l, 1]/.08))
            cv2.circle(bgr, lxy, 2, (0, 128, 255), 2)
        for t in range(0, len(Track), 2):
            txy1 = (400+int(Track[t, 0]/.08), 0-int(Track[t, 1]/.08))
            txy2 = (400+int(Track[t+1, 0]/.08), 0-int(Track[t+1, 1]/.08))
            cv2.line(bgr, txy1, txy2, (190, 190, 190), 1)

        annotate.draw_throttle(bgr, throttle)
        annotate.draw_speed(bgr, tstamp, wheels, periods)
        annotate.draw_steering(bgr, steering, servo, center=(200, 420))
        annotate.draw_accelerometer(bgr, accel, gyro, center=(320, 420))
        # TODO: also annotate gyroz and lateral accel

        # get steering angle, front and rear wheel velocities
        delta = caldata.wheel_angle(servo)
        vf = 0.5*np.sum(dw[:2])
        vr = 0.5*np.sum(dw[2:])
        # print 'vf', vf, 'vr', vr, 'vr-vf', vr-vf, 'vr/vf', vr/(vf + 0.001)
        if np.abs(delta) > 0.08:
            # estimate lateral velocity
            vy = (vr*np.cos(delta) + gyroz*a*np.sin(delta) - vf)
            # print 'lateral velocity *', np.sin(delta), '=', vy

        if VIDEO:
            vidout.write(bgr)
        else:
            cv2.imshow("map", mapview)
            cv2.imshow("raw", bgr)
            k = cv2.waitKey()
            if k == ord('q'):
                break
        print 'frame', i, 'L', totalL

        i += 1
Пример #5
0
def main(f, VIDEO, interactive):
    np.random.seed(1)
    # bg = cv2.imread("trackmap.jpg")
    # bg = cv2.imread("drtrack-2cm.png")
    # bg = cv2.imread("bball-2cm.png")
    if interactive:
        bg = cv2.imread("cl.png")
    # bg = cv2.imread("voyage-top.png")

    m1 = camcal(320, 240, 10, -5, -np.pi / 4, 2 * np.pi - np.pi / 4)
    ym1, ym2 = cv2.convertMaps(m1 * 2, None, cv2.CV_16SC2)
    uvm1, uvm2 = cv2.convertMaps(m1, None, cv2.CV_16SC2)

    def yuvremap(yuv):
        # assumes 640x480 YUV420 image
        y = cv2.remap(yuv[:480], ym1, ym2, cv2.INTER_LINEAR)
        u = cv2.remap(yuv[480:480 + 120].reshape((240, 320)), uvm1, uvm2,
                      cv2.INTER_LINEAR)
        v = cv2.remap(yuv[480 + 120:].reshape((240, 320)), uvm1, uvm2,
                      cv2.INTER_LINEAR)
        return np.stack([y, u, v]).transpose(1, 2, 0)

    Np = 300
    X = np.zeros((4, Np))
    # X[:2] = 100 * np.random.rand(2, Np)
    # X[1] -= 400
    X[0] = 0.125 * pseudorandn(Np)
    X[1] = 0.125 * pseudorandn(Np)
    X[2] = pseudorandn(Np) * 0.1
    X.T[:, :3] += Lhome
    tstamp = None
    last_wheels = None

    done = False
    i = 0
    if VIDEO is not None:
        #vidout = cv2.VideoWriter(VIDEO, cv2.VideoWriter_fourcc(
        #   'X', '2', '6', '4'), 30, (bg.shape[1], bg.shape[0]), True)
        vidout = cv2.VideoWriter(VIDEO,
                                 cv2.VideoWriter_fourcc('X', '2', '6', '4'),
                                 32.4, (640, 480), True)
        if not vidout:
            return 'video out fail?'

    Am, Ae = 0, 0
    Vlat = 0
    totalL = 0
    vest2 = 0
    for framedata in f:
        throttle, steering, accel, gyro, servo, wheels, periods = framedata[
            'carstate']
        savedparticles = framedata['particles']
        ts = framedata['tstamp']
        if tstamp is None:
            tstamp = ts - 1.0 / 30
        if last_wheels is None:
            last_wheels = wheels
        dt = ts - tstamp
        if dt > 0.1:
            print('WARNING: frame', i, 'has a', dt, 'second gap')
        tstamp = ts
        tsfrac = tstamp - int(tstamp)
        tstring = time.strftime(
            "%H:%M:%S.", time.localtime(tstamp)) + "%02d" % (tsfrac * 100)
        gyroz = gyro[2]  # we only need the yaw rate from the gyro
        dw = wheels - last_wheels
        last_wheels = wheels
        # print('wheels', dw, 'periods', periods, 'gyro', gyroz, 'dt', dt)
        ds = np.sum(dw) * params.WHEEL_TICK_LENGTH / params.NUM_ENCODERS
        vest1 = np.mean(periods[:params.NUM_ENCODERS])
        if vest1 != 0:
            vest1 = params.WHEEL_TICK_LENGTH * 1e6 / vest1
        vest2 += 0.2 * (ds / dt - vest2)
        # ds = 0.5*np.sum(dw[2:])
        # w = v k
        # a = v^2 k = v w
        # print('accel', accel, ' expected ', gyroz * vest1 / 9.8, 'v', vest1, vest2, accel[0]*dt * 9.8)
        step(X, dt * 0.3, ds * 0.3, gyroz, vest2, -accel[1] * 9.8)
        step(X, dt * 0.3, ds * 0.3, gyroz, vest2, -accel[1] * 9.8)
        step(X, dt * 0.3, ds * 0.3, gyroz, vest2, -accel[1] * 9.8)
        step(X, dt * 0.1, ds * 0.1, gyroz, vest2, -accel[1] * 9.8)

        if False:
            Am = 0.8 * Am + 0.2 * accel[1] * 9.8
            Ae = 0.8 * Ae + 0.2 * ds * gyroz / dt * 0.02
            Vlat = 0.8 * Vlat + dt * accel[1] * 9.8 - ds * gyroz
            # print('alat', accel[1]*9.8, 'estimated', ds*gyroz/dt*0.02)
            # print('Vlat estimate', Vlat)
            # print('measured alat', Am, 'estimated alat', Ae)

        if interactive or VIDEO is not None:
            yuv = framedata['yuv420']
            bgr = cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR_I420)
            mapview = bg.copy()
            # mapview = 200*np.ones(bg.shape, np.uint8)

            # draw all particles
            xi = np.uint32(XOFF + X[0] / a)
            xj = np.uint32(YOFF - X[1] / a)
            xin = (xi >= 0) & (xi < mapview.shape[1]) & (xj >= 0) & (
                xj < mapview.shape[0])
            mapview[xj[xin], xi[xin], :] = 0
            mapview[xj[xin], xi[xin], 1] = 255
            mapview[xj[xin], xi[xin], 2] = 255

            xi = np.uint32(XOFF + savedparticles[:, 0] / .02)
            xj = np.uint32(YOFF - savedparticles[:, 1] / .02)
            xin = (xi >= 0) & (xi < mapview.shape[1]) & (xj >= 0) & (
                xj < mapview.shape[0])
            mapview[xj[xin], xi[xin], :] = 0
            mapview[xj[xin], xi[xin], 1] = 255
            mapview[xj[xin], xi[xin], 2] = 0

            # draw mean/covariance also
            x = np.mean(X, axis=1)
            P = np.cov(X)
            # print("%d: %f %f %f" % (i, x[0], x[1], x[2]))

            x0, y0 = x[:2] / a
            dx, dy = 20 * np.cos(x[2]), 20 * np.sin(x[2])
            U, V, _ = np.linalg.svd(P[:2, :2])
            axes = np.sqrt(V) / a
            angle = -np.arctan2(U[0, 1], U[0, 0]) * 180 / np.pi
            cv2.ellipse(mapview, (XOFF + int(x0), YOFF - int(y0)),
                        (int(axes[0]), int(axes[1])), angle, 0, 360,
                        (0, 0, 220), 1)
            cv2.circle(mapview, (XOFF + int(x0), YOFF - int(y0)), 3,
                       (0, 0, 220), 2)
            cv2.line(mapview, (XOFF + int(x0), YOFF - int(y0)),
                     (XOFF + int(x0 + dx), YOFF - int(y0 + dy)), (0, 0, 220),
                     2)
            for l in range(len(L)):
                lxy = (XOFF + int(L[l, 0] / a), YOFF - int(L[l, 1] / a))
                cv2.circle(mapview, lxy, 3, (0, 128, 255), 3)
                cv2.putText(mapview, "%d" % l, (lxy[0] + 3, lxy[1] + 3),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 0), 1,
                            cv2.LINE_AA)

        # stored activations
        activation = framedata['activations'].copy()
        activation[1:] = activation[1:] - activation[:-1]

        # recompute activations from video
        if False:
            yuv1 = yuvremap(yuv)
            uv = yuv1[:, :, 1:3].astype(np.int32) - 128
            myactivation = uv[:, :, 1]
            myactivation[yuv1[:, :, 1] == 0] = 0
            myactivation = np.sum(myactivation, axis=0)

            if interactive or VIDEO is not None:
                #ai = np.clip(uvm1[10, activation > params.V_THRESHOLD, 0]*2, 0, 639)
                #aj = np.clip(uvm1[10, activation > params.V_THRESHOLD, 1]*2, 0, 479)
                #bgr[aj, ai, :] = 255
                #bgr[aj, ai, 2] = 0

                ai = np.clip(uvm1[0, myactivation > params.V_THRESHOLD, 0] * 2,
                             0, 639)
                aj = np.clip(uvm1[0, myactivation > params.V_THRESHOLD, 1] * 2,
                             0, 479)
                bgr[aj, ai, :] = 255
                bgr[aj, ai, 0] = 0

                #print('act', activation[300:350])
                #print('myact', myactivation[300:350])

        LL, c0, c1 = likelihood(X, activation)
        totalL += np.sum(LL)
        LL -= np.max(LL)
        if ds > 0:
            X = resample_particles(X, LL)
        if interactive:
            ml = np.argmax(LL)
            Na = uvm1.shape[1]
            for s, e in zip(c0[:, ml] % Na, c1[:, ml] % Na):
                x0 = uvm1[5, s] * 2
                x1 = uvm1[5, e] * 2
                cv2.line(bgr, (x0[0], np.clip(x0[1], 2, 477)),
                         (x1[0], np.clip(x1[1], 2, 477)), (255, 170, 0), 2,
                         cv2.LINE_AA)

        if interactive or VIDEO is not None:
            cv2.putText(mapview, "%d" % i, (20, 30), cv2.FONT_HERSHEY_SIMPLEX,
                        1, (255, 255, 255), 2, cv2.LINE_AA)
            cv2.putText(bgr, tstring, (20, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
                        (255, 255, 255), 1, cv2.LINE_AA)

            if VIDEO is not None:
                #s = mapview[::2, ::2].shape
                #bgr[-s[0]:, -s[1]:] = mapview[::2, ::2]
                pass

            # draw particle map on front view too
            xi = np.uint32(400 + savedparticles[:, 0] / .08)
            xj = np.uint32(-savedparticles[:, 1] / .08)
            xin = (xi >= 0) & (xi < bgr.shape[1]) & (xj >= 0) & (xj <
                                                                 bgr.shape[0])
            bgr[xj[xin], xi[xin], :] = 0
            bgr[xj[xin], xi[xin], 1] = 255
            xi = np.uint32(400 + X[0] / .08)
            xj = np.uint32(-X[1] / .08)
            xin = (xi >= 0) & (xi < bgr.shape[1]) & (xj >= 0) & (xj <
                                                                 bgr.shape[0])
            bgr[xj[xin], xi[xin], :] = 255
            bgr[xj[xin], xi[xin], 0] = 0
            for l in range(len(L)):
                lxy = (400 + int(L[l, 0] / .08), 0 - int(L[l, 1] / .08))
                cv2.circle(bgr, lxy, 2, (0, 128, 255), 2)

            for t in range(0, len(Track), 2):
                txy1 = (400 + int(Track[t, 0] / .08),
                        0 - int(Track[t, 1] / .08))
                txy2 = (400 + int(Track[t + 1, 0] / .08),
                        0 - int(Track[t + 1, 1] / .08))
                cv2.line(bgr, txy1, txy2, (190, 190, 190), 1)

            annotate.draw_throttle(bgr, throttle)
            annotate.draw_speed(bgr, tstamp, wheels, periods)
            annotate.draw_steering(bgr, steering, servo, center=(200, 420))
            annotate.draw_accelerometer(bgr, accel, gyro, center=(320, 420))
            # TODO: also annotate gyroz and lateral accel

        # get steering angle, front and rear wheel velocities
        #delta = caldata.wheel_angle(servo)
        #vf = 0.5*np.sum(dw[:2])
        #vr = 0.5*np.sum(dw[2:])
        # print('vf', vf, 'vr', vr, 'vr-vf', vr-vf, 'vr/vf', vr/(vf + 0.001))
        #if np.abs(delta) > 0.08:
        #    # estimate lateral velocity
        #    vy = (vr*np.cos(delta) + gyroz*a*np.sin(delta) - vf)
        #    # print('lateral velocity *', np.sin(delta), '=', vy)

        if VIDEO is not None:
            vidout.write(bgr)
        if interactive:
            cv2.imshow("map", mapview)
            cv2.imshow("raw", bgr)
            k = cv2.waitKey()
            if k == ord('q'):
                break
        # print('frame', i, 'L', totalL)

        i += 1

    return totalL
Пример #6
0
def main(f):
    np.random.seed(1)
    # bg = cv2.imread("satview.png")
    # bg = cv2.imread("trackmap.jpg")
    bg = cv2.imread("drtrack-2cm.png")

    Np = 300
    X = np.zeros((3, Np))
    # X[:2] = 100 * np.random.rand(2, Np)
    # X[1] -= 400
    X[:2] = 30 * np.random.randn(2, Np)
    X[2] = np.random.randn(Np) * 0.2
    tstamp = None
    last_wheels = None

    done = False
    i = 0
    if VIDEO:
        # vidout = cv2.VideoWriter("particlefilter.h264", cv2.VideoWriter_fourcc(
        #    'X', '2', '6', '4'), 30, (bg.shape[1], bg.shape[0]), True)
        vidout = cv2.VideoWriter("particlefilter.h264",
                                 cv2.VideoWriter_fourcc('X', '2', '6', '4'),
                                 30, (640, 480), True)

    Am, Ae = 0, 0
    Vlat = 0
    while not done:
        ok, frame = recordreader.read_frame(f)
        if not ok:
            break

        _, throttle, steering, accel, gyro, servo, wheels, periods, yuv = frame
        if tstamp is None:
            tstamp = frame[0] - 1.0 / 30
        if last_wheels is None:
            last_wheels = wheels
        ts = frame[0]
        dt = frame[0] - tstamp
        if dt > 0.1:
            print 'WARNING: frame', i, 'has a', dt, 'second gap'
        tstamp = ts
        gyroz = gyro[2]  # we only need the yaw rate from the gyro
        dw = wheels - last_wheels
        last_wheels = wheels
        # print 'wheels', dw, 'gyro', gyroz, 'dt', dt
        ds = 0.25 * np.sum(dw)
        step(X, dt, ds, gyroz)

        if True:
            Am = 0.8 * Am + 0.2 * accel[1] * 9.8
            Ae = 0.8 * Ae + 0.2 * ds * gyroz / dt * 0.02
            Vlat = 0.8 * Vlat + dt * accel[1] * 9.8 - ds * gyroz * 0.02
            print 'alat', accel[1] * 9.8, 'estimated', ds * gyroz / dt * 0.02
            print 'Vlat estimate', Vlat
            # print 'measured alat', Am, 'estimated alat', Ae

        bgr = cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR_I420)
        mapview = bg.copy()
        # mapview = 200*np.ones(bg.shape, np.uint8)

        # draw all particles
        xi = np.uint32(XOFF + X[0] / a)
        xj = np.uint32(YOFF - X[1] / a)
        xin = (xi >= 0) & (xi < mapview.shape[1]) & (xj >= 0) & (
            xj < mapview.shape[0])
        mapview[xj[xin], xi[xin], :] = 0
        mapview[xj[xin], xi[xin], 1] = 255
        mapview[xj[xin], xi[xin], 2] = 255

        # draw mean/covariance also
        x = np.mean(X, axis=1)
        P = np.cov(X)
        # print "%d: %f %f %f" % (i, x[0], x[1], x[2])

        x0, y0 = x[:2] / a
        dx, dy = 20 * np.cos(x[2]), 20 * np.sin(x[2])
        U, V, _ = np.linalg.svd(P[:2, :2])
        axes = np.sqrt(V)
        angle = -np.arctan2(U[0, 1], U[0, 0]) * 180 / np.pi
        cv2.ellipse(mapview, (XOFF + int(x0), YOFF - int(y0)),
                    (int(axes[0]), int(axes[1])), angle, 0, 360, (0, 0, 220),
                    1)
        cv2.circle(mapview, (XOFF + int(x0), YOFF - int(y0)), 3, (0, 0, 220),
                   2)
        cv2.line(mapview, (XOFF + int(x0), YOFF - int(y0)),
                 (XOFF + int(x0 + dx), YOFF - int(y0 + dy)), (0, 0, 220), 2)
        for l in range(len(L)):
            lxy = (XOFF + int(L[l, 0] / a), YOFF - int(L[l, 1] / a))
            cv2.circle(mapview, lxy, 3, (0, 128, 255), 3)
            cv2.putText(mapview, "%d" % l, (lxy[0] + 3, lxy[1] + 3),
                        cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 0), 1,
                        cv2.LINE_AA)

        conecenters, origcenters, cone_acts = coneclassify.classify(yuv, gyroz)
        LLs = np.zeros(Np)
        for n, z in enumerate(conecenters):
            # mark the cone on the original view
            p = origcenters[n]
            cv2.circle(bgr, (int(p[0]), int(p[1])), 8, (255, 200, 0),
                       cv2.FILLED)

            zz = np.arctan(z[0])
            j, LL = likeliest_lm(X, L, zz)
            LLs += LL
            print 'frame', i, 'cone', n, 'LL', np.min(LL), np.mean(
                LL), '+-', np.std(LL), np.max(LL)

            # we could also update the landmarks at this point

            # TODO: visualize the various landmarks being hit by the various particles
            # we could draw thicker lines for more hits and thinner for fewer

            # cv2.line(mapview, (XOFF+int(x0), YOFF-int(y0)), (XOFF+int(ll[0]), YOFF-int(ll[1])), (255,128,0), 1)
            # ll = L[j]
            # x, P, LL = ekf.update_lm_bearing(x, P, -zz, ll[0], ll[1], R)

        bgr[params.vpy, ::2][cone_acts] = 255
        bgr[params.vpy, 1::2][cone_acts] = 255
        bgr[params.vpy + 1, ::2][cone_acts] = 255
        bgr[params.vpy + 1, 1::2][cone_acts] = 255

        if len(conecenters) > 0:
            # resample the particles based on their landmark likelihood
            X = resample_particles(X, LL)

        cv2.putText(mapview, "%d" % i, (20, 30), cv2.FONT_HERSHEY_SIMPLEX, 1,
                    (255, 255, 255), 2, cv2.LINE_AA)

        if VIDEO:
            s = mapview[::2, ::2].shape
            bgr[-s[0]:, -s[1]:] = mapview[::2, ::2]

        annotate.draw_throttle(bgr, throttle)
        annotate.draw_speed(bgr, tstamp, wheels, periods)
        annotate.draw_steering(bgr, steering, servo, center=(200, 420))
        # TODO: also annotate gyroz and lateral accel

        # get steering angle, front and rear wheel velocities
        delta = caldata.wheel_angle(servo)
        vf = 0.5 * np.sum(dw[:2])
        vr = 0.5 * np.sum(dw[2:])
        print 'vf', vf, 'vr', vr, 'vr-vf', vr - vf, 'vr/vf', vr / (vf + 0.001)
        if np.abs(delta) > 0.08:
            # estimate lateral velocity
            vy = (vr * np.cos(delta) + gyroz * a * np.sin(delta) - vf)
            print 'lateral velocity *', np.sin(delta), '=', vy

        if VIDEO:
            s = (bg.shape[1] - 320) // 2
            vidout.write(bgr)
        else:
            cv2.imshow("map", mapview)
            cv2.imshow("raw", bgr)
            k = cv2.waitKey()
            if k == ord('q'):
                break

        i += 1