def update_measurement_improved(self, poleparams, resample=True):
        n = poleparams.shape[0]
        polepos_r = np.hstack(
            [poleparams[:, :2],
             np.zeros([n, 1]),
             np.ones([n, 1])]).T
        for i in range(self.count):
            polepos_w = self.particles[i].dot(polepos_r)
            d, _ = self.kdtree.query(polepos_w[:2].T,
                                     k=1,
                                     distance_upper_bound=self.d_max)
            lik = np.prod(self.poledist.pdf(np.clip(d, 0.0, self.d_max)) + 0.1)
            prior = scipy.stats.multivariate_normal(
                mean=[0, 0, 0], cov=self.Pukf[i, :, :]).pdf(
                    util.ht2xyp(self.particles[i]) -
                    util.ht2xyp(self.particles_pre[i]))
            proposal = scipy.stats.multivariate_normal(
                mean=[0, 0, 0], cov=self.Poutt).pdf(
                    util.ht2xyp(self.particles[i]) - self.xukf)
            self.weights[i] *= lik * prior / proposal

            self.particles_pre[i] = self.particles[i].copy()
            self.Pukf[i, :, :] = self.Poutt[i].copy()
        self.weights /= np.sum(self.weights)
        if resample and self.neff < self.minneff:
            self.resample()
示例#2
0
 def estimate_pose(self):
     if self.estimatetype == 'mean':
         xyp = util.ht2xyp(np.matmul(self.T_o_w, self.particles))
         mean = np.hstack([
             np.average(xyp[:, :2], axis=0, weights=self.weights),
             util.average_angles(xyp[:, 2], weights=self.weights)
         ])
         return self.T_w_o.dot(util.xyp2ht(mean))
     if self.estimatetype == 'max':
         return self.particles[np.argmax(self.weights)]
     if self.estimatetype == 'best':
         i = np.argsort(self.weights)[-int(0.1 * self.count):]
         xyp = util.ht2xyp(np.matmul(self.T_o_w, self.particles[i]))
         mean = np.hstack([
             np.average(xyp[:, :2], axis=0, weights=self.weights[i]),
             util.average_angles(xyp[:, 2], weights=self.weights[i])
         ])
         return self.T_w_o.dot(util.xyp2ht(mean))
示例#3
0
def evaluate():
    stats = []
    for sessionname in pynclt.sessions:
        files = [file for file \
            in os.listdir(os.path.join(pynclt.resultdir, sessionname)) \
                if file.startswith(localization_name_start)]
                # if file.startswith(get_locfileprefix())]
        files.sort()
        session = pynclt.session(sessionname)
        cumdist = np.hstack([0.0, np.cumsum(np.linalg.norm(np.diff(
            session.T_w_r_gt[:, :3, 3], axis=0), axis=1))])
        t_eval = scipy.interpolate.interp1d(
            cumdist, session.t_gt)(np.arange(0.0, cumdist[-1], 1.0))
        T_w_r_gt = np.stack([util.project_xy(
                session.get_T_w_r_gt(t).dot(T_r_mc)).dot(T_mc_r) \
                    for t in t_eval])
        T_gt_est = []
        for file in files:
            T_w_r_est = np.load(os.path.join(
                pynclt.resultdir, sessionname, file))['T_w_r_est']
            T_w_r_est_interp = np.empty([len(t_eval), 4, 4])
            iodo = 1
            for ieval in range(len(t_eval)):
                while session.t_relodo[iodo] < t_eval[ieval]:
                    iodo += 1
                T_w_r_est_interp[ieval] = util.interpolate_ht(
                    T_w_r_est[iodo-1:iodo+1], 
                    session.t_relodo[iodo-1:iodo+1], t_eval[ieval])
            T_gt_est.append(
                np.matmul(util.invert_ht(T_w_r_gt), T_w_r_est_interp))
        T_gt_est = np.stack(T_gt_est)
        lonerror = np.mean(np.mean(np.abs(T_gt_est[..., 0, 3]), axis=-1))
        laterror = np.mean(np.mean(np.abs(T_gt_est[..., 1, 3]), axis=-1))
        poserrors = np.linalg.norm(T_gt_est[..., :2, 3], axis=-1)
        poserror = np.mean(np.mean(poserrors, axis=-1))
        posrmse = np.mean(np.sqrt(np.mean(poserrors**2, axis=-1)))
        angerrors = np.degrees(np.abs(
            np.array([util.ht2xyp(T)[:, 2] for T in T_gt_est])))
        angerror = np.mean(np.mean(angerrors, axis=-1))
        angrmse = np.mean(np.sqrt(np.mean(angerrors**2, axis=-1)))
        stats.append({'session': sessionname, 'lonerror': lonerror, 
            'laterror': laterror, 'poserror': poserror, 'posrmse': posrmse,
            'angerror': angerror, 'angrmse': angrmse, 'T_gt_est': T_gt_est})
    np.savez(os.path.join(pynclt.resultdir, get_evalfile()), stats=stats)
    
    mapdata = np.load(os.path.join(pynclt.resultdir, get_globalmapname() + '.npz'))
    print('session \t f\te_pos \trmse_pos \te_ang \te_rmse')
    row = '{session} \t{f} \t{poserror} \t{posrmse} \t{angerror} \t{angrmse}'
    for i, stat in enumerate(stats):
        print(row.format(
            session=stat['session'],
            f=mapdata['mapfactors'][i] * 100.0,
            poserror=stat['poserror'],
            posrmse=stat['posrmse'],
            angerror=stat['angerror'],
            angrmse=stat['angrmse']))
示例#4
0
 def estimate_pose(self):
     if self.estimatetype == 'mean':
         xyp = util.ht2xyp(np.matmul(self.T_o_w, self.particles))
         mean = np.hstack([
             np.average(xyp[:, :2], axis=0, weights=self.weights),
             util.average_angles(xyp[:, 2], weights=self.weights)
         ])
         return self.T_w_o.dot(util.xyp2ht(mean))
     if self.estimatetype == 'max':
         return self.particles[np.argmax(self.weights)]
     if self.estimatetype == 'best':
         i = np.argsort(self.weights)[-int(0.1 * self.count):]
         xyp = util.ht2xyp(np.matmul(self.T_o_w, self.particles[i]))
         mean = np.hstack([
             np.average(xyp[:, :2], axis=0, weights=self.weights[i]),
             util.average_angles(xyp[:, 2], weights=self.weights[i])
         ])
         zero_mean_particles = xyp - mean
         zero_mean_particles[:, 2] %= 2 * np.pi  #wrap between 0 and 2pi
         self.Sigma = (
             1 / xyp.shape[0]) * zero_mean_particles.T @ zero_mean_particles
         return self.T_w_o.dot(util.xyp2ht(mean))
示例#5
0
 def estimate_pose(self):
     if self.estimatetype == 'mean':
         xyp = util.ht2xyp(np.matmul(self.T_o_w, self.particles))
         mean = np.hstack([
             np.average(xyp[:, :2], axis=0, weights=self.weights),
             util.average_angles(xyp[:, 2], weights=self.weights)
         ])
         return self.T_w_o.dot(util.xyp2ht(mean))
     if self.estimatetype == 'max':
         return self.particles[np.argmax(self.weights)]
     if self.estimatetype == 'best':
         """
             Skips the worst 10% of indices (the ones with least weight)
             Guessing xyp is x, y and phi for rotation angle, same as paper
             ht is transformation matrix of particle at time t??
             mean is the weighted average x, y and phi multiplied with T_w_o
         """
         i = np.argsort(self.weights)[-int(0.1 * self.count):]
         xyp = util.ht2xyp(np.matmul(self.T_o_w, self.particles[i]))
         mean = np.hstack([
             np.average(xyp[:, :2], axis=0, weights=self.weights[i]),
             util.average_angles(xyp[:, 2], weights=self.weights[i])
         ])
         return self.T_w_o.dot(util.xyp2ht(mean))
def evaluate(seq):
    sequence = dataset.sequence(seq)
    T_w_velo_gt = np.matmul(sequence.poses, sequence.calib.T_cam0_velo)
    T_w_velo_gt = np.array([util.project_xy(ht) for ht in T_w_velo_gt])
    trajectory_dir = os.path.join(result_dir, '{:03d}'.format(seq),
                                  'trajectory')
    util.makedirs(trajectory_dir)
    mapdata = np.load(os.path.join(seqdir, globalmapfile), allow_pickle=True)
    polemap = mapdata['polemeans']
    # plt.scatter(polemap[:, 0], polemap[:, 1], s=1, c='b')
    # plt.plot(T_w_velo_gt[:, 0, 3], T_w_velo_gt[:, 1, 3], color=(0, 1, 0), label='Ground Truth', linewidth=3.0)
    cumdist = np.hstack([
        0.0,
        np.cumsum(
            np.linalg.norm(np.diff(T_w_velo_gt[:, :2, 3], axis=0), axis=1))
    ])
    timestamps = np.array([arrow.get(timestamp).float_timestamp \
        for timestamp in sequence.timestamps])
    t_eval = scipy.interpolate.interp1d(cumdist, timestamps)(np.arange(
        0.0, cumdist[-1], 1.0))
    n = t_eval.size
    T_w_velo_gt_interp = np.empty([n, 4, 4])
    iodo = 1
    for ieval in range(n):
        while timestamps[iodo] < t_eval[ieval]:
            iodo += 1
        T_w_velo_gt_interp[ieval] = util.interpolate_ht(
            T_w_velo_gt[iodo - 1:iodo + 1], timestamps[iodo - 1:iodo + 1],
            t_eval[ieval])
    files = [file for file in os.listdir(seqdir) \
        if os.path.basename(file).startswith(locfileprefix)]
    poserror = np.full([n, len(files)], np.nan)
    laterror = np.full([n, len(files)], np.nan)
    lonerror = np.full([n, len(files)], np.nan)
    angerror = np.full([n, len(files)], np.nan)
    T_gt_est = np.full([n, 4, 4], np.nan)
    for ifile in range(len(files)):
        T_w_velo_est = np.load(os.path.join(seqdir, files[ifile]),
                               allow_pickle=True)['T_w_velo_est']
        plt.clf()
        plt.plot(T_w_velo_gt[:, 0, 3],
                 T_w_velo_gt[:, 1, 3],
                 color=(0, 1, 0),
                 label='Ground Truth',
                 linewidth=3.0)
        landmarks = plt.scatter(polemap[:, 0],
                                polemap[:, 1],
                                s=1,
                                c='m',
                                marker='*',
                                label='Landmarks')
        iodo = 1
        for ieval in range(n):
            while timestamps[iodo] < t_eval[ieval]:
                iodo += 1
            T_w_velo_est_interp = util.interpolate_ht(
                T_w_velo_est[iodo - 1:iodo + 1], timestamps[iodo - 1:iodo + 1],
                t_eval[ieval])
            T_gt_est[ieval] = util.invert_ht(
                T_w_velo_gt_interp[ieval]).dot(T_w_velo_est_interp)
        lonerror[:, ifile] = T_gt_est[:, 0, 3]
        laterror[:, ifile] = T_gt_est[:, 1, 3]
        poserror[:, ifile] = np.linalg.norm(T_gt_est[:, :2, 3], axis=1)
        angerror[:, ifile] = util.ht2xyp(T_gt_est)[:, 2]
        plt.plot(T_w_velo_est[:, 0, 3],
                 T_w_velo_est[:, 1, 3],
                 'r',
                 label='Estimated trajectory')
        plt.ylabel('North (Unit:m)')
        plt.xlabel('East (Unit:m)')
        plt.legend()
        plt.gcf().subplots_adjust(bottom=0.13,
                                  top=0.98,
                                  left=0.145,
                                  right=0.98)
        plt.grid(color=(0.5, 0.5, 0.5), linestyle='-', linewidth=1)
    angerror = np.degrees(angerror)
    lonstd = np.std(lonerror, axis=0)
    latstd = np.std(laterror, axis=0)
    angstd = np.std(angerror, axis=0)
    angerror = np.abs(angerror)
    laterror = np.mean(np.abs(laterror), axis=0)
    lonerror = np.mean(np.abs(lonerror), axis=0)
    posrmse = np.sqrt(np.mean(poserror**2, axis=0))
    angrmse = np.sqrt(np.mean(angerror**2, axis=0))
    poserror = np.mean(poserror, axis=0)
    angerror = np.mean(angerror, axis=0)
    plt.savefig(os.path.join(trajectory_dir, 'trajectory_est.svg'))
    plt.savefig(os.path.join(trajectory_dir, 'trajectory_est.png'))
    np.savez(os.path.join(seqdir, evalfile),
             poserror=poserror,
             angerror=angerror,
             posrmse=posrmse,
             angrmse=angrmse,
             laterror=laterror,
             latstd=latstd,
             lonerror=lonerror,
             lonstd=lonstd)
    print('poserror: {}\nposrmse: {}\n'
          'laterror: {}\nlatstd: {}\n'
          'lonerror: {}\nlonstd: {}\n'
          'angerror: {}\nangstd: {}\nangrmse: {}'.format(
              np.mean(poserror), np.mean(posrmse), np.mean(laterror),
              np.mean(latstd), np.mean(lonerror), np.mean(lonstd),
              np.mean(angerror), np.mean(angstd), np.mean(angrmse)))
def localize(seq, visualize=False):
    print(seq)
    sequence = dataset.sequence(seq)
    seqdir = os.path.join(result_dir, '{:03d}'.format(seq))
    mapdata = np.load(os.path.join(seqdir, globalmapfile), allow_pickle=True)
    polemap = mapdata['polemeans']
    polemap = np.hstack([polemap[:, :2], np.diff(polemap[:, 2:4], axis=1)])
    figuresdir = os.path.join(seqdir, 'Figures')
    util.makedirs(figuresdir)
    locdata = np.load(os.path.join(seqdir, localmapfile),
                      allow_pickle=True)['maps']
    T_velo_cam0 = util.invert_ht(sequence.calib.T_cam0_velo)
    T_w_velo_gt = np.matmul(sequence.poses, sequence.calib.T_cam0_velo)
    i = 0
    polecov = 1.0
    filter = particlefilter.particlefilter(2000, T_w_velo_gt[i], 3.0,
                                           np.radians(5.0), polemap, polecov)
    filter.minneff = 0.5
    filter.estimatetype = 'best'

    if visualize:
        plt.ion()
        figure = plt.figure()
        nplots = 2
        mapaxes = figure.add_subplot(nplots, 1, 1)
        mapaxes.set_aspect('equal')
        mapaxes.scatter(polemap[:, 0], polemap[:, 1], s=10, c='b', marker='s')
        mapaxes.plot(T_w_velo_gt[:, 0, 3], T_w_velo_gt[:, 1, 3], 'g')
        particles = mapaxes.scatter([], [], s=1, c='r')
        arrow = mapaxes.arrow(0.0,
                              0.0,
                              4.0,
                              0.0,
                              length_includes_head=True,
                              head_width=1.2,
                              head_length=1.5,
                              color='k')
        arrowdata = np.hstack(
            [arrow.get_xy(), np.zeros([8, 1]),
             np.ones([8, 1])]).T
        locpoles = mapaxes.scatter([], [], s=30, c='y', marker='^')
        viewoffset = 25.0

        # weightaxes = figure.add_subplot(nplots, 1, 2)
        # gridsize = 50
        # offset = 10.0
        # visfilter = particlefilter.particlefilter(gridsize**2,
        #     np.identity(4), 0.0, 0.0, polemap, polecov)
        # gridcoord = np.linspace(-offset, offset, gridsize)
        # x, y = np.meshgrid(gridcoord, gridcoord)
        # dxy = np.hstack([x.reshape([-1, 1]), y.reshape([-1, 1])])
        # weightimage = weightaxes.matshow(np.zeros([gridsize, gridsize]),
        #     extent=(-offset, offset, -offset, offset))

        # histaxes = figure.add_subplot(nplots, 1, 3)

    imap = 0
    while locdata[imap]['imid'] < i:
        imap += 1
    T_w_velo_est = np.full(T_w_velo_gt.shape, np.nan)
    T_w_velo_est[i] = filter.estimate_pose()
    i += 1
    with progressbar.ProgressBar(max_value=T_w_velo_est.shape[0] - i) as bar:
        while i < T_w_velo_est.shape[0]:
            relodo = util.ht2xyp(
                util.invert_ht(T_w_velo_gt[i - 1]).dot(T_w_velo_gt[i]))
            relodocov = np.diag((0.02 * relodo)**2)
            relodo = np.random.multivariate_normal(relodo, relodocov)
            filter.update_motion(relodo, relodocov)
            T_w_velo_est[i] = filter.estimate_pose()

            if imap < locdata.size and i >= locdata[imap]['iend']:
                T_w_cam0_mid = sequence.poses[locdata[imap]['imid']]
                T_w_cam0_now = sequence.poses[i]
                T_cam0_now_cam0_mid \
                    = util.invert_ht(T_w_cam0_now).dot(T_w_cam0_mid)

                poleparams = locdata[imap]['poleparams']
                npoles = poleparams.shape[0]
                h = np.diff(poleparams[:, 2:4], axis=1)
                polepos_m_mid = np.hstack([
                    poleparams[:, :2],
                    np.zeros([npoles, 1]),
                    np.ones([npoles, 1])
                ]).T
                polepos_velo_now = T_velo_cam0.dot(T_cam0_now_cam0_mid).dot(
                    T_cam0_m).dot(polepos_m_mid)
                poleparams = np.hstack([polepos_velo_now[:2].T, h])
                filter.update_measurement(poleparams[:, :2])
                T_w_velo_est[i] = filter.estimate_pose()
                if visualize:
                    polepos_w = T_w_velo_est[i].dot(polepos_velo_now)
                    locpoles.set_offsets(polepos_w[:2].T)

                    # particleposes = np.tile(T_w_velo_gt[i], [gridsize**2, 1, 1])
                    # particleposes[:, :2, 3] += dxy
                    # visfilter.particles = particleposes
                    # visfilter.weights[:] = 1.0 / visfilter.count
                    # visfilter.update_measurement(poleparams[:, :2], resample=False)
                    # weightimage.set_array(np.flipud(
                    #     visfilter.weights.reshape([gridsize, gridsize])))
                    # weightimage.autoscale()
                imap += 1

            if visualize:
                particles.set_offsets(filter.particles[:, :2, 3])
                arrow.set_xy(T_w_velo_est[i].dot(arrowdata)[:2].T)
                x, y = T_w_velo_est[i, :2, 3]
                mapaxes.set_xlim(left=x - viewoffset, right=x + viewoffset)
                mapaxes.set_ylim(bottom=y - viewoffset, top=y + viewoffset)
                # histaxes.cla()
                # histaxes.hist(filter.weights,
                #     bins=50, range=(0.0, np.max(filter.weights)))
                if i % 15 == 0:
                    filename = str(seq) + '_' + str(i) + '_'
                    figure.savefig(os.path.join(figuresdir, filename + '.png'))
                figure.canvas.draw_idle()
                figure.canvas.flush_events()
            bar.update(i)
            i += 1
    filename = os.path.join(seqdir, locfileprefix \
        + datetime.datetime.now().strftime('_%Y-%m-%d_%H-%M-%S.npz'))
    np.savez(filename, T_w_velo_est=T_w_velo_est)
示例#8
0
def evaluate(seq):
    sequence = dataset.sequence(seq)
    T_w_velo_gt = np.matmul(sequence.poses, sequence.calib.T_cam0_velo)
    T_w_velo_gt = np.array([util.project_xy(ht) for ht in T_w_velo_gt])
    seqdir = os.path.join('kitti', '{:03d}'.format(seq))
    mapdata = np.load(os.path.join(seqdir, globalmapfile))
    polemap = mapdata['polemeans']
    plt.scatter(polemap[:, 0], polemap[:, 1], s=1, c='b')
    plt.plot(T_w_velo_gt[:, 0, 3], T_w_velo_gt[:, 1, 3], color=(0.5, 0.5, 0.5))
    cumdist = np.hstack([
        0.0,
        np.cumsum(
            np.linalg.norm(np.diff(T_w_velo_gt[:, :2, 3], axis=0), axis=1))
    ])
    timestamps = np.array([arrow.get(timestamp).float_timestamp \
        for timestamp in sequence.timestamps])
    t_eval = scipy.interpolate.interp1d(cumdist, timestamps)(np.arange(
        0.0, cumdist[-1], 1.0))
    n = t_eval.size
    T_w_velo_gt_interp = np.empty([n, 4, 4])
    iodo = 1
    for ieval in range(n):
        while timestamps[iodo] < t_eval[ieval]:
            iodo += 1
        T_w_velo_gt_interp[ieval] = util.interpolate_ht(
            T_w_velo_gt[iodo - 1:iodo + 1], timestamps[iodo - 1:iodo + 1],
            t_eval[ieval])
    files = [file for file in os.listdir(seqdir) \
        if os.path.basename(file).startswith(locfileprefix)]
    poserror = np.full([n, len(files)], np.nan)
    laterror = np.full([n, len(files)], np.nan)
    lonerror = np.full([n, len(files)], np.nan)
    angerror = np.full([n, len(files)], np.nan)
    T_gt_est = np.full([n, 4, 4], np.nan)
    for ifile in range(len(files)):
        T_w_velo_est = np.load(os.path.join(seqdir,
                                            files[ifile]))['T_w_velo_est']
        iodo = 1
        for ieval in range(n):
            while timestamps[iodo] < t_eval[ieval]:
                iodo += 1
            T_w_velo_est_interp = util.interpolate_ht(
                T_w_velo_est[iodo - 1:iodo + 1], timestamps[iodo - 1:iodo + 1],
                t_eval[ieval])
            T_gt_est[ieval] = util.invert_ht(
                T_w_velo_gt_interp[ieval]).dot(T_w_velo_est_interp)
        lonerror[:, ifile] = T_gt_est[:, 0, 3]
        laterror[:, ifile] = T_gt_est[:, 1, 3]
        poserror[:, ifile] = np.linalg.norm(T_gt_est[:, :2, 3], axis=1)
        angerror[:, ifile] = util.ht2xyp(T_gt_est)[:, 2]
        plt.plot(T_w_velo_est[:, 0, 3], T_w_velo_est[:, 1, 3], 'r')
    angerror = np.degrees(angerror)
    lonstd = np.std(lonerror, axis=0)
    latstd = np.std(laterror, axis=0)
    angstd = np.std(angerror, axis=0)
    angerror = np.abs(angerror)
    laterror = np.mean(np.abs(laterror), axis=0)
    lonerror = np.mean(np.abs(lonerror), axis=0)
    posrmse = np.sqrt(np.mean(poserror**2, axis=0))
    angrmse = np.sqrt(np.mean(angerror**2, axis=0))
    poserror = np.mean(poserror, axis=0)
    angerror = np.mean(angerror, axis=0)
    plt.savefig(os.path.join(seqdir, 'trajectory_est.svg'))
    np.savez(os.path.join(seqdir, evalfile),
             poserror=poserror,
             angerror=angerror,
             posrmse=posrmse,
             angrmse=angrmse,
             laterror=laterror,
             latstd=latstd,
             lonerror=lonerror,
             lonstd=lonstd)
    print('poserror: {}\nposrmse: {}\n'
          'laterror: {}\nlatstd: {}\n'
          'lonerror: {}\nlonstd: {}\n'
          'angerror: {}\nangstd: {}\nangrmse: {}'.format(
              np.mean(poserror), np.mean(posrmse), np.mean(laterror),
              np.mean(latstd), np.mean(lonerror), np.mean(lonstd),
              np.mean(angerror), np.mean(angstd), np.mean(angrmse)))
示例#9
0
    #     imap += 1

    T_w_velo_est = np.full(T_w_velo_gt.shape, np.nan)

    """
        Gets the weighted average of the particles poses (in world coordinates)
        This is the likeliest position of the car at the current time
    """
    T_w_velo_est[i] = filter.estimate_pose()  # i = 0
    i += 1
    # with progressbar.ProgressBar(max_value=T_w_velo_est.shape[0] - i) as bar:
        while i < T_w_velo_est.shape[0]:
            """
                Change from last to current pose in terms of x, y and phi??
            """
            relodo = util.ht2xyp(
                util.invert_ht(T_w_velo_gt[i-1]).dot(T_w_velo_gt[i]))
            relodocov = np.diag((0.02 * relodo)**2)

            """
                Creates random new xyp mean, drawn from a multivariate normal
                distribution.
                We then update all the particles we have by similarly drawing
                new xyp changes using the new mean and the relodocov.

                This basically means that we change our particles poses
                according to the motion of the vehicle + some randomness.

                Then we check again what our likeliest pose is.
            """
            relodo = np.random.multivariate_normal(relodo, relodocov)
            filter.update_motion(relodo, relodocov)
    def ukf(self, Xin, Pin, mean, cov, poleparams):
        # Xin:matrix.
        Z, z_pred = self.measurement(Xin, poleparams)
        x_in = util.ht2xyp(Xin)  #(3,)
        Qukf = 0.5 * np.eye(x_in.shape[0])
        Rukf = 0.01 * np.eye(z_pred.shape[0])
        states = x_in.shape[0]  # 3
        observations = z_pred.shape[0]  # 2*Nout
        vNoise = Qukf.shape[1]
        wNoise = Rukf.shape[1]
        noises = vNoise + wNoise
        if noises:
            N = scipy.linalg.block_diag(Qukf, Rukf)
            P_q = scipy.linalg.block_diag(Pin, N)
            x_q = np.vstack([x_in.reshape(-1, 1), np.zeros((noises, 1))])
        else:
            P_q = Pin
            x_q = x_in.reshape(-1, 1)
        xSigmaPts, wSigmaPts, nsp = self.scaledSymmetricSigmaPoints(x_q, P_q)
        '''
            xSigmaPts (3+num_noise,nsp)
            wSigmaPts (nsp+1,)
        '''
        wSigmaPts_xmat = np.tile(wSigmaPts[1:nsp],
                                 (states, 1))  # (3+num_noise,nsp-1)
        wSigmaPts_zmat = np.tile(wSigmaPts[1:nsp], (observations, 1))
        xPredSigmaPts = np.zeros((states, nsp))
        zPredSigmaPts = np.zeros((states, nsp))
        for i in range(nsp):
            x_pred = self.motion(
                util.xyp2ht(xSigmaPts[:states, i].reshape(-1)), mean,
                cov) + xSigmaPts[states:states + vNoise, i]
            # xPredSigmaPts.append(util.ht2xyp(x_pred).reshape(3,1))
            xPredSigmaPts[:, i] = x_pred.reshape(states, 1)
            z_pred = self.measurement(
                util.xyp2ht(x_pred),
                poleparams).reshape(-1) + xSigmaPts[states + vNoise:states +
                                                    noises, i]  #(2*Nout,1)
            # zPredSigmaPts.append(z_pred)
            zPredSigmaPts[:, i] = z_pred
        xPredSigmaPts = np.array(xPredSigmaPts)  # (3,7)
        zPredSigmaPts = np.array(zPredSigmaPts)  # (2*Nout,7)

        xPred = np.sum(wSigmaPts_xmat *
                       (xPredSigmaPts[:, 1:nsp] -
                        np.tile(xPredSigmaPts[:, 0].reshape(-1, 1),
                                (1, nsp - 1))),
                       axis=1).reshape(-1, 1)  # (3,1)
        zPred = np.sum(wSigmaPts_zmat *
                       (zPredSigmaPts[:, 1:nsp] -
                        np.tile(zPredSigmaPts[:, 0].reshape(-1, 1),
                                (1, nsp - 1))),
                       axis=1).reshape(-1, 1)  # (2*Nout,1)
        xPred = xPred + xPredSigmaPts[:, 0].reshape(-1, 1)
        zPred = zPred + zPredSigmaPts[:, 0].reshape(-1, 1)

        exSigmaPt = xPredSigmaPts[:, 0].reshape(-1, 1) - xPred
        ezSigmaPt = zPredSigmaPts[:, 0].reshape(-1, 1) - zPred

        PPred = wSigmaPts[nsp] * exSigmaPt.dot(exSigmaPt.T)
        PxzPred = wSigmaPts[nsp] * exSigmaPt.dot(ezSigmaPt.T)
        S = wSigmaPts[nsp] * ezSigmaPt.dot(ezSigmaPt.T)

        exSigmaPt = xPredSigmaPts[:, 1:nsp] - np.tile(
            xPred, (1, nsp - 1))  # (3,nsp-1)
        ezSigmaPt = zPredSigmaPts[:, 1:nsp] - np.tile(
            zPred, (1, nsp - 1))  # (2*Nout,nsp-1)
        PPred = PPred + (wSigmaPts_xmat * exSigmaPt).dot(exSigmaPt.T)
        S = S + (wSigmaPts_zmat * ezSigmaPt).dot(ezSigmaPt.T)
        PxzPred = PxzPred + exSigmaPt.dot((wSigmaPts_zmat * ezSigmaPt).T)

        K = PxzPred.dot(np.linalg.inv(S))
        inovation = Z - zPred
        Xout = xPred + K.dot(inovation)
        Pout = PPred - K @ S @ K.T
        return Xout, Pout