Exemplo n.º 1
0
    def init_normalize_factors(self, train_seqs):
        if os.path.exists(self.path_normalize_factors):
            mondict = pload(self.path_normalize_factors)
            return mondict['mean_u'], mondict['std_u']

        path = os.path.join(self.predata_dir, train_seqs[0] + '.p')
        if not os.path.exists(path):
            print("init_normalize_factors not computed")
            return 0, 0

        print('Start computing normalizing factors ...')
        cprint("Do it only on training sequences, it is vital!", 'yellow')
        # first compute mean
        num_data = 0

        for i, sequence in enumerate(train_seqs):
            pickle_dict = pload(self.predata_dir, sequence + '.p')
            us = pickle_dict['us']
            sms = pickle_dict['xs']
            if i == 0:
                mean_u = us.sum(dim=0)
                num_positive = sms.sum(dim=0)
                num_negative = sms.shape[0] - sms.sum(dim=0)
            else:
                mean_u += us.sum(dim=0)
                num_positive += sms.sum(dim=0)
                num_negative += sms.shape[0] - sms.sum(dim=0)
            num_data += us.shape[0]
        mean_u = mean_u / num_data
        pos_weight = num_negative / num_positive

        # second compute standard deviation
        for i, sequence in enumerate(train_seqs):
            pickle_dict = pload(self.predata_dir, sequence + '.p')
            us = pickle_dict['us']
            if i == 0:
                std_u = ((us - mean_u)**2).sum(dim=0)
            else:
                std_u += ((us - mean_u)**2).sum(dim=0)
        std_u = (std_u / num_data).sqrt()
        normalize_factors = {
            'mean_u': mean_u,
            'std_u': std_u,
        }
        print('... ended computing normalizing factors')
        print('pos_weight:', pos_weight)
        print('This values most be a training parameters !')
        print('mean_u    :', mean_u)
        print('std_u     :', std_u)
        print('num_data  :', num_data)
        pdump(normalize_factors, self.path_normalize_factors)
        return mean_u, std_u
Exemplo n.º 2
0
    def display_test(self, dataset, mode):
        self.roes = {
            'Rots': [],
            'yaws': [],
        }
        self.to_open_vins(dataset)
        for i, seq in enumerate(dataset.sequences):
            print('\n', 'Results for sequence ' + seq )
            self.seq = seq
            # get ground truth
            self.gt = dataset.load_gt(i)
            Rots = SO3.from_quaternion(self.gt['qs'].cuda())
            self.gt['Rots'] = Rots.cpu()
            self.gt['rpys'] = SO3.to_rpy(Rots).cpu()
            # get data and estimate
            self.net_us = pload(self.address, seq, 'results.p')['hat_xs']
            self.raw_us, _ = dataset[i]
            N = self.net_us.shape[0]
            self.gyro_corrections =  (self.raw_us[:, :3] - self.net_us[:N, :3])
            self.ts = torch.linspace(0, N*self.dt, N)

            self.convert()
            self.plot_gyro()
            self.plot_gyro_correction()
            plt.show()
Exemplo n.º 3
0
 def __init__(self, res_dir, tb_dir, net_class, net_params, address, dt):
     self.res_dir = res_dir
     self.tb_dir = tb_dir
     self.net_class = net_class
     self.net_params = net_params
     self._ready = False
     self.train_params = {}
     self.figsize = (20, 12)
     self.dt = dt # (s)
     self.address, self.tb_address = self.find_address(address)
     if address is None:  # create new address
         pdump(self.net_params, self.address, 'net_params.p')
         ydump(self.net_params, self.address, 'net_params.yaml')
     else:  # pick the network parameters
         self.net_params = pload(self.address, 'net_params.p')
         self.train_params = pload(self.address, 'train_params.p')
         self._ready = True
     self.path_weights = os.path.join(self.address, 'weights.pt')
     self.net = self.net_class(**self.net_params)
     if self._ready:  # fill network parameters
         self.load_weights()
Exemplo n.º 4
0
    def to_open_vins(self, dataset):
        """
        Export results to Open-VINS format. Use them eval toolbox available 
        at https://github.com/rpng/open_vins/
        """

        for i, seq in enumerate(dataset.sequences):
            self.seq = seq
            # get ground truth
            self.gt = dataset.load_gt(i)
            raw_us, _ = dataset[i]
            net_us = pload(self.address, seq, 'results.p')['hat_xs']
            N = net_us.shape[0]
            net_qs, imu_Rots, net_Rots = self.integrate_with_quaternions_superfast(N, raw_us, net_us)
            path = os.path.join(self.address, seq + '.txt')
            header = "timestamp(s) tx ty tz qx qy qz qw"
            x = np.zeros((net_qs.shape[0], 8))
            x[:, 0] = self.gt['ts'][:net_qs.shape[0]]
            x[:, [7, 4, 5, 6]] = net_qs
            np.savetxt(path, x[::10], header=header, delimiter=" ",
                    fmt='%1.9f')
Exemplo n.º 5
0
 def load_gt(self, i):
     return pload(self.predata_dir, self.sequences[i] + '_gt.p')
Exemplo n.º 6
0
 def load_seq(self, i):
     return pload(self.predata_dir, self.sequences[i] + '.p')
Exemplo n.º 7
0
 def get_iekf_results(self, seq):
     return pload(self.address, seq, 'iekf.p')
Exemplo n.º 8
0
 def get_results(self, seq):
     return pload(self.address, seq, 'results.p')['hat_xs']