Exemple #1
0
class LocalizationNode(object):
    '''
    Class to hold all ROS related transactions to use split and merge algorithm.
    '''

    #===========================================================================
    def __init__(self,
                 x0=0,
                 y0=0,
                 theta0=0,
                 odom_lin_sigma=0.025,
                 odom_ang_sigma=np.deg2rad(2),
                 meas_rng_noise=0.2,
                 meas_ang_noise=np.deg2rad(10),
                 xs=0,
                 ys=0,
                 thetas=0,
                 rob2sensor=[-0.087, 0.013, np.deg2rad(0)]):
        '''
        Initializes publishers, subscribers and the particle filter.
        '''

        # Transformation from robot to sensor
        self.robotToSensor = np.array([xs, ys, thetas])

        # Publishers
        self.pub_lines = rospy.Publisher("ekf_lines", Marker, queue_size=0)
        self.pub_map = rospy.Publisher("map", Marker, queue_size=0)
        self.pub_map_gt = rospy.Publisher("map_gt", Marker, queue_size=0)
        self.pub_odom = rospy.Publisher("predicted_odom",
                                        Odometry,
                                        queue_size=0)
        self.pub_uncertainity = rospy.Publisher("uncertainity",
                                                Marker,
                                                queue_size=0)
        self.pub_laser = rospy.Publisher("ekf_laser", LaserScan, queue_size=0)

        # Subscribers
        self.sub_scan = rospy.Subscriber("scan", LaserScan,
                                         self.laser_callback)
        self.sub_odom = rospy.Subscriber("odom", Odometry, self.odom_callback)
        self.sub_scan = rospy.Subscriber("lines", Marker, self.lines_callback)

        # TF
        self.tfBroad = tf.TransformBroadcaster()

        # Incremental odometry
        self.last_odom = None
        self.odom = None

        # Flags
        self.new_odom = False
        self.new_laser = False
        self.pub = False

        # Ground truth map
        dataset = rospy.get_param("~dataset", None)
        if dataset == 1:
            self.map = get_map()
            x0 = 0.8 - 0.1908
            y0 = 0.3 + 0.08481
            theta0 = -0.034128
        elif dataset == 2:
            self.map = np.array([])
        elif dataset == 3:
            self.map = get_dataset3_map()
        else:
            self.map = np.array([])

        # Initial state
        self.x0 = np.array([x0, y0, theta0])

        # Particle filter
        self.ekf = EKF_SLAM(x0, y0, theta0, odom_lin_sigma, odom_ang_sigma,
                            meas_rng_noise, meas_ang_noise)

        # Transformation from robot to sensor
        self.robot2sensor = np.array(rob2sensor)

    #===========================================================================
    def odom_callback(self, msg):
        '''
        Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
        '''
        # Save time
        self.time = msg.header.stamp

        # Translation
        trans = (msg.pose.pose.position.x, msg.pose.pose.position.y,
                 msg.pose.pose.position.z)

        # Rotation
        rot = (msg.pose.pose.orientation.x, msg.pose.pose.orientation.y,
               msg.pose.pose.orientation.z, msg.pose.pose.orientation.w)

        # Publish transform
        self.tfBroad.sendTransform(translation=self.robot2sensor,
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='/sensor',
                                   parent='/robot')
        self.tfBroad.sendTransform(translation=self.robot2sensor,
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='/camera_depth_frame',
                                   parent='/base_footprint')
        self.tfBroad.sendTransform(translation=trans,
                                   rotation=rot,
                                   time=msg.header.stamp,
                                   child='/base_footprint',
                                   parent='/odom')
        self.tfBroad.sendTransform(
            translation=(self.x0[0], self.x0[1], 0),
            rotation=tf.transformations.quaternion_from_euler(
                0, 0, self.x0[2]),
            time=msg.header.stamp,
            child='/odom',
            parent='/world')

        # Incremental odometry
        if self.last_odom is not None and not self.new_odom:

            # Increment computation
            delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
            yaw = yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)

            # Odometry seen from vehicle frame
            self.uk = np.array([
                delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                -delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                angle_wrap(yaw - lyaw)
            ])

            # Flag
            self.new_odom = True

            # For next loop
            self.last_odom = msg
        if self.last_odom is None:
            self.last_odom = msg

    #===========================================================================
    def laser_callback(self, msg):
        '''
        Republishes laser scan in the EKF solution frame.
        '''
        msg.header.frame_id = '/sensor'
        self.pub_laser.publish(msg)

    #===========================================================================
    def lines_callback(self, msg):
        '''
        Function called each time a LaserScan message with topic "scan" arrives. 
        '''
        # Save time
        self.linestime = msg.header.stamp

        # Get the lines
        if len(msg.points) > 0:

            # Structure for the lines
            self.lines = np.zeros((len(msg.points) / 2, 4))

            for i in range(0, len(msg.points) / 2):
                # Get start and end points
                pt1 = msg.points[2 * i]
                pt2 = msg.points[2 * i + 1]

                # Transform to robot frame
                pt1R = comp(self.robot2sensor, [pt1.x, pt1.y, 0.0])
                pt2R = comp(self.robot2sensor, [pt2.x, pt2.y, 0.0])

                # Append to line list
                self.lines[i, :2] = pt1R[:2]
                self.lines[i, 2:] = pt2R[:2]

            # Flag
            self.new_laser = True

            # Publish
            publish_lines(self.lines,
                          self.pub_lines,
                          frame='/robot',
                          time=msg.header.stamp,
                          ns='lines_robot',
                          color=(0, 0, 1))

    #===========================================================================
    def iterate(self):
        '''
        Main loop of the filter.
        '''
        # Prediction
        if self.new_odom:

            self.ekf.predict(self.uk.copy())
            self.new_odom = False
            self.pub = True

        # Weightimg and resampling
        if self.new_laser:

            Innovk_List, H_k_List, Rk_List, idx_not_associated = self.ekf.data_association(
                self.lines.copy())
            self.ekf.update_position(Innovk_List, H_k_List, Rk_List)
            self.ekf.state_augmentation(self.lines.copy(), idx_not_associated)
            self.new_laser = False
            self.pub = True

        # Publish results
        if self.pub:
            self.publish_results()
            self.pub = False

    #===========================================================================
    def publish_results(self):
        '''
        Publishes all results from the filter.
        '''
        # Ground turth of the map of the room
        publish_lines(self.map,
                      self.pub_map_gt,
                      frame='/world',
                      ns='gt_map',
                      color=(0.3, 0.3, 0.3))

        odom, ellipse, trans, rot, room_map = get_ekf_msgs(self.ekf)

        publish_lines(room_map,
                      self.pub_map,
                      frame='/world',
                      ns='map',
                      color=(0, 1, 0))

        self.pub_odom.publish(odom)
        self.pub_uncertainity.publish(ellipse)
        self.tfBroad.sendTransform(translation=trans,
                                   rotation=rot,
                                   time=self.time,
                                   child='/robot',
                                   parent='/world')
Exemple #2
0
class LocalizationNode(object):
    '''
    Class to hold all ROS related transactions to use split and merge algorithm.
    '''
    
    #===========================================================================
    def __init__(self, x0=0,y0=0,theta0=0, odom_lin_sigma=0.025, odom_ang_sigma=np.deg2rad(2),
                        meas_rng_noise=0.2,  meas_ang_noise=np.deg2rad(10),xs = 0, ys = 0, thetas = 0,
                       rob2sensor = [-0.087, 0.013, np.deg2rad(0)]):
        '''
        Initializes publishers, subscribers and the particle filter.
        '''

        # Transformation from robot to sensor
        self.robotToSensor = np.array([xs,ys,thetas])        
        
        # Publishers
        self.pub_lines = rospy.Publisher("ekf_lines", Marker, queue_size=0)
        self.pub_map = rospy.Publisher("map", Marker, queue_size=0)
        self.pub_map_gt = rospy.Publisher("map_gt", Marker, queue_size=0)
        self.pub_odom = rospy.Publisher("predicted_odom", Odometry, queue_size=0)
        self.pub_uncertainity = rospy.Publisher("uncertainity",  Marker, queue_size=0)
        self.pub_laser = rospy.Publisher("ekf_laser",  LaserScan, queue_size=0)
        
        # Subscribers
        self.sub_scan = rospy.Subscriber("scan", LaserScan, self.laser_callback)
        self.sub_odom = rospy.Subscriber("odom", Odometry, self.odom_callback)
        self.sub_scan = rospy.Subscriber("lines", Marker, self.lines_callback)
        
        # TF
        self.tfBroad = tf.TransformBroadcaster()
        
        # Incremental odometry
        self.last_odom = None
        self.odom = None
        
        # Flags
        self.new_odom = False
        self.new_laser = False
        self.pub = False
        
        # Ground truth map
        dataset = rospy.get_param("~dataset",None)
        if dataset == 1:
            self.map = get_map()
            x0 = 0.8-0.1908
            y0 = 0.3+0.08481
            theta0 = -0.034128
        elif dataset == 2:
            self.map = np.array([])
        elif dataset == 3:
            self.map = get_dataset3_map()
        else:
            self.map = np.array([])
        
        # Initial state
        self.x0 = np.array([x0,y0,theta0])        
        
        # Particle filter
        self.ekf = EKF_SLAM(x0, y0, theta0, odom_lin_sigma, odom_ang_sigma, meas_rng_noise, meas_ang_noise)
        
        # Transformation from robot to sensor
        self.robot2sensor = np.array(rob2sensor)
                
            
    #===========================================================================
    def odom_callback(self, msg):
        '''
        Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
        '''
        # Save time
        self.time = msg.header.stamp
        
        # Translation
        trans = (msg.pose.pose.position.x, 
                 msg.pose.pose.position.y, 
                 msg.pose.pose.position.z)
        
        # Rotation
        rot = (msg.pose.pose.orientation.x,
               msg.pose.pose.orientation.y,
               msg.pose.pose.orientation.z,
               msg.pose.pose.orientation.w)
               
        # Publish transform
        self.tfBroad.sendTransform(translation = self.robot2sensor,
                                   rotation = (0, 0, 0, 1), 
                                   time = msg.header.stamp,
                                   child = '/sensor',
                                   parent = '/robot')
        self.tfBroad.sendTransform(translation = self.robot2sensor,
                                   rotation = (0, 0, 0, 1), 
                                   time = msg.header.stamp,
                                   child = '/camera_depth_frame',
                                   parent = '/base_footprint')
        self.tfBroad.sendTransform(translation = trans,
                                   rotation = rot, 
                                   time = msg.header.stamp,
                                   child = '/base_footprint',
                                   parent = '/odom')
        self.tfBroad.sendTransform(translation = (self.x0[0],self.x0[1],0),
                                   rotation = tf.transformations.quaternion_from_euler(0,0,self.x0[2]), 
                                   time = msg.header.stamp,
                                   child = '/odom',
                                   parent = '/world')
                                   
        # Incremental odometry
        if self.last_odom is not None and not self.new_odom:
            
            # Increment computation
            delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
            yaw = yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)
            
            # Odometry seen from vehicle frame
            self.uk = np.array([delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                              - delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                                angle_wrap(yaw - lyaw)])
            
            # Flag
            self.new_odom = True
        
            # For next loop
            self.last_odom = msg
        if self.last_odom is None:
            self.last_odom = msg
        
    
    #===========================================================================
    def laser_callback(self, msg):
        '''
        Republishes laser scan in the EKF solution frame.
        '''
        msg.header.frame_id = '/sensor'
        self.pub_laser.publish(msg)
    
    #===========================================================================
    def lines_callback(self, msg):
        '''
        Function called each time a LaserScan message with topic "scan" arrives. 
        '''
        # Save time
        self.linestime = msg.header.stamp

        # Get the lines
        if len(msg.points) > 0:
            
            # Structure for the lines
            self.lines = np.zeros((len(msg.points) / 2, 4))
                  
            for i in range(0, len(msg.points)/2):
                # Get start and end points
                pt1 = msg.points[2*i]
                pt2 = msg.points[2*i+1]
                
                # Transform to robot frame
                pt1R = comp(self.robot2sensor, [pt1.x, pt1.y, 0.0])
                pt2R = comp(self.robot2sensor, [pt2.x, pt2.y, 0.0])
                
                # Append to line list
                self.lines[i, :2] = pt1R[:2]
                self.lines[i, 2:] = pt2R[:2]
            
            # Flag
            self.new_laser = True
            
            # Publish
            publish_lines(self.lines, self.pub_lines,
                          frame = '/robot',
                          time = msg.header.stamp,
                          ns = 'lines_robot', color = (0,0,1))
    
    #===========================================================================
    def iterate(self):
        '''
        Main loop of the filter.
        '''
        # Prediction
        if self.new_odom:
            
            self.ekf.predict(self.uk.copy())
            self.new_odom = False
            self.pub = True
            
        # Weightimg and resampling
        if self.new_laser:
            Innovk_List, H_k_List, Rk_List,idx_not_associated = self.ekf.data_association(self.lines.copy())
            self.ekf.update_position(Innovk_List, H_k_List, Rk_List)
            self.ekf.state_augmentation(self.lines.copy(),idx_not_associated)
            self.new_laser = False
            self.pub = True
            
        # Publish results
        if self.pub:
            self.publish_results()
            self.pub = False
    
    #===========================================================================
    def publish_results(self):
        '''
        Publishes all results from the filter.
        '''
        # Ground turth of the map of the room
        publish_lines(self.map, self.pub_map_gt, frame='/world', ns='gt_map', color=(0.3,0.3,0.3))
        
        odom, ellipse, trans, rot, room_map = get_ekf_msgs(self.ekf)
        
        publish_lines(room_map, self.pub_map, frame='/world', ns='map', color=(0,1,0))
        

        self.pub_odom.publish(odom)
        self.pub_uncertainity.publish(ellipse)
        self.tfBroad.sendTransform(translation = trans,
                                   rotation = rot, 
                                   time = self.time,
                                   child = '/robot',
                                   parent = '/world')
Exemple #3
0
def main():
    args = get_cli_args()
    validate_cli_args(args)
    alphas = np.array(args.alphas)
    beta = np.array(args.beta)
    mean_prior = np.array([180., 50., 0.])
    Sigma_prior = 1e-12 * np.eye(3, 3)
    initial_state = Gaussian(mean_prior, Sigma_prior)

    if args.input_data_file:
        data = load_data(args.input_data_file)
    elif args.num_steps:
        # Generate data, assuming `--num-steps` was present in the CL args.
        data = generate_input_data(initial_state.mu.T, args.num_steps,
                                   args.num_landmarks_per_side,
                                   args.max_obs_per_time_step, alphas, beta,
                                   args.dt)
    else:
        raise RuntimeError('')

    store_sim_data = True if args.output_dir else False
    should_show_plots = True if args.animate else False
    should_write_movie = True if args.movie_file else False
    should_update_plots = True if should_show_plots or should_write_movie else False

    field_map = FieldMap(args.num_landmarks_per_side)

    fig = get_plots_figure(should_show_plots, should_write_movie)
    movie_writer = get_movie_writer(should_write_movie, 'Simulation SLAM',
                                    args.movie_fps, args.plot_pause_len)
    progress_bar = FillingCirclesBar('Simulation Progress', max=data.num_steps)

    if store_sim_data:
        if not os.path.exists(args.output_dir):
            os.makedirs(args.output_dir)
        save_input_data(data, os.path.join(args.output_dir, 'input_data.npy'))

    # slam object initialization
    slam = EKF_SLAM('ekf', 'known', 'batch', args, initial_state)
    mu_traj = mean_prior
    sigma_traj = []
    theta = []

    with movie_writer.saving(
            fig, args.movie_file,
            data.num_steps) if should_write_movie else get_dummy_context_mgr():
        for t in range(data.num_steps):
            # Used as means to include the t-th time-step while plotting.
            tp1 = t + 1

            # Control at the current step.
            u = data.filter.motion_commands[t]
            # Observation at the current step.
            z = data.filter.observations[t]

            # TODO SLAM predict(u)
            mu, Sigma = slam.predict(u)

            # TODO SLAM update
            mu, Sigma = slam.update(z)
            mu_traj = np.vstack((mu_traj, mu[:3]))
            sigma_traj.append(Sigma[:3, :3])
            theta.append(mu[2])

            progress_bar.next()
            if not should_update_plots:
                continue

            plt.cla()
            plot_field(field_map, z)
            plot_robot(data.debug.real_robot_path[t])
            plot_observations(data.debug.real_robot_path[t],
                              data.debug.noise_free_observations[t],
                              data.filter.observations[t])

            plt.plot(data.debug.real_robot_path[1:tp1, 0],
                     data.debug.real_robot_path[1:tp1, 1], 'm')
            plt.plot(data.debug.noise_free_robot_path[1:tp1, 0],
                     data.debug.noise_free_robot_path[1:tp1, 1], 'g')

            plt.plot([data.debug.real_robot_path[t, 0]],
                     [data.debug.real_robot_path[t, 1]], '*r')
            plt.plot([data.debug.noise_free_robot_path[t, 0]],
                     [data.debug.noise_free_robot_path[t, 1]], '*g')

            # TODO plot SLAM solution
            # robot filtered trajectory and covariance
            plt.plot(mu_traj[:, 0], mu_traj[:, 1], 'blue')
            plot2dcov(mu[:2], Sigma[:2, :2], color='b', nSigma=3, legend=None)

            # landmarks covariances and expected poses
            Sm = slam.Sigma[slam.iR:slam.iR + slam.iM,
                            slam.iR:slam.iR + slam.iM]
            mu_M = slam.mu[slam.iR:]
            for c in range(0, slam.iM, 2):
                Sigma_lm = Sm[c:c + 2, c:c + 2]
                mu_lm = mu_M[c:c + 2]
                plt.plot(mu_lm[0], mu_lm[1], 'ro')
                plot2dcov(mu_lm, Sigma_lm, color='k', nSigma=3, legend=None)

            if should_show_plots:
                # Draw all the plots and pause to create an animation effect.
                plt.draw()
                plt.pause(args.plot_pause_len)

            if should_write_movie:
                movie_writer.grab_frame()

    progress_bar.finish()

    # plt.figure(2)
    # plt.plot(theta)
    plt.show(block=True)

    if store_sim_data:
        file_path = os.path.join(args.output_dir, 'output_data.npy')
        with open(file_path, 'wb') as data_file:
            np.savez(data_file,
                     mean_trajectory=mu_traj,
                     covariance_trajectory=np.array(sigma_traj))
Exemple #4
0
class LocalizationNode(object):
    '''
    Class to hold all ROS related transactions to use split and merge algorithm.
    '''
    
    #===========================================================================
    def __init__(self, x0=0,y0=0,theta0=0, odom_lin_sigma=0.025, odom_ang_sigma=np.deg2rad(2),
                        meas_rng_noise=0.2,  meas_ang_noise=np.deg2rad(10),xs = 0, ys = 0, thetas = 0):
        '''
        Initializes publishers, subscribers and the particle filter.
        '''

        # Transformation from robot to sensor
        self.robotToSensor = np.array([xs,ys,thetas])        
        
        # Publishers
        self.pub_lines = rospy.Publisher("lines", Marker)
        self.pub_odom = rospy.Publisher("predicted_odom", Odometry)
        self.pub_uncertainity = rospy.Publisher("uncertainity",  Marker)
        
        # Subscribers
        self.sub_scan = rospy.Subscriber("scan", LaserScan, self.laser_callback)
        self.sub_odom = rospy.Subscriber("odom", Odometry, self.odom_callback)
        
        # TF
        self.tfBroad = tf.TransformBroadcaster()
        
        # Incremental odometry
        self.last_odom = None
        self.odom = None
        
        # Flags
        self.new_odom = False
        self.new_laser = False
        self.pub = False
        
        # Ground truth map
#        self.map = get_map(0.8,0.3,-0.03)    
        self.map = get_map()
        
        # Initial state
        self.x0 = np.array([x0,y0,theta0])        
        
        # Particle filter
        self.ekf = EKF_SLAM(x0, y0, theta0, odom_lin_sigma, odom_ang_sigma, meas_rng_noise, meas_ang_noise)
                
            
    #===========================================================================
    def odom_callback(self, msg):
        '''
        Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
        '''
        # Save time
        self.time = msg.header.stamp
        
        # Translation
        trans = (msg.pose.pose.position.x, 
                 msg.pose.pose.position.y, 
                 msg.pose.pose.position.z)
        
        # Rotation
        rot = (msg.pose.pose.orientation.x,
               msg.pose.pose.orientation.y,
               msg.pose.pose.orientation.z,
               msg.pose.pose.orientation.w)
        
        # Publish transform
        self.tfBroad.sendTransform(translation = (self.robotToSensor[0],self.robotToSensor[1],0),
                                   rotation = tf.transformations.quaternion_from_euler(0,0,self.robotToSensor[2]),
                                   time = msg.header.stamp,
                                   child = '/openni_depth_frame',
                                   parent = '/robot')
        
        self.tfBroad.sendTransform(translation = trans,
                                   rotation = rot, 
                                   time = msg.header.stamp,
                                   child = '/robot_original',
                                   parent = '/odom')
        self.tfBroad.sendTransform(translation = (self.x0[0]-0.1908,self.x0[1]+0.08481,self.x0[2]),
                                   rotation = tf.transformations.quaternion_from_euler(0,0,-0.034128),
#                                   rotation = (0,0,0,1), 
                                   time = msg.header.stamp,
                                   child = '/odom',
                                   parent = '/world')
                                   
        # Incremental odometry
        if self.last_odom is not None and not self.new_odom:
            
            # Increment computation
            delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
            yaw = yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)
            
            # Odometry seen from vehicle frame
            self.uk = np.array([delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                              - delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                                angle_wrap(yaw - lyaw)])
            
            # Flag
            self.new_odom = True
        
            # For next loop
            self.last_odom = msg
        if self.last_odom is None:
            self.last_odom = msg
        
    
    #===========================================================================
    def laser_callback(self, msg):
        '''
        Function called each time a LaserScan message with topic "scan" arrives. 
        '''
        # Save time
        self.time = msg.header.stamp
        
        # Project LaserScan to points in space
        rng = np.array(msg.ranges)
        ang = np.linspace(msg.angle_min, msg.angle_max, len(msg.ranges))
        points = np.vstack((rng * np.cos(ang),
                            rng * np.sin(ang)))
                            
        # Filter long ranges
        points = points[:, rng < msg.range_max]
        
        # Use split and merge to obtain lines and publish
        self.lines = splitandmerge(points, split_thres=0.1,
                                      inter_thres=0.3,
                                      min_points=6,
                                      dist_thres=0.12,
                                      ang_thres=np.deg2rad(10))

        # Have valid points
        if self.lines is not None:            
            # Transform line to robot frame
            for i in range(0,self.lines.shape[0]):
                point1S = np.append(self.lines[i,0:2], 0)
                point2S = np.append(self.lines[i,2:4], 0)
                point1R = comp(self.robotToSensor, point1S)
                point2R = comp(self.robotToSensor, point2S)
                self.lines[i,:] = np.append(point1R[0:2],point2R[0:2])
            
            # Publish results
            publish_lines(self.lines, self.pub_lines, frame='/robot',
                      time=msg.header.stamp, ns='scan_lines_robot', color=(0,0,1))
            
            # Flag
            self.new_laser = True
    
    #===========================================================================
    def iterate(self):
        '''
        Main loop of the filter.
        '''
        # Prediction
        if self.new_odom:
            
            self.ekf.predict(self.uk.copy())
            self.new_odom = False
            self.pub = True
            
        # Weightimg and resampling
        if self.new_laser:
            
            Innovk_List, H_k_List, S_f_List, Rk_List,idx_not_associated = self.ekf.data_association(self.lines.copy())
            self.ekf.update_position(Innovk_List, H_k_List, S_f_List, Rk_List)
            self.ekf.state_augmentation(self.lines.copy(),idx_not_associated)
            self.new_laser = False
            self.pub = True
            
        # Publish results
        if self.pub:
            self.publish_results()
            self.pub = False
    
    #===========================================================================
    def publish_results(self):
        '''
        Publishes all results from the filter.
        '''
        # Ground turth of the map of the room
        publish_lines(self.map, self.pub_lines, frame='/world', ns='gt_map', color=(0.3,0.3,0.3))
        
        msg_odom, msg_ellipse, trans, rot, room_map = get_ekf_msgs(self.ekf)
        
        publish_lines(room_map, self.pub_lines, frame='/world', ns='map', color=(0,1,0))
        

        self.pub_odom.publish(msg_odom)
        self.pub_uncertainity.publish(msg_ellipse)
        self.tfBroad.sendTransform(translation = trans,
                                   rotation = rot, 
                                   time = self.time,
                                   child = '/robot',
                                   parent = '/world')