def __init__(self, **kwargs): pose_cell_shape = kwargs.pop("pose_cell_dimensions", (100,100,60)) shift_match = kwargs.pop("odo_shift_match", 20) # self.odometer = SimpleVisualOdometer() self.odometer = SimpleVisualOdometer(fov_deg = kwargs.pop("fov_deg",50), im_trans_y_range = kwargs.pop("im_trans_y_range"), im_rot_y_range = kwargs.pop("im_rot_y_range"), im_odo_x_range = kwargs.pop("im_odo_x_range"), shift_match = shift_match, trans_scale = kwargs.pop("odo_trans_scale",100.)) # vt_match_threshold = kwargs.pop('vt_match_threshold', None) # self.view_templates = VisualTemplateCollection() self.view_templates = VisualTemplateCollection( template_decay = kwargs.pop('template_decay', 1.0), match_y_range = kwargs.pop('im_vt_y_range', None), match_x_range = kwargs.pop('im_vt_x_range', None), shift_match = shift_match, match_threshold = kwargs.pop('vt_match_threshold', None)) self.pose_cell_network = PoseCellNetwork(pose_cell_shape)#, # self.view_templates) self.experience_map = ExperienceMap(self.pose_cell_network, self.view_templates, delta_pc_threshold = kwargs.pop("exp_delta_pc_threshold",1.0), correction_factor = kwargs.pop("exp_correction_factor",0.5), exp_loops = kwargs.pop("exp_loops",100))
def __init__( self ): self.im_count = 0 # Initialize the Pose Cell Network self.pcn = PoseCellNetwork(shape=POSE_SIZE) self.pc_count = 0 self.twist_data = deque() self.odom_freq = ODOM_FREQ midpoint = (math.floor(POSE_SIZE[0]/2),math.floor(POSE_SIZE[1]/2),math.floor(POSE_SIZE[2]/2)) self.pcn.inject( 1, midpoint ) # Set up layout for ROS message dim = [ MultiArrayDimension( label="x", size=POSE_SIZE[0], stride=POSE_SIZE[0] * POSE_SIZE[1] * POSE_SIZE[2] ), MultiArrayDimension( label="y", size=POSE_SIZE[1], stride=POSE_SIZE[1] * POSE_SIZE[2] ), MultiArrayDimension( label="th", size=POSE_SIZE[2], stride=POSE_SIZE[2] ) ] self.pc_layout = MultiArrayLayout(dim=dim) # Initialize the View Templates # TODO: put reasonable values here self.vts = ViewTemplates( x_range=X_RANGE, y_range=Y_RANGE, x_step=X_STEP, y_step=Y_STEP, im_x=IM_SIZE[0], im_y=IM_SIZE[1], match_threshold=MATCH_THRESHOLD) self.vt_count = 0 self.bridge = CvBridge() # Initialize the Experience Map self.em = ExperienceMap() self.em_count = 0 self.vis_odom_data = deque() rospy.init_node( 'posecells', anonymous=True ) if not USE_VISUAL_ODOMETRY: self.sub_odom = rospy.Subscriber( 'navbot/odom',Odometry,self.odom_callback ) self.sub_vis = rospy.Subscriber( 'navbot/camera/image',Image,self.vis_callback ) self.pub_pc = rospy.Publisher( 'navbot/posecells', numpy_msg(Float64MultiArray) ) self.pub_em = rospy.Publisher( 'navbot/experiencemap', Pose2D ) # Set up publisher for the template matches (sends just the index value of a match) self.pub_tm = rospy.Publisher( 'navbot/templatematches', Int32 ) # Set up a visual odometer self.vis_odom = SimpleVisualOdometer() if RECORD_ROSBAG: date = datetime.datetime.now() self.bag = rosbag.Bag( '../testdata/Output-{0}-{1}-{2}-{3}.bag'.format( date.month, date.day, date.hour, date.minute ),'w' )
class RatslamRos(): def __init__( self ): self.im_count = 0 # Initialize the Pose Cell Network self.pcn = PoseCellNetwork(shape=POSE_SIZE) self.pc_count = 0 self.twist_data = deque() self.odom_freq = ODOM_FREQ midpoint = (math.floor(POSE_SIZE[0]/2),math.floor(POSE_SIZE[1]/2),math.floor(POSE_SIZE[2]/2)) self.pcn.inject( 1, midpoint ) # Set up layout for ROS message dim = [ MultiArrayDimension( label="x", size=POSE_SIZE[0], stride=POSE_SIZE[0] * POSE_SIZE[1] * POSE_SIZE[2] ), MultiArrayDimension( label="y", size=POSE_SIZE[1], stride=POSE_SIZE[1] * POSE_SIZE[2] ), MultiArrayDimension( label="th", size=POSE_SIZE[2], stride=POSE_SIZE[2] ) ] self.pc_layout = MultiArrayLayout(dim=dim) # Initialize the View Templates # TODO: put reasonable values here self.vts = ViewTemplates( x_range=X_RANGE, y_range=Y_RANGE, x_step=X_STEP, y_step=Y_STEP, im_x=IM_SIZE[0], im_y=IM_SIZE[1], match_threshold=MATCH_THRESHOLD) self.vt_count = 0 self.bridge = CvBridge() # Initialize the Experience Map self.em = ExperienceMap() self.em_count = 0 self.vis_odom_data = deque() rospy.init_node( 'posecells', anonymous=True ) if not USE_VISUAL_ODOMETRY: self.sub_odom = rospy.Subscriber( 'navbot/odom',Odometry,self.odom_callback ) self.sub_vis = rospy.Subscriber( 'navbot/camera/image',Image,self.vis_callback ) self.pub_pc = rospy.Publisher( 'navbot/posecells', numpy_msg(Float64MultiArray) ) self.pub_em = rospy.Publisher( 'navbot/experiencemap', Pose2D ) # Set up publisher for the template matches (sends just the index value of a match) self.pub_tm = rospy.Publisher( 'navbot/templatematches', Int32 ) # Set up a visual odometer self.vis_odom = SimpleVisualOdometer() if RECORD_ROSBAG: date = datetime.datetime.now() self.bag = rosbag.Bag( '../testdata/Output-{0}-{1}-{2}-{3}.bag'.format( date.month, date.day, date.hour, date.minute ),'w' ) # This is called whenever new visual information is received def vis_callback( self, data ): cv_im = self.bridge.imgmsg_to_cv( data, "mono8" ) im = asarray( cv_im ) pc_max = self.pcn.get_pc_max() template_match = self.vts.match( input=im,pc_x=pc_max[0],pc_y=pc_max[1],pc_th=pc_max[2] ) index = template_match.get_index() #TEMP - just inject with this template for now #self.pcn.inject( .02, template_match.location() ) #self.pcn.inject( .2, template_match.location() ) # Send the template match index to the viewer index_msg = Int32() index_msg.data = index self.pub_tm.publish( index_msg ) if RECORD_ROSBAG: self.bag.write( 'navbot/camera/image', data ) self.bag.write( 'navbot/templatematches', index_msg ) # If using visual odometry, update the odometry information using this new image if USE_VISUAL_ODOMETRY: delta = self.vis_odom.update( im ) self.vis_odom_data.append( delta ) # This is called whenever new odometry information is received def odom_callback( self, data ): twist = data.twist.twist # If there is barely any motion, don't bother flooding the queue with it if abs(twist.linear.x) > 0.001 or abs(twist.angular.z) > 0.001: self.twist_data.append(twist) if RECORD_ROSBAG: self.bag.write( 'navbot/odom', data ) def update_posecells( self, vtrans, vrot ): self.pcn.update( ( vtrans, vrot ) ) pc_max = self.pcn.get_pc_max() self.em.update( vtrans, vrot, pc_max ) #pc = sparse.csr_matrix( self.pcn.posecells ) # Create scipy sparse matrix pc = self.pcn.posecells em_msg = Pose2D() em_msg.x, em_msg.y = self.em.get_current_point() # TODO: add theta as well self.pub_pc.publish( self.pc_layout, pc.ravel() ) self.pub_em.publish( em_msg ) if RECORD_ROSBAG: self.bag.write( 'navbot/experiencemap', em_msg ) self.bag.write( 'navbot/posecells', numpy_msg(self.pc_layout, pc.ravel()) ) def run( self ): count = 0 start_time = time.time() while not rospy.is_shutdown(): if self.twist_data: twist = self.twist_data.popleft() vtrans = twist.linear.x / self.odom_freq vrot = twist.angular.z / self.odom_freq self.update_posecells( vtrans, vrot ) count += 1 if self.vis_odom_data: vtrans, vrot = self.vis_odom_data.popleft() self.update_posecells( vtrans, vrot ) count += 1
class RatSLAM: """ A complete rat slam instance (Matlab version: equivalent to rs_main) """ def __init__(self, **kwargs): pose_cell_shape = kwargs.pop("pose_cell_dimensions", (100,100,60)) shift_match = kwargs.pop("odo_shift_match", 20) # self.odometer = SimpleVisualOdometer() self.odometer = SimpleVisualOdometer(fov_deg = kwargs.pop("fov_deg",50), im_trans_y_range = kwargs.pop("im_trans_y_range"), im_rot_y_range = kwargs.pop("im_rot_y_range"), im_odo_x_range = kwargs.pop("im_odo_x_range"), shift_match = shift_match, trans_scale = kwargs.pop("odo_trans_scale",100.)) # vt_match_threshold = kwargs.pop('vt_match_threshold', None) # self.view_templates = VisualTemplateCollection() self.view_templates = VisualTemplateCollection( template_decay = kwargs.pop('template_decay', 1.0), match_y_range = kwargs.pop('im_vt_y_range', None), match_x_range = kwargs.pop('im_vt_x_range', None), shift_match = shift_match, match_threshold = kwargs.pop('vt_match_threshold', None)) self.pose_cell_network = PoseCellNetwork(pose_cell_shape)#, # self.view_templates) self.experience_map = ExperienceMap(self.pose_cell_network, self.view_templates, delta_pc_threshold = kwargs.pop("exp_delta_pc_threshold",1.0), correction_factor = kwargs.pop("exp_correction_factor",0.5), exp_loops = kwargs.pop("exp_loops",100)) def update(self, video_frame): # TODO save experience map # convert video_frame to grayscale # done outside # plt.imshow(video_frame) # plt.show() # get odometry from video speed, rotation, odo = self.odometer.estimate_odometry(video_frame) # get most active visual template (bassed on current video_frame) vt_id = self.view_templates.match(video_frame, odo[0], odo[1], odo[2]) # update pose cells self.pose_cell_network.update(self.view_templates.templates[vt_id], speed, rotation) # get 'best' center of pose cell activity px, py, pr = self.pose_cell_network.activty_packet_center() # run an iteration fo the experience map experience_map.update(vt_id, speed, rotation, (px, py, pr)) # store current odometry for next update # pass print self.experience_map.get_current_experience()