def pickle_to_carmen(filename, outfilename): myfile = open(filename, 'r') messages = [] while (1): try: data = cPickle.load(myfile) except (EOFError): break x = data['x'] y = data['y'] theta = data['theta'] #print data #raw_input()x ts = data['time_stamp'] message = robot_laser_message( ts, [x, y, tklib_normalize_theta(theta)], []) #om = odometry_message(ts, [x, y, 0]) messages.append(message) outfile = open(outfilename, 'w') #for m in reversed(messages): for m in messages: outfile.write(m.carmen_str() + "\n")
def get_initial_configurations(self, pose_st): ret_configurations = [] #propose all the destinations and the first alignments # for each of the first elements s_I = kNN_index(pose_st[0:2], transpose(self.start_poses)[0:2], 10); s_i = None for i in s_I: if(fabs(tklib_normalize_theta(self.start_poses[int(i)][2]-pose_st[2])) < pi/4.0): s_i = int(i) break; if(s_i == None): print "couldn't fine a candidate start location" return [] for d_i, dst in enumerate(self.dest_poses): #print "starting d_i=", d_i, 'of', len(self.dest_poses) if(tklib_euclidean_distance(dst[0:2], pose_st[0:2]) < 10e-5): continue start_confs = self.partitions[s_i][d_i].init_alignments(s_i, d_i) ret_configurations.extend(start_confs) return ret_configurations
def asPosesRadians(self, interval=100, endTime=None): if (endTime == None): endTime = self.endTime path = [] for t in range(self.startTime, endTime + 1, interval): fx, fy, ftheta = self.location(t) path.append((fx, fy, tklib_normalize_theta(radians(ftheta)))) return path
def callback(self, the_type, msg): if (not self.reading_seen and the_type == "front_laser"): self.prev_pose = msg["laser_pose"] laser_pose = msg["laser_pose"] laser_range = msg["range"] self.reading_seen = True curr_pose = carmen_util_vasco_scan_match( laser_pose, laser_range, arange(-pi / 2.0, pi / 2.0 + 0.0001, pi / 180.0), 1) self.mapper.update(array(curr_pose) + self.init_pose, laser_range) elif (the_type == "front_laser"): laser_pose = msg["laser_pose"] laser_range = msg["range"] diff = array(laser_pose) - self.prev_pose diff[2] = tklib_normalize_theta(diff[2]) #curr_pose = array(laser_pose)+self.init_pose curr_pose = self.mapper.get_pose() + diff curr_pose[2] = tklib_normalize_theta(self.prev_pose[2]) curr_pose = carmen_util_vasco_scan_match( laser_pose, laser_range, arange(-pi / 2.0, pi / 2.0 + 0.0001, pi / 180.0), 0) self.current_pose = array(curr_pose) + self.init_pose self.mapper.update(self.current_pose, laser_range) self.mapper.map.publish() #update the pose and publish it self.prev_pose = laser_pose x, y, theta = self.current_pose pyTklib.carmen_publish_gridmapping_pose_message(x, y, theta) #show the map if (self.show_map): self.plot_map(self.mapper, self.i) self.i += 1
def directionsToWaypoints(self, directions, x, y, z, yaw, pitch, roll): start_xyz = na.array((x, y, z)) loc_index, = kNN_index( (x, y, z), na.transpose( [self.m4du.tmap_locs_3d[x] for x in self.m4du.tmap_keys]), 1) topo_key = self.m4du.tmap_keys[int(loc_index)] yaw = tklib_normalize_theta(yaw) if yaw < 0: yaw += 2 * math.pi orient = yaw print "yaw", math.degrees(yaw) curr_theta = yaw #orient = get_orientations_annotated(self.m4du, region, self.dataset_name)[0][0] orients = [na.append(self.m4du.orients, 360.0)] print "orients", orients i_tmp, = kNN_index([math.degrees(orient)], orients, 1) print "i", i_tmp i_tmp = int(i_tmp % len(self.m4du.orients)) vp = str(topo_key) + "_" + str(self.m4du.orients[i_tmp]) print "topo_key", topo_key, topo_key.__class__ print "vp", vp vp_i = self.m4du.vpt_to_num[vp] print "vp", self.m4du.vpt_to_num #vp_i = self.m4du. print "vp", vp_i path = self.mbWnd.runSentence(directions, vp_i) path_as_locs = [(start_xyz, orient)] for p in path: topo, orient = p.split("_") xyz = self.m4du.tmap_locs_3d[float(topo)].tolist() theta = radians(float(orient)) path_as_locs.append((xyz, theta)) return path_as_locs
def followDirections(self): txt = str(self.commandText.toPlainText()) print "txt", txt if self.lcmApp.cur_pose != None: print "loc", self.lcmApp.cur_pose[0:3] loc_index, = kNN_index( self.lcmApp.cur_pose[0:3], transpose( [self.m4du.tmap_locs_3d[x] for x in self.m4du.tmap_keys]), 1) topo_key = self.m4du.tmap_keys[int(loc_index)] if isChecked(self.useRobotYawBox): yaw = self.lcmApp.cur_pose[3] print "yaw", yaw yaw = tklib_normalize_theta(yaw) if yaw < 0: yaw += 2 * math.pi orient = yaw print "yaw", math.degrees(yaw) orients = [na.append(self.m4du.orients, 360.0)] i_tmp, = kNN_index([math.degrees(orient)], orients, 1) i_tmp = int(i_tmp % len(self.m4du.orients)) vp = str(topo_key) + "_" + str(self.m4du.orients[i_tmp]) vp_i = self.m4du.vpt_to_num[vp] vps = [vp_i] else: vps = [] for orient in self.m4du.orients: vp = str(topo_key) + "_" + str(orient) vp_i = self.m4du.vpt_to_num[vp] vps.append(vp_i) else: vps = None self.path = self.modelBrowser.runSentence(txt, vps)