def __init__(self, world_frame='world'): rospy.init_node("db_republisher") self.msg_store = MessageStoreProxy() service = rospy.Service('apps/database/reload', Empty, self._cb_db_reload) # Define the 'world' frame self.world = TransformStamped() self.world.header.frame_id = 'world' self.world.child_frame_id = world_frame self.world.transform.rotation.w = 1.0 rospy.loginfo("Started helping hand ...") # Publish the database at start if not self.reload_db(): raise Exception('Failed to reload the database') # Make the program stay awake rospy.spin()
def __init__(self, name): rospy.loginfo("Starting %s ..." % name) self._as = PNPSimplePluginServer( name, MoveToWaypointAction, execute_cb=self.execute_cb, auto_start=False ) self.msg_store = MessageStoreProxy( database=rospy.get_param("~db_name", "semantic_map"), collection=rospy.get_param("~collection_name", "waypoints") ) rospy.loginfo("Creating tracker client") self.stop_client = SimpleActionClient("/stop_tracking_person", TrackPersonAction) self.stop_client.wait_for_server() rospy.loginfo("Tracker client connected") self.client = SimpleActionClient("move_base", MoveBaseAction) rospy.loginfo("Waiting for move_base client.") self.client.wait_for_server() self._as.start() rospy.loginfo("... done")
def export_data(tmap, data_window_start, data_window_end, data_window_size, data_windows): msg_store = MessageStoreProxy(collection='nav_stats') for window in range(data_windows): window_start = data_window_start + data_window_size * window window_end = data_window_end + data_window_size * window # get nav stats for window meta_query = { "epoch": { "$gt": to_epoch(window_start), "$lt": to_epoch(window_end) } } nav_stats = msg_store.query(NavStatistics._type, {'topological_map': tmap}, meta_query) write_stats(window_start, window_end, nav_stats)
def remove_edge(self, edge_name): #print 'removing edge: '+edge_name rospy.loginfo('Removing Edge: ' + edge_name) msg_store = MessageStoreProxy(collection='topological_maps') query = {"edges.edge_id": edge_name, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) >= 1: for i in available: print i[0] for j in i[0].edges: if j.edge_id == edge_name: i[0].edges.remove(j) msg_store.update(i[0], query_meta, query, upsert=True) else: rospy.logerr("Impossible to store in DB " + str(len(available)) + " waypoints found after query") rospy.logerr("Available data: " + str(available))
def remove_node(self, node_name): rospy.loginfo('Removing Node: ' + node_name) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": node_name, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map available = msg_store.query(TopologicalNode._type, query, query_meta) node_found = False if len(available) == 1: node_found = True rm_id = str(available[0][1]['_id']) print rm_id else: rospy.logerr("Node not found " + str(len(available)) + " waypoints found after query") #rospy.logerr("Available data: "+str(available)) if node_found: query_meta = {} query_meta["pointset"] = self.nodes.name edges_to_rm = [] message_list = msg_store.query(TopologicalNode._type, {}, query_meta) for i in message_list: for j in i[0].edges: if j.node == node_name: edge_rm = j.edge_id edges_to_rm.append(edge_rm) for k in edges_to_rm: print 'remove: ' + k self.remove_edge(k) msg_store.delete(rm_id) return True else: return False
def __init__(self, start_time=(8, 0), end_time=(18, 0), observe_interval=rospy.Duration(1800)): rospy.loginfo("Initiating budgetting control...") # hand-allocated budget (between 8am to 6pm) self.observe_interval = observe_interval self._start_time = datetime.time(start_time[0], start_time[1]) self._end_time = datetime.time(end_time[0], end_time[1]) self.budget_alloc = list() self.available_budget = 0 tmp = datetime.datetime.fromtimestamp(rospy.Time.now().secs) self._update_budget_date = datetime.datetime(tmp.year, tmp.month, tmp.day, 0, 0) self.bidder = ExplorationBidder() self.soma_config = rospy.get_param("~soma_config", "activity_exploration") self._db = MessageStoreProxy(collection="activity_exploration_log") # all services to counters people_srv_name = rospy.get_param( "~people_srv", "/people_counter/people_best_time_estimate") if people_srv_name != "": rospy.loginfo("Connecting to %s service..." % people_srv_name) self._people_srv = rospy.ServiceProxy(people_srv_name, PeopleBestTimeEstimateSrv) self._people_srv.wait_for_service() act_srv_name = rospy.get_param( "~activity_srv", "/activity_counter/activity_best_time_estimate") if act_srv_name != "": rospy.loginfo("Connecting to %s service..." % act_srv_name) self._act_srv = rospy.ServiceProxy(act_srv_name, ActivityBestTimeEstimateSrv) self._act_srv.wait_for_service() scene_srv_name = rospy.get_param( "~scene_srv", "/scene_counter/scene_best_time_estimate") if scene_srv_name != "": rospy.loginfo("Connecting to %s service..." % scene_srv_name) self._scene_srv = rospy.ServiceProxy(scene_srv_name, SceneBestTimeEstimateSrv) self._scene_srv.wait_for_service()
def get_scene_and_store_in_db(): get_scene_service = rospy.ServiceProxy('/get_scene', GetScene) result = get_scene_service() rospy.loginfo("Response from get_scene service: \n{}".format(result)) if result.success: # clear previous items in scene db and knowledge base clear_blocks_from_scene_db() clear_locations_from_scene_db() clear_service = rospy.ServiceProxy('/rosplan_knowledge_base/clear', Empty) result_clear = clear_service() #print "result_clear = " #print result_clear msg_store = MessageStoreProxy() listener = tf.TransformListener() for block in result.blocks: listener.waitForTransform("table", block.pose.header.frame_id, rospy.Time(), rospy.Duration(4.0)) now = rospy.Time.now() listener.waitForTransform("table", block.pose.header.frame_id, now, rospy.Duration(4.0)) tmp_pose = listener.transformPose("table", block.pose) block.pose = tmp_pose rospy.loginfo( "Storing following block in scene db: \n{}".format(block)) msg_store.insert_named('block{}'.format(block.id), block) return result.success
def __init__(self): rospy.init_node('configuration_services') # TF self.tf2_buffer = tf2_ros.Buffer() self.tf2_listener = tf2_ros.TransformListener(self.tf2_buffer) # Create database proxy self._msg_store = MessageStoreProxy() # Create database reload service try: rospy.wait_for_service('/apps/database/reload', 0.1) self._database_service = rospy.ServiceProxy( '/apps/database/reload', Empty) except Exception as e: rospy.logerr('Could not initialize {}'.format(rospy.get_name())) rospy.loginfo('Exceptions: {}'.format(e)) return 0 # Define the service proxies try: save_tf_srvs = rospy.Service('tf_capture', CaptureTF, self._handle_tf_save) save_joint_srvs = rospy.Service('joint_capture', CaptureJoint, self._handle_joint_save) save_dmp_srvs = rospy.Service('dmp_capture', CaptureDMP, self._handle_dmp_save) except Exception as e: rospy.logerr('Could not initialize {}'.format(rospy.get_name())) rospy.loginfo('Exceptions: {}'.format(e)) return 0 # Notify the user the node is running rospy.loginfo("Hepling hand is running!") rospy.spin()
def init_kb(prefix=None): if prefix is None: prefix = "/kcl_rosplan" global db global services services['get_current_instances'] = \ rospy.ServiceProxy(prefix + "/get_current_instances", GetInstanceService) services['update_knowledge_base'] = \ rospy.ServiceProxy(prefix + "/update_knowledge_base", KnowledgeUpdateService) services['get_domain_predicates'] = \ rospy.ServiceProxy(prefix + "/get_domain_predicates", GetDomainAttributeService) services['get_current_goals'] =\ rospy.ServiceProxy(prefix + "/get_current_goals", GetAttributeService) services['get_current_knowledge'] = \ rospy.ServiceProxy(prefix + "/get_current_knowledge", GetAttributeService) services['get_domain_operators'] = \ rospy.ServiceProxy(prefix + "/get_domain_operators", GetDomainOperatorService) services['get_domain_types'] = \ rospy.ServiceProxy(prefix + "/get_domain_types", GetDomainTypeService) services['planning'] = \ rospy.ServiceProxy(prefix + "/planning_server", Empty) services['query'] = \ rospy.ServiceProxy(prefix + "/query_knowledge_base", KnowledgeQueryService) services['clear_knowledge'] = \ rospy.ServiceProxy(prefix + "/clear_knowledge_base", Empty) db = MessageStoreProxy()
def get_tagged_nodes_from_mongo(self, tag): mm=[] a=[] #db.topological_maps.find({ "_meta.tag":"AAA" }) msg_store = MessageStoreProxy(collection='topological_maps') query = {"_meta.tag": tag, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) for i in available: nname= i[1]['node'] a.append(nname) mm.append(a) return mm
def __init__(self, soma_map, soma_config, minute_interval=1, window_interval=10, periodic_type="weekly"): """ Initialize a set of trajectory occurrence frequency classified by regions, days, hours, minute intervals respectively. Hours will be set by default to [0-23]. Days are set as days of a week [0-6] where Monday is 0. Argument periodic_type can be set either 'weekly' or 'monthly'. """ self.soma = soma_map self.soma_config = soma_config self.periodic_type = periodic_type if self.periodic_type == "weekly": self.periodic_days = [i for i in range(7)] else: self.periodic_days = [i for i in range(31)] self.minute_interval = minute_interval self.window_interval = window_interval self.ms = MessageStoreProxy(collection="occurrence_rates") self.reinit()
def __init__(self, wait_time=60): self._counter = 0 self.img = Image() self.ubd = list() self.change = list() self._db = MessageStoreProxy(collection="ubd_scene_log") # ptu self._max_dist = 0.1 self._wait_time = wait_time rospy.loginfo("Subcribe to /ptu/state...") self._ptu = JointState() self._ptu.position = [0, 0] self._ptu_counter = 0 self._is_ptu_changing = [True for i in range(wait_time)] rospy.Subscriber("/ptu/state", JointState, self._ptu_cb, None, 1) # robot pose rospy.loginfo("Subcribe to /robot_pose...") self._robot_pose = Pose() self._robot_pose_counter = 0 self._is_robot_moving = [True for i in range(wait_time)] rospy.Subscriber("/robot_pose", Pose, self._robot_cb, None, 1) # logging stuff subs = [ message_filters.Subscriber( rospy.get_param("~scene_topic", "/change_detection/detections"), ChangeDetectionMsg), message_filters.Subscriber( rospy.get_param("~ubd_topic", "/vision_logging_service/log"), LoggingUBD), message_filters.Subscriber( rospy.get_param("~image_topic", "/head_xtion/rgb/image_rect_color"), Image) ] ts = message_filters.ApproximateTimeSynchronizer(subs, queue_size=1, slop=0.5) ts.registerCallback(self.cb) rospy.Timer(rospy.Duration(60), self.to_log)
def test_replication_with_query(self): replication_db = "replication_test_with_query" replication_col = "replication_test_with_query" # connect to destination for replication try: self.assertTrue(wait_for_mongo(ns="/datacentre2"), "wait for mongodb server") dst_client = import_MongoClient()("localhost", 49163) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 0, "No entry in destination") except pymongo.errors.ConnectionFailure: self.fail("Failed to connect to destination for replication") # insert an entry to move self.assertTrue(wait_for_mongo(), "wait for mongodb server") msg_store = MessageStoreProxy( database=replication_db, collection=replication_col) for i in range(5): msg = Wrench() msg.force.x = i msg_store.insert(msg) msg = Pose() msg.position.x = i msg_store.insert(msg) # move entries with query rospy.sleep(3) query = {'_meta.stored_type': Pose._type} retcode = subprocess.check_call([ get_script_path(), '--move-before', '0', '--query', json_util.dumps(query), replication_db, replication_col]) self.assertEqual(retcode, 0, "replicator_client returns code 0") # check if replication was succeeded rospy.sleep(3) count = dst_client[replication_db][replication_col].count() self.assertEqual(count, 5, "replicated entry exists in destination")
def publish_stats(self): pubst = NavStatistics() pubst.edge_id = self.stat.edge_id pubst.status = self.stat.status pubst.origin = self.stat.origin pubst.target = self.stat.target pubst.topological_map = self.stat.topological_map pubst.final_node = self.stat.final_node pubst.time_to_waypoint = self.stat.time_to_wp pubst.operation_time = self.stat.operation_time pubst.date_started = self.stat.get_start_time_str() pubst.date_at_node = self.stat.date_at_node.strftime('%A, %B %d %Y, at %H:%M:%S hours') pubst.date_finished = self.stat.get_finish_time_str() self.stats_pub.publish(pubst) meta = {} meta["type"] = "Topological Navigation Stat" meta["epoch"] = calendar.timegm(self.stat.date_at_node.timetuple()) meta["date"] = self.stat.date_at_node.strftime('%A, %B %d %Y, at %H:%M:%S hours') meta["pointset"] = self.stat.topological_map msg_store = MessageStoreProxy(collection='nav_stats') msg_store.insert(pubst,meta)
def remove_edge(self, edge_name): #print 'removing edge: '+edge_name rospy.loginfo('Removing Edge: ' + edge_name) edged = edge_name.split('_') #print edged node_name = edged[0] #nodeindx = self._get_node_index(edged[0]) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1: for i in available[0][0].edges: #print i.node if i.node == edged[1]: available[0][0].edges.remove(i) msg_store.update(available[0][0], query_meta, query, upsert=True) else: rospy.logerr("Impossible to store in DB " + str(len(available)) + " waypoints found after query") rospy.logerr("Available data: " + str(available))
def add_edge(self, or_waypoint, de_waypoint, action): #print 'removing edge: '+edge_name rospy.loginfo('Adding Edge from ' + or_waypoint + ' to ' + de_waypoint + ' using ' + action) node_name = or_waypoint #nodeindx = self._get_node_index(edged[0]) msg_store = MessageStoreProxy(collection='topological_maps') query = {"name": node_name, "pointset": self.name} query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.map available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1: found = False for i in available[0][0].edges: #print i.node if i.node == de_waypoint: found = True break if not found: edge = Edge() edge.edge_id = "{0}_{1}".format(or_waypoint, de_waypoint) edge.node = de_waypoint edge.action = action edge.top_vel = 0.55 edge.map_2d = available[0][0].map available[0][0].edges.append(edge) msg_store.update(available[0][0], query_meta, query, upsert=True) else: rospy.logerr("Edge already exist: Try deleting it first") else: rospy.logerr("Impossible to store in DB " + str(len(available)) + " waypoints found after query") rospy.logerr("Available data: " + str(available))
def __init__(self, soma_map, soma_conf, config_file=None): self.soma_map = soma_map self.soma_conf = soma_conf if config_file: self._config_file = config_file else: # default file rp = RosPack() path = rp.get_path('soma_roi_manager') + '/config/' filename = 'default.json' self._config_file = path + filename self._soma_obj_ids = dict() self._soma_obj_msg = dict() self._soma_obj_roi_ids = dict() self._soma_obj_roi = dict() self._soma_obj_type = dict() self._soma_obj_pose = dict() self._interactive = True self._msg_store = MessageStoreProxy(collection="soma_roi") self._gs_store = GeoSpatialStoreProxy(db="geospatial_store", collection="soma") self._server = InteractiveMarkerServer("soma_roi") self._init_types() self._init_menu() self.load_objects() self.update_objects() rospy.spin()
def get_node_tags_from_mongo(self, req): #rospy.loginfo('Adding Tag '+msg.tag+' to '+str(msg.node)) succeded = True msg_store = MessageStoreProxy(collection='topological_maps') query = {"name" : req.node_name, "pointset": self.nodes.name} query_meta = {} query_meta["pointset"] = self.nodes.name query_meta["map"] = self.nodes.map #print query, query_meta available = msg_store.query(strands_navigation_msgs.msg.TopologicalNode._type, query, query_meta) #print len(available) if len(available) == 1: # The meta information for a node is in the second part of the tuple # returned by the message store query if 'tag' in available[0][1]: tags = available[0][1]['tag'] else: tags = [] else: succeded = False tags = [] return succeded, tags
def init(): rospy.init_node("task_summary") task_event_mongo_host = 'localhost' task_event_mongo_port = 62345 task_summary_mongo_host = 'localhost' task_summary_mongo_port = 62345 db_name = 'message_store' event_col_name = 'task_events_unique' event_msg_store = MessageStoreProxy(database=db_name, collection=event_col_name) task_event_mongo_client = Connection(task_event_mongo_host, task_event_mongo_port) event_collection = task_event_mongo_client[db_name][event_col_name] summary_col_name = 'task_summaries' task_summary_mongo_client = Connection(task_summary_mongo_host, task_summary_mongo_port) summary_collection = task_summary_mongo_client[db_name][summary_col_name] tz = pytz.timezone(pytz.country_timezones['gb'][0]) analysis_start = datetime(2016, 5, 23, 5, 00, tzinfo=tz) # analysis_end = datetime(2016,6,6,23,00,tzinfo=tz) analysis_end = datetime(2016, 8, 10, 23, 00, tzinfo=tz) create_task_summary_docs(event_msg_store, event_collection, summary_collection, reprocess=False) summarise_actions(summary_collection, start_date=analysis_start, end_date=analysis_end)
def update_edge(self, edge_id, action, top_vel): msg_store = MessageStoreProxy(collection='topological_maps') # The query retrieves the node name with the given name from the given pointset. query = {"name": edge_id.split('_')[0], "pointset": self.name} # The meta-information is some additional information about the specific # map that we are interested in (?) query_meta = {} query_meta["pointset"] = self.name query_meta["map"] = self.nodes.map # This returns a tuple containing the object, if it exists, and some # information about how it's stored in the database. available = msg_store.query(TopologicalNode._type, query, query_meta) if len(available) == 1: for edge in available[0][0].edges: if edge.edge_id == edge_id: edge.action = action or edge.action edge.top_vel = top_vel or edge.top_vel msg_store.update(available[0][0], query_meta, query, upsert=True) return True, "" else: rospy.logerr("Impossible to store in DB "+str(len(available))+" waypoints found after query") rospy.logerr("Available data: "+str(available)) return False, "no edge found or multiple edges found"
def publish(self): rospy.sleep(self.delay) for i in range(self.count): self.publisher.publish(i) self.rate.sleep() if __name__ == '__main__': rospy.init_node("mongodb_log_test_publisher") # topic, delay, rate, count to_publish = [ ('test_0', rospy.Duration(10), rospy.Rate(1), 20), ('test_1', rospy.Duration(20), rospy.Rate(1), 20) ] publishers = map(lambda tup: TestPublisher(*tup), to_publish) map(lambda pub: pub.thread.start(), publishers) map(lambda pub: pub.thread.join(), publishers) # now I should be able to get these back from the datacentre for target in to_publish: msg_store = MessageStoreProxy(database='roslog', collection=target[0]) print len(msg_store.query(Int64._type)) == target[3]
def __init__(self): self.baseFrame = '/tracker_depth_frame' self.joints = [ 'head', 'neck', 'torso', 'left_shoulder', 'left_elbow', 'left_hand', 'left_hip', 'left_knee', 'left_foot', 'right_shoulder', 'right_elbow', 'right_hand', 'right_hip', 'right_knee', 'right_foot', ] self.data = {} #c urrent tf frame data for 15 joints self.accumulate_data = {} # accumulates multiple tf frames self.users = {} # keeps the tracker state message, timepoint and UUID self.map_info = "don't know" # topological map name self.current_node = "don't care" # topological node waypoint self.robot_pose = Pose() # pose of the robot # logging to mongo: self._with_logging = rospy.get_param("~log_skeleton", "false") self._message_store = rospy.get_param("~message_store", "people_skeleton") # listeners: self.tf_listener = tf.TransformListener() #self.uuid_listener = rospy.Subscriber("/people", user_ID, self.uuid_callback) rospy.Subscriber("skeleton_data/state", skeleton_tracker_state, self.tracker_state_callback) rospy.Subscriber("/current_node", String, callback=self.node_callback, queue_size=10) rospy.Subscriber("/robot_pose", Pose, callback=self.robot_callback, queue_size=10) self.topo_listerner = rospy.Subscriber("/topological_map", TopologicalMap, self.map_callback, queue_size=10) # publishers: self.publish_incr = rospy.Publisher('skeleton_data/incremental', skeleton_message, queue_size=10) self.publish_comp = rospy.Publisher('skeleton_data/complete', skeleton_complete, queue_size=10) self.rate = rospy.Rate(15.0) # initialise data to recieve tf data self._initialise_data() # initialise mongodb client if self._with_logging: rospy.loginfo("Connecting to mongodb...%s" % self._message_store) self._store_client = MessageStoreProxy( collection=self._message_store)
import rospy from sensor_msgs.msg import PointCloud2, PointField from mongodb_store.message_store import MessageStoreProxy from sensor_msgs.msg import PointCloud2, Image from cv_bridge import CvBridge, CvBridgeError from soma_llsd_msgs.msg import * import cv import cv2 import os import pickle if __name__ == '__main__': rospy.init_node('om_test', anonymous=False) bridge = CvBridge() store = MessageStoreProxy(database="somadata", collection="llsd_scene_store") scenes = store.query(Scene._type) targ = "scene_dump/" print("got some scenes." + str(len(scenes)) + " in fact! writing them to " + targ) if not os.path.exists(targ): os.makedirs(targ) scene_count = 0 processed_episodes = [] for sc in scenes: cur_scene = sc[0] if "surface" in sc.meta: print("processing: " + cur_scene.id) if not os.path.exists(targ + cur_scene.episode_id + "/"): os.makedirs(targ + cur_scene.episode_id + "/")
#! /usr/bin/env python import rospy import roslib import sys, os from std_msgs.msg import String, Header, Int32 from mongodb_store.message_store import MessageStoreProxy # from soma2_msgs.msg import SOMA2Object if __name__ == "__main__": rospy.init_node('query_soma') # connecting soma_store = MessageStoreProxy(database="message_store", collection="soma_activity_ids_list") if len(sys.argv) > 1: for cnt, arg in enumerate(sys.argv): if cnt == 0: continue # print arg print 'Add an object ID to a msg store list: %s ' % arg new_obj_id = String(arg) # putting something in soma_store.insert_named("object id %s" % arg, new_obj_id) # # getting it back out # id,meta = soma_store.query_named("object id %s" %arg, Int32._type) # print scene, meta else:
def __init__(self, context): super(HelpingHandGUI, self).__init__(context) # Give QObjects reasonable names self.setObjectName('HelpingHandGUI') # Process standalone plugin command-line arguments from argparse import ArgumentParser parser = ArgumentParser() # Add argument(s) to the parser. parser.add_argument("-q", "--quiet", action="store_true", dest="quiet", help="Put plugin in silent mode") # args, unknowns = parser.parse_known_args(context.argv()) # if not args.quiet: # rospy.logdebug 'arguments: ', args # rospy.logdebug 'unknowns: ', unknowns # Create QWidget self._widget = QWidget() # Get path to UI file which should be in the "resource" folder of this package ui_file = os.path.join(rospkg.RosPack().get_path('helping_hand_gui'), 'qt', 'helpinghand.ui') # Extend the widget with all attributes and children from UI file loadUi(ui_file, self._widget) # Give QObjects reasonable names # self._widget.setObjectName('DMPRecordToolGUIUi') # Show _widget.windowTitle on left-top of each plugin (when # it's set in _widget). This is useful when you open multiple # plugins at once. Also if you open multiple instances of your # plugin at once, these lines add number to make it easy to # tell from pane to pane. if context.serial_number() > 1: self._widget.setWindowTitle(self._widget.windowTitle() + (' (%d)' % context.serial_number())) # Add widget to the user interface context.add_widget(self._widget) # Populate configuration table's headers self._conf_table = self._widget.findChild(QTableWidget, 'configTable') self._conf_table.setHorizontalHeaderLabels(( 'Trigger Name', 'Robot Namespace', 'Trigger Topic', 'Trigger Type', 'Trigger Value', 'Trigger Callback')) # Populate data table's headers self._data_table = self._widget.findChild(QTableWidget, 'data_table') self._data_table.setHorizontalHeaderLabels(( 'Timestamp', 'Trigger Conf', 'Trigger Reason', 'Data Type')) header = self._data_table.horizontalHeader() header.setSectionResizeMode(QHeaderView.ResizeToContents) # Hide the 5th column that contains data identifiers self._data_table.setColumnCount(self._data_table.columnCount()+1) self._data_table.setColumnHidden(self._data_table.columnCount()-1, True) # Connect the refresh signal with the update function self._refresh_data_table_contents_sig.connect(self._update_data_table) # Connect the item selected signal to the display details self._data_table.itemSelectionChanged.connect(self._display_item_details) # Parse config self._parse_yaml_config() # Show conf in table self._write_conf_to_table() # Display what we are listening to self._display_addresses() # Print initial status report self.status_report = StatusReport(self._init_status_report, self) self._print_status() # DMP Encoding service self._dmp_ecoder = rospy.ServiceProxy('/encode_traj_to_dmp', EncodeTrajectoryToDMP) # Save button widget save_button = self._widget.findChild(QPushButton, 'save_button') save_button.clicked.connect(partial(self._save_button_pressed, save_button)) # Create database proxy self._msg_store = MessageStoreProxy() # Create the service proxies for into saving databse self._tf_capture_srv = rospy.ServiceProxy('tf_capture', CaptureTF) self._joint_capture_srv = rospy.ServiceProxy('joint_capture', CaptureJoint) self._dmp_capture_srv = rospy.ServiceProxy('dmp_capture', CaptureDMP) # Make sure the services are running !! try: self._tf_capture_srv.wait_for_service(0.5) self._joint_capture_srv.wait_for_service(0.5) self._dmp_capture_srv.wait_for_service(0.5) except Exception as e: rospy.logerr("Could not establish a connection to services. See exception:\n{}".format(e)) exit()
def collect_predictions(predictor_name, now, tmap, training_window_start, training_window_end, training_window_size, training_windows, testing_window_start, testing_window_end): builder = model_client() predictor = prediction_client() msg_store = MessageStoreProxy(collection='nav_stats') host = rospy.get_param('mongodb_host', 'localhost') port = rospy.get_param('mongodb_port', '62345') client = pymongo.MongoClient(host, port) results_collection = client['nav_results'][tmap] meta_query = { "epoch": { "$gt": to_epoch(testing_window_start), "$lt": to_epoch(testing_window_end) } } test_stats = msg_store.query(NavStatistics._type, {'topological_map': tmap}, meta_query) test_epochs = {} test_stat_count = 0 # group nav stats by epoch for faster processing later for nav_stat, nav_stat_meta in test_stats: if nav_stat_meta['epoch'] not in test_epochs: # print nav_stat_meta['epoch'] test_epochs[nav_stat_meta['epoch']] = [] test_epochs[nav_stat_meta['epoch']].append(nav_stat) test_stat_count += 1 print 'Testing with %s stats at %s different epochs' % (test_stat_count, len(test_epochs)) processed = 0 increment = 100 increment_count = 100 for window in range(training_windows): window_start = training_window_start window_end = training_window_end + training_window_size * window # get nav stats for window meta_query = { "epoch": { "$gt": to_epoch(window_start), "$lt": to_epoch(window_end) } } nav_stats = msg_store.query(NavStatistics._type, {'topological_map': tmap}, meta_query) print 'Building model for window %s to %s, total stats %s' % ( training_window_start, training_window_end + training_window_size * window, len(nav_stats)) build_goal = BuildTopPredictionGoal(python_to_rostime(window_start), python_to_rostime(window_end)) builder.send_goal(build_goal) builder.wait_for_result() # now test each nav stat from the windows result_set = '%s___%s' % (window_start, window_end) for epoch, stats in test_epochs.iteritems(): results = evaluate_prediction(predictor, epoch, stats) result_doc = { 'result_time': now, 'training_window_start': window_start, 'training_window_end': window_end, 'predictor': predictor_name, 'epoch': epoch, 'results': results } results_collection.insert(result_doc) processed += len(results) if processed > increment_count: print 'Processed %s/%s' % (processed, test_stat_count) increment_count += increment print 'Processed %s/%s' % (processed, test_stat_count) return now
#!/usr/bin/env python # -*- coding: utf-8 -*- import rospy import mongodb_store_msgs.srv as dc_srv import mongodb_store.util as dc_util from mongodb_store.message_store import MessageStoreProxy from geometry_msgs.msg import Pose, Point, Quaternion import StringIO if __name__ == '__main__': rospy.init_node("example_message_store_client") msg_store = MessageStoreProxy() p = Pose(Point(0, 1, 2), Quaternion(3, 4, 5, 6)) try: # insert a pose object with a name, store the id from db p_id = msg_store.insert_named("my favourite pose", p) # you don't need a name (note that this p_id is different than one above) p_id = msg_store.insert(p) p_id = msg_store.insert(['test1', 'test2']) # get it back with a name print msg_store.query_named("my favourite pose", Pose._type) p.position.x = 666
#!/usr/bin/env python import rospy from mongodb_store.message_store import MessageStoreProxy from ubd_scene_image_comparator.msg import UbdSceneImgLog if __name__ == '__main__': rospy.init_node("ubd_scene_reset_annotator") rospy.loginfo("Resetting all annotated logs...") db = MessageStoreProxy(collection="ubd_scene_log") logs = db.query(UbdSceneImgLog._type, {"annotated": True}, limit=10) while not rospy.is_shutdown() and len(logs) > 0: rospy.loginfo("Resetting %d entries" % len(logs)) for log in logs: log[0].annotated = False db.update( log[0], message_query={"header.stamp.secs": log[0].header.stamp.secs}) logs = db.query(UbdSceneImgLog._type, {"annotated": True}, limit=10) rospy.loginfo("Done...") rospy.loginfo( "Please remove entries on ubd_scene_accuracy if you want to have fresh learnt accuracy" ) rospy.spin()
#!/usr/bin/env python import json import yaml import sys from strands_navigation_msgs.msg import TopologicalNode import mongodb_store.util as dc_util from mongodb_store.message_store import MessageStoreProxy if __name__ == '__main__': if len(sys.argv) < 2: print "usage: insert_map input_file.txt" sys.exit(2) filename = str(sys.argv[1]) #dataset_name=str(sys.argv[2]) #map_name=str(sys.argv[3]) msg_store = MessageStoreProxy(collection='topological_maps') json_data = open(filename, 'rb').read() data = json.loads(json_data) for i in data: meta = i['meta'] msgv = dc_util.dictionary_to_message(i['node'], TopologicalNode) msg_store.insert(msgv, meta) #mongodb_store.util.store_message(points_db,p,val)
from mongodb_store.message_store import MessageStoreProxy import yaml import json import pprint import argparse def loadDialogue(inputfile, dataset_name): print "openning %s" % inputfile with open(inputfile) as f: content = f.readlines() print "Done" return content if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("dataset_name", help="The name of the dataset. Saved in meta information using 'meta_name'", type=str) parser.add_argument("-i", "--input", help="Input yaml file", type=str, required=True) parser.add_argument("--collection_name", help="The collection name. Default: aaf_walking_group", type=str, default="aaf_walking_group") parser.add_argument("--meta_name", help="The name of the meta filed to store 'dataset_name' in. Default: waypoint_set", type=str, default="waypoint_set") args = parser.parse_args() msg_store = MessageStoreProxy(collection=args.collection_name) data = yaml.load(open(args.input)) meta = {} meta[args.meta_name] = args.dataset_name pprint.pprint(meta) pprint.pprint(data) msg_store.insert(std_msgs.msg.String(json.dumps(data)), meta)