def __init__(self, tf_frequency): rospy.wait_for_service('/json_prolog/query') self.tf_frequency = tf_frequency self.prolog = json_prolog.Prolog() self.tf_broadcaster = tf.TransformBroadcaster() self.marker_publisher = rospy.Publisher('/visualization_marker', Marker, queue_size=10) self.dirty_object_srv = rospy.Service('~mark_dirty_object', DirtyObject, self.dirty_cb) self.objects = defaultdict(lambda: ThorinObject())
def __init__(self): rospy.wait_for_service('/json_prolog/query') self.prolog = json_prolog.Prolog() self.objects = defaultdict(lambda: UnrealObject()) self.unreal_publisher = rospy.Publisher("/unreal", ModelDescription, queue_size=100) self.unreal_service = rospy.ServiceProxy( 'unreal/spawn_multiple_models', SpawnMultipleModels) self.unreal_service_single = rospy.ServiceProxy( 'unreal/spawn_model', SpawnModel) rospy.loginfo('unreal publisher is running')
def __init__(self): # TODO implement all the things [high] # TODO use paramserver [low] self.floors = {} self.shelf_ids = [] self.separators = {} self.perceived_frame_id_map = {} self.action_graph = None self.tf = TfWrapper() self.prolog = json_prolog.Prolog() self.prolog.wait_for_service() self.query_lock = Lock()
def __init__(self, tf_frequency, object_types): rospy.wait_for_service('/json_prolog/query') self.tf_frequency = tf_frequency self.object_types = object_types self.dirty_object_requests = Queue() self.prolog = json_prolog.Prolog() self.objects = defaultdict(lambda: PerceivedObject()) self.tf_broadcaster = rospy.Publisher("/tf", TFMessage, queue_size=100) self.marker_publisher = rospy.Publisher('/visualization_marker', Marker, queue_size=100) self.marker_array_publisher = rospy.Publisher('/visualization_marker_array', MarkerArray, queue_size=100) self.object_state_subscriber = rospy.Subscriber('/object_state', ObjectStateArray, self.update_state_cb) self.update_positions_srv = rospy.Service('~update_object_positions', Trigger, self.update_object_positions_cb) self.dirty_timer = rospy.Timer(rospy.Duration(.01), self.dirty_timer_cb) rospy.loginfo('object state publisher is running') rospy.sleep(1)
def __init__(self): #constants to mark begin and end of discussions rospy.init_node('dialog_manager') rospy.on_shutdown(self.cleanup) rospy.loginfo("Starting core server manager node...") #Publisher self.respub = rospy.Publisher('~dialog_responses', DialogResponses, queue_size=1000) # Subscriber rospy.Subscriber('/rpc_server/recognition', String, self.process) #chartscript server rospy.loginfo("starting core server Manager ...") self.prolog = json_prolog.Prolog()
def __init__(self, tf_frequency): rospy.wait_for_service('/json_prolog/query') self.tf_frequency = tf_frequency self.prolog = json_prolog.Prolog() self.tf_broadcaster = tf.TransformBroadcaster() self.marker_publisher = rospy.Publisher('/visualization_marker', Marker, queue_size=10) self.dirty_object_srv = rospy.Service('~mark_dirty_object', DirtyObject, self.dirty_cb) self.dirty_lock = Lock() self.update_positions_srv = rospy.Service( '~update_object_positions', Trigger, self.update_object_positions_cb) self.objects = defaultdict(lambda: PerceivedObject()) rospy.loginfo('object state publisher is running')
def ros_setup(self): self.client = actionlib.SimpleActionClient( '/scanning_action', refills_msgs.msg.ScanningAction) if not self.client.wait_for_server(rospy.Duration(0.5)): rospy.logwarn( "Waiting for action server at '/scanning_action' to appear. Please make sure it is up." ) self.client.wait_for_server() self.prolog = json_prolog.Prolog() try: self.prolog.wait_for_service(0.5) except rospy.ROSException: rospy.logwarn( "Waiting for json_prolog server at default namespace to appear. Please make sure it is up." ) self.prolog.wait_for_service()
def start_querying(): global solutions #invoke prolog, broadcaster prolog = json_prolog.Prolog() #do until killed (Ctrl-C) while (not rospy.is_shutdown()): query = prolog.query("get_tf_infos(Name,FrameID,Position,Orientation)") temp = [] #loop for all solutions from get_tf_infos(N,F,P,O) for solution in query.solutions(): temp.append(solution) #rospy.logerr(str(solution)) query.finish() solutions = temp rospy.sleep(1)
#!/usr/bin/env python import roslib roslib.load_manifest('json_prolog') import rospy from json_prolog import json_prolog from suturo_perception_msgs.msg import ObjectDetection import time object_map = {} last_update_map = {} prolog = json_prolog.Prolog() index_of_is_near = -1 # Timeout of prolog update in nanosekunden (50.000.000 sind also 0.05 sekunden, 66.666.666 ungefaehr 15hz) TIMEOUT = 65666666 # Distance of the position and orientation. POSE_DISTANCE = 0.03 QUAT_DISTANCE = 0.03 def callback(currObj): if ((not currObj.name in object_map) or (is_near_list(currObj, object_map[currObj.name]) == -1) or (last_update_map[currObj.name].to_nsec() < rospy.Time.now().to_nsec() - TIMEOUT)): before = time.time() query = prolog.query("create_object_state_with_close('" + currObj.name + "', [ [" + str(currObj.pose.pose.position.x) + "," + str(currObj.pose.pose.position.y) + "," +
def __init__(self): self.prolog = json_prolog.Prolog()
objects2type = {} objects = [] for inst, issue in zip(frames[0]['instance_ids'], frames[0]['issue_ids']): for inst_number in inst: inst_number -= 1 for issue_number in issue: issue_number -= 1 obj_name = '%s_%d' % (classes[issue_number], inst_number) objects2type[obj_name] = classes[issue_number] objects.append(obj_name) print 'objects:' print objects # start json-prolog rospy.init_node('tracking2knowrob', anonymous=True) knowrob = json_prolog.Prolog() # ask knowrob for object features object_store = [] for o in objects: o_type = objects2type[o] obj = Object(o) object_store.append(obj) if not o_type in concept_map: continue print 'Object: %s' % obj.name query = knowrob.query("object_feature(%s, ITasCFeatureT, Pos, Dir)" % concept_map[o_type]) counters = [1] * 3 for s in query.solutions(): featureT = s['ITasCFeatureT'] pos = numpy.matrix(s['Pos']).transpose()
def __init__(self): rospy.wait_for_service('/json_prolog/query') self.prolog = json_prolog.Prolog()
def load_data(): """Return a list of Experiments with loaded data""" experimentNames = [] experimentObjects = [] prolog = json_prolog.Prolog() # query = prolog.query("suturo_learning:get_robot_experiment_task_success(foo)") # query.finish() query = prolog.query("suturo_learning:l_get_robot_experiment(Experiment)") # , ) # Load unique RobotExperiment IDs for solution in query.solutions(): print 'Found Experiments. Experiment= %s' % (solution['Experiment']) experimentNames.append(solution['Experiment']) query.finish() # Fetch data for RobotExperiments for experiment in experimentNames: extracted_data = Experiment() # query = prolog.query("suturo_learning:get_robot_experiment_name('"+experiment+"',ExperimentName), suturo_learning:get_learningaction_sequence_in_experiment('"+experiment+"', LS), suturo_learning:get_state_sequence_for_action_sequence(LS, SSeq)") query = prolog.query("suturo_learning:get_robot_experiment_name('"+experiment+"',ExperimentName), suturo_learning:get_learning_sequence_w_exp('"+experiment+"', LS)") # TODO: Only first result # print "Found " + len(query.solutions()) + " solutions" for solution in query.solutions(): print 'Found Data for Experiment. ExperimentName= %s' % (solution['ExperimentName']) extracted_data.name = solution['ExperimentName'] # extracted_data.actions = solution['LS'] # Extract the Data from the computed LearningSequence extracted_data.actions = [] extracted_data.states = [] for entry in solution['LS']: states = entry[0] # Get the first digit out of the state. # This gets rid of the dot and everything that follows clean_states = map(lambda s: str(s[0]), states) print "New states" print clean_states # print states[0] extracted_data.actions.append( entry[1] ) extracted_data.states.append( str(clean_states).replace("'","") ) # Add the last state, that is valid # after executing the last action clean_state = map(lambda s: str(s[0]), (solution['LS'][-1])[2]) # extracted_data.states.append( str( (solution['LS'][-1])[2]).replace("'","") ) extracted_data.states.append( str( clean_state ).replace("'","")) # extracted_data.states = solution['SSeq'] extracted_data.overall_success = True query.finish() time.sleep(1) print "sleep done" last_state_in_experiment = extracted_data.states[-1] # Task is successful, if: # the last action was successful, # the red cube is placed # and the blue handle is placed if (last_state_in_experiment[4] == "1" and last_state_in_experiment[7] == "1" and last_state_in_experiment[10] == "1"): extracted_data.overall_success = True print "XXXX - True" else: extracted_data.overall_success = False print "XXXX - False" # # Get the result for the given experiment # query = prolog.query("suturo_learning:get_robot_experiment_task_success('"+experiment+"')") # # for solution in query.solutions(): # # print solution # if query.next(): # extracted_data.overall_success = True # print "XXXX - True" # else: # extracted_data.overall_success = False # print "XXXX - False" query.finish() # if isinstance(prolog.once("suturo_learning:get_robot_experiment_task_success('"+experiment+"')"),dict): # print "True" # else: # print "False" # get_learningaction_sequence_in_experiment(Experiment, LS), # get_state_sequence_for_action_sequence(LS, SSeq), experimentObjects.append(extracted_data) return experimentObjects