Beispiel #1
0
    def __init__(self):

        # ................................................................
        # read ros parameters
        self.loadROSParams()

        # ................................................................
        # Other config params and atributes
        self.closest_human_pose = None
        self.prev_closest_human_pose = None

        # qsrlib data...
        self.qsrlib = QSRlib()

        # TODO: add as parameter
        self.which_qsr = 'qtccs'

        # TODO: add as parameter
        self.dynammic_args = {
            "qtccs": {
                "no_collapse": True,
                "quantisation_factor": 0.01,
                "validate": False,
                "qsrs_for": [("human_os", "robot_os")]
            }
        }

        # sensible default values ...
        self.closest_human_pose = None

        # ................................................................
        # start ros subs/pubs/servs....
        self.initROS()

        rospy.loginfo("Node [" + rospy.get_name() + "] entering spin...")
    def get_object_frame_qsrs(self, world_trace, objects):
        joint_types = {'left_hand': 'hand', 'right_hand': 'hand',  'head-torso': 'tpcc-plane'}

        joint_types_plus_objects = joint_types.copy()
        for object in objects:
            generic_object = "_".join(object.split("_")[:-1])
            joint_types_plus_objects[object] = generic_object
        #print joint_types_plus_objects

        """create QSRs between the person's joints and the soma objects in map frame"""
        qsrs_for=[]
        for ob in objects:
            qsrs_for.append((str(ob), 'left_hand'))
            qsrs_for.append((str(ob), 'right_hand'))
            #qsrs_for.append((str(ob), 'torso'))

        dynamic_args = {}
        # dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.25, 'Near': 0.5,  'Ignore': 10}}
        # dynamic_args['qtcbs'] = {"qsrs_for": qsrs_for, "quantisation_factor": 0.05, "validate": False, "no_collapse": True} # Quant factor is effected by filters to frame rate
        # dynamic_args["qstag"] = {"object_types": joint_types_plus_objects, "params": {"min_rows": 1, "max_rows": 1, "max_eps": 2}}

        dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.5, 'Near': 0.75,  'Medium': 1.5, 'Ignore': 10}}
        # dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.2, 'Ignore': 10}}
        dynamic_args['qtcbs'] = {"qsrs_for": qsrs_for, "quantisation_factor": 0.01, "validate": False, "no_collapse": True} # Quant factor is effected by filters to frame rate
        dynamic_args["qstag"] = {"object_types": joint_types_plus_objects, "params": {"min_rows": 1, "max_rows": 1, "max_eps": 2}}

        qsrlib = QSRlib()
        req = QSRlib_Request_Message(which_qsr=["argd", "qtcbs"], input_data=world_trace, dynamic_args=dynamic_args)
        #req = QSRlib_Request_Message(which_qsr="argd", input_data=world_trace, dynamic_args=dynamic_args)
        ret = qsrlib.request_qsrs(req_msg=req)

        # for ep in ret.qstag.episodes:
        #     print ep
        #
        return ret
Beispiel #3
0
    def get_object_frame_qsrs(self, world_trace, objects):
        joint_types = {
            'left_hand': 'hand',
            'right_hand': 'hand',
            'head-torso': 'tpcc-plane'
        }

        joint_types_plus_objects = joint_types.copy()
        for object in objects:
            generic_object = "_".join(object.split("_")[:-1])
            joint_types_plus_objects[object] = generic_object
        #print joint_types_plus_objects
        """create QSRs between the person's joints and the soma objects in map frame"""
        qsrs_for = []
        for ob in objects:
            qsrs_for.append((str(ob), 'left_hand'))
            qsrs_for.append((str(ob), 'right_hand'))
            #qsrs_for.append((str(ob), 'torso'))

        dynamic_args = {}
        # dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.25, 'Near': 0.5,  'Ignore': 10}}
        # dynamic_args['qtcbs'] = {"qsrs_for": qsrs_for, "quantisation_factor": 0.05, "validate": False, "no_collapse": True} # Quant factor is effected by filters to frame rate
        # dynamic_args["qstag"] = {"object_types": joint_types_plus_objects, "params": {"min_rows": 1, "max_rows": 1, "max_eps": 2}}

        dynamic_args['argd'] = {
            "qsrs_for": qsrs_for,
            "qsr_relations_and_values": {
                'Touch': 0.5,
                'Near': 0.75,
                'Medium': 1.5,
                'Ignore': 10
            }
        }
        # dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.2, 'Ignore': 10}}
        dynamic_args['qtcbs'] = {
            "qsrs_for": qsrs_for,
            "quantisation_factor": 0.01,
            "validate": False,
            "no_collapse": True
        }  # Quant factor is effected by filters to frame rate
        dynamic_args["qstag"] = {
            "object_types": joint_types_plus_objects,
            "params": {
                "min_rows": 1,
                "max_rows": 1,
                "max_eps": 2
            }
        }

        qsrlib = QSRlib()
        req = QSRlib_Request_Message(which_qsr=["argd", "qtcbs"],
                                     input_data=world_trace,
                                     dynamic_args=dynamic_args)
        #req = QSRlib_Request_Message(which_qsr="argd", input_data=world_trace, dynamic_args=dynamic_args)
        ret = qsrlib.request_qsrs(req_msg=req)

        # for ep in ret.qstag.episodes:
        #     print ep
        #
        return ret
def get_object_frame_qsrs(file, world_trace, objects, joint_types, dynamic_args):
    """create QSRs between the person's joints and the soma objects in map frame"""
    qsrs_for=[]
    for ob in objects:
        qsrs_for.append((str(ob), 'left_hand'))
        #qsrs_for.append((str(ob), 'right_hand'))
        #qsrs_for.append((str(ob), 'torso'))

    dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.5, 'Near': 0.75,  'Medium': 1.5, 'Ignore': 10}}
    # dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.2, 'Ignore': 10}}
    dynamic_args['qtcbs'] = {"qsrs_for": qsrs_for, "quantisation_factor": 0.05, "validate": False, "no_collapse": True} # Quant factor is effected by filters to frame rate
    dynamic_args["qstag"] = {"object_types": joint_types, "params": {"min_rows": 1, "max_rows": 1, "max_eps": 2}}

    qsrlib = QSRlib()

    req = QSRlib_Request_Message(which_qsr=["argd", "qtcbs"], input_data=world_trace, dynamic_args=dynamic_args)
    # req = QSRlib_Request_Message(which_qsr="argd", input_data=world_trace, dynamic_args=dynamic_args)
    qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    for ep in qsr_object_frame.qstag.episodes:
        print ep

    for cnt, h in  zip(qsr_object_frame.qstag.graphlets.histogram, qsr_object_frame.qstag.graphlets.code_book):
        print "\n", cnt, h, qsr_object_frame.qstag.graphlets.graphlets[h]

    return qsr_object_frame
    def __compute_qsr(self, bb1, bb2):
        if not self.qsr:
            return ""
        ax, ay, bx, by = self.bb1
        cx, cy, dx, dy = self.bb2

        qsrlib = QSRlib()
        world = World_Trace()
        world.add_object_state_series([
            Object_State(name="red",
                         timestamp=0,
                         x=((ax + bx) / 2.0),
                         y=((ay + by) / 2.0),
                         xsize=abs(bx - ax),
                         ysize=abs(by - ay)),
            Object_State(name="yellow",
                         timestamp=0,
                         x=((cx + dx) / 2.0),
                         y=((cy + dy) / 2.0),
                         xsize=abs(dx - cx),
                         ysize=abs(dy - cy))
        ])
        dynamic_args = {"argd": {"qsr_relations_and_values": self.distance}}
        qsrlib_request_message = QSRlib_Request_Message(
            which_qsr=self.qsr, input_data=world, dynamic_args=dynamic_args)
        qsrlib_response_message = qsrlib.request_qsrs(
            req_msg=qsrlib_request_message)

        for t in qsrlib_response_message.qsrs.get_sorted_timestamps():
            foo = ""
            for k, v in zip(
                    qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                    qsrlib_response_message.qsrs.trace[t].qsrs.values()):
                foo += str(k) + ":" + str(v.qsr) + "; \n"
        return foo
def get_map_frame_qsrs(file, world_trace, dynamic_args):
    """create QSRs between the person and the robot in map frame"""
    qsrs_for = [('robot', 'torso')]
    dynamic_args['qtcbs'] = {
        "qsrs_for": qsrs_for,
        "quantisation_factor": 0.0,
        "validate": False,
        "no_collapse": True
    }
    dynamic_args["qstag"] = {
        "params": {
            "min_rows": 1,
            "max_rows": 1,
            "max_eps": 3
        }
    }

    qsrlib = QSRlib()
    req = QSRlib_Request_Message(which_qsr="qtcbs",
                                 input_data=world_trace,
                                 dynamic_args=dynamic_args)
    qsr_map_frame = qsrlib.request_qsrs(req_msg=req)
    print "    ", file, "episodes = "
    for i in qsr_map_frame.qstag.episodes:
        print i
    return qsr_map_frame
def worker_qsrs(chunk):
    (file_, path, soma_objects, config) = chunk

    e = utils.load_e(path, file_)

    dynamic_args = {}
    try:
        dynamic_args['argd'] = config['argd_args']
        dynamic_args['qtcbs'] = config['qtcbs_args']
        dynamic_args["qstag"] = {"params" : config['qstag_args']}
        dynamic_args['filters'] = {"median_filter": {"window": config['qsr_mean_window']}}  # This has been updated since ECAI paper.
    except KeyError:
        print "check argd, qtcbs, qstag parameters in config file"


    joint_types = {'head': 'head', 'torso': 'torso', 'left_hand': 'hand', 'right_hand': 'hand', 'left_knee': 'knee', 'right_knee': 'knee',
                   'left_shoulder': 'shoulder', 'right_shoulder': 'shoulder', 'head-torso': 'tpcc-plane'}
    object_types = joint_types.copy()
    add_objects = []
    for region, objects in ce.get_soma_objects().items():
        for o in objects:
            add_objects.append(o)
            try:
                generic_object = "_".join(o.split("_")[:-1])
                object_types[o] = generic_object
            except:
                print "didnt add:", object
    dynamic_args["qstag"]["object_types"] = object_types

    """1. CREATE QSRs FOR joints & Object """
    qsrlib = QSRlib()

    qsrs_for=[]
    for ob in objects:
        qsrs_for.append((str(ob), 'left_hand'))
        qsrs_for.append((str(ob), 'right_hand'))
        qsrs_for.append((str(ob), 'torso'))
    dynamic_args['argd']["qsrs_for"] = qsrs_for
    dynamic_args['qtcbs']["qsrs_for"] = qsrs_for

    req = QSRlib_Request_Message(config['which_qsr'], input_data=e.map_world, dynamic_args=dynamic_args)
    e.qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    # print ">", e.qsr_object_frame.qstag.graphlets.histogram
    # for i in e.qsr_object_frame.qstag.episodes:
    #     print i
    # sys.exit(1)
    """2. CREATE QSRs for joints - TPCC"""
    # print "TPCC: ",
    # # e.qsr_joint_frame = get_joint_frame_qsrs(file, e.camera_world, joint_types, dynamic_args)
    # qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    # dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    # dynamic_args["qstag"]["params"] = {"min_rows": 1, "max_rows": 2, "max_eps": 4}
    # qsrlib = QSRlib()
    # req = QSRlib_Request_Message(which_qsr="tpcc", input_data=e.camera_world, dynamic_args=dynamic_args)
    # e.qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)
    # # pretty_print_world_qsr_trace("tpcc", e.qsr_joints_frame)
    # # print e.qsr_joints_frame.qstag.graphlets.histogram
    utils.save_event(e, "Learning/QSR_Worlds")
def worker_qsrs(chunk):
    (file_, path, soma_objects, qsr_mean_window) = chunk
    e = utils.load_e(path, file_)

    dynamic_args = {}
    dynamic_args["qstag"] = {}
    dynamic_args['filters'] = {"median_filter": {"window": qsr_mean_window}}  # This has been updated since ECAI paper.

    """ADD OBJECT TYPES into DYNAMIC ARGS """
    joint_types = {'head': 'head', 'torso': 'torso', 'left_hand': 'hand', 'right_hand': 'hand', 'left_knee': 'knee', 'right_knee': 'knee',
                   'left_shoulder': 'shoulder', 'right_shoulder': 'shoulder', 'head-torso': 'tpcc-plane'}
    all_object_types = joint_types.copy()

    add_objects = []
    for region, objects in ce.get_soma_objects().items():
        for o in objects:
            add_objects.append(o)
            try:
                generic_object = "_".join(o.split("_")[:-1])
                all_object_types[o] = generic_object
            except:
                print "didnt add:", object

    dynamic_args["qstag"]["object_types"] = all_object_types

    """1. CREATE QSRs FOR Key joints & Object """
    print " QTC,QDC: "
    qsrlib = QSRlib()
    # print ">>", e.map_world.get_sorted_timestamps()

    # for t in e.map_world.trace:
    #     print "\nt", t
    #     for o, state in e.map_world.trace[t].objects.items():
    #         print o, state.x,state.y,state.z
    # sys.exit(1)

    req = QSRLib_param_mapping(e.map_world, dynamic_args, soma_objects)
    e.qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    # print ">", e.qsr_object_frame.qstag.graphlets.histogram
    # for i in e.qsr_object_frame.qstag.episodes:
    #     print i
    # sys.exit(1)
    """2. CREATE QSRs for joints - TPCC"""
    # print "TPCC: ",
    # # e.qsr_joint_frame = get_joint_frame_qsrs(file, e.camera_world, joint_types, dynamic_args)
    # qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    # dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    # dynamic_args["qstag"]["params"] = {"min_rows": 1, "max_rows": 2, "max_eps": 4}
    # qsrlib = QSRlib()
    # req = QSRlib_Request_Message(which_qsr="tpcc", input_data=e.camera_world, dynamic_args=dynamic_args)
    # e.qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)
    # # pretty_print_world_qsr_trace("tpcc", e.qsr_joints_frame)
    # # print e.qsr_joints_frame.qstag.graphlets.histogram
    utils.save_event(e, "Learning/QSR_Worlds")
Beispiel #9
0
 def __init__(self, *args):
     super(AbstractClass_UnitTest, self).__init__(*args)
     self._unique_id = ""
     self._worlds = {"data1": load_input_data1(),
                     "data2": load_input_data2(),
                     "data3": load_input_data3(),
                     "data4": load_input_data4(),
                     "data2_first100": load_input_data2_first100(),
                     "data3_first100": load_input_data3_first100(),
                     "data4_first100": load_input_data4_first100()}
     self._qsrlib = QSRlib()
def get_joint_frame_qsrs(file, world_trace, joint_types, dynamic_args):

    qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    dynamic_args["qstag"] = {"object_types": joint_types, "params": {"min_rows": 1, "max_rows": 1, "max_eps": 3}}

    qsrlib = QSRlib()
    req = QSRlib_Request_Message(which_qsr="tpcc", input_data=world_trace, dynamic_args=dynamic_args)
    qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)

    # for i in qsr_joints_frame.qstag.episodes:
    #     print i
    return qsr_joints_frame
def get_map_frame_qsrs(file, world_trace, dynamic_args):
    """create QSRs between the person and the robot in map frame"""
    qsrs_for = [('robot', 'torso')]
    dynamic_args['qtcbs'] = {"qsrs_for": qsrs_for, "quantisation_factor": 0.0, "validate": False, "no_collapse": True}
    dynamic_args["qstag"] = {"params": {"min_rows": 1, "max_rows": 1, "max_eps": 3}}

    qsrlib = QSRlib()
    req = QSRlib_Request_Message(which_qsr="qtcbs", input_data=world_trace, dynamic_args=dynamic_args)
    qsr_map_frame = qsrlib.request_qsrs(req_msg=req)
    print "    ", file, "episodes = "
    for i in qsr_map_frame.qstag.episodes:
        print i
    return qsr_map_frame
def qsr_relation_between(obj1_name, obj2_name, obj1, obj2):
    global frame

    qsrlib = QSRlib()
    options = sorted(qsrlib.qsrs_registry.keys())
    if init_qsr.qsr not in options:
        raise ValueError("qsr not found, keywords: %s" % options)

    world = World_Trace()

    object_types = {obj1_name: obj1_name, obj2_name: obj2_name}

    dynamic_args = {
        "qstag": {
            "object_types": object_types,
            "params": {
                "min_rows": 1,
                "max_rows": 1,
                "max_eps": 3
            }
        },
        "tpcc": {
            "qsrs_for": [(obj1_name, obj2_name)]
        },
        "rcc2": {
            "qsrs_for": [(obj1_name, obj2_name)]
        },
        "rcc4": {
            "qsrs_for": [(obj1_name, obj2_name)]
        },
        "rcc8": {
            "qsrs_for": [(obj1_name, obj2_name)]
        }
    }

    o1 = [Object_State(name=obj1_name, timestamp=frame, x=obj1[0], y=obj1[1], \
      xsize=obj1[2], ysize=obj1[3])]
    o2 = [Object_State(name=obj2_name, timestamp=frame, x=obj2[0], y=obj2[1], \
      xsize=obj2[2], ysize=obj2[3])]
    world.add_object_state_series(o1)
    world.add_object_state_series(o2)

    qsrlib_request_message = QSRlib_Request_Message(which_qsr=init_qsr.qsr, \
      input_data=world, dynamic_args=dynamic_args)
    qsrlib_response_message = qsrlib.request_qsrs(
        req_msg=qsrlib_request_message)
    pretty_print_world_qsr_trace(init_qsr.qsr,
                                 qsrlib_response_message)  #, vis = True)
    qsr_value = find_qsr_value(init_qsr.qsr, qsrlib_response_message)
    return qsr_value
Beispiel #13
0
class AbstractClass_UnitTest(unittest.TestCase):
    __metaclass__ = ABCMeta

    def __init__(self, *args):
        super(AbstractClass_UnitTest, self).__init__(*args)
        self._unique_id = ""
        self._worlds = {"data1": load_input_data1(),
                        "data2": load_input_data2(),
                        "data3": load_input_data3(),
                        "data4": load_input_data4(),
                        "data2_first100": load_input_data2_first100(),
                        "data3_first100": load_input_data3_first100(),
                        "data4_first100": load_input_data4_first100()}
        self._qsrlib = QSRlib()

    @abstractmethod
    def qsrs_for_global_namespace(self, world_name, gt_filename):
        return

    @abstractmethod
    def qsrs_for_qsr_namespace(self, world_name, gt_filename):
        return

    @abstractmethod
    def qsrs_for_qsr_namespace_over_global_namespace(self, world_name, gt_filename):
        return

    def defaults(self, world_name, gt_filename):
        expected = unittest_read_qsrs_as_one_long_list(find_resource(PKG, gt_filename)[0])
        req_msg = QSRlib_Request_Message(self._unique_id, self._worlds[world_name])
        actual = unittest_get_qsrs_as_one_long_list(self._qsrlib.request_qsrs(req_msg).qsrs)
        return expected, actual

    def q_factor(self, world_name, gt_filename, q_factor):
        expected = unittest_read_qsrs_as_one_long_list(find_resource(PKG, gt_filename)[0])
        req_msg = QSRlib_Request_Message(self._unique_id, self._worlds[world_name],
                                         {self._unique_id: {"quantisation_factor": q_factor}})
        actual = unittest_get_qsrs_as_one_long_list(self._qsrlib.request_qsrs(req_msg).qsrs)
        return expected, actual

    def q_factor_data_notequal_defaults(self, q_factor_filename, defaults_filename):
        q_factor_results = unittest_read_qsrs_as_one_long_list(find_resource(PKG, q_factor_filename)[0])
        defaults_results = unittest_read_qsrs_as_one_long_list(find_resource(PKG, defaults_filename)[0])
        return q_factor_results, defaults_results

    def custom(self, world_name, gt_filename, dynamic_args):
        expected = unittest_read_qsrs_as_one_long_list(find_resource(PKG, gt_filename)[0])
        req_msg = QSRlib_Request_Message(self._unique_id, self._worlds[world_name], dynamic_args)
        actual = unittest_get_qsrs_as_one_long_list(self._qsrlib.request_qsrs(req_msg).qsrs)
        return expected, actual
def get_object_frame_qsrs(file, world_trace, objects, joint_types,
                          dynamic_args):
    """create QSRs between the person's joints and the soma objects in map frame"""
    qsrs_for = []
    for ob in objects:
        qsrs_for.append((str(ob), 'left_hand'))
        #qsrs_for.append((str(ob), 'right_hand'))
        #qsrs_for.append((str(ob), 'torso'))

    dynamic_args['argd'] = {
        "qsrs_for": qsrs_for,
        "qsr_relations_and_values": {
            'Touch': 0.5,
            'Near': 0.75,
            'Medium': 1.5,
            'Ignore': 10
        }
    }
    # dynamic_args['argd'] = {"qsrs_for": qsrs_for, "qsr_relations_and_values": {'Touch': 0.2, 'Ignore': 10}}
    dynamic_args['qtcbs'] = {
        "qsrs_for": qsrs_for,
        "quantisation_factor": 0.05,
        "validate": False,
        "no_collapse": True
    }  # Quant factor is effected by filters to frame rate
    dynamic_args["qstag"] = {
        "object_types": joint_types,
        "params": {
            "min_rows": 1,
            "max_rows": 1,
            "max_eps": 2
        }
    }

    qsrlib = QSRlib()

    req = QSRlib_Request_Message(which_qsr=["argd", "qtcbs"],
                                 input_data=world_trace,
                                 dynamic_args=dynamic_args)
    # req = QSRlib_Request_Message(which_qsr="argd", input_data=world_trace, dynamic_args=dynamic_args)
    qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    for ep in qsr_object_frame.qstag.episodes:
        print ep

    for cnt, h in zip(qsr_object_frame.qstag.graphlets.histogram,
                      qsr_object_frame.qstag.graphlets.code_book):
        print "\n", cnt, h, qsr_object_frame.qstag.graphlets.graphlets[h]

    return qsr_object_frame
    def __init__(self):

        self.world = World_Trace()
        self.options = sorted(QSRlib().qsrs_registry.keys())
        self.calculi = self.options[0]
        # if a value lies between the thresholds set for 'near' and 'far' then the relation is immediately reported as 'far'
        # the values are bound by a 'lesser than, < ' relationship
        self.distance_args = {"Near": 3.0, "Medium": 3.5, "Far": 4.0}
        self.dynamic_args = {
            "argd": {
                "qsr_relations_and_values": self.distance_args
            },
            "for_all_qsrs": {
                'qsrs_for': [('object_1', 'robot'), ('object_2', 'robot')]
            }
        }

        self.relationship_pub = rospy.Publisher("argd_realtionship",
                                                QsrMsg,
                                                queue_size=10)
        self.pose_sub1 = message_filters.Subscriber("extracted_pose1", PoseMsg)
        self.pose_sub2 = message_filters.Subscriber("extracted_pose2", PoseMsg)
        self.cln = QSRlib_ROS_Client()
        # the update rate for the ApproximateTimeSynchronizer is 0.1 and the queue_size is 10
        self.ts = message_filters.ApproximateTimeSynchronizer(
            [self.pose_sub1, self.pose_sub2], 3, 0.1)
        self.ts.registerCallback(self.relations_callback)
 def __init__(self, node_name="qsr_lib", active_qsrs=None):
     self.qsrlib = QSRlib(active_qsrs)
     self.node_name = node_name
     self.node = rospy.init_node(self.node_name)
     self.service_topic_names = {"request": self.node_name+"/request"}
     self.srv_qsrs_request = rospy.Service(self.service_topic_names["request"], RequestQSRs, self.handle_request_qsrs)
     rospy.loginfo("QSRlib_ROS_Server up and running, listening to: %s" % self.service_topic_names["request"])
 def __init__(self, *args):
     super(AbstractClass_UnitTest, self).__init__(*args)
     self._unique_id = ""
     self._worlds = {"data1": load_input_data1(),
                     "data2": load_input_data2(),
                     "data3": load_input_data3(),
                     "data4": load_input_data4()}
     self._qsrlib = QSRlib()
    def __compute_qsr(self, bb1, bb2):
        if not self.qsr:
            return ""
        ax, ay, bx, by = self.bb1
        cx, cy, dx, dy = self.bb2
        
        qsrlib = QSRlib()
        world = World_Trace()
        world.add_object_state_series([Object_State(name="red", timestamp=0, x=((ax+bx)/2.0), y=((ay+by)/2.0), xsize=abs(bx-ax), ysize=abs(by-ay)),
            Object_State(name="yellow", timestamp=0, x=((cx+dx)/2.0), y=((cy+dy)/2.0), xsize=abs(dx-cx), ysize=abs(dy-cy))])
        dynamic_args = {"argd": {"qsr_relations_and_values": self.distance}}
        qsrlib_request_message = QSRlib_Request_Message(which_qsr=self.qsr, input_data=world, dynamic_args=dynamic_args)
        qsrlib_response_message = qsrlib.request_qsrs(req_msg=qsrlib_request_message)

        for t in qsrlib_response_message.qsrs.get_sorted_timestamps():
            foo = ""
            for k, v in zip(qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                            qsrlib_response_message.qsrs.trace[t].qsrs.values()):
                foo += str(k) + ":" + str(v.qsr) + "; \n"
        return foo
Beispiel #19
0
def qsr_message(state_series, objects):
    qsrlib = QSRlib()
    options = sorted(qsrlib.qsrs_registry.keys())
    if init_qsr.qsr not in options:
        raise ValueError("qsr not found, keywords: %s" % options)

    world = World_Trace()

    # Create dynamic arguments for every qsr type
    dynamic_args = dynamic_qsr_arguments(objects)
    
    # Add all object states in the World.
    for o in state_series:
        world.add_object_state_series(o)
    # Create request message
    qsrlib_request_message = QSRlib_Request_Message(which_qsr=init_qsr.qsr, input_data=world, dynamic_args=dynamic_args)

    qsrlib_response_message = qsrlib.request_qsrs(req_msg=qsrlib_request_message)

    pretty_print_world_qsr_trace(init_qsr.qsr, qsrlib_response_message, vis = True)
def get_joint_frame_qsrs(file, world_trace, joint_types, dynamic_args):

    qsrs_for = [('head', 'torso',
                 ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else
                () for ob in joint_types.keys()]
    dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    dynamic_args["qstag"] = {
        "object_types": joint_types,
        "params": {
            "min_rows": 1,
            "max_rows": 1,
            "max_eps": 3
        }
    }

    qsrlib = QSRlib()
    req = QSRlib_Request_Message(which_qsr="tpcc",
                                 input_data=world_trace,
                                 dynamic_args=dynamic_args)
    qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)

    # for i in qsr_joints_frame.qstag.episodes:
    #     print i
    return qsr_joints_frame
    def __init__(self, node_name="qsr_lib"):
        """Constructor.

        :param node_name: The QSRlib ROS server node name.
        :type node_name: str
        """
        self.qsrlib = QSRlib()
        """:class:`QSRlib <qsrlib.qsrlib.QSRlib>`: QSRlib main object."""

        self.node_name = node_name
        """str: QSRlib ROS server node name."""

        rospy.init_node(self.node_name)

        self.service_topic_names = {"request": self.node_name + "/request"}
        """dict: Holds the service topic names."""

        self.srv_qsrs_request = rospy.Service(
            self.service_topic_names["request"], RequestQSRs,
            self.handle_request_qsrs)
        """rospy.impl.tcpros_service.Service: QSRlib ROS service."""

        rospy.loginfo("QSRlib_ROS_Server up and running, listening to: %s" %
                      self.service_topic_names["request"])
class QSRlib_ROS_Server(object):
    def __init__(self, node_name="qsr_lib", active_qsrs=None):
        self.qsrlib = QSRlib(active_qsrs)
        self.node_name = node_name
        self.node = rospy.init_node(self.node_name)
        self.service_topic_names = {"request": self.node_name+"/request"}
        self.srv_qsrs_request = rospy.Service(self.service_topic_names["request"], RequestQSRs, self.handle_request_qsrs)
        rospy.loginfo("QSRlib_ROS_Server up and running, listening to: %s" % self.service_topic_names["request"])

    def handle_request_qsrs(self, req):
        rospy.logdebug("Handling QSRs request made at %i.%i" % (req.header.stamp.secs, req.header.stamp.nsecs))
        request_message = pickle.loads(req.data)
        qsrs_response_message = self.qsrlib.request_qsrs(request_message=request_message)
        res = RequestQSRsResponse()
        res.header.stamp = rospy.get_rostime()
        res.data = pickle.dumps(qsrs_response_message)
        print()
        return res
class QSRlib_ROS_Server(object):
    """QSRlib ROS server."""
    def __init__(self, node_name="qsr_lib"):
        """Constructor.

        :param node_name: The QSRlib ROS server node name.
        :type node_name: str
        """
        self.qsrlib = QSRlib()
        """:class:`QSRlib <qsrlib.qsrlib.QSRlib>`: QSRlib main object."""

        self.node_name = node_name
        """str: QSRlib ROS server node name."""

        rospy.init_node(self.node_name)

        self.service_topic_names = {"request": self.node_name + "/request"}
        """dict: Holds the service topic names."""

        self.srv_qsrs_request = rospy.Service(
            self.service_topic_names["request"], RequestQSRs,
            self.handle_request_qsrs)
        """rospy.impl.tcpros_service.Service: QSRlib ROS service."""

        rospy.loginfo("QSRlib_ROS_Server up and running, listening to: %s" %
                      self.service_topic_names["request"])

    def handle_request_qsrs(self, req):
        """Service handler.

        :param req: QSRlib ROS request.
        :type req: qsr_lib.srv.RequestQSRsRequest
        :return: SRlib ROS response message.
        :rtype: qsr_lib.srv.RequestQSRsResponse
        """
        rospy.logdebug("Handling QSRs request made at %i.%i" %
                       (req.header.stamp.secs, req.header.stamp.nsecs))
        req_msg = pickle.loads(req.data)
        qsrlib_res_msg = self.qsrlib.request_qsrs(req_msg)
        ros_res_msg = RequestQSRsResponse()
        ros_res_msg.header.stamp = rospy.get_rostime()
        ros_res_msg.data = pickle.dumps(qsrlib_res_msg)
        return ros_res_msg
class QSRlib_ROS_Server(object):
    """QSRlib ROS server.

    """
    def __init__(self, node_name="qsr_lib"):
        """Constructor.

        :param node_name: The QSRlib ROS server node name.
        :type node_name: str
        :return:
        """
        self.qsrlib = QSRlib()
        """qsrlib.qsrlib.QSRlib: QSRlib main object."""

        self.node_name = node_name
        """str: The QSRlib ROS server node name."""

        # todo no need for self.node as rospy.init_node returns None
        self.node = rospy.init_node(self.node_name)

        self.service_topic_names = {"request": self.node_name+"/request"}
        """dict: Holds the service topic names."""

        self.srv_qsrs_request = rospy.Service(self.service_topic_names["request"], RequestQSRs, self.handle_request_qsrs)
        """rospy.impl.tcpros_service.Service: The QSRlib ROS service."""

        rospy.loginfo("QSRlib_ROS_Server up and running, listening to: %s" % self.service_topic_names["request"])

    def handle_request_qsrs(self, req):
        """Service handler.

        :param req: The QSRlib ROS request.
        :type req: qsr_lib.srv.RequestQSRsRequest
        :return: The QSRlib ROS response message.
        :rtype: qsr_lib.srv.RequestQSRsResponse
        """
        rospy.logdebug("Handling QSRs request made at %i.%i" % (req.header.stamp.secs, req.header.stamp.nsecs))
        req_msg = pickle.loads(req.data)
        qsrlib_res_msg = self.qsrlib.request_qsrs(req_msg)
        ros_res_msg = RequestQSRsResponse()
        ros_res_msg.header.stamp = rospy.get_rostime()
        ros_res_msg.data = pickle.dumps(qsrlib_res_msg)
        return ros_res_msg
Beispiel #25
0
    def __init__(self):

        self.world = World_Trace()
        self.options = sorted(QSRlib().qsrs_registry.keys())
        self.calculi = self.options[6]
        # quantisation_factor represents the minimum change that should be considered to assume that the object has moved
        self.dynamic_args = {"qtcbs": {"no_collapse":True,"quantisation_factor": 0.01}, "qtcbs": {"qsrs_for": [("object_1","robot"), ("object_2","robot")]}}
        self.prev_timestamp = 0
        self.prev_pose_x1 = self.prev_pose_y1 = 0
        self.prev_pose_x2 = self.prev_pose_y2 = 0
        self.prev_robot_pose_x = self.prev_robot_pose_y = 0

        self.relationship_pub = rospy.Publisher("qtcb_realtionship", QsrMsg, queue_size = 10)
        self.pose_sub1 = message_filters.Subscriber("extracted_pose1",PoseMsg)
        self.pose_sub2 = message_filters.Subscriber("extracted_pose2",PoseMsg)
        self.cln = QSRlib_ROS_Client()
        # the update rate for the ApproximateTimeSynchronizer is 0.1 and the queue_size is 10
        self.ts = message_filters.ApproximateTimeSynchronizer([self.pose_sub1, self.pose_sub2], 10, 0.1)
        self.ts.registerCallback(self.relations_callback)
    def __init__(self, node_name="qsr_lib"):
        """Constructor.

        :param node_name: The QSRlib ROS server node name.
        :type node_name: str
        """
        self.qsrlib = QSRlib()
        """:class:`QSRlib <qsrlib.qsrlib.QSRlib>`: QSRlib main object."""

        self.node_name = node_name
        """str: QSRlib ROS server node name."""

        rospy.init_node(self.node_name)

        self.service_topic_names = {"request": self.node_name+"/request"}
        """dict: Holds the service topic names."""

        self.srv_qsrs_request = rospy.Service(self.service_topic_names["request"], RequestQSRs, self.handle_request_qsrs)
        """rospy.impl.tcpros_service.Service: QSRlib ROS service."""

        rospy.loginfo("QSRlib_ROS_Server up and running, listening to: %s" % self.service_topic_names["request"])
Beispiel #27
0
    def __init__(self):

        self.world = World_Trace()
        self.options = sorted(QSRlib().qsrs_registry.keys())
        self.calculi = self.options[0]
        # any value in between goes to the upper bound
        self.dynamic_args = {
            "argd": {
                "qsr_relations_and_values": {
                    "Near": 0.36,
                    "Medium": .40,
                    "Far": .45
                }
            }
        }

        self.relationship_pub = rospy.Publisher("argd_realtionship",
                                                QsrMsg,
                                                queue_size=2)
        self.pose_sub1 = rospy.Subscriber("extracted_pose1", PoseMsg,
                                          self.relations_callback)
        self.cln = QSRlib_ROS_Client()
Beispiel #28
0
# arguments
which_qsr = 'qtcbcs'
state_space = QSR_QTC_BC_Simplified().all_possible_relations
states_len = [len(si.replace(',', '')) for si in state_space]

dynammic_args = {
    "qtcbcs": {
        "no_collapse": True,
        "quantisation_factor": 0.01,
        "validate": False,
        "qsrs_for": [("human_os", "robot_os")]
    }
}

# create a QSRlib object if there isn't one already
qsrlib = QSRlib()
qsr_types = qsrlib.qsrs_registry.keys()

# Print available qsr models
# qsrlib._QSRlib__print_qsrs_available()

# Print available states
# qsrlib._QSRlib__print_qsrs_available()

## Inputs.
# time increment we are studying
td = 0.5

#- Human state vector: position (xh,yh,hh), and speed (vh,wh)
xh0 = 0.
yh0 = 1.
Beispiel #29
0
class QTCStatePublisherNode():

    # Must have __init__(self) function for a class, similar to a C++ class constructor.
    def __init__(self):

        # ................................................................
        # read ros parameters
        self.loadROSParams()

        # ................................................................
        # Other config params and atributes
        self.closest_human_pose = None
        self.prev_closest_human_pose = None

        # qsrlib data...
        self.qsrlib = QSRlib()

        # TODO: add as parameter
        self.which_qsr = 'qtccs'

        # TODO: add as parameter
        self.dynammic_args = {
            "qtccs": {
                "no_collapse": True,
                "quantisation_factor": 0.01,
                "validate": False,
                "qsrs_for": [("human_os", "robot_os")]
            }
        }

        # sensible default values ...
        self.closest_human_pose = None

        # ................................................................
        # start ros subs/pubs/servs....
        self.initROS()

        rospy.loginfo("Node [" + rospy.get_name() + "] entering spin...")

    def loadROSParams(self):
        self.robot_id = rospy.get_param('~robot_id', 4)

        # human detection method: peopletracker(bayes, from STRANDS) OR trackedpersons (SPENCER)
        self.human_detection_method = rospy.get_param(
            '~human_detection_method', 'trackedpersons')

        # human detections using bayes peopletracker
        self.peopletracker_topic = rospy.get_param(
            '~peopletracker_topic',
            '/robot' + str(self.robot_id) + '/people_tracker/positions')

        # human tracking trackedpersons topic
        self.human_tracking_topic = rospy.get_param(
            '~human_tracking_topic',
            '/robot' + str(self.robot_id) + '/perception/tracked_persons')

        # publisher for our detection
        self.qtc_state_topic = rospy.get_param(
            '~qtc_state_topic',
            '/robot' + str(self.robot_id) + '/qsr/qtc_state')

        # points used in qtc is also published
        self.qtc_points_topic_name = rospy.get_param(
            '~qtc_points_topic_name',
            '/robot' + str(self.robot_id) + '/qsr/qtc_points')

        # current robot frame ide from tf tree
        self.robot_base_frame_id = rospy.get_param(
            '~robot_base_frame_id',
            '/robot' + str(self.robot_id) + '/base_link')
        # in tf2, frames do not have the initial slash
        if (self.robot_base_frame_id[0] == '/'):
            self.robot_base_frame_id = self.robot_base_frame_id[1:]

        # How far in the future we consider next state
        self.sampling_time = rospy.get_param('~sampling_time', 0.6)

        # global frame id: both robot and human pose will be refered to this one
        self.global_frame_id = rospy.get_param('~global_frame', 'map_laser2d')
        # in tf2, frames do not have the initial slash
        if (self.global_frame_id[0] == '/'):
            self.global_frame_id = self.global_frame_id[1:]

        # tranform tf_timeout
        timeout = rospy.get_param('~tf_timeout', 2)
        self.tf_timeout = rospy.Duration(timeout)

    def initROS(self):
        # publishers
        self.qtc_state_pub = rospy.Publisher(self.qtc_state_topic,
                                             String,
                                             queue_size=1)
        self.qtc_points_pub = rospy.Publisher(self.qtc_points_topic_name,
                                              Float64MultiArray,
                                              queue_size=1)

        # service clients
        # none here

        # tf buffers
        self.tfBuffer = tf2_ros.Buffer()
        self.listener = tf2_ros.TransformListener(self.tfBuffer)

        # subscribers and listeners

        # we either use spencer or bayes ...
        if self.human_detection_method == 'peopletracker':
            rospy.Subscriber(self.peopletracker_topic,
                             PeopleTracker,
                             self.peopletracker_callback,
                             queue_size=1)
        elif self.human_detection_method == 'trackedpersons':
            rospy.Subscriber(self.human_tracking_topic,
                             TrackedPersons,
                             self.spencer_human_tracking_callback,
                             queue_size=1)
        else:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "] Provided detection method  (" +
                self.human_detection_method +
                ") is not peopletracker/trackedpersons. Defaulting to trackedpersons"
            )
            rospy.Subscriber(self.human_tracking_topic,
                             TrackedPersons,
                             self.spencer_human_tracking_callback,
                             queue_size=1)

        # service servers
        # none here

    # .............................................................................................................

    def spencer_human_tracking_callback(self, msg):
        min_dist = np.inf
        min_i = -1
        closest_human_pose = None
        # TODO: Maybe it's better if we low pass filter these tracks to keep
        #       just humans that have been detected for a minimum period of time
        #rospy.loginfo("...........................................................")
        for i, trk_person in enumerate(msg.tracks):
            # Create the stamped object
            human_pose = PoseStamped()
            # msg contains a header with the frame id for all poses
            human_pose.header = msg.header
            human_pose.pose = trk_person.pose.pose

            # from the list of tracked persons, find the closest to robot
            (dist, human_pose_glob) = self.getDistToHuman(human_pose)
            #rospy.loginfo("ID: "+str(i)+" Dist: "+str(dist) +" Pose:\n"+str(human_pose))
            if dist < min_dist:
                min_i = i
                min_dist = dist
                closest_human_pose = human_pose_glob

        #rospy.loginfo("...........................................................")
        if (min_i > -1):
            # get first
            if (self.prev_closest_human_pose == None):
                self.prev_closest_human_pose = closest_human_pose
            # get second and clear
            elif (self.closest_human_pose == None):
                if (human_pose_glob.header.stamp -
                        self.prev_closest_human_pose.header.stamp
                    ).to_sec() > self.sampling_time:
                    self.closest_human_pose = closest_human_pose
                    self.publishQTCdata()
                    self.prev_closest_human_pose = None
                    self.closest_human_pose = None

    def peopletracker_callback(self, msg):
        spencer_msg = self.bayes2spencer(msg)
        self.spencer_human_tracking_callback(spencer_msg)

    #  There is some info here that may be not accurate
    def bayes2spencer(self, bayes_msg):
        spencer_msg = TrackedPersons()
        spencer_msg.header = bayes_msg.header

        for i, pose in enumerate(bayes_msg.poses):
            track = TrackedPerson()
            track.track_id = self.string2uint64(bayes_msg.uuids[i])
            # PoseWithCovariance
            track.pose.pose.position = pose.position
            track.pose.pose.orientation = pose.orientation
            # TwistWithCovariance
            track.twist.twist.linear = bayes_msg.velocities[i]
            # Data not in bayes. Not sure about these ...
            track.pose.covariance = (np.random.normal(0.3, 0.1) *
                                     np.identity(6)).flatten().tolist()
            track.twist.covariance = (np.random.normal(0.3, 0.1) *
                                      np.identity(6)).flatten().tolist()
            # we assume 0 here
            # track.twist.twist.angular
            track.is_occluded = False
            track.is_matched = False
            track.detection_id = self.string2uint64(bayes_msg.uuids[i])
            track.age = 0

            spencer_msg.tracks.append(track)

        return spencer_msg

    def string2uint64(self, in_string):
        ans = UInt64(uuid.UUID(in_string).int)

        return ans

    def transformPoseSt(self, poseSt_in, frame_id_out, when):
        """Transform pose into provided frame id."""
        poseSt_out = None

        # in tf2, frames do not have the initial slash
        if (frame_id_out[0] == '/'):
            frame_id_out = frame_id_out[1:]
        # in tf2, frames do not have the initial slash
        if (poseSt_in.header.frame_id[0] == '/'):
            poseSt_in.header.frame_id = poseSt_in.header.frame_id[1:]

        try:
            transform = self.tfBuffer.lookup_transform(
                frame_id_out, poseSt_in.header.frame_id, when, self.tf_timeout)
            poseSt_out = tf2_geometry_msgs.do_transform_pose(
                poseSt_in, transform)

        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            rospy.logwarn("[%s] transform from (%s) to (%s) failed: (%s).",
                          rospy.get_name(), poseSt_in.header.frame_id,
                          frame_id_out, e)

        return poseSt_out

    def getDistToHuman(self, human_pose_in):
        """Returns distance between human track and robot."""
        dist = np.inf
        hpose_out = None

        now = rospy.Time.now()
        human_pose_rob = self.transformPoseSt(human_pose_in,
                                              self.robot_base_frame_id, now)
        human_pose_glob = self.transformPoseSt(human_pose_in,
                                               self.global_frame_id, now)

        if (human_pose_rob) and (human_pose_glob):
            dist = np.sqrt((human_pose_rob.pose.position.x**2) +
                           (human_pose_rob.pose.position.y**2))

            hpose_out = human_pose_glob

        return (dist, hpose_out)

    def getRobotState(self, timestamp):
        x = y = h = None

        robotPoseSt = PoseStamped()
        robotPoseSt.header.stamp = timestamp
        robotPoseSt.header.frame_id = self.robot_base_frame_id

        robotPoseGlob = self.transformPoseSt(robotPoseSt, self.global_frame_id,
                                             timestamp)
        if (robotPoseGlob):
            x = robotPoseGlob.pose.position.x
            y = robotPoseGlob.pose.position.y
            h = self.getRotation(robotPoseGlob.pose.orientation)
        return (x, y, h)

    def publishQTCdata(self):
        # this method is called by one of the other callbacks, so its under the lock already
        #with self.data_lock:
        validData = self.closest_human_pose and self.prev_closest_human_pose
        if validData:
            # all should be already in global frame ...
            (xh0, yh0, hh0) = self.getState(self.prev_closest_human_pose)
            (xh1, yh1, hh1) = self.getState(self.closest_human_pose)

            (xr0, yr0, hr0) = self.getRobotState(
                self.prev_closest_human_pose.header.stamp)
            (xr1, yr1,
             hr1) = self.getRobotState(self.closest_human_pose.header.stamp)

            # get state only if transforms where successfull
            if ((xh0 != None) and (xh1 != None) and (xr0 != None)
                    and (xr1 != None)):
                (isValid, state) = self.getQSRState(
                    xh0, yh0, xh1, yh1, xr0, yr0, xr1, yr1,
                    (self.closest_human_pose.header.stamp -
                     self.prev_closest_human_pose.header.stamp).to_sec())
                self.qtc_state = state
                self.is_valid = isValid

                # Finally publish .......................
                if self.is_valid:
                    self.qtc_state_pub.publish(self.qtc_state)
                    qtc_data = Float64MultiArray()
                    qtc_data.data = [
                        xh0, yh0, hh0, xh1, yh1, hh1, xr0, yr0, hr0, xr1, yr1,
                        hr1
                    ]
                    self.qtc_points_pub.publish(qtc_data)

        rospy.logdebug_throttle(
            1, "Node [" + rospy.get_name() + "] " + "Updated visuals ... ")

    def getQSRState(self, xh0, yh0, xh1, yh1, xr0, yr0, xr1, yr1, td):

        isValid = False
        state = None
        human_os = [
            Object_State(name="human_os", timestamp=0, x=xh0, y=yh0),
            Object_State(name="human_os", timestamp=td, x=xh1, y=yh1)
        ]

        robot_os = [
            Object_State(name="robot_os", timestamp=0, x=xr0, y=yr0),
            Object_State(name="robot_os", timestamp=td, x=xr1, y=yr1)
        ]

        # make some input data
        world = World_Trace()

        world.add_object_state_series(human_os)
        world.add_object_state_series(robot_os)

        # make a QSRlib request message
        qsrlib_request_message = QSRlib_Request_Message(
            self.which_qsr, world, self.dynammic_args)

        # request your QSRs
        qsrlib_response_message = self.qsrlib.request_qsrs(
            req_msg=qsrlib_request_message)

        # should have 1 timestamp
        t = qsrlib_response_message.qsrs.get_sorted_timestamps()
        if len(t) != 1:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "@getQSRState] : response timestamp message lenght is not 1.")
            return (isValid, state)
        t = t[0]

        # should have one value: human to robot
        v = qsrlib_response_message.qsrs.trace[t].qsrs.values()
        if len(v) != 1:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "@getQSRState] : response values message lenght is not 1.")
            return (isValid, state)
        v = v[0]
        state = v.qsr.values()
        if len(state) != 1:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "@getQSRState] : response state message lenght is not 1.")
            return (isValid, state)
        state = state[0]

        # Ok, maybe is valid ...
        isValid = True

        return (isValid, state)

    def getState(self, poseSt):
        x = poseSt.pose.position.x
        y = poseSt.pose.position.y
        h = self.getRotation(poseSt.pose.orientation)
        return (x, y, h)

    def getRotation(self, orientation_q):
        orientation_list = [
            orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w
        ]
        (roll, pitch, yaw) = euler_from_quaternion(orientation_list)
        # quat = quaternion_from_euler(roll, pitch, yaw)
        return yaw
        str(qsrlib_response_message.req_received_at) + " and finished at " +
        str(qsrlib_response_message.req_finished_at))
    print("---")
    print("timestamps:", qsrlib_response_message.qsrs.get_sorted_timestamps())
    print("Response is:")
    for t in qsrlib_response_message.qsrs.get_sorted_timestamps():
        foo = str(t) + ": "
        for k, v in zip(qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                        qsrlib_response_message.qsrs.trace[t].qsrs.values()):
            foo += str(k) + ":" + str(v.qsr) + "; "
        print(foo)


if __name__ == "__main__":

    options = sorted(QSRlib().qsrs_registry.keys()) + ["multiple"]
    multiple = options[:]
    multiple.remove("multiple")
    multiple.remove("argd")
    multiple.remove("argprobd")
    multiple.remove("ra")
    multiple.remove("mwe")

    parser = argparse.ArgumentParser()
    parser.add_argument("qsr",
                        help="choose qsr: %s" % options,
                        type=str,
                        default='qtcbs')
    parser.add_argument("--ros",
                        action="store_true",
                        default=False,
Beispiel #31
0
            foo += str(k) + ":" + str(v.qsr) + "; "
        print(foo)


if __name__ == "__main__":
    load_by_world_name = {"data1": load_input_data1,
                          "data2": load_input_data2,
                          "data3": load_input_data3,
                          "data4": load_input_data4,
                          "data2_first100": load_input_data2_first100,
                          "data3_first100": load_input_data3_first100,
                          "data4_first100": load_input_data4_first100}

    # ****************************************************************************************************
    # create a QSRlib object if there isn't one already
    qsrlib = QSRlib()

    # ****************************************************************************************************
    # parse command line arguments
    options = sorted(qsrlib.qsrs_registry.keys())
    parser = argparse.ArgumentParser()
    parser.add_argument("qsr", help="choose qsr: %s" % options, type=str)
    parser.add_argument("-i", "--input", required=True, type=str,
                        help="world name %s" % sorted(load_by_world_name.keys()))
    parser.add_argument("-o", "--output", required=True, type=str, help="where to write json file")
    args = parser.parse_args()
    if args.qsr in options:
        which_qsr = args.qsr
    elif args.qsr == "multiple":
        which_qsr = options[:]
    else:
    #     dynamic_args[which_qsr] = {"qsrs_for": ["o1"]}
    # dynamic_args["for_all_qsrs"] = {"qsrs_for": [("o1", "o2"), "o2"]}
    # try:
    #     print(dynamic_args[which_qsr]["qsrs_for"])
    # except KeyError:
    #     print("qsrs_for not set in which_qsr namespace")
    # print(dynamic_args["for_all_qsrs"]["qsrs_for"])
    # # DBG: eof

    # dynamic_args[which_qsr]["qsrs_for"] = [("o1", "o2", "o3"), ("o1", "o3")]
    dynamic_args = {"tpcc": {"qsrs_for":  [("o1", "o2", "o3"), ("o1", "o3")]}}

    qsrlib_request_message = QSRlib_Request_Message(which_qsr=which_qsr, input_data=world, dynamic_args=dynamic_args)

    if args.ros:
        try:
            import rospy
            from qsrlib_ros.qsrlib_ros_client import QSRlib_ROS_Client
        except ImportError:
            raise ImportError("ROS not found")
        client_node = rospy.init_node("qsr_lib_ros_client_example")
        cln = QSRlib_ROS_Client()
        req = cln.make_ros_request_message(qsrlib_request_message)
        res = cln.request_qsrs(req)
        qsrlib_response_message = pickle.loads(res.data)
    else:
        qsrlib = QSRlib()
        qsrlib_response_message = qsrlib.request_qsrs(req_msg=qsrlib_request_message)

    pretty_print_world_qsr_trace(which_qsr, qsrlib_response_message)
Beispiel #33
0
    def __compute_qsr(self, bb1, bb2):
        if not self.qsr:
            return ""
        ax, ay, bx, by = bb1
        cx, cy, dx, dy = bb2
        
        qsrlib = QSRlib()
        world = World_Trace()

        if self.args.distribution[0] == "weibull":
            weibull_units = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/weibull_units.npy")
            weibull = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/weibull.npy")
            scores = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/scaled_scores.npy")
            timestamp = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/timestamp.npy")

            o1 = []
            o2 = []
            o3 = []

            for ii,unit in tqdm(enumerate(weibull_units)):
                o1.append(Object_State(name="o1", timestamp=timestamp[ii], x=ii, y=int(unit*100), object_type="Unit"))
            for ii,weib in tqdm(enumerate(weibull)):
                o2.append(Object_State(name="o2", timestamp=timestamp[ii], x=ii, y=int(weib*100), object_type="Weibull"))
            for ii,score in tqdm(enumerate(scores)):
                o3.append(Object_State(name="o3", timestamp=timestamp[ii], x=ii, y=int(score*100), object_type="Score"))
        
        elif self.args.distribution[0] == "beta":
            beta_units = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/beta_units.npy")
            beta = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/beta.npy")
            scores = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/scaled_scores.npy")
            timestamp = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/timestamp.npy")

            o1 = []
            o2 = []
            o3 = []

            for ii,unit in tqdm(enumerate(beta_units)):
                o1.append(Object_State(name="o1", timestamp=timestamp[ii], x=ii, y=int(unit*100), object_type="Unit"))
            for ii,beta in tqdm(enumerate(beta)):
                o2.append(Object_State(name="o2", timestamp=timestamp[ii], x=ii, y=int(beta*100), object_type="Beta"))
            for ii,score in tqdm(enumerate(scores)):
                o3.append(Object_State(name="o3", timestamp=timestamp[ii], x=ii, y=int(score*100), object_type="Score"))

        elif self.args.distribution[0] == "cauchy":
            cauchy_units = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/cauchy_units.npy")
            cauchy = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/cauchy.npy")
            scores = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/scaled_scores.npy")
            timestamp = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/timestamp.npy")

            o1 = []
            o2 = []
            o3 = []

            for ii,unit in tqdm(enumerate(cauchy_units)):
                o1.append(Object_State(name="o1", timestamp=timestamp[ii], x=ii, y=int(unit*100), object_type="Unit"))
            for ii,cauc in tqdm(enumerate(cauchy)):
                o2.append(Object_State(name="o2", timestamp=timestamp[ii], x=ii, y=int(cauc*100), object_type="Cauchy"))
            for ii,score in tqdm(enumerate(scores)):
                o3.append(Object_State(name="o3", timestamp=timestamp[ii], x=ii, y=int(score*100), object_type="Score"))

        elif self.args.distribution[0] == "rayleigh":
            rayleigh_units = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/rayleigh_units.npy")
            rayleigh = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/beta.npy")
            scores = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/scaled_scores.npy")
            timestamp = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/timestamp.npy")

            o1 = []
            o2 = []
            o3 = []

            for ii,unit in tqdm(enumerate(rayleigh_units)):
                o1.append(Object_State(name="o1", timestamp=timestamp[ii], x=ii, y=int(unit*100), object_type="Unit"))
            for ii,rayl in tqdm(enumerate(rayleigh)):
                o2.append(Object_State(name="o2", timestamp=timestamp[ii], x=ii, y=int(rayl*100), object_type="Rayleigh"))
            for ii,score in tqdm(enumerate(scores)):
                o3.append(Object_State(name="o3", timestamp=timestamp[ii], x=ii, y=int(score*100), object_type="Score"))

        elif self.args.distribution[0] == "gamma":
            gamma_units = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/gamma_units.npy")
            gamma = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/gamma.npy")
            scores = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/scaled_scores.npy")
            timestamp = np.load("/home/aswin/Documents/Courses/Udacity/Intel-Edge/Work/EdgeApp/PGCR-Results-Analysis/timestamp.npy")

            o1 = []
            o2 = []
            o3 = []

            for ii,unit in tqdm(enumerate(gamma_units)):
                o1.append(Object_State(name="o1", timestamp=timestamp[ii], x=ii, y=int(unit*100), object_type="Unit"))
            for ii,gam in tqdm(enumerate(gamma)):
                o2.append(Object_State(name="o2", timestamp=timestamp[ii], x=ii, y=int(gam*100), object_type="Gamma"))
            for ii,score in tqdm(enumerate(scores)):
                o3.append(Object_State(name="o3", timestamp=timestamp[ii], x=ii, y=int(score*100), object_type="Score"))
        
        world.add_object_state_series(o1)
        world.add_object_state_series(o2)
        world.add_object_state_series(o3)
        
        # world.add_object_state_series([Object_State(name="red", timestamp=0, x=((ax+bx)/2.0), y=((ay+by)/2.0), xsize=abs(bx-ax), ysize=abs(by-ay)),
        #     Object_State(name="yellow", timestamp=0, x=((cx+dx)/2.0), y=((cy+dy)/2.0), xsize=abs(dx-cx), ysize=abs(dy-cy))])
        dynamic_args = {"argd": {"qsr_relations_and_values": self.distance}}
        qsrlib_request_message = QSRlib_Request_Message(which_qsr=self.qsr, input_data=world, dynamic_args=dynamic_args)
        qsrlib_response_message = qsrlib.request_qsrs(req_msg=qsrlib_request_message)

        for t in qsrlib_response_message.qsrs.get_sorted_timestamps():
            foo = ""
            for k, v in zip(qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                            qsrlib_response_message.qsrs.trace[t].qsrs.values()):
                foo += str(k) + ":" + str(v.qsr) + "; \n"
        return foo
Beispiel #34
0
    def __init__(self):

        # ................................................................
        # read ros parameters
        self.loadROSParams()

        # ................................................................
        # Other config params and atributes
        self.data_lock = Lock()

        # states from mpc node
        self.state = None
        self.prev_state = None
        # timestamp to get speeds...
        self.prev_time = None

        # qsrlib data...
        self.qsrlib = QSRlib()
        # TODO: add as parameter
        self.which_qsr = 'qtccs'
        # TODO: add as parameter
        self.dynammic_args = {
            "qtccs": {
                "no_collapse": True,
                "quantisation_factor": 0.01,
                "validate": False,
                "qsrs_for": [("human_os", "robot_os")]
            }
        }
        # TODO: add as parameter
        # these should be valid which_qsr states ...
        self.state_forbidden = ['-,-,-,-']

        # sensible default values ...
        self.closest_human_pose = None
        self.closest_human_twist = None
        self.constraint_v = 0
        self.constraint_w = 0
        self.robot_x = 0
        self.robot_y = 0
        self.robot_h = 0
        self.robot_v = 0
        self.robot_w = 0
        # TODO: add as parameter
        self.allowed_min_v = 0
        self.allowed_max_v = 0
        self.allowed_min_w = 0
        self.allowed_max_w = 0

        # What possible speeds we can achieve
        self.vr_space_inc = np.linspace(-self.max_vr, self.max_vr,
                                        self.num_speed_points)
        self.wr_space_inc = np.linspace(-self.max_wr, self.max_wr,
                                        self.num_speed_points)

        # ................................................................
        # start ros subs/pubs/servs....
        self.initROS()

        rospy.loginfo("Node [" + rospy.get_name() + "] entering spin...")

        r = rospy.Rate(2)
        while not rospy.is_shutdown():
            self.updateVisuals()
            self.updateConstraints()
            self.sendNewConstraints()
            r.sleep()
Beispiel #35
0
class DynamicConstraintsNode():

    # Must have __init__(self) function for a class, similar to a C++ class constructor.
    def __init__(self):

        # ................................................................
        # read ros parameters
        self.loadROSParams()

        # ................................................................
        # Other config params and atributes
        self.data_lock = Lock()

        # states from mpc node
        self.state = None
        self.prev_state = None
        # timestamp to get speeds...
        self.prev_time = None

        # qsrlib data...
        self.qsrlib = QSRlib()
        # TODO: add as parameter
        self.which_qsr = 'qtccs'
        # TODO: add as parameter
        self.dynammic_args = {
            "qtccs": {
                "no_collapse": True,
                "quantisation_factor": 0.01,
                "validate": False,
                "qsrs_for": [("human_os", "robot_os")]
            }
        }
        # TODO: add as parameter
        # these should be valid which_qsr states ...
        self.state_forbidden = ['-,-,-,-']

        # sensible default values ...
        self.closest_human_pose = None
        self.closest_human_twist = None
        self.constraint_v = 0
        self.constraint_w = 0
        self.robot_x = 0
        self.robot_y = 0
        self.robot_h = 0
        self.robot_v = 0
        self.robot_w = 0
        # TODO: add as parameter
        self.allowed_min_v = 0
        self.allowed_max_v = 0
        self.allowed_min_w = 0
        self.allowed_max_w = 0

        # What possible speeds we can achieve
        self.vr_space_inc = np.linspace(-self.max_vr, self.max_vr,
                                        self.num_speed_points)
        self.wr_space_inc = np.linspace(-self.max_wr, self.max_wr,
                                        self.num_speed_points)

        # ................................................................
        # start ros subs/pubs/servs....
        self.initROS()

        rospy.loginfo("Node [" + rospy.get_name() + "] entering spin...")

        r = rospy.Rate(2)
        while not rospy.is_shutdown():
            self.updateVisuals()
            self.updateConstraints()
            self.sendNewConstraints()
            r.sleep()

    def find_param(self, param_name, rid, default_val):
        ans = default_val
        allParams = rospy.get_param_names()
        for pi in allParams:
            if (pi[1:7] == ('robot' + str(rid))):
                if param_name in pi:
                    ans = rospy.get_param(pi, ans)
                    break

        return ans

    def loadROSParams(self):
        self.robot_id = rospy.get_param('~robot_id', 4)

        # where do we publish speed constraints: v and w
        self.velocity_constraints_topic = rospy.get_param(
            '~velocity_constraints_topic',
            '/robot' + str(self.robot_id) + '/velocity_constraints')

        # human detection method: peopletracker OR trackedpersons
        self.human_detection_method = rospy.get_param(
            '~human_detection_method', 'trackedpersons')

        # human tracking trackedpersons topic
        self.human_tracking_topic = rospy.get_param(
            '~human_tracking_topic',
            '/robot' + str(self.robot_id) + '/perception/tracked_persons')

        # human detections using bayes peopletracker
        self.peopletracker_topic = rospy.get_param(
            '~peopletracker_topic',
            '/robot' + str(self.robot_id) + '/people_tracker/positions')

        # MPC reports with current robot state
        self.velocity_constraints_topic = rospy.get_param(
            '~velocity_constraints_topic',
            '/robot' + str(self.robot_id) + '/velocity_constraints')

        # MARKER for visual data
        self.visual_topic = rospy.get_param(
            '~visual_topic',
            '/robot' + str(self.robot_id) + '/velocity_constraints_markers')

        self.reports_topic = rospy.get_param(
            '~reports_topic',
            '/robot' + str(self.robot_id) + '/control/controller/reports')

        # lookahead time for positions
        self.sampling_time = rospy.get_param('~sampling_time', 0.6)

        # robot frame id
        self.base_frame_id = rospy.get_param(
            '~base_frame', '/robot' + str(self.robot_id) + '/base_link')
        # in tf2, frames do not have the initial slash
        if (self.base_frame_id[0] == '/'):
            self.base_frame_id = self.base_frame_id[1:]

        # global frame id
        self.global_frame_id = rospy.get_param('~global_frame', 'map_laser2d')
        # in tf2, frames do not have the initial slash
        if (self.global_frame_id[0] == '/'):
            self.global_frame_id = self.global_frame_id[1:]

        # tranform tf_timeout
        timeout = rospy.get_param('~tf_timeout', 2)
        self.tf_timeout = rospy.Duration(timeout)

        # how many points we want to test
        # TODO: add as parameter
        self.num_speed_points = 20

        # maximum max_tangential_velocity m/s
        self.max_vr = self.find_param('max_tangential_velocity', self.robot_id,
                                      1.0)

        # maximum steering wheel angle in rads
        self.phi_max = self.find_param('max_steer_angle', self.robot_id, 1.45)

        # steering wheel to back wheels distance m
        self.L = self.find_param('car_wheel_base', self.robot_id, 1.19)

        # w_max  = tan(phi_max) V_max / L
        self.max_wr = self.max_vr * np.tan(self.phi_max) / self.L

    def initROS(self):
        # publishers
        self.visual_pub = rospy.Publisher(self.visual_topic,
                                          MarkerArray,
                                          queue_size=1)
        self.velocity_constraints_pub = rospy.Publisher(
            self.velocity_constraints_topic, Float64MultiArray, queue_size=1)

        # service clients
        # none here

        # subscribers and listeners
        rospy.Subscriber(self.reports_topic,
                         ControllerReport,
                         self.reports_callback,
                         queue_size=1)

        self.tfBuffer = tf2_ros.Buffer()
        self.listener = tf2_ros.TransformListener(self.tfBuffer)

        # we either use spencer or bayes ...
        if self.human_detection_method == 'peopletracker':
            rospy.Subscriber(self.peopletracker_topic,
                             PeopleTracker,
                             self.peopletracker_callback,
                             queue_size=1)

        elif self.human_detection_method == 'trackedpersons':
            rospy.Subscriber(self.human_tracking_topic,
                             TrackedPersons,
                             self.spencer_human_tracking_callback,
                             queue_size=1)
        else:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "] Provided detection method  (" +
                self.human_detection_method +
                ") is not peopletracker/trackedpersons. Defaulting to trackedpersons"
            )
            rospy.Subscriber(self.human_tracking_topic,
                             TrackedPersons,
                             self.spencer_human_tracking_callback,
                             queue_size=1)

        # service servers
        # none here

    # .............................................................................................................

    # we use robot reports to know robot position and speed
    def reports_callback(self, msg):
        with self.data_lock:
            nowTime = rospy.Time.now()
            self.prev_state = self.state
            self.state = msg.state
            self.robot_x = msg.state.position_x
            self.robot_y = msg.state.position_y
            self.robot_h = msg.state.orientation_angle
            self.robot_v = 0
            self.robot_w = 0

            if (self.prev_state and self.prev_time):
                inc_t = (nowTime - self.prev_time).to_sec()
                inc_x = self.state.position_x - self.prev_state.position_x
                inc_y = self.state.position_y - self.prev_state.position_y
                inc_r = np.sqrt((inc_x**2) + (inc_y**2))
                inc_h = self.state.orientation_angle - self.prev_state.orientation_angle
                if (inc_t > 0):
                    self.robot_v = inc_r / inc_t
                    self.robot_w = inc_h / inc_t
                else:
                    self.robot_v = 0
                    self.robot_w = 0

                rospy.logdebug_throttle(
                    5, "Node [" + rospy.get_name() + "] " +
                    "Robot status: Pose ( " + str(self.state.position_x) +
                    ", " + str(self.state.position_y) + ", " +
                    str(self.state.orientation_angle * 180.0 / np.pi) +
                    " deg), " + "Speed ( " + str(self.robot_v) + " m/sec, " +
                    str(self.robot_w * 180.0 / np.pi) + " deg/sec) ")
            self.prev_time = nowTime
            #self.updateVisuals()

    def spencer_human_tracking_callback(self, msg):
        with self.data_lock:
            # after each callback, reset closests

            self.closest_human_pose = None
            self.closest_human_twist = None
            min_dist = np.inf
            min_i = -1

            # TODO: Maybe it's better if we low pass filter these tracks to keep
            #       just humans that have been detected for a minimum period of time
            #rospy.loginfo("...........................................................")
            for i, trk_person in enumerate(msg.tracks):
                # Create the stamped object
                human_pose = PoseWithCovarianceStamped()
                # msg contains a header with the frame id for all poses
                human_pose.header = msg.header
                human_pose.pose = trk_person.pose

                # from the list of tracked persons, find the closest ...
                (dist, human_pose_base) = self.getDistToHuman(human_pose)
                #rospy.loginfo("ID: "+str(i)+" Dist: "+str(dist) +" Pose:\n"+str(human_pose))
                if dist < min_dist:
                    min_i = i
                    min_dist = dist
                    self.closest_human_pose = human_pose_base
                    self.closest_human_twist = self.getTwistInBaseFrame(
                        trk_person.twist, msg.header)

                    (xh0, yh0, hh0, vh0, wh0) = self.getHumanState()

                    rospy.logdebug_throttle(
                        5, "Node [" + rospy.get_name() + "] " +
                        "Closest human at: Pose ( " + str(xh0) + ", " +
                        str(yh0) + ", " + str(hh0 * 180.0 / np.pi) +
                        " deg), " + "Speed ( " + str(vh0) + " m/sec, " +
                        str(wh0 * 180.0 / np.pi) + " deg/sec) ")
            #rospy.loginfo("...........................................................")
            #self.updateVisuals()
            #self.updateConstraints()
            #self.sendNewConstraints()

    def peopletracker_callback(self, msg):
        spencer_msg = self.bayes2spencer(msg)
        self.spencer_human_tracking_callback(spencer_msg)

    #  There is some info here that may be not accurate
    def bayes2spencer(self, bayes_msg):
        spencer_msg = TrackedPersons()
        spencer_msg.header = bayes_msg.header

        for i, pose in enumerate(bayes_msg.poses):
            track = TrackedPerson()
            track.track_id = self.string2uint64(bayes_msg.uuids[i])
            # PoseWithCovariance
            track.pose.pose.position = pose.position
            track.pose.pose.orientation = pose.orientation
            # TwistWithCovariance
            track.twist.twist.linear = bayes_msg.velocities[i]
            # Data not in bayes. Not sure about these ...
            track.pose.covariance = (np.random.normal(0.3, 0.1) *
                                     np.identity(6)).flatten().tolist()
            track.twist.covariance = (np.random.normal(0.3, 0.1) *
                                      np.identity(6)).flatten().tolist()
            # we assume 0 here
            # track.twist.twist.angular
            track.is_occluded = False
            track.is_matched = False
            track.detection_id = self.string2uint64(bayes_msg.uuids[i])
            track.age = 0

            spencer_msg.tracks.append(track)

        return spencer_msg

    def string2uint64(self, in_string):
        ans = UInt64(uuid.UUID(in_string).int)

        return ans

    # ..............................
    # based on http://docs.ros.org/hydro/api/tf/html/c++/transform__listener_8cpp_source.html
    #

    def transformTwist(self, target_frame, msg_in):  # msg_in is a TwistStamped

        # divide twist message into rotational and translational velocities
        v_rot = [
            msg_in.twist.angular.x, msg_in.twist.angular.y,
            msg_in.twist.angular.z
        ]
        v_trans = [
            msg_in.twist.linear.x, msg_in.twist.linear.y, msg_in.twist.linear.z
        ]

        # get the transform between target frame and twist frame
        try:
            frame_tf = self.tfBuffer.lookup_transform(target_frame,
                                                      msg_in.header.frame_id,
                                                      msg_in.header.stamp)
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            rospy.logerr("Node [" + rospy.get_name() +
                         "] Can't find transform. (" + str(e) + ") ")
            return None

        q_ = frame_tf.transform.rotation
        x_ = frame_tf.transform.translation

        # get the translation matrix from translation vector
        trans_vec = [x_.x, x_.y, x_.z]

        # get the rotation matrix from quaternion.
        rot_mat = quaternion_matrix([q_.x, q_.y, q_.z, q_.w])

        # rotate vector using
        out_rot = np.dot(rot_mat[:3, :3], v_rot)
        out_vel = np.dot(rot_mat[:3, :3], v_trans) + np.cross(
            trans_vec, out_rot)

        interframe_twist = TwistStamped()
        # we asumme frames are not moving relatively? not sure

        msg_out = TwistStamped()
        msg_out.header.stamp = msg_in.header.stamp
        msg_out.header.frame_id = target_frame

        #
        msg_out.twist.linear.x = out_vel[0] + interframe_twist.twist.linear.x
        msg_out.twist.linear.y = out_vel[1] + interframe_twist.twist.linear.y
        msg_out.twist.linear.z = out_vel[2] + interframe_twist.twist.linear.z

        # final angular speed is input angular speed, rotated plus interframe speed
        msg_out.twist.angular.x = out_rot[0] + interframe_twist.twist.angular.x
        msg_out.twist.angular.y = out_rot[1] + interframe_twist.twist.angular.y
        msg_out.twist.angular.z = out_rot[2] + interframe_twist.twist.angular.z

        return msg_out  # geometry_msgs::TwistStamped
        # ..............................

    def getTwistInBaseFrame(self, twistWC, header):
        """Returns a TwistWithCovarianceStamped in base frame"""
        # Create the stamped object
        twistS = TwistStamped()
        twistS.header = header
        twistS.twist = twistWC.twist

        twistS_base = self.transformTwist(self.base_frame_id, twistS)

        twistWC_out = TwistWithCovarianceStamped()
        twistWC_out.header = twistS_base.header
        twistWC_out.twist.twist = twistS_base.twist
        twistWC_out.twist.covariance = twistWC.covariance

        return twistWC_out

    def transformPoseSt(self, poseSt_in, frame_id_out):
        """Transform pose into provided frame id."""
        poseSt_out = PoseStamped()

        try:
            poseSt_out = self.tfBuffer.transform(poseSt_in, frame_id_out,
                                                 self.tf_timeout)
        except tf2_ros.TransformException as e:
            poseSt_out.header.frame_id = 'NONE'
            rospy.logerr("Node [" + rospy.get_name() + "] Can't transform. (" +
                         str(e) + ") ")

        return (poseSt_out)

    def fromRobot2GlobalFrame(self, x, y, h):
        xg = yg = hg = None
        poseSt = PoseStamped()
        poseSt.header.frame_id = self.base_frame_id
        poseSt.pose.position.x = x
        poseSt.pose.position.y = y
        poseSt.pose.position.z = 0
        poseSt.pose.orientation = Quaternion(*quaternion_from_euler(0, 0, h))

        poseStG = self.transformPoseSt(poseSt, self.global_frame_id)

        if (poseStG.header.frame_id == self.global_frame_id):
            xg = poseStG.pose.position.x
            yg = poseStG.pose.position.y
            hg = self.getRotation(poseStG.pose.orientation)

        return (xg, yg, hg)

    def getDistToHuman(self, human_pose_in):
        """Returns distance between human track and robot."""
        human_pose_base = None
        dist = np.inf
        hpose_out = None
        hpose = PoseStamped()
        hpose.header = human_pose_in.header
        hpose.pose = human_pose_in.pose.pose

        human_pose_base = self.transformPoseSt(hpose, self.base_frame_id)

        if (human_pose_base.header.frame_id == self.base_frame_id):
            dist = np.sqrt((human_pose_base.pose.position.x**2) +
                           (human_pose_base.pose.position.y**2))
            hpose_out = PoseWithCovarianceStamped()
            hpose_out.header = human_pose_base.header
            hpose_out.pose.pose = human_pose_base.pose
            hpose_out.pose.covariance = human_pose_in.pose.covariance

        return (dist, hpose_out)

    def updateVisuals(self):
        with self.data_lock:
            validData = isinstance(self.closest_human_pose,
                                   PoseWithCovarianceStamped) and isinstance(
                                       self.closest_human_twist,
                                       TwistWithCovarianceStamped)

            if validData:

                (xh0, yh0, hh0, vh0, wh0) = self.getHumanState()
                (xh1, yh1,
                 hh1) = self.getNextHumanState(xh0, yh0, hh0, vh0, wh0)
                xr0 = yr0 = hr0 = 0
                (xr1, yr1,
                 hr1) = self.getNextHumanState(xr0, yr0, hr0, self.robot_v,
                                               self.robot_w)

                tx = xh0 / 2.0
                ty = yh0 / 2.0

                # All that data is in robot frame, but we need it in global frame...
                (xh0, yh0, hh0) = self.fromRobot2GlobalFrame(xh0, yh0, hh0)
                (xh1, yh1, hh1) = self.fromRobot2GlobalFrame(xh1, yh1, hh1)
                # I could just use getOrigin with this one ...
                (xr0, yr0, hr0) = self.fromRobot2GlobalFrame(xr0, yr0, hr0)
                (xr1, yr1, hr1) = self.fromRobot2GlobalFrame(xr1, yr1, hr1)

                # plot only if transforms where successfull
                if ((xh0 != None) and (xh1 != None) and (xr0 != None)
                        and (xr1 != None)):

                    data = MarkerArray()

                    # 0 line
                    line = Marker()
                    line.id = 0
                    line.type = Marker.LINE_STRIP
                    line.header.frame_id = self.global_frame_id
                    line.header.stamp = rospy.Time.now()
                    line.ns = "connecting_line"
                    line.action = Marker.ADD
                    line.pose.orientation.w = 1.0

                    # LINE_STRIP markers use only the x component of scale, for the line width
                    line.scale.x = 0.05

                    # blue color
                    line.color.b = 1.0
                    line.color.a = 1.0

                    humanP = Point(xh0, yh0, 1)

                    robotP = Point(xr0, yr0, 1)
                    line.points.append(humanP)
                    line.points.append(robotP)

                    data.markers.append(line)

                    # 1 text
                    text = Marker()
                    text.id = 1
                    text.type = Marker.TEXT_VIEW_FACING
                    text.header.frame_id = self.base_frame_id
                    text.header.stamp = rospy.Time.now()
                    text.ns = "descriptor"
                    text.action = Marker.ADD
                    text.pose.orientation.w = 1.0

                    text.pose.position.x = tx
                    text.pose.position.y = ty
                    text.pose.position.z = 1 + 0.2
                    # TEXT_VIEW_FACING markers use only the z component of scale, specifies the height of an uppercase "A".
                    text.scale.z = 0.45

                    # we show next state as text
                    (isValid,
                     state) = self.getQSRState(xh0, yh0, xh1, yh1, xr0, yr0,
                                               xr1, yr1)

                    if isValid:
                        text.text = 'QTC State: ' + state
                        # yellow color
                        text.color.r = text.color.g = 1.0
                        text.color.a = 1.0
                    else:
                        text.text = 'Unknown state'
                        # red color
                        text.color.r = 1.0
                        text.color.a = 1.0

                    data.markers.append(text)

                    # Finally publish .......................
                    self.visual_pub.publish(data)

        rospy.logdebug_throttle(
            1, "Node [" + rospy.get_name() + "] " + "Updated visuals ... ")

    def isBigger(self, rel_amount, amount1, amount0):
        dif = np.abs(amount0 - amount1)
        ans = True
        if amount0 > 0:
            rel_dif = dif / amount0
            ans = rel_dif > rel_amount
        # if original amount was 0, any value is infinite change ...
        else:
            ans = False

        return ans

    def getQSRState(self, xh0, yh0, xh1, yh1, xr0, yr0, xr1, yr1):
        td = self.sampling_time

        isValid = False
        state = None
        human_os = [
            Object_State(name="human_os", timestamp=0, x=xh0, y=yh0),
            Object_State(name="human_os", timestamp=td, x=xh1, y=yh1)
        ]

        robot_os = [
            Object_State(name="robot_os", timestamp=0, x=xr0, y=yr0),
            Object_State(name="robot_os", timestamp=td, x=xr1, y=yr1)
        ]

        # make some input data
        world = World_Trace()

        world.add_object_state_series(human_os)
        world.add_object_state_series(robot_os)

        # make a QSRlib request message
        qsrlib_request_message = QSRlib_Request_Message(
            self.which_qsr, world, self.dynammic_args)

        # request your QSRs
        qsrlib_response_message = self.qsrlib.request_qsrs(
            req_msg=qsrlib_request_message)

        # should have 1 timestamp
        t = qsrlib_response_message.qsrs.get_sorted_timestamps()
        if len(t) != 1:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "@getQSRState] : response timestamp message lenght is not 1.")
            return (isValid, state)
        t = t[0]

        # should have one value: human to robot
        v = qsrlib_response_message.qsrs.trace[t].qsrs.values()
        if len(v) != 1:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "@getQSRState] : response values message lenght is not 1.")
            return (isValid, state)
        v = v[0]
        state = v.qsr.values()
        if len(state) != 1:
            rospy.logerr(
                "Node [" + rospy.get_name() +
                "@getQSRState] : response state message lenght is not 1.")
            return (isValid, state)
        state = state[0]

        if state in self.state_forbidden:
            isValid = False
        else:
            # Ok, maybe is valid ...
            isValid = True

        return (isValid, state)

    def getNextHumanState(self, xh0, yh0, hh0, vh0, wh0):
        """Simplest way to get next human position given current state """
        hh1 = hh0 + wh0 * self.sampling_time
        xh1 = xh0 + vh0 * np.cos(hh1) * self.sampling_time
        yh1 = yh0 + vh0 * np.sin(hh1) * self.sampling_time
        return (xh1, yh1, hh1)

    def getHumanState(self):
        """Get human position, orientation and speeds from current tracking"""

        xh0 = self.closest_human_pose.pose.pose.position.x
        yh0 = self.closest_human_pose.pose.pose.position.y
        hh0 = self.getRotation(self.closest_human_pose.pose.pose.orientation)

        vh0 = np.sqrt((self.closest_human_twist.twist.twist.linear.x**2) +
                      (self.closest_human_twist.twist.twist.linear.y**2))
        wh0 = self.closest_human_twist.twist.twist.angular.z
        # np.atan2(self.closest_human_twist.linear.y,
        #          self.closest_human_twist.linear.x)
        return (xh0, yh0, hh0, vh0, wh0)

    def getRotation(self, orientation_q):
        orientation_list = [
            orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w
        ]
        (roll, pitch, yaw) = euler_from_quaternion(orientation_list)
        # quat = quaternion_from_euler(roll, pitch, yaw)
        return yaw

    def updateConstraints(self):
        with self.data_lock:
            # 1. get human pose and most likely speed
            if (self.closest_human_pose) and (self.closest_human_twist):
                # current human position
                (xh0, yh0, hh0, vh0, wh0) = self.getHumanState()

                # next likely human position
                (xh1, yh1,
                 hh1) = self.getNextHumanState(xh0, yh0, hh0, vh0, wh0)
                rospy.loginfo_throttle(
                    5, "Node [" + rospy.get_name() + "] " +
                    "Next most likely human state: Pose ( " + str(xh1) + ", " +
                    str(yh1) + ", " + str(hh1 * 180.0 / np.pi) + " deg), " +
                    "Speed ( " + str(vh0) + " m/sec, " +
                    str(wh0 * 180.0 / np.pi) + " deg/sec) ")

                # What possible speeds robot can achieve from current one
                vr_space = self.vr_space_inc + self.robot_v
                wr_space = self.wr_space_inc + self.robot_w

                # Create 2D speed space grid
                V, W = np.meshgrid(vr_space, wr_space)

                # robot position in base_link is ... 0!
                hr0 = 0
                xr0 = 0
                yr0 = 0
                # And from that, what posible points robot can reach
                Hr1 = hr0 + W * self.sampling_time
                Xr1 = xr0 + V * np.cos(Hr1) * self.sampling_time
                Yr1 = yr0 + V * np.sin(Hr1) * self.sampling_time

                T = np.zeros_like(Xr1).flatten()
                all_states = []
                for i in range(0, len(Xr1.flatten())):
                    xr1 = Xr1.flatten()[i]
                    yr1 = Yr1.flatten()[i]
                    hr1 = Hr1.flatten()[i]
                    (T[i], state) = self.isValidState(xh0, yh0, xh1, yh1, xr0,
                                                      yr0, xr1, yr1)

                    all_states.append(state)
                    # T[i]=1, allowed, =0 not allowed...

                T = T.reshape(Xr1.shape)
                max_area, min_i, min_j, max_i, max_j = self.maximalRectangle(T)
                self.allowed_min_v = V[min_i, min_j]
                self.allowed_max_v = V[max_i, max_j]
                self.allowed_min_w = W[min_i, min_j]
                self.allowed_max_w = W[max_i, max_j]

    def sendNewConstraints(self):
        with self.data_lock:
            newV = np.min(
                [np.abs(self.allowed_min_v),
                 np.abs(self.allowed_max_v)])
            newW = np.min(
                [np.abs(self.allowed_min_w),
                 np.abs(self.allowed_max_w)])

            if self.isBigger(0.05, newV, self.constraint_v) or self.isBigger(
                    0.05, newW, self.constraint_w):
                self.constraint_v = newV
                self.constraint_w = newW
                msg = Float64MultiArray()
                msg.data.append(self.constraint_v)
                msg.data.append(self.constraint_w)
                self.velocity_constraints_pub.publish(msg)
                rospy.loginfo_throttle(
                    3, "Node [" + rospy.get_name() + "] " +
                    "New speed Constraints sent : V ( " +
                    str(self.constraint_v) + " m/s), " + "W ( " +
                    str(self.constraint_w * 180.0 / np.pi) + " deg/sec) ")

    def maximalRectangle(self, M):
        n, m = M.shape

        # Count number of 1 bits in each column
        cumul_height = np.zeros((n, m))

        # first row
        cumul_height[0, :] = M[0, :]

        # rest of matrix
        for i in range(1, n):
            for j in range(0, m):
                if M[i, j] == 1:
                    cumul_height[i, j] = cumul_height[i - 1, j] + 1
                else:
                    cumul_height[i, j] = 0

        max_area = 0
        inc_i = 0
        inc_j = 0
        max_i = 0
        min_i = 0
        min_j = 0
        max_j = 0

        # for each row we
        for i in range(0, n):
            # We compute all contiguous sublists of each row
            row = cumul_height[i, :]
            for width in range(1, len(row) + 1):
                for offset in range(0, m - width + 1):
                    slice = row[0 + offset:offset + width]
                    slice_area = width * np.min(slice)
                    if slice_area > max_area:
                        max_area = slice_area
                        inc_i = int(np.min(slice))
                        inc_j = width  # len(slice)
                        max_i = i
                        min_i = max_i - inc_i + 1
                        min_j = offset
                        max_j = min_j + inc_j - 1

        return max_area, min_i, min_j, max_i, max_j
Beispiel #36
0
    for t in qsrlib_response_message.qsrs.get_sorted_timestamps():
        # This line is for printing the colons after the timestamp
        foo = str(t) + " :: "
        for k, v in zip(qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                        qsrlib_response_message.qsrs.trace[t].qsrs.values()):
            #this line adds the keys(objects) and their values(relations) to the foo object
            foo += str(k) + ":" + str(v.qsr) + "; "
        print(foo)


#==============================================================================================================#

#=================This section deals with parsing the user input from the terminal===============================#

#create a Qsrlib() object
qsrlib = QSRlib()

#creates a argparse object, which will be used to read from the terminal as well as check the arguments
parser = argparse.ArgumentParser(
    description='Get a Qualitative Spatial Representation ')

#we use 'add_argument' to tell the parser object what type of input we expect from the terminal
parser.add_argument('qsr',
                    type=str,
                    nargs='+',
                    help='enter a qsr from the already listed ones')

#this reads the input from the terminal by using the parse_args() function
#this only accepts arguments of the type specified in the add_argument() function and can work with lists as well as individual arguments
args = parser.parse_args()
Beispiel #37
0
class QTCStatePublisherNode():

    # Must have __init__(self) function for a class, similar to a C++ class constructor.
    def __init__(self):

        # ................................................................
        # read ros parameters
        self.loadROSParams()

        # ................................................................
        # Other config params and atributes
        self.data_lock = Lock()

        self.closest_human_pose = None
        self.closest_human_twist = None

        # robot position                
        self.prev_robotPoseSt = None
        self.robotPoseSt = None

        # timestamp to get speeds...
        self.prev_time = rospy.Time.now()
        self.robotPose_time = rospy.Time.now()
        # qsrlib data...
        self.qsrlib = QSRlib()

        # TODO: add as parameter
        self.which_qsr = 'qtccs'
        
        # TODO: add as parameter
        self.dynammic_args = {"qtccs": {"no_collapse": True, "quantisation_factor": 0.01,
                                        "validate": False, "qsrs_for": [("human_os", "robot_os")]}}
        
        # sensible default values ...
        self.closest_human_pose = None
        self.closest_human_twist = None
        

        # ................................................................
        # start ros subs/pubs/servs....
        self.initROS()

        rospy.loginfo("Node [" + rospy.get_name() + "] entering spin...")


    def loadROSParams(self):
        self.robot_id = rospy.get_param('~robot_id', 4)

        # human detection method: peopletracker(bayes, from STRANDS) OR trackedpersons (SPENCER)
        self.human_detection_method = rospy.get_param(
            '~human_detection_method', 'trackedpersons')

        # human detections using bayes peopletracker
        self.peopletracker_topic = rospy.get_param(
            '~peopletracker_topic', '/robot'+str(self.robot_id)+'/people_tracker/positions')

        # human tracking trackedpersons topic
        self.human_tracking_topic = rospy.get_param(
            '~human_tracking_topic', '/robot'+str(self.robot_id)+'/perception/tracked_persons')

        # publisher for our detection
        self.qtc_state_topic = rospy.get_param(
            '~qtc_state_topic', '/robot'+str(self.robot_id)+'/qsr/qtc_state')


        # points used in qtc is also published
        self.qtc_points_topic_name = rospy.get_param('~qtc_points_topic_name', '/robot' + str(self.robot_id) + '/qsr/qtc_points')

        # current robot position from tf tree
        self.robot_pose_topic_name = rospy.get_param('~robot_pose_topic_name', '/robot' + str(self.robot_id) + '/robot_poseST')

        # How far in the future we consider next state 
        self.sampling_time = rospy.get_param(
            '~sampling_time', 0.6)    

        # global frame id: both robot and human pose will be refered to this one
        self.global_frame_id = rospy.get_param('~global_frame', 'map_laser2d')
        # in tf2, frames do not have the initial slash
        if (self.global_frame_id[0] == '/'):
            self.global_frame_id = self.global_frame_id[1:]
        
        # tranform tf_timeout
        timeout = rospy.get_param('~tf_timeout', 2)
        self.tf_timeout = rospy.Duration(timeout)

    def initROS(self):
        # publishers
        self.qtc_state_pub = rospy.Publisher(self.qtc_state_topic, String, queue_size=1)
        self.qtc_points_pub = rospy.Publisher(self.qtc_points_topic_name, Float64MultiArray, queue_size=1)

        # service clients
        # none here

        # subscribers and listeners
        rospy.Subscriber(self.robot_pose_topic_name, PoseStamped, self.robot_pose_callback, queue_size=1)

        self.tfBuffer = tf2_ros.Buffer()
        self.listener = tf2_ros.TransformListener(self.tfBuffer)

        # we either use spencer or bayes ...
        if self.human_detection_method=='peopletracker':
            rospy.Subscriber(self.peopletracker_topic, PeopleTracker, self.peopletracker_callback, queue_size=1)
        elif self.human_detection_method=='trackedpersons':
            rospy.Subscriber(self.human_tracking_topic, TrackedPersons, self.spencer_human_tracking_callback, queue_size=1)
        else:
            rospy.logerr("Node [" + rospy.get_name() + "] Provided detection method  ("+self.human_detection_method+") is not peopletracker/trackedpersons. Defaulting to trackedpersons")
            rospy.Subscriber(self.human_tracking_topic, TrackedPersons,
                        self.spencer_human_tracking_callback, queue_size=1)

        # service servers
        # none here

    # .............................................................................................................

    # we use robot pose topic to know robot position and speed
    def robot_pose_callback(self, msg):
        with self.data_lock:   
            self.prev_time = self.robotPose_time
            self.prev_robotPoseSt = self.robotPoseSt
            self.robotPose_time = rospy.Time.now()            
            self.robotPoseSt = self.transformPoseSt( msg, self.global_frame_id)        
            

    def spencer_human_tracking_callback(self, msg):
        with self.data_lock:
            # after each callback, reset closests
            self.closest_human_pose = None
            self.closest_human_twist = None
            min_dist = np.inf
            min_i = -1

            # TODO: Maybe it's better if we low pass filter these tracks to keep
            #       just humans that have been detected for a minimum period of time
            #rospy.loginfo("...........................................................")
            for i, trk_person in enumerate(msg.tracks):
                # Create the stamped object
                human_pose = PoseWithCovarianceStamped()
                # msg contains a header with the frame id for all poses
                human_pose.header = msg.header
                human_pose.pose = trk_person.pose

                # from the list of tracked persons, find the closest ...
                (dist, human_pose_glob) = self.getDistToHuman(human_pose)
                #rospy.loginfo("ID: "+str(i)+" Dist: "+str(dist) +" Pose:\n"+str(human_pose))
                if dist < min_dist:
                    min_i = i
                    min_dist = dist
                    self.closest_human_pose = human_pose_glob

                    self.closest_human_twist = self.transformTwistWC(trk_person.twist, msg.header, self.global_frame_id)
                    (xh0, yh0, hh0, vh0, wh0) = self.getHumanState()

                    rospy.logdebug_throttle(1, "Node [" + rospy.get_name() + "] " +
                                            "Closest human at: Pose ( " +
                                            str(xh0) + ", " + str(yh0) + ", " +
                                            str(hh0*180.0/np.pi) + " deg), " +
                                            "Speed ( " +
                                            str(vh0) + " m/sec, " +
                                            str(wh0*180.0/np.pi) + " deg/sec) "
                                            )
            #rospy.loginfo("...........................................................")
            if (min_dist>0):
                self.publishQTCdata()

    def peopletracker_callback(self, msg):
        spencer_msg = self.bayes2spencer(msg)
        self.spencer_human_tracking_callback(spencer_msg)

    #  There is some info here that may be not accurate
    def bayes2spencer(self,bayes_msg):
        spencer_msg=TrackedPersons()
        spencer_msg.header = bayes_msg.header

        for i, pose in enumerate(bayes_msg.poses):
            track=TrackedPerson()
            track.track_id = self.string2uint64(bayes_msg.uuids[i])
            # PoseWithCovariance
            track.pose.pose.position = pose.position
            track.pose.pose.orientation = pose.orientation
            # TwistWithCovariance
            track.twist.twist.linear = bayes_msg.velocities[i]
            # Data not in bayes. Not sure about these ...
            track.pose.covariance = (np.random.normal(0.3,0.1)* np.identity(6)).flatten().tolist()
            track.twist.covariance = (np.random.normal(0.3,0.1)* np.identity(6)).flatten().tolist()
            # we assume 0 here
            # track.twist.twist.angular
            track.is_occluded = False
            track.is_matched = False
            track.detection_id = self.string2uint64(bayes_msg.uuids[i])
            track.age = 0

            spencer_msg.tracks.append(track)


        return spencer_msg

    def string2uint64(self, in_string):        
        ans=UInt64(uuid.UUID(in_string).int)

        return ans

    # ..............................
    # based on http://docs.ros.org/hydro/api/tf/html/c++/transform__listener_8cpp_source.html
    #

    def transformTwist(self, target_frame, msg_in):  # msg_in is a TwistStamped

        # divide twist message into rotational and translational velocities
        v_rot = [msg_in.twist.angular.x, msg_in.twist.angular.y, msg_in.twist.angular.z]
        v_trans = [msg_in.twist.linear.x, msg_in.twist.linear.y, msg_in.twist.linear.z]

        # get the transform between target frame and twist frame
        try:
            frame_tf = self.tfBuffer.lookup_transform(
                target_frame, msg_in.header.frame_id,  msg_in.header.stamp)
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException) as e:
            rospy.logerr("Node [" + rospy.get_name() + "] Can't find transform. (" + str(e) + ") ")
            return None

        q_ = frame_tf.transform.rotation
        x_ = frame_tf.transform.translation

        # get the translation matrix from translation vector
        trans_vec = [x_.x, x_.y, x_.z]

        # get the rotation matrix from quaternion.
        rot_mat = quaternion_matrix([q_.x, q_.y, q_.z, q_.w])

        # rotate vector using
        out_rot = np.dot(rot_mat[:3, :3], v_rot)
        out_vel = np.dot(rot_mat[:3, :3], v_trans) + np.cross(trans_vec, out_rot)

        interframe_twist = TwistStamped()
        # we asumme frames are not moving relatively? not sure

        msg_out = TwistStamped()
        msg_out.header.stamp = msg_in.header.stamp
        msg_out.header.frame_id = target_frame

        #
        msg_out.twist.linear.x = out_vel[0] + interframe_twist.twist.linear.x
        msg_out.twist.linear.y = out_vel[1] + interframe_twist.twist.linear.y
        msg_out.twist.linear.z = out_vel[2] + interframe_twist.twist.linear.z

        # final angular speed is input angular speed, rotated plus interframe speed
        msg_out.twist.angular.x = out_rot[0] + interframe_twist.twist.angular.x
        msg_out.twist.angular.y = out_rot[1] + interframe_twist.twist.angular.y
        msg_out.twist.angular.z = out_rot[2] + interframe_twist.twist.angular.z

        return msg_out  # geometry_msgs::TwistStamped
        # ..............................

    def transformTwistWC(self, twistWC, header, frame_id_out):
        """Returns a TwistWithCovarianceStamped in base frame"""
        # Create the stamped object
        twistS = TwistStamped()
        twistS.header = header
        twistS.twist = twistWC.twist

        twistS_base = self.transformTwist(frame_id_out, twistS)

        twistWC_out = TwistWithCovarianceStamped()
        twistWC_out.header = twistS_base.header
        twistWC_out.twist.twist = twistS_base.twist
        twistWC_out.twist.covariance = twistWC.covariance

        return twistWC_out

    def transformPoseSt(self, poseSt_in, frame_id_out):
        """Transform pose into provided frame id."""
        poseSt_out = None
        now = rospy.Time.now()

        # in tf2, frames do not have the initial slash
        if (frame_id_out[0] == '/'):
            frame_id_out = frame_id_out[1:]
        # in tf2, frames do not have the initial slash
        if (poseSt_in.header.frame_id[0] == '/'):
            poseSt_in.header.frame_id = poseSt_in.header.frame_id[1:]

        try:
            transform = self.tfBuffer.lookup_transform(frame_id_out, poseSt_in.header.frame_id, now, self.tf_timeout)
            poseSt_out = tf2_geometry_msgs.do_transform_pose(poseSt_in, transform)

        except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException) as e:
            rospy.logwarn("[%s] transform from (%s) to (%s) failed: (%s).", rospy.get_name(), poseSt_in.header.frame_id, frame_id_out, e)        
        
        return poseSt_out

    def fromRobot2GlobalFrame(self, x, y, h):
        xg = yg = hg = None
        poseSt = PoseStamped()
        poseSt.header.frame_id = self.robotPoseSt.header.frame_id
        poseSt.pose.position.x = x
        poseSt.pose.position.y = y
        poseSt.pose.position.z = 0
        poseSt.pose.orientation = Quaternion(*quaternion_from_euler(0, 0, h))

        if (self.robotPoseSt.header.frame_id!=self.global_frame_id):
            poseStG = self.transformPoseSt(poseSt, self.global_frame_id)
        else:
            poseStG = poseSt

        if (poseStG):
            xg = poseStG.pose.position.x
            yg = poseStG.pose.position.y
            hg = self.getRotation(poseStG.pose.orientation)
        
        return (xg,yg,hg)

    def getDistToHuman(self, human_pose_in):
        """Returns distance between human track and robot."""
        dist = np.inf        
        hpose_out = None

        if (self.robotPoseSt):
            hpose = PoseStamped()
            hpose.header = human_pose_in.header
            hpose.pose = human_pose_in.pose.pose

            human_pose_rob = self.transformPoseSt(hpose, self.robotPoseSt.header.frame_id)
            human_pose_glob = self.transformPoseSt(hpose, self.global_frame_id)

            if (human_pose_rob) and (human_pose_glob):
                dist = np.sqrt((human_pose_rob.pose.position.x ** 2) +
                            (human_pose_rob.pose.position.y ** 2))

                hpose_out = PoseWithCovarianceStamped()
                hpose_out.header = human_pose_glob.header
                hpose_out.pose.pose = human_pose_glob.pose
                hpose_out.pose.covariance = human_pose_in.pose.covariance

        return (dist, hpose_out)

    def getNextRobotState(self):
        xr1 = yr1 = hr1 = None
        inc_x = inc_y = inc_h = dt = 0
        if (self.prev_robotPoseSt and self.robotPoseSt and (self.robotPose_time > self.prev_time) ):
                t = [1, 2, 3]
                p = [3, 2, 0]
                np.interp(2.5, t, p)


                inc_x = self.robotPoseSt.pose.position.x - self.prev_robotPoseSt.pose.position.x
                inc_y = self.robotPoseSt.pose.position.y - self.prev_robotPoseSt.pose.position.y                
                inc_h = self.getRotation(self.robotPoseSt.pose.orientation) - self.getRotation(self.prev_robotPoseSt.pose.orientation)        
                dt = (self.sampling_time ) / (self.robotPose_time - self.prev_time).to_sec()

        if self.robotPoseSt:
            xr1 = self.robotPoseSt.pose.position.x + inc_x*dt
            yr1 = self.robotPoseSt.pose.position.y + inc_y*dt
            hr1 = self.getRotation(self.robotPoseSt.pose.orientation) + inc_h*dt

            rospy.logdebug_throttle(5, "Node [" + rospy.get_name() + "] " + "\n " + 
                                        "Robot status: Pose 0 ( " +
                                        '{0:.2f}'.format(self.prev_robotPoseSt.pose.position.x) + ", " + '{0:.2f}'.format(self.prev_robotPoseSt.pose.position.y) + ", " +
                                        '{0:.2f}'.format(self.getRotation(self.prev_robotPoseSt.pose.orientation) *180.0/np.pi) + " deg) " + 
                                        " at " + '{0:.2f}'.format(self.prev_time.to_sec() ) + " secs." + "\n" +                                     
                                        "Robot status: Pose 1 ( " +
                                        '{0:.2f}'.format(self.robotPoseSt.pose.position.x) + ", " + '{0:.2f}'.format(self.robotPoseSt.pose.position.y) + ", " +
                                        '{0:.2f}'.format(self.getRotation(self.robotPoseSt.pose.orientation) *180.0/np.pi) + " deg) " + 
                                        " at " + '{0:.2f}'.format(self.robotPose_time.to_sec() ) + " secs." + "\n" + 
                                        "Robot status: Pose 2( " +
                                        '{0:.2f}'.format(xr1) + ", " + '{0:.2f}'.format(yr1) + ", " +
                                        '{0:.2f}'.format(hr1 *180.0/np.pi) + " deg) " +
                                        " at " + '{0:.2f}'.format(self.sampling_time+self.robotPose_time.to_sec() ) + " secs." + "\n"
                                        )

        return (xr1, yr1, hr1)


    def publishQTCdata(self):
            # this method is called by one of the other callbacks, so its under the lock already
            #with self.data_lock:
            validData = self.closest_human_pose and self.closest_human_twist and self.robotPoseSt
            if validData:                
                # all should be already in global frame ...                
                (xh0, yh0, hh0, vh0, wh0) = self.getHumanState()
                (xh1, yh1, hh1) = self.getNextHumanState(xh0, yh0, hh0, vh0, wh0)

                xr0 = self.robotPoseSt.pose.position.x
                yr0 = self.robotPoseSt.pose.position.y
                hr0 = self.getRotation(self.robotPoseSt.pose.orientation)
                (xr1, yr1, hr1) = self.getNextRobotState()

                # get state only if transforms where successfull
                if ((xh0!=None) and (xh1!=None) and(xr0!=None) and(xr1!=None)):                                   
                    (isValid, state) = self.getQSRState(xh0, yh0, xh1, yh1, xr0, yr0, xr1, yr1)
                    self.qtc_state = state
                    self.is_valid = isValid

                    # Finally publish .......................                    
                    if self.is_valid:
                        self.qtc_state_pub.publish(self.qtc_state)
                        qtc_data = Float64MultiArray()
                        qtc_data.data = [xh0, yh0, hh0, xh1, yh1, hh1, xr0, yr0, hr0, xr1, yr1, hr1]
                        self.qtc_points_pub.publish(qtc_data)

            rospy.logdebug_throttle(1, "Node [" + rospy.get_name() + "] " +
                                                "Updated visuals ... "
                                                )

    def getQSRState(self, xh0, yh0, xh1, yh1, xr0, yr0, xr1, yr1):
        td = self.sampling_time

        isValid = False
        state = None
        human_os = [Object_State(name="human_os", timestamp=0, x=xh0, y=yh0),
                    Object_State(name="human_os", timestamp=td, x=xh1, y=yh1)
                    ]

        robot_os = [Object_State(name="robot_os", timestamp=0, x=xr0, y=yr0),
                    Object_State(name="robot_os", timestamp=td, x=xr1, y=yr1)
                    ]

        # make some input data
        world = World_Trace()

        world.add_object_state_series(human_os)
        world.add_object_state_series(robot_os)

        # make a QSRlib request message
        qsrlib_request_message = QSRlib_Request_Message(
            self.which_qsr, world, self.dynammic_args)

        # request your QSRs
        qsrlib_response_message = self.qsrlib.request_qsrs(req_msg=qsrlib_request_message)

        # should have 1 timestamp
        t = qsrlib_response_message.qsrs.get_sorted_timestamps()
        if len(t) != 1:
            rospy.logerr("Node [" + rospy.get_name() +
                         "@getQSRState] : response timestamp message lenght is not 1.")
            return (isValid, state)
        t = t[0]

        # should have one value: human to robot
        v = qsrlib_response_message.qsrs.trace[t].qsrs.values()
        if len(v) != 1:
            rospy.logerr("Node [" + rospy.get_name() +
                         "@getQSRState] : response values message lenght is not 1.")
            return (isValid, state)
        v = v[0]
        state = v.qsr.values()
        if len(state) != 1:
            rospy.logerr("Node [" + rospy.get_name() +
                         "@getQSRState] : response state message lenght is not 1.")
            return (isValid, state)
        state = state[0]

        # Ok, maybe is valid ...
        isValid = True

        return (isValid, state)

    def getNextHumanState(self, xh0, yh0, hh0, vh0, wh0):
        """Simplest way to get next human position given current state """
        hh1 = hh0 + wh0*self.sampling_time
        xh1 = xh0 + vh0*np.cos(hh1) * self.sampling_time
        yh1 = yh0 + vh0*np.sin(hh1) * self.sampling_time
        return (xh1, yh1, hh1)

    def getHumanState(self):
        """Get human position, orientation and speeds from current tracking"""
        
        xh0 = self.closest_human_pose.pose.pose.position.x
        yh0 = self.closest_human_pose.pose.pose.position.y
        hh0 = self.getRotation(self.closest_human_pose.pose.pose.orientation)

        vh0 = np.sqrt((self.closest_human_twist.twist.twist.linear.x ** 2) +
                      (self.closest_human_twist.twist.twist.linear.y ** 2))
        wh0 = self.closest_human_twist.twist.twist.angular.z
        # np.atan2(self.closest_human_twist.linear.y,
        #          self.closest_human_twist.linear.x)
        return (xh0, yh0, hh0, vh0, wh0)

    def getRotation(self, orientation_q):
        orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
        (roll, pitch, yaw) = euler_from_quaternion(orientation_list)
        # quat = quaternion_from_euler(roll, pitch, yaw)
        return yaw
        foo = str(t) + ": "
        for k, v in zip(qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                        qsrlib_response_message.qsrs.trace[t].qsrs.values()):
            foo += str(k) + ":" + str(v.qsr) + "; "
        print(foo)


if __name__ == "__main__":
    load_by_world_name = {"data1": load_input_data1,
                          "data2": load_input_data2,
                          "data3": load_input_data3,
                          "data4": load_input_data4}

    # ****************************************************************************************************
    # create a QSRlib object if there isn't one already
    qsrlib = QSRlib()

    # ****************************************************************************************************
    # parse command line arguments
    options = sorted(qsrlib.get_qsrs_registry().keys())
    parser = argparse.ArgumentParser()
    parser.add_argument("qsr", help="choose qsr: %s" % options, type=str)
    parser.add_argument("-i", "--input", required=True, type=str,
                        help="world name %s" % sorted(load_by_world_name.keys()))
    parser.add_argument("-o", "--output", required=True, type=str, help="where to write json file")
    args = parser.parse_args()
    if args.qsr in options:
        which_qsr = args.qsr
    elif args.qsr == "multiple":
        which_qsr = options[:]
        # todo: qtcs is giving a warning message
def worker_qsrs(chunk):
    (file_, path, config) = chunk
    #print "\n", path, file_
    e = utils.load_e(path, file_)

    dynamic_args = {}
    try:
        dynamic_args['argd'] = config['argd_args']
        dynamic_args['qtcbs'] = config['qtcbs_args']
        dynamic_args["qstag"] = {"params": config['qstag_args']}
        dynamic_args['filters'] = {
            "median_filter": {
                "window": config['qsr_mean_window']
            }
        }  # This has been updated since ECAI paper.
    except KeyError:
        print "check argd, qtcbs, qstag parameters in config file"

    joint_types = {
        'head': 'head',
        'torso': 'torso',
        'left_hand': 'hand',
        'right_hand': 'hand',
        'left_knee': 'knee',
        'right_knee': 'knee',
        'left_shoulder': 'shoulder',
        'right_shoulder': 'shoulder',
        'head-torso': 'tpcc-plane'
    }
    object_types = joint_types.copy()

    for ob, pos in e.objects.items():
        try:
            generic_object = "_".join(ob.split("_")[:-1])
            object_types[ob] = generic_object
            # print "object generic", generic_object
        except:
            print "didnt add:", object
    dynamic_args["qstag"]["object_types"] = object_types

    # for i,j in object_types.items():
    #     print ">", i,j
    """1. CREATE QSRs FOR joints & Object """
    qsrlib = QSRlib()
    qsrs_for = []
    for ob, pos in e.objects.items():
        qsrs_for.append((str(ob), 'left_hand'))
        qsrs_for.append((str(ob), 'right_hand'))
        qsrs_for.append((str(ob), 'torso'))
    dynamic_args['argd']["qsrs_for"] = qsrs_for
    dynamic_args['qtcbs']["qsrs_for"] = qsrs_for

    # for (i,j) in qsrs_for:
    #     print ">", (i,j)

    req = QSRlib_Request_Message(config['which_qsr'],
                                 input_data=e.map_world,
                                 dynamic_args=dynamic_args)
    e.qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    # print ">", e.qsr_object_frame.qstag.graphlets.histogram
    # for i in e.qsr_object_frame.qstag.episodes:
    #     print i
    # sys.exit(1)
    """2. CREATE QSRs for joints - TPCC"""
    # print "TPCC: ",
    # # e.qsr_joint_frame = get_joint_frame_qsrs(file, e.camera_world, joint_types, dynamic_args)
    # qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    # dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    # dynamic_args["qstag"]["params"] = {"min_rows": 1, "max_rows": 2, "max_eps": 4}
    # qsrlib = QSRlib()
    # req = QSRlib_Request_Message(which_qsr="tpcc", input_data=e.camera_world, dynamic_args=dynamic_args)
    # e.qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)
    # # pretty_print_world_qsr_trace("tpcc", e.qsr_joints_frame)
    # # print e.qsr_joints_frame.qstag.graphlets.histogram
    utils.save_event(e, "Learning/QSR_Worlds")
        foo = str(t) + ": "
        for k, v in zip(qsrlib_response_message.qsrs.trace[t].qsrs.keys(),
                        qsrlib_response_message.qsrs.trace[t].qsrs.values()):
            foo += str(k) + ":" + str(v.qsr) + "; "
        print(foo)


if __name__ == "__main__":
    load_by_world_name = {"data1": load_input_data1,
                          "data2": load_input_data2,
                          "data3": load_input_data3,
                          "data4": load_input_data4}

    # ****************************************************************************************************
    # create a QSRlib object if there isn't one already
    qsrlib = QSRlib()

    # ****************************************************************************************************
    # parse command line arguments
    options = sorted(qsrlib.qsrs_registry.keys())
    parser = argparse.ArgumentParser()
    parser.add_argument("qsr", help="choose qsr: %s" % options, type=str)
    parser.add_argument("-i", "--input", required=True, type=str,
                        help="world name %s" % sorted(load_by_world_name.keys()))
    parser.add_argument("-o", "--output", required=True, type=str, help="where to write json file")
    args = parser.parse_args()
    if args.qsr in options:
        which_qsr = args.qsr
    elif args.qsr == "multiple":
        which_qsr = options[:]
    else:
Beispiel #41
0
 def test(self):
     QSRlib(help=False)