def worker_qsrs(chunk):
    (file_, path, soma_objects, config) = chunk

    e = utils.load_e(path, file_)

    dynamic_args = {}
    try:
        dynamic_args['argd'] = config['argd_args']
        dynamic_args['qtcbs'] = config['qtcbs_args']
        dynamic_args["qstag"] = {"params" : config['qstag_args']}
        dynamic_args['filters'] = {"median_filter": {"window": config['qsr_mean_window']}}  # This has been updated since ECAI paper.
    except KeyError:
        print "check argd, qtcbs, qstag parameters in config file"


    joint_types = {'head': 'head', 'torso': 'torso', 'left_hand': 'hand', 'right_hand': 'hand', 'left_knee': 'knee', 'right_knee': 'knee',
                   'left_shoulder': 'shoulder', 'right_shoulder': 'shoulder', 'head-torso': 'tpcc-plane'}
    object_types = joint_types.copy()
    add_objects = []
    for region, objects in ce.get_soma_objects().items():
        for o in objects:
            add_objects.append(o)
            try:
                generic_object = "_".join(o.split("_")[:-1])
                object_types[o] = generic_object
            except:
                print "didnt add:", object
    dynamic_args["qstag"]["object_types"] = object_types

    """1. CREATE QSRs FOR joints & Object """
    qsrlib = QSRlib()

    qsrs_for=[]
    for ob in objects:
        qsrs_for.append((str(ob), 'left_hand'))
        qsrs_for.append((str(ob), 'right_hand'))
        qsrs_for.append((str(ob), 'torso'))
    dynamic_args['argd']["qsrs_for"] = qsrs_for
    dynamic_args['qtcbs']["qsrs_for"] = qsrs_for

    req = QSRlib_Request_Message(config['which_qsr'], input_data=e.map_world, dynamic_args=dynamic_args)
    e.qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    # print ">", e.qsr_object_frame.qstag.graphlets.histogram
    # for i in e.qsr_object_frame.qstag.episodes:
    #     print i
    # sys.exit(1)
    """2. CREATE QSRs for joints - TPCC"""
    # print "TPCC: ",
    # # e.qsr_joint_frame = get_joint_frame_qsrs(file, e.camera_world, joint_types, dynamic_args)
    # qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    # dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    # dynamic_args["qstag"]["params"] = {"min_rows": 1, "max_rows": 2, "max_eps": 4}
    # qsrlib = QSRlib()
    # req = QSRlib_Request_Message(which_qsr="tpcc", input_data=e.camera_world, dynamic_args=dynamic_args)
    # e.qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)
    # # pretty_print_world_qsr_trace("tpcc", e.qsr_joints_frame)
    # # print e.qsr_joints_frame.qstag.graphlets.histogram
    utils.save_event(e, "Learning/QSR_Worlds")
def worker_qsrs(chunk):
    (file_, path, soma_objects, qsr_mean_window) = chunk
    e = utils.load_e(path, file_)

    dynamic_args = {}
    dynamic_args["qstag"] = {}
    dynamic_args['filters'] = {"median_filter": {"window": qsr_mean_window}}  # This has been updated since ECAI paper.

    """ADD OBJECT TYPES into DYNAMIC ARGS """
    joint_types = {'head': 'head', 'torso': 'torso', 'left_hand': 'hand', 'right_hand': 'hand', 'left_knee': 'knee', 'right_knee': 'knee',
                   'left_shoulder': 'shoulder', 'right_shoulder': 'shoulder', 'head-torso': 'tpcc-plane'}
    all_object_types = joint_types.copy()

    add_objects = []
    for region, objects in ce.get_soma_objects().items():
        for o in objects:
            add_objects.append(o)
            try:
                generic_object = "_".join(o.split("_")[:-1])
                all_object_types[o] = generic_object
            except:
                print "didnt add:", object

    dynamic_args["qstag"]["object_types"] = all_object_types

    """1. CREATE QSRs FOR Key joints & Object """
    print " QTC,QDC: "
    qsrlib = QSRlib()
    # print ">>", e.map_world.get_sorted_timestamps()

    # for t in e.map_world.trace:
    #     print "\nt", t
    #     for o, state in e.map_world.trace[t].objects.items():
    #         print o, state.x,state.y,state.z
    # sys.exit(1)

    req = QSRLib_param_mapping(e.map_world, dynamic_args, soma_objects)
    e.qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    # print ">", e.qsr_object_frame.qstag.graphlets.histogram
    # for i in e.qsr_object_frame.qstag.episodes:
    #     print i
    # sys.exit(1)
    """2. CREATE QSRs for joints - TPCC"""
    # print "TPCC: ",
    # # e.qsr_joint_frame = get_joint_frame_qsrs(file, e.camera_world, joint_types, dynamic_args)
    # qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    # dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    # dynamic_args["qstag"]["params"] = {"min_rows": 1, "max_rows": 2, "max_eps": 4}
    # qsrlib = QSRlib()
    # req = QSRlib_Request_Message(which_qsr="tpcc", input_data=e.camera_world, dynamic_args=dynamic_args)
    # e.qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)
    # # pretty_print_world_qsr_trace("tpcc", e.qsr_joints_frame)
    # # print e.qsr_joints_frame.qstag.graphlets.histogram
    utils.save_event(e, "Learning/QSR_Worlds")
def create_temp_histograms(qsr_path_, global_codebook, all_graphlets, batch):
    """sequentially create a temporary histogram whilst
    generating the codebook from observations"""
    wordids = []
    wordcts = []
    uuids = []

    for e_cnt, event_file in enumerate(batch):
        (date, time, uuid) = event_file.split("_")
        qsr_path = os.path.join(qsr_path_, date)

        #for e_cnt, event_file in sorted(enumerate(os.listdir(qsr_path))):
        #if event_file.replace(".p","") not in batch: continue
        e = utils.load_e(qsr_path, event_file)

        if len(e.qsr_object_frame.qstag.graphlets.histogram) == 0:
            print e_cnt, event_file, "len:0"
            continue

        e.temp_histogram = np.array([0] * (global_codebook.shape[0]))
        if e.label != "NA": e.uuid += "_" + e.label

        print "  ", e_cnt, e.uuid, "len:", len(
            e.qsr_object_frame.qstag.graphlets.histogram
        )  #, len(e.qsr_joints_frame.qstag.graphlets.histogram)
        # feature_spaces = [e.qsr_object_frame.qstag.graphlets, e.qsr_joints_frame.qstag.graphlets]
        feature_spaces = [e.qsr_object_frame.qstag.graphlets
                          ]  #, e.qsr_joints_frame.qstag.graphlets]

        for cnt, f in enumerate(feature_spaces):
            for freq, hash in zip(f.histogram, f.code_book):
                try:
                    ind = np.where(global_codebook == hash)[0][0]
                    e.temp_histogram[ind] += freq
                # If the hash doesn't exist in the global codebook yet - add it
                except IndexError:
                    global_codebook = np.append(global_codebook, hash)
                    e.temp_histogram = np.append(e.temp_histogram, freq)
                    all_graphlets = np.append(all_graphlets, f.graphlets[hash])
                    # print "\n>", hash, f.graphlets[hash]
        # print "> ",len(e.temp_histogram)
        # print global_codebook, e.temp_histogram, all_graphlets
        # print ">", np.nonzero(e.temp_histogram)[0]
        uuids.append(e.uuid)
        ids = np.nonzero(e.temp_histogram)[0]
        wordids.append(ids)
        wordcts.append(e.temp_histogram[ids])
        # print "<", e.temp_histogram[ids]
        utils.save_event(e, "Learning/Histograms")

    ret = (uuids, wordids, wordcts)
    return global_codebook, all_graphlets, ret
Esempio n. 4
0
def on_message(message):
	if message.embeds and (message.author.name  in os.environ['DISCORD_AUTHOR']):
		for e in message.embeds:
			t = e['title'] + e['description']
			evt = {'pds':e}
			for p, c in pats:
				m = p.search(t)
				if m:
					evt.update(m.groupdict())
				elif c:
					break
			else:
				utils.save_event(evt)
Esempio n. 5
0
def create_temp_histograms(qsr_path, accu_path):
    """sequentially create a temporary histogram whilst
    generating the codebook from observations"""

    global_codebook = np.array([])
    all_graphlets = np.array([])

    for d_cnt, dir_ in sorted(enumerate(os.listdir(qsr_path))):
        directory = os.path.join(qsr_path, dir_)
        print "dir: ", directory, d_cnt, dir_

        for e_cnt, event_file in sorted(enumerate(os.listdir(directory))):
            e = utils.load_e(directory, event_file)

            if len(e.qsr_object_frame.qstag.graphlets.histogram) == 0:
                print "removed:", e_cnt, event_file
                continue
            e.temp_histogram = np.array([0] * (global_codebook.shape[0]))

            print "  ", d_cnt, e_cnt, e.uuid, "len:", len(e.qsr_object_frame.qstag.graphlets.histogram) #, len(e.qsr_joints_frame.qstag.graphlets.histogram)
            # feature_spaces = [e.qsr_object_frame.qstag.graphlets, e.qsr_joints_frame.qstag.graphlets]
            feature_spaces = [e.qsr_object_frame.qstag.graphlets]#, e.qsr_joints_frame.qstag.graphlets]

            for cnt, f in enumerate(feature_spaces):
                for freq, hash in zip(f.histogram, f.code_book):
                    try:
                        ind = np.where(global_codebook == hash)[0][0]
                        e.temp_histogram[ind] += freq
                    # If the hash doesn't exist in the global codebook yet - add it
                    except IndexError:
                        global_codebook = np.append(global_codebook, hash)
                        e.temp_histogram = np.append(e.temp_histogram, freq)
                        all_graphlets = np.append(all_graphlets, f.graphlets[hash])
                        # print "\n>", hash, f.graphlets[hash]

            # print global_codebook, e.temp_histogram, all_graphlets
            utils.save_event(e, "Learning/Histograms")

    print "Code book shape:", global_codebook.shape
    f = open(os.path.join(accu_path, "code_book_all.p"), "w")
    pickle.dump(global_codebook, f)
    f.close()

    f = open(os.path.join(accu_path, "graphlets_all.p"), "w")
    pickle.dump(all_graphlets, f)
    f.close()

    return global_codebook.shape[0]
def get_event(recording, path, soma_objects, reduce_frame_rate=2, mean_window=5):
    """create event class from a recording"""

    """directories containing the data"""
    d1 = os.path.join(path, recording)
    d_sk = os.path.join(d1, "skeleton/")
    d_robot = os.path.join(d1, "robot/")

    """information stored in the filename"""

    try:
        uuid = recording.split("_")[-1]
        date = recording.split("_")[0]
        time = recording.split("_")[1]
    except:
        print "recording not found"
        return
    print uuid, date, time

    print "uid: %s. date: %s. time: %s." % (uuid, date, time)

    """initialise event"""
    e = event(uuid, d1)

    """get the skeleton data from each timepoint file"""
    sk_files = [f for f in os.listdir(d_sk) if os.path.isfile(os.path.join(d_sk, f))]

    """reduce the number of frames by a rate. Re-number from 1."""
    frame = 1
    for file in sorted(sk_files):
        original_frame = int(file.split(".")[0].split("_")[1])
        if original_frame % reduce_frame_rate != 0:
            continue

        e.skeleton_data[frame] = {}
        e.sorted_timestamps.append(frame)

        f1 = open(d_sk + file, "r")
        for count, line in enumerate(f1):
            if count == 0:
                t = line.split(":")[1].split("\n")[0]
                e.sorted_ros_timestamps.append(np.float64(t))

            # read the joint name
            elif (count - 1) % 11 == 0:
                j = line.split("\n")[0]
                e.skeleton_data[frame][j] = []
            # read the x value
            elif (count - 1) % 11 == 2:
                a = float(line.split("\n")[0].split(":")[1])
                e.skeleton_data[frame][j].append(a)
            # read the y value
            elif (count - 1) % 11 == 3:
                a = float(line.split("\n")[0].split(":")[1])
                e.skeleton_data[frame][j].append(a)
            # read the z value
            elif (count - 1) % 11 == 4:
                a = float(line.split("\n")[0].split(":")[1])
                e.skeleton_data[frame][j].append(a)

        frame += 1
    # for frame, data in  e.skeleton_data.items():
    # print frame, data['head']
    # sys.exit(1)

    """ apply a skeleton data filter and create a QSRLib.World_Trace object"""
    # e.apply_mean_filter(window_length=mean_window)
    e.apply_median_filter(mean_window)

    """ read robot odom data"""
    r_files = [f for f in os.listdir(d_robot) if os.path.isfile(os.path.join(d_robot, f))]
    for file in sorted(r_files):
        frame = int(file.split(".")[0].split("_")[1])
        e.robot_data[frame] = [[], []]
        f1 = open(d_robot + file, "r")
        for count, line in enumerate(f1):
            if count == 1:  # read the x value
                a = float(line.split("\n")[0].split(":")[1])
                e.robot_data[frame][0].append(a)
            elif count == 2:  # read the y value
                a = float(line.split("\n")[0].split(":")[1])
                e.robot_data[frame][0].append(a)
            elif count == 3:  # read the z value
                a = float(line.split("\n")[0].split(":")[1])
                e.robot_data[frame][0].append(a)
            elif count == 5:  # read roll pitch yaw
                ax = float(line.split("\n")[0].split(":")[1])
            elif count == 6:
                ay = float(line.split("\n")[0].split(":")[1])
            elif count == 7:
                az = float(line.split("\n")[0].split(":")[1])
            elif count == 8:
                aw = float(line.split("\n")[0].split(":")[1])
            elif count == 10:
                pan = float(line.split("\n")[0].split(":")[1])
            elif count == 11:
                tilt = float(line.split("\n")[0].split(":")[1])

                # ax,ay,az,aw
                roll, pitch, yaw = euler_from_quaternion([ax, ay, az, aw])  # odom
                # print ">", roll, pitch, yaw
                yaw += (
                    pan
                )  # *math.pi / 180.                   # this adds the pan of the ptu state when recording took place.
                pitch += (
                    tilt
                )  # *math.pi / 180.                # this adds the tilt of the ptu state when recording took place.
                e.robot_data[frame][1] = [roll, pitch, yaw]

    # add the map frame data for the skeleton detection
    for frame in e.sorted_timestamps:
        """Note frame does not start from 0. It is the actual file frame number"""

        e.map_frame_data[frame] = {}
        xr, yr, zr = e.robot_data[frame][0]
        yawr = e.robot_data[frame][1][2]
        pr = e.robot_data[frame][1][1]

        #  because the Nite tracker has z as depth, height as y and left/right as x
        #  we translate this to the map frame with x, y and z as height.
        for joint, (y, z, x, x2d, y2d) in e.filtered_skeleton_data[frame].items():
            # transformation from camera to map
            rot_y = np.matrix([[np.cos(pr), 0, np.sin(pr)], [0, 1, 0], [-np.sin(pr), 0, np.cos(pr)]])
            rot_z = np.matrix([[np.cos(yawr), -np.sin(yawr), 0], [np.sin(yawr), np.cos(yawr), 0], [0, 0, 1]])
            rot = rot_z * rot_y

            pos_r = np.matrix([[xr], [yr], [zr + 1.66]])  # robot's position in map frame
            pos_p = np.matrix([[x], [-y], [-z]])  # person's position in camera frame

            map_pos = rot * pos_p + pos_r  # person's position in map frame
            x_mf = map_pos[0, 0]
            y_mf = map_pos[1, 0]
            z_mf = map_pos[2, 0]

            j = (x_mf, y_mf, z_mf)
            e.map_frame_data[frame][joint] = j

    # for i in e.sorted_timestamps:
    #     print i, e.map_frame_data[i]['head'], e.map_frame_data[i]['left_hand']#, e.map_frame_data[i]['right_hand'] #e.skeleton_data[i]['right_hand'], e.map_frame_data[i]['right_hand']   , yaw, pitch
    # sys.exit(1)

    e.get_world_frame_trace(soma_objects)
    utils.save_event(e, "Learning/Events")
Esempio n. 7
0
# -*- coding: utf-8 -*-
import code, datetime, time, rocket, utils, os
from random import shuffle

data = []
with open('/scan/.scan') as f:
    for r in f.read().split('\n'):
        try:
            u, *c = r.split(' ')
            if u:
                c = [float(v) for v in c]
                data.append((u, c))
        except:
            pass
while True:
    shuffle(data)
    for u, c in data:
        R = rocket.Rocket(u)
        i = j = 0
        for g in R.gyms(c)['gyms']:
            if g.get('raid_end', False):
                if utils.save_event(g):
                    i += 1
                else:
                    j += 1
        for p in R.pokemons(c)['pokemons']:
            if utils.save_event(p):
                i += 1
            else:
                j += 1
    time.sleep(10)
def get_event(recording, path, soma_objects, config):
    """create event class from a recording"""
    """directories containing the data"""
    d1 = os.path.join(path, recording)
    if config["use_cpm"]:
        d_sk = os.path.join(d1, 'cpm_skeleton/')
    else:
        d_sk = os.path.join(d1, 'skeleton/')
    d_robot = os.path.join(d1, 'robot/')

    # If recording has been restricted with ROIs:
    region = ""
    if os.path.isfile(os.path.join(d1, 'meta.txt')):
        f1 = open(os.path.join(d1, 'meta.txt'), 'r')
        for count, line in enumerate(f1):
            if count == 0:
                region = line.split('\n')[0].split(':')[1].replace(" ", "")

    objects = {}
    try:
        objects = soma_objects[region]
    except KeyError:
        for r, objs in soma_objects.items():
            for ob, position in objs.items():
                objects[ob] = position

    try:
        """information stored in the filename - differently for ecai dataset"""
        date = recording.split('_')[0]
        time = recording.split('_')[1]
        uuid = recording.split('_')[2]
    except IndexError:
        date = ""
        time = recording.split('_')[0]
        uuid = recording.split('_')[1]
    print "date: %s. time: %s. uid: %s. " % (date, time, uuid),
    # print "objs: %s" % use_objects
    labels = get_labels(d1, d_sk)

    for iter, (label, st, end) in labels.items():
        # print iter
        """initialise event"""
        e = event(uuid, d1, st, end, label)
        e.objects = objects
        """get the skeleton data from each timepoint file"""
        sk_files = [
            f for f in os.listdir(d_sk)
            if os.path.isfile(os.path.join(d_sk, f))
        ]
        """reduce the number of frames by a rate. Re-number from 1."""
        # frame = 1
        for file in sorted(sk_files):
            # original_frame = int(file.split('.')[0].split('_')[1])
            # if original_frame % config['reduce_frame_rate'] != 0: continue

            # new file format: "cpm_skl_00540.txt"
            if config["use_cpm"]:
                frame = int(file.split('.')[0].split('_')[-1])
            else:
                frame = int(file.split('.')[0].split('_')[1])

            if frame < int(st) or frame > int(end): continue
            e.sorted_timestamps.append(frame)
            f1 = open(d_sk + file, 'r')
            # e.skeleton_data[frame] = {}
            e.skeleton_data[frame] = get_openni_values(f1)

            # frame+=1
            # if config['rec_inc_confidence_vals']:
            #     for count,line in enumerate(f1):
            #         if count == 0:
            #             t = line.split(':')[1].split('\n')[0]
            #             e.sorted_ros_timestamps.append(np.float64(t))
            #
            #         # read the joint name
            #         elif (count-1)%11 == 0:
            #             j = line.split('\n')[0]
            #             e.skeleton_data[frame][j] = []
            #         # read the x value
            #         elif (count-1)%11 == 2:
            #             a = float(line.split('\n')[0].split(':')[1])
            #             e.skeleton_data[frame][j].append(a)
            #         # read the y value
            #         elif (count-1)%11 == 3:
            #             a = float(line.split('\n')[0].split(':')[1])
            #             e.skeleton_data[frame][j].append(a)
            #         # read the z value
            #         elif (count-1)%11 == 4:
            #             a = float(line.split('\n')[0].split(':')[1])
            #             e.skeleton_data[frame][j].append(a)
            # else:
            #     for count,line in enumerate(f1):
            #         if count == 0:
            #             t = line.split(':')[1].split('\n')[0]
            #             e.sorted_ros_timestamps.append(np.float64(t))
            #
            #         # read the joint name
            #         elif (count-1)%10 == 0:
            #             j = line.split('\n')[0]
            #             e.skeleton_data[frame][j] = []
            #         # read the x value
            #         elif (count-1)%10 == 2:
            #             a = float(line.split('\n')[0].split(':')[1])
            #             e.skeleton_data[frame][j].append(a)
            #         # read the y value
            #         elif (count-1)%10 == 3:
            #             a = float(line.split('\n')[0].split(':')[1])
            #             e.skeleton_data[frame][j].append(a)
            #         # read the z value
            #         elif (count-1)%10 == 4:
            #             a = float(line.split('\n')[0].split(':')[1])
            #             e.skeleton_data[frame][j].append(a)

        # for frame, data in  e.skeleton_data.items():
        # print frame, data['head']
        # sys.exit(1)
        """ apply a skeleton data filter and create a QSRLib.World_Trace object"""
        # e.apply_mean_filter(window_length=config['joints_mean_window'])
        if not e.apply_median_filter(config['joints_mean_window']): return
        """ read robot odom data"""

        r_files = [
            f for f in os.listdir(d_robot)
            if os.path.isfile(os.path.join(d_robot, f))
        ]
        # frame = 1
        for file in sorted(r_files):
            frame = int(file.split('.')[0].split('_')[1])
            if frame not in e.skeleton_data.keys(): continue

            # if original_frame % config['reduce_frame_rate'] != 0: continue
            e.robot_data[frame] = [[], []]
            f1 = open(d_robot + file, 'r')
            for count, line in enumerate(f1):
                # print count, line, config['ptu_vals_stored']

                if count == 1:  # read the x value
                    a = float(line.split('\n')[0].split(':')[1])
                    e.robot_data[frame][0].append(a)
                elif count == 2:  # read the y value
                    a = float(line.split('\n')[0].split(':')[1])
                    e.robot_data[frame][0].append(a)
                elif count == 3:  # read the z value
                    a = float(line.split('\n')[0].split(':')[1])
                    e.robot_data[frame][0].append(a)
                elif count == 5:  # read roll pitch yaw
                    ax = float(line.split('\n')[0].split(':')[1])
                elif count == 6:
                    ay = float(line.split('\n')[0].split(':')[1])
                elif count == 7:
                    az = float(line.split('\n')[0].split(':')[1])
                elif count == 8:
                    aw = float(line.split('\n')[0].split(':')[1])

                    if not config['ptu_vals_stored']:
                        roll, pitch, yaw = euler_from_quaternion(
                            [ax, ay, az, aw])  #odom
                        pitch = 10 * math.pi / 180.  #we pointed the pan tilt 10 degrees
                        e.robot_data[frame][1] = [roll, pitch, yaw]

                elif count > 8 and config['ptu_vals_stored']:
                    if count == 10:
                        pan = float(line.split('\n')[0].split(':')[1])
                    elif count == 11:
                        tilt = float(line.split('\n')[0].split(':')[1])
                        roll, pitch, yaw = euler_from_quaternion(
                            [ax, ay, az, aw])  #odom
                        yaw += pan  #*math.pi / 180.                   # this adds the pan of the ptu state when recording took place.
                        pitch += tilt  #*math.pi / 180.                # this adds the tilt of the ptu state when recording took place.
                        e.robot_data[frame][1] = [roll, pitch, yaw]
            # frame+=1

        # add the map frame data for the skeleton detection
        for frame in e.sorted_timestamps:
            """Note frame does not start from 0. It is the actual file frame number"""
            e.map_frame_data[frame] = {}
            xr, yr, zr = e.robot_data[frame][0]
            yawr = e.robot_data[frame][1][2]
            pr = e.robot_data[frame][1][1]

            #  because the Nite tracker has z as depth, height as y and left/right as x
            #  we translate this to the map frame with x, y and z as height.
            for joint, (y, z, x, x2d,
                        y2d) in e.filtered_skeleton_data[frame].items():
                # transformation from camera to map
                rot_y = np.matrix([[np.cos(pr), 0, np.sin(pr)], [0, 1, 0],
                                   [-np.sin(pr), 0, np.cos(pr)]])
                rot_z = np.matrix([[np.cos(yawr), -np.sin(yawr), 0],
                                   [np.sin(yawr),
                                    np.cos(yawr), 0], [0, 0, 1]])
                rot = rot_z * rot_y

                pos_r = np.matrix([[xr], [yr], [zr + 1.66]
                                   ])  # robot's position in map frame
                pos_p = np.matrix([[x], [-y],
                                   [-z]])  # person's position in camera frame

                map_pos = rot * pos_p + pos_r  # person's position in map frame
                x_mf = map_pos[0, 0]
                y_mf = map_pos[1, 0]
                z_mf = map_pos[2, 0]

                j = (x_mf, y_mf, z_mf)
                e.map_frame_data[frame][joint] = j

        # print ">",e.sorted_timestamps
        # for i in e.sorted_timestamps:
        #     print i, e.map_frame_data[i]['head'], e.map_frame_data[i]['left_hand']#, e.map_frame_data[i]['right_hand'] #e.skeleton_data[i]['right_hand'], e.map_frame_data[i]['right_hand']   , yaw, pitch
        # sys.exit(1)
        if len(e.sorted_timestamps) >= 5:
            e.get_world_frame_trace(e.objects)
            utils.save_event(e, "Learning/Events")
            return True
        else:
            print "  >dont save me - recording too short."
            return False
Esempio n. 9
0
import utils, re, time, code
pats = [
	(re.compile('ate (?P<hour>\d\d):(?P<minute>\d\d):(?P<second>\d\d)'),True),
	(re.compile('http://maps.google.com/maps.q=(?P<latitude>[-.0-9]+),(?P<longitude>[-.0-9]+)'),True),
	(re.compile('^Um (?P<pokemon_name>.+) selvagem'),False),
	(re.compile('^Detectada raid de (?P<raid_pokemon_name>[^!]+)!'),False)]

while True:
	for e in utils.read_raw_events():
		print(e)
		if 'text' in e['evt']:
			t = e['evt']['text']
			evt = {'btg':e['evt']}
			for p, c in pats:
				m = p.search(t)
				if m:
					evt.update(m.groupdict())
				elif c:
					break
			else:
				if ('pokemon_name' in evt) or ('raid_pokemon_name' in evt):
					utils.save_event(evt)
				else:
					print('failed msg'+t)
		utils.close_raw_event(e)
	utils.clean()
	time.sleep(2)
Esempio n. 10
0
def worker_qsrs(chunk):
    (file_, path, config) = chunk
    #print "\n", path, file_
    e = utils.load_e(path, file_)

    dynamic_args = {}
    try:
        dynamic_args['argd'] = config['argd_args']
        dynamic_args['qtcbs'] = config['qtcbs_args']
        dynamic_args["qstag"] = {"params": config['qstag_args']}
        dynamic_args['filters'] = {
            "median_filter": {
                "window": config['qsr_mean_window']
            }
        }  # This has been updated since ECAI paper.
    except KeyError:
        print "check argd, qtcbs, qstag parameters in config file"

    joint_types = {
        'head': 'head',
        'torso': 'torso',
        'left_hand': 'hand',
        'right_hand': 'hand',
        'left_knee': 'knee',
        'right_knee': 'knee',
        'left_shoulder': 'shoulder',
        'right_shoulder': 'shoulder',
        'head-torso': 'tpcc-plane'
    }
    object_types = joint_types.copy()

    for ob, pos in e.objects.items():
        try:
            generic_object = "_".join(ob.split("_")[:-1])
            object_types[ob] = generic_object
            # print "object generic", generic_object
        except:
            print "didnt add:", object
    dynamic_args["qstag"]["object_types"] = object_types

    # for i,j in object_types.items():
    #     print ">", i,j
    """1. CREATE QSRs FOR joints & Object """
    qsrlib = QSRlib()
    qsrs_for = []
    for ob, pos in e.objects.items():
        qsrs_for.append((str(ob), 'left_hand'))
        qsrs_for.append((str(ob), 'right_hand'))
        qsrs_for.append((str(ob), 'torso'))
    dynamic_args['argd']["qsrs_for"] = qsrs_for
    dynamic_args['qtcbs']["qsrs_for"] = qsrs_for

    # for (i,j) in qsrs_for:
    #     print ">", (i,j)

    req = QSRlib_Request_Message(config['which_qsr'],
                                 input_data=e.map_world,
                                 dynamic_args=dynamic_args)
    e.qsr_object_frame = qsrlib.request_qsrs(req_msg=req)

    # print ">", e.qsr_object_frame.qstag.graphlets.histogram
    # for i in e.qsr_object_frame.qstag.episodes:
    #     print i
    # sys.exit(1)
    """2. CREATE QSRs for joints - TPCC"""
    # print "TPCC: ",
    # # e.qsr_joint_frame = get_joint_frame_qsrs(file, e.camera_world, joint_types, dynamic_args)
    # qsrs_for = [('head', 'torso', ob) if ob not in ['head', 'torso'] and ob != 'head-torso' else () for ob in joint_types.keys()]
    # dynamic_args['tpcc'] = {"qsrs_for": qsrs_for}
    # dynamic_args["qstag"]["params"] = {"min_rows": 1, "max_rows": 2, "max_eps": 4}
    # qsrlib = QSRlib()
    # req = QSRlib_Request_Message(which_qsr="tpcc", input_data=e.camera_world, dynamic_args=dynamic_args)
    # e.qsr_joints_frame = qsrlib.request_qsrs(req_msg=req)
    # # pretty_print_world_qsr_trace("tpcc", e.qsr_joints_frame)
    # # print e.qsr_joints_frame.qstag.graphlets.histogram
    utils.save_event(e, "Learning/QSR_Worlds")