Beispiel #1
0
async def setup():
    """ main function """

    connection = await qtm.connect("127.0.0.1")

    if connection is None:
        return -1

    async with qtm.TakeControl(connection, "password"):

        state = await connection.get_state()
        if state != qtm.QRTEvent.EventConnected:
            await connection.new()
            try:
                await connection.await_event(qtm.QRTEvent.EventConnected, timeout=10)
            except asyncio.TimeoutError:
                LOG.error("Failed to start new measurement")
                return -1

        queue = asyncio.Queue()

        receiver_future = asyncio.ensure_future(package_receiver(queue))

        await connection.stream_frames(components=["2d"], on_packet=queue.put_nowait)

        asyncio.ensure_future(shutdown(30, connection, receiver_future, queue))
async def setup():
    """ main function """

    connection = await qtm.connect("127.0.0.1")

    if connection is None:
        return -1

    async with qtm.TakeControl(connection, "password"):

        state = await connection.get_state()
        if state != qtm.QRTEvent.EventConnected:
            await connection.new()
            try:
                await connection.await_event(qtm.QRTEvent.EventConnected, timeout=10)
            except asyncio.TimeoutError:
                LOG.error("Failed to start new measurement")
                return -1

        try:
            cal_response = await connection.calibrate()
        except asyncio.TimeoutError:
            LOG.error("Timeout waiting for calibration result.")
        except Exception as e:
            LOG.error(e)
        else:
            root = etree.fromstring(cal_response)
            print(etree.tostring(root, pretty_print=True).decode())

    # tell qtm to stop streaming
    await connection.stream_frames_stop()

    # stop the event loop, thus exiting the run_forever call
    loop.stop()    
async def main():

    # Connect to qtm
    connection = await qtm.connect("127.0.0.1")

    # Connection failed?
    if connection is None:
        print("Failed to connect")
        return

    # Take control of qtm, context manager will automatically release control after scope end
    async with qtm.TakeControl(connection, "password"):

        realtime = False

        if realtime:
            # Start new realtime
            await connection.new()
        else:
            # Load qtm file
            await connection.load(QTM_FILE)

            # start rtfromfile
            await connection.start(rtfromfile=True)

    # Get 6dof settings from qtm
    xml_string = await connection.get_parameters(parameters=["6d"])
    body_index = create_body_index(xml_string)

    wanted_body = "L-frame"

    def on_packet(packet):
        info, bodies = packet.get_6d()
        print("Framenumber: {} - Body count: {}".format(
            packet.framenumber, info.body_count))

        if wanted_body is not None and wanted_body in body_index:
            # Extract one specific body
            wanted_index = body_index[wanted_body]
            position, rotation = bodies[wanted_index]
            print("{} - Pos: {} - Rot: {}".format(wanted_body, position,
                                                  rotation))
        else:
            # Print all bodies
            for position, rotation in bodies:
                print("Pos: {} - Rot: {}".format(position, rotation))

    # Start streaming frames
    await connection.stream_frames(components=["6d"], on_packet=on_packet)

    # Wait asynchronously 5 seconds
    await asyncio.sleep(5)

    # Stop streaming
    await connection.stream_frames_stop()
Beispiel #4
0
async def main(interface=None):
    """ Main function """

    qtm_ip = await choose_qtm_instance(interface)
    if qtm_ip is None:
        return

    while True:

        connection = await qtm.connect(qtm_ip, 22223, version="1.18")

        if connection is None:
            return

        await connection.get_state()
        await connection.byte_order()

        async with qtm.TakeControl(connection, "password"):

            result = await connection.close()
            if result == b"Closing connection":
                await connection.await_event(qtm.QRTEvent.EventConnectionClosed
                                             )

            await connection.load(QTM_FILE)

            await connection.start(rtfromfile=True)

            (await
             connection.get_current_frame(components=["3d"])).get_3d_markers()

            queue = asyncio.Queue()

            asyncio.ensure_future(packet_receiver(queue))

            try:
                await connection.stream_frames(components=["incorrect"],
                                               on_packet=queue.put_nowait)
            except qtm.QRTCommandException as exception:
                LOG.info("exception %s", exception)

            await connection.stream_frames(components=["3d"],
                                           on_packet=queue.put_nowait)

            await asyncio.sleep(0.5)
            await connection.byte_order()
            await asyncio.sleep(0.5)
            await connection.stream_frames_stop()
            queue.put_nowait(None)

            await connection.get_parameters(parameters=["3d"])
            await connection.stop()

            await connection.await_event()

            await connection.new()
            await connection.await_event(qtm.QRTEvent.EventConnected)

            await connection.start()
            await connection.await_event(qtm.QRTEvent.EventWaitingForTrigger)

            await connection.trig()
            await connection.await_event(qtm.QRTEvent.EventCaptureStarted)

            await asyncio.sleep(0.5)

            await connection.set_qtm_event()
            await asyncio.sleep(0.001)
            await connection.set_qtm_event("with_label")

            await asyncio.sleep(0.5)

            await connection.stop()
            await connection.await_event(qtm.QRTEvent.EventCaptureStopped)

            await connection.save(r"measurement.qtm")

            await asyncio.sleep(3)

            await connection.close()

        connection.disconnect()
async def main(network_config_file_name):
    # Read the configuration from the json file
    json_file = open(network_config_file_name)
    json_file_data = json.load(json_file)

    # 1 for realtime streaming, 0 for loading qtm file
    flag_realtime = int(json_file_data['FLAG_REALTIME'])

    # IP address for the mocap server
    IP_server = json_file_data['IP_SERVER']

    # If you want to stream recorded data in a real-time way, change json file and load it here.
    # There might be a bug about file path. Will test it later. -- Sept. 08, 2020
    file_name_qtm = json_file_data['NAME_FILE_LOADED_QTM']
    QTM_FILE = pkg_resources.resource_filename("qtm", file_name_qtm)

    # Connect to qtm
    connection = await qtm.connect(IP_server)

    # Connection failed?
    if connection is None:
        print("Failed to connect")
        return

    # Take control of qtm, context manager will automatically release control after scope end
    async with qtm.TakeControl(connection, "password"):
        if not flag_realtime:
            # Load qtm file
            await connection.load(QTM_FILE)
            # start rtfromfile
            await connection.start(rtfromfile=True)

    # Get 6-DOF settings from QTM
    xml_string = await connection.get_parameters(parameters=["6d"])

    # IP for listening data
    HOST = json_file_data['HOST_UDP']
    # Port for listening data
    PORT = int(json_file_data['PORT_UDP'])
    server_address_udp = (HOST, PORT)

    # Create a UDP socket for data streaming
    loop = asyncio.get_running_loop()
    transport, protocol = await loop.create_datagram_endpoint(
        UdpProtocol, local_addr=None, remote_addr=server_address_udp)

    # parser for mocap rigid bodies indexing
    body_index = create_body_index(xml_string)

    wanted_body = json_file_data['NAME_SINGLE_BODY']

    def on_packet(packet):
        # Get the 6-DOF data
        bodies = packet.get_6d()[1]

        if wanted_body is not None and wanted_body in body_index:
            # Extract one specific body
            wanted_index = body_index[wanted_body]
            position, rotation = bodies[wanted_index]
            # You can use position and rotation here. Notice that the unit for position is mm!
            # print(wanted_body)

            print("Position in numpy [meter]")
            position_np = np.array(
                [[position.x / 1000.0], [position.y / 1000.0],
                 [position.z / 1000.0]],
                dtype=np.float64)
            print(position_np)

            # # rotation.matrix is a tuple with 9 elements.
            # print("Rotation matrix in numpy")
            # rotation_np = np.asarray(rotation.matrix, dtype=np.float64).reshape(3, 3)
            # print(rotation_np)

            # send 6-DOF data via UDP
            # concatenate the position and rotation matrix vertically
            msg = np.asarray(
                (position.x / 1000.0, position.y / 1000.0, position.z / 1000.0)
                + rotation.matrix,
                dtype=np.float64).tobytes()
            transport.sendto(msg, server_address_udp)
            print("6-DOF data sent via UDP!")

        else:
            # Print all bodies
            for position, rotation in bodies:
                print("There is no such a rigid body! Print all bodies.")
                print("Pos: {} - Rot: {}".format(position, rotation))

    # Start streaming frames
    # Make sure the component matches with the data fetch function, for example: packet.get_6d() with "6d"
    # Reference: https://qualisys.github.io/qualisys_python_sdk/index.html
    # while True:
    #     await connection.stream_frames(components=["6d"], on_packet=on_packet)
    await connection.stream_frames(components=["6d"], on_packet=on_packet)
Beispiel #6
0
async def main():
    # Delay to get in position for realtime measurement
    await asyncio.sleep(5)

    # Connect to qtm
    connection = await qtm.connect("127.0.0.1")

    # Connection failed?
    if connection is None:
        print("Failed to connect")
        return

    # Take control of qtm, context manager will automatically release control after scope end
    async with qtm.TakeControl(connection, "password"):

        # New measurement
        await connection.new()
        try:
            # Start capture
            await connection.await_event(qtm.QRTEvent.EventConnected,
                                         timeout=10)
            await connection.start()
            await connection.await_event(qtm.QRTEvent.EventCaptureStarted,
                                         timeout=10)
            print("Capture started")
        except:
            print("Failed to start new measurement")

        framesOfPositions = []
        framesOfRotations = []
        flexion_ind1 = []
        flexion_ind2 = []
        flexion_mid1 = []
        flexion_mid2 = []
        flexion_thumb1 = []
        flexion_thumb2 = []
        roll = []
        pitch = []
        yaw = []

        # Labels need to be in the same order as in the AIM model used in QTM
        labels = [
            'wrist1', 'wrist2', 'wrist3', 'wrist4', 'ind1', 'ind2', 'ind3',
            'mid1', 'mid2', 'mid3', 'thumb1', 'thumb2', 'thumb3'
        ]

        ################################################################################################################
        # on_packet defines what happens for every streamed frame
        def on_packet(packet):
            info, bodies = packet.get_6d_euler()
            position, rotation = bodies[0]
            header, markers = packet.get_3d_markers()

            # print("Framenumber: {}".format(packet.framenumber))

            df_pos = pd.DataFrame(markers, index=labels)
            framesOfPositions.append(df_pos)
            df_rot = pd.DataFrame(rotation,
                                  columns=["wrist"],
                                  index=['roll', 'pitch', 'yaw'])
            framesOfRotations.append(df_rot)

            roll.append(rotation[0])
            pitch.append(rotation[1])
            yaw.append(rotation[2])

            flexion_ind1.append(flexion_mcp('ind', df_pos))
            flexion_ind2.append(flexion_pip('ind', df_pos))
            flexion_mid1.append(flexion_mcp('mid', df_pos))
            flexion_mid2.append(flexion_pip('mid', df_pos))
            flexion_thumb1.append(flexion_mcp('thumb', df_pos))
            flexion_thumb2.append(flexion_pip('thumb', df_pos))

        # Start streaming frames
        # Frequency is reduced to 10Hz
        await connection.stream_frames(frames='frequency:10',
                                       components=["6deuler", "3d"],
                                       on_packet=on_packet)

        # Define ideal felxions
        if capture_new_ideal_mov:
            # Time to perform ideal movements
            await asyncio.sleep(8)
            # Initialize ideal movements
            flexions = [
                flexion_ind1, flexion_ind2, flexion_mid1, flexion_mid2,
                flexion_thumb1, flexion_thumb2, roll, pitch, yaw
            ]
            ideal_flexions = init_ideal_mov(flexions)
        else:
            ideal_flexions = old_ideal_flexions
        print(ideal_flexions)

        ################################################################################################################
        # Open serial port
        ser = serial.Serial()
        ser.baudrate = 115200
        ser.port = 'COM4'
        ser.timeout = 1
        ser.open()
        print(ser)

        # Initial commands for FES device
        ser.write(b"iam DESKTOP\r\n")
        ser.write(b"elec 1 *pads_qty 16\r\n")
        ser.write(b"freq 35\r\n")

        ################################################################################################################
        await asyncio.sleep(0.5)

        # Archive for deep searches
        deep_searches = pd.DataFrame(data={'time': [], 'finger': []})
        # Initialize history of selected bandits
        active_bandits = []
        # veritcal lines to visualize the stimulation sequences
        vlines = []

        # Start 'normal' search
        for t in range(n):
            deeper_search = False
            print('\nt:', t)
            # aim defines which finger/ posterior distribution is used to pick the following action
            aim = aim_options[0]
            print(aim_options)
            print(aim)

            # Choose action based on maximum of success probability which comes from random sample of the posterior distributions of the bandits
            action = pick_action(aim, start_bandits)
            selected_bandit = start_bandits[action]
            # Define velec corresponding to selected bandit
            velec = selected_bandit.define_velec(ser)

            before_stim = len(framesOfPositions)

            # Pause between stimulations
            await asyncio.sleep(0.5)
            # Stimulate predefined velecs and set event markers
            await connection.set_qtm_event(velec.name)
            velec.stim_on()
            # Stimulation time
            await asyncio.sleep(stim_time)
            velec.stim_off()
            await asyncio.sleep(0.5)

            after_stim = len(framesOfPositions)
            vlines.append(after_stim)
            print('Sequence:', before_stim, '-', after_stim)

            # Get flexions for the recent stimulation section
            flexions = [
                flexion_ind1[before_stim:after_stim],
                flexion_ind2[before_stim:after_stim],
                flexion_mid1[before_stim:after_stim],
                flexion_mid2[before_stim:after_stim],
                flexion_thumb1[before_stim:after_stim],
                flexion_thumb2[before_stim:after_stim],
                roll[before_stim:after_stim], pitch[before_stim:after_stim],
                yaw[before_stim:after_stim]
            ]

            # Calculate rewards/accuracys for each finger
            accuracys = []
            undesired_movs = []
            for finger in ['ind', 'mid', 'thumb']:
                accuracy = (calc_reward(finger, flexions, ideal_flexions))

                # Take mean of PIP and MCP joint accuracy
                merged_accuracy = (accuracy[0] + accuracy[1]) / 2
                accuracys.append(merged_accuracy)

                # Undesired movements are the flexions of the other fingers and the wrist
                # especially the wrist flexion is not wanted and therefore the deep search is inhibit if the sum of wrist flexion is > 20degree
                undesired_mov = calc_undesired_mov(finger, flexions)
                undesired_movs.append(undesired_mov)
                undesired_wrist_mov = undesired_mov[2]

                # Check if observation is good enough for deeper search
                if merged_accuracy >= 0.5 and undesired_wrist_mov < (
                        20 / 3) and finger in aim_options:

                    # Check if deep search for this finger was applied recently without success
                    previous_ds = deep_searches[deep_searches['finger'] ==
                                                finger]
                    if previous_ds.empty:

                        deeper_search = True
                        # Initial aim_accuracy
                        aim = finger
                        aim_accuracy = merged_accuracy

                    else:
                        numb_of_prev_ds = previous_ds.shape[0]
                        time_since_last_ds = t - int(
                            previous_ds.tail(1)['time'])
                        # too recent deepsearch for the same finger or too many unsuccessful deepsearch attempts inhibit new deepsearch due to impending fatigue
                        if time_since_last_ds > pause_between_ds and numb_of_prev_ds < max_numb_of_ds:

                            deeper_search = True
                            # Initial aim_accuracy
                            aim = finger
                            aim_accuracy = merged_accuracy

        # Update each distribution for selected bandit
            selected_bandit.update_observation(accuracys, undesired_movs)
            print(selected_bandit)
            print('Accuracys:', accuracys)
            print('Wrist movement:', undesired_wrist_mov)
            active_bandits.append([
                selected_bandit.electrode, selected_bandit.amplitude,
                accuracys, undesired_movs
            ])
            # save bandits with posterior distribution
            pickle.dump(bandits, open(new_file, 'wb'))

            ############################################################################################################
            if deeper_search:
                print(
                    'Deep search was entered: With the %s an accuracy of %s for the movement of %s was achieved'
                    % (selected_bandit, aim_accuracy, aim))

                # Add deep search to deep search archive
                deep_searches = deep_searches.append({
                    'time': t,
                    'finger': aim
                },
                                                     ignore_index=True)

                # Define new actionspace/bandits for deeper search
                combinations = neighbor_combinations(selected_bandit.electrode)
                new_bandits = [
                    x for x in bandits
                    if x.electrode in combinations and x.amplitude in [8, 10]
                ]

                # Initialize iterator for deeper search
                iter = 0
                time_exceeded = False
                # Stay in deeper search for n_deeper steps
                while aim_accuracy <= 0.75:
                    iter += 1
                    print('iteration', iter)
                    if iter == n_deeper:
                        time_exceeded = True
                        break

                    # Pick action based on random sample of posterior distribution
                    new_action = pick_action(aim, new_bandits)
                    selected_bandit = new_bandits[new_action]
                    # Define velec
                    velec = selected_bandit.define_velec(ser)

                    before_stim = len(framesOfPositions)

                    # Pause between stimulations
                    await asyncio.sleep(0.5)
                    # Stimulate predefined velecs
                    velec.stim_on()
                    # Stimulation time
                    await asyncio.sleep(stim_time)
                    velec.stim_off()
                    await asyncio.sleep(0.5)

                    after_stim = len(framesOfPositions)
                    vlines.append(after_stim)
                    print('Sequence:', before_stim, '-', after_stim)

                    # Get flexions for the recent stimulation section
                    flexions = [
                        flexion_ind1[before_stim:after_stim],
                        flexion_ind2[before_stim:after_stim],
                        flexion_mid1[before_stim:after_stim],
                        flexion_mid2[before_stim:after_stim],
                        flexion_thumb1[before_stim:after_stim],
                        flexion_thumb2[before_stim:after_stim],
                        roll[before_stim:after_stim],
                        pitch[before_stim:after_stim],
                        yaw[before_stim:after_stim]
                    ]

                    # Calculate rewards/accuracys for each finger
                    accuracys = []
                    undesired_movs = []
                    for finger in ['ind', 'mid', 'thumb']:
                        accuracy = (calc_reward(finger, flexions,
                                                ideal_flexions))

                        # Take mean of PIP and MCP joint accuracy
                        merged_accuracy = (accuracy[0] + accuracy[1]) / 2
                        accuracys.append(merged_accuracy)

                        # Undesired movements are the flexions of the other fingers and the wrist
                        undesired_mov = calc_undesired_mov(finger, flexions)
                        undesired_movs.append(undesired_mov)

                        # Check if other finger (than aim) achieved the desired accuracy
                        if merged_accuracy >= 0.75 and finger != aim and finger in aim_options:
                            print(
                                'Success: Good bandit found! With the %s an accuracy of %s for the movement of %s was achieved'
                                % (selected_bandit, merged_accuracy, finger))
                            aim_options.remove(finger)

                        if finger == aim:
                            aim_accuracy = merged_accuracy

                    # Update each distribution for selected bandit
                    selected_bandit.update_observation(accuracys,
                                                       undesired_movs)
                    print(selected_bandit)
                    print('Accuracys:', accuracys)
                    print('Wrist movement:', undesired_mov[2])

                    active_bandits.append([
                        selected_bandit.electrode, selected_bandit.amplitude,
                        accuracys, undesired_movs
                    ])
                    # save bandits with posterior distribution
                    pickle.dump(bandits, open(new_file, 'wb'))

                if time_exceeded:
                    print('Failure: No good bandit found in given time!')

                else:
                    print(
                        'Success: Good bandit found! With the %s an accuracy of %s for the movement of %s was achieved'
                        % (selected_bandit, aim_accuracy, aim))
                    deeper_search = False
                    if len(aim_options) != 0:
                        aim_options.remove(aim)
                    if len(aim_options) == 0:
                        print(
                            'Finished! For each movement a good bandit has been found.'
                        )
                        break

        # Save list of active bandits for eventual trace back of stimulation order
        pickle.dump(active_bandits, open(('active' + new_file), 'wb'))

        # Close serial port
        ser.close()

        ################################################################################################################
        # Delay needed, otherwise error from connection.stop
        await asyncio.sleep(10)

        # Stop streaming
        await connection.stream_frames_stop()

        await connection.stop()

        # Save flexions to ease a new plot
        with open(('flexions' + new_file), 'wb') as f:
            pickle.dump(flexions, f)

        # Plot the flexions for the whole measurement
        fulltime = len(framesOfPositions)
        fig, (ax1, ax2, ax3, ax4) = plt.subplots(4, 1, sharey=True)
        ax1.plot(range(fulltime), flexion_ind1, label=('mcp flex ind'))
        ax1.plot(range(fulltime), flexion_ind2, label=('pip flex ind'))
        ax1.set(xlabel='', ylabel='ind flexion (°)')
        ax1.legend()
        ax1.grid()
        ax1.vlines(vlines, 0, 75)
        ax2.plot(range(fulltime), flexion_mid1, label=('mcp flex mid'))
        ax2.plot(range(fulltime), flexion_mid2, label=('pip flex mid'))
        ax2.set(xlabel='', ylabel='mid flexion (°)')
        ax2.legend()
        ax2.grid()
        ax2.vlines(vlines, 0, 75)
        ax3.plot(range(fulltime), flexion_thumb1, label=('mcp flex thumb'))
        ax3.plot(range(fulltime), flexion_thumb2, label=('pip flex thumb'))
        ax3.set(xlabel='', ylabel='thumb flexion (°)')
        ax3.legend()
        ax3.grid()
        ax3.vlines(vlines, -20, 60)
        ax4.plot(range(fulltime), roll[:], label=('roll'))
        ax4.plot(range(fulltime), pitch[:], label=('pitch'))
        ax4.plot(range(fulltime), yaw[:], label=('yaw'))
        ax4.set(xlabel='frames', ylabel='rpy angles(°)')
        ax4.legend()
        ax4.grid()
        plt.show()