Beispiel #1
0
'''
List of possible "plants" that a subject could control either during manual or brain control
'''
import numpy as np
from riglib import plants

pi = np.pi
RED = (1, 0, 0, .5)
## BMI Plants
cursor_14x14 = plants.CursorPlant(endpt_bounds=(-14, 14, 0., 0., -14, 14))

shoulder_anchor = np.array([2., 0., -15])
chain_kwargs = dict(link_radii=.6,
                    joint_radii=0.6,
                    joint_colors=(181 / 256., 116 / 256., 96 / 256., 1),
                    link_colors=(181 / 256., 116 / 256., 96 / 256., 1))

chain_20_20_endpt = plants.EndptControlled2LArm(link_lengths=[20, 20],
                                                base_loc=shoulder_anchor,
                                                **chain_kwargs)
init_pos = np.array([0, 0, 0], np.float64)
chain_20_20_endpt.set_intrinsic_coordinates(init_pos)

chain_20_20 = plants.RobotArmGen2D(link_lengths=[20, 20],
                                   base_loc=shoulder_anchor,
                                   **chain_kwargs)
init_pos = np.array([0.38118002, 2.08145271])
chain_20_20.set_intrinsic_coordinates(init_pos)

plantlist = dict(cursor_14x14=cursor_14x14,
                 chain_20_20=chain_20_20,
Beispiel #2
0
    def __init__(self,
                 n_iter=None,
                 entry_id=None,
                 hdf_filename=None,
                 decoder_filename=None,
                 params=dict(),
                 *args,
                 **kwargs):
        if entry_id is None and (hdf_filename is None
                                 or decoder_filename is None):
            raise ValueError(
                "Not enough data to reconstruct a BMI! Specify a database entry OR an HDF file + decoder file"
            )
        if entry_id is not None:
            from db import dbfunctions as dbfn
            te = dbfn.TaskEntry(entry_id)
            self.hdf_ref = te.hdf
            self.decoder = te.decoder
            self.params = te.params

            if self.hdf_ref is None:
                raise ValueError("Database is unable to locate HDF file!")
            if self.decoder is None:
                raise ValueError("Database is unable to locate HDF file!")
        elif hdf_filename is not None:
            self.hdf_ref = tables.open_file(hdf_filename)
            self.decoder = pickle.load(
                open(decoder_filename, 'rb'),
                encoding='latin1')  # extra args to get py3 to read py2 pickles
            self.params = params

        self.n_iter = min(n_iter, len(self.hdf_ref.root.task))

        try:
            self.starting_pos = self.hdf_ref.root.task[0]['decoder_state'][0:3,
                                                                           0]
        except:
            # The statement above appears to not always work...
            self.starting_pos = self.hdf_ref.root.task[0][
                'cursor']  # #(0, 0, 0)

        # if 'plant_type' in te.params:
        #     self.plant_type = te.params['plant_type']
        # elif 'arm_class' in te.params:
        #     plant_type = te.params['arm_class']
        #     if plant_type == 'CursorPlant':
        #         self.plant_type = 'cursor_14x14'
        #     else:
        #         self.plant_type = plant_type
        # else:
        #     self.plant_type = 'cursor_14x14'

        # TODO overly specific
        self.plant = plants.CursorPlant(endpt_bounds=(-14, 14, 0., 0., -14,
                                                      14))

        ## Set the target radius because the old assist method changes the assist speed
        # when the cursor is inside the target
        self.target_radius = params['target_radius']
        self.cursor_radius = params['cursor_radius']
        self.assist_level = params['assist_level']

        self.idx = 0
        gen = sim_target_seq_generator_multi(8, 1000)

        super(BMIReconstruction, self).__init__(gen, *args, **kwargs)

        self.hdf = SimHDF()
        self.learn_flag = True

        task_msgs = self.hdf_ref.root.task_msgs[:]
        self.update_bmi_msgs = task_msgs[task_msgs['msg'] == 'update_bmi']
        task_msgs = list(
            filter(lambda x: x['msg'] not in ['update_bmi'], task_msgs))
        # print task_msgs
        self.task_state = np.array([None] * n_iter)
        for msg, next_msg in zip(task_msgs[:-1], task_msgs[1:]):
            self.task_state[msg['time']:next_msg['time']] = msg['msg']

        self.update_bmi_inds = np.zeros(len(self.hdf_ref.root.task))
        self.update_bmi_inds[self.update_bmi_msgs['time']] = 1
        self.recon_update_bmi_inds = np.zeros(len(self.hdf_ref.root.task))

        self.target_hold_msgs = list(
            filter(lambda x: x['msg'] in ['target', 'hold'],
                   self.hdf_ref.root.task_msgs[:]))
Beispiel #3
0
'''
List of possible "plants" that a subject could control either during manual or brain control
'''
import numpy as np
from riglib import plants

pi = np.pi
RED = (1,0,0,.5)
## BMI Plants
cursor_14x14 = plants.CursorPlant(endpt_bounds=(-14, 14, 0., 0., -14, 14))
cursor_25x14 = plants.CursorPlant(endpt_bounds=(-25, 25, 0., 0., -14, 14))
big_cursor_25x14 = plants.CursorPlant(endpt_bounds=(-25, 25, 0., 0., -14, 14), cursor_radius=1.0)

cursor_14x14_no_vel_wall = plants.CursorPlant(endpt_bounds=(-14, 14, 0., 0., -14, 14), vel_wall=False)

chain_kwargs = dict(link_radii=.6, joint_radii=0.6, joint_colors=(181/256., 116/256., 96/256., 1), link_colors=(181/256., 116/256., 96/256., 1))
shoulder_anchor = np.array([2., 0., -15])
chain_15_15_5_5 = plants.RobotArmGen2D(link_lengths=[15, 15, 5, 5], base_loc=shoulder_anchor, **chain_kwargs)
init_joint_pos = np.array([ 0.47515737,  1.1369006 ,  1.57079633,  0.29316668])
chain_15_15_5_5.set_intrinsic_coordinates(init_joint_pos)

tentacle2 = plants.RobotArmGen2D(link_lengths=[10, 10, 10, 10], base_loc=shoulder_anchor, **chain_kwargs)
init_joint_pos = np.array([ 0.47515737,  1.1369006 ,  1.57079633,  0.29316668])
tentacle2.set_intrinsic_coordinates(init_joint_pos)

chain_15_15_5_5_on_screen = plants.RobotArmGen2D(link_lengths=[15, 15, 5, 5], base_loc=shoulder_anchor, stay_on_screen=True, **chain_kwargs)
chain_15_15_5_5_on_screen.set_intrinsic_coordinates(init_joint_pos)


chain_20_20_endpt = plants.EndptControlled2LArm(link_lengths=[20, 20], base_loc=shoulder_anchor, **chain_kwargs)
init_pos = np.array([0, 0, 0], np.float64)
Beispiel #4
0
import time
import os
import math
import traceback
from collections import OrderedDict

from riglib.experiment import traits, Sequence, FSMTable, StateTransitions
from riglib.stereo_opengl import ik
from riglib import plants

from riglib.stereo_opengl.window import Window
from .target_graphics import *

## Plants
# List of possible "plants" that a subject could control either during manual or brain control
cursor = plants.CursorPlant()
shoulder_anchor = np.array([2., 0., -15])
chain_kwargs = dict(link_radii=.6,
                    joint_radii=0.6,
                    joint_colors=(181 / 256., 116 / 256., 96 / 256., 1),
                    link_colors=(181 / 256., 116 / 256., 96 / 256., 1))
chain_20_20_endpt = plants.EndptControlled2LArm(link_lengths=[20, 20],
                                                base_loc=shoulder_anchor,
                                                **chain_kwargs)
chain_20_20 = plants.RobotArmGen2D(link_lengths=[20, 20],
                                   base_loc=shoulder_anchor,
                                   **chain_kwargs)

plantlist = dict(cursor=cursor,
                 chain_20_20=chain_20_20,
                 chain_20_20_endpt=chain_20_20_endpt)