Exemple #1
0
    def setupUi(self, MainWindow):
        super().setupUi(MainWindow)

        self.robot_controller = RobotController()
        self.robot_controller.connect_feedback(self.feedback_changed)
        self.robot_controller.connect_log(self.update_log)

        self.pbUp.clicked.connect(self.pbUp_clicked)
        self.pbDown.clicked.connect(self.pbDown_clicked)
        self.pbResetError.clicked.connect(self.pbResetError_clicked)
        self.pbPauseMotion.clicked.connect(self.pbPauseMotion_clicked)
        self.pbResumeMotion.clicked.connect(self.pbResumeMotion_clicked)
        self.pbClearMotion.clicked.connect(self.pbClearMotion_clicked)
        self.sldAngSpeedValue.valueChanged.connect(self.joint_speed_slided)
        self.sldCartSpeedValue.valueChanged.connect(self.cart_speed_slided)
        self.pbReadCurrAng.clicked.connect(self.pbReadCurrAng_clicked)
        self.pbMoveNewAngVal.clicked.connect(self.pbMoveNewAngVal_clicked)
        self.pbReadCurrCart.clicked.connect(self.pbReadCurrCart_clicked)
        self.pbMoveNewCartVal.clicked.connect(self.pbMoveNewCartVal_clicked)

        for i in range(0, 6):
            pbTheta_inc = getattr(self, f'pbTheta{i+1}_inc')
            pbTheta_dec = getattr(self, f'pbTheta{i+1}_dec')
            pbTheta_inc.clicked.connect(partial(self.pbTheta_inc_clicked, i))
            pbTheta_dec.clicked.connect(partial(self.pbTheta_dec_clicked, i))

        for i in range(0, 6):
            pbCart_inc = getattr(self, f'pbCart{i+1}_inc')
            pbCart_dec = getattr(self, f'pbCart{i+1}_dec')
            pbCart_inc.clicked.connect(partial(self.pbCart_inc_clicked, i))
            pbCart_dec.clicked.connect(partial(self.pbCart_dec_clicked, i))
Exemple #2
0
def start_training(trace_history, room_dst, person_loc, approach_person,
                   sound_wave_model):
    rc = RobotController()
    mc = ModelController()

    for i, trace in enumerate(trace_history):
        print("-" * 60)
        print("Current Time: {}, Sample: {}".format(rospy.get_rostime().secs,
                                                    i))

        target_room = trace["target"]
        # robot back to origin
        explore_room(rc, room_dst, trace["origin"])
        # move a person to dst and emit sound
        px, py, pidx = set_person_pose(mc, person_loc, target_room)
        mc.spawn_model("sound_wave", sound_wave_model, px, py, 2)
        rospy.sleep(3)
        mc.delete_model("sound_wave")

        # robot actively explore the room according to the ranking result
        for next_room in trace["trace"]:
            if next_room == target_room:
                print("Sample {} find target room: {}".format(i, next_room))
                app_pos = approach_person[str(target_room)][pidx]
                rc.goto(app_pos["x"], app_pos["y"], app_pos["yaw"])
                rospy.sleep(1)
            else:
                print("Sample {} explore room: {}".format(i, next_room))
                explore_room(rc, room_dst, next_room)
    def handle(self):
        #client = self.request
        sock = SimpleMessageSocket(self.request)
        address = self.client_address[0]
        print('connected with {}'.format(address))
        ctl = RobotController()
        motion_thread = MotionThread(ctl)
        motion_thread.start()

        msg_handlers = {
            SimpleMessageType.JOINT_TRAJ_PT: self.on_joint_traj_pt,
            SimpleMessageType.EXECUTE_PROGRAM: self.on_execute_program
        }

        try:
            while True:
                recv_msg = sock.recv()

                msg_handler = msg_handlers.get(recv_msg.msg_type,
                                               self.on_unkown_message)

                reply_msg = msg_handler(ctl, recv_msg, motion_thread)
                sock.send(reply_msg)
        finally:
            ctl.close()
    def handle(self):
        #client = self.request
        sock = SimpleMessageSocket(self.request)
        address = self.client_address[0]
        print('connected from {}'.format(address))
        ctl = RobotController()

        msg_handlers = {
            SimpleMessageType.SET_IO: self.on_set_io,
            SimpleMessageType.SET_POSTURE: self.on_set_posture,
            SimpleMessageType.GET_POSTURE: self.on_get_posture,
            SimpleMessageType.SYS_STAT: self.on_sys_stat,
            SimpleMessageType.SET_TOOL_OFFSET: self.on_set_tool_offset
        }

        try:
            while True:
                recv_msg = sock.recv()

                msg_handler = msg_handlers.get(recv_msg.msg_type,
                                               self.on_unkown_message)

                reply_msg = msg_handler(ctl, recv_msg)
                sock.send(reply_msg)
        finally:
            ctl.close()
Exemple #5
0
def talker():
    # ROS
    rospy.init_node('controller-talker-lorang', anonymous=True)
    rate = rospy.Rate(5)  # 1Hz = 1s

    # Robot Controller
    robot = RobotController()

    # Kinematic solver
    kine_solver = RRRSolver(0.095, 0.15, 0.15, [-180., 180.], [0., 180.],
                            [0., 180.])
    kine_solver.calibration([0., 180., 110.], [1, -1, -1], [-180., 180.],
                            [80., 150.], [80., 135.])

    # Rectangle
    pts_1 = [0.14, 0, 0.2]
    pts_2 = [0.22, 0, 0.25]
    pts_3 = [0.14, 0, 0.25]
    pts_4 = [0.22, 0, 0.2]

    # Compute trajectory
    traj = kine_solver.linear_trajectory([pts_1, pts_3, pts_2, pts_4, pts_1],
                                         20)
    counter = 0

    while not rospy.is_shutdown():
        # Publish the angles
        robot.move_to(traj[counter])

        counter += 1
        if counter >= len(traj):
            counter = len(traj) - 1

        rate.sleep()
Exemple #6
0
    def run(self, robot_config_values, room_size, rects, start_pos,
            start_angle, square_size, cartographer_grid):
        self.is_running = True

        self.commands_queue = Queue()

        # Clear map and set sizes for views
        self.map_window.start(room_size)
        self.map_scene.draw_map(room_size, rects, start_pos, start_angle)
        self.scanner_window.start(room_size)
        self.discovered_map_window.start(room_size, square_size)

        # Create some objects for the RobotController to use, as we have all the data here
        hardware = Hardware(start_pos, start_angle, room_size, rects,
                            robot_config_values, self.map_change_pos,
                            self.map_change_rotation)
        cartographer = Cartographer(room_size, square_size,
                                    self.cartographer_grid_values_callback,
                                    cartographer_grid)

        self.controller = RobotController(room_size, hardware, cartographer,
                                          self.commands_queue,
                                          self.scanner_draw_points,
                                          self.scanner_draw_lines,
                                          self.scanner_draw_robot,
                                          self.scanner_clear)

        self.controller.start()
 def __init__(self, arm, joint_names, joint_topic):
     RobotController.__init__(self, joint_names, joint_topic)
     self.arm = arm
     # Set smoothness of angle filter
     self.coef = rospy.get_param('~filter_coef', 0.1)
     # Adds API support for controlling Baxter arm
     self.limb = baxter_interface.Limb(self.arm)
     # Adds API support for controlling Baxter gripper
     self.gripper = baxter_interface.Gripper(self.arm)
Exemple #8
0
 def __init__(self, video_server):
     threading.Thread.__init__(self)
     self.video_server = video_server
     self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     self.server_address = (IP, PORT)
     print('starting up on {} port {}'.format(*self.server_address))
     self.socket.bind(self.server_address)
     self.socket.listen(1)
     self.is_listening = True
     self.robot_controller = RobotController()
    def __init__(self):

        self.ctrl = RobotController(self.CONTROL_RATE)

        self.recorder = RobotRecorder(save_dir='',
                                      use_aux=False,
                                      save_actions=False,
                                      save_images=False)
        self.image_publisher = rospy.Publisher('/robot/head_display',
                                               Image_msg,
                                               queue_size=2)
        self.load_splashes()

        self.control_rate = rospy.Rate(self.PREDICT_RATE)
        self.ui_rate = rospy.Rate(self.UI_RATE)

        self.robot_predictor = setup_predictor(
            self.ROBOT_MODEL, self.ROBOT_BIAS, self.CROP_H_MIN,
            self.CROP_H_MAX, self.CROP_W_MIN, self.CROP_W_MAX)
        self.human_predictor = setup_predictor_human(
            self.HUMAN_MODEL, self.HUMAN_BIAS, self.CROP_H_MIN,
            self.CROP_H_MAX, self.CROP_W_MIN, self.CROP_W_MAX)
        self.is_human = False

        self.predictor = self.robot_predictor

        rospy.Subscriber("/keyboard/keyup", Key, self.keyboard_up_listener)
        self.weiss_pub = rospy.Publisher('/wsg_50_driver/goal_position',
                                         Cmd,
                                         queue_size=10)
        # rospy.Subscriber("/wsg_50_driver/status", Status, self.weiss_status_listener)

        self._navigator = intera_interface.Navigator()
        self.demo_collect = self._navigator.register_callback(
            self.record_demo, 'right_button_ok')
        self.swap_key = self._navigator.register_callback(
            self.swap_sawyer_cuff, 'right_button_back')
        self.start_key = self._navigator.register_callback(
            self.start_traj_cuff, 'right_button_square')
        self.neutral_key = self._navigator.register_callback(
            self.neutral_cuff, 'right_button_show')
        self.x_button = self._navigator.register_callback(
            self.gripper_cuff, 'right_button_triangle')

        self.calc = EE_Calculator()

        print 'ONE SHOT READY!'
        self.running = False
        self.demo_imgs = None

        self.move_netural()

        rospy.spin()
    def __init__(self, *args, **kwargs):
        threading.Thread.__init__(self)
        self.kwargs = kwargs
        self.enable_joysticks = True
        if 'enable_joysticks' in self.kwargs:
            self.enable_joysticks = self.kwargs['enable_joysticks']

        logging.getLogger("requests").setLevel(logging.WARNING)

        self.stop_robot = False
        self.logger = logging.getLogger('GUI')
        self.curr_image = None
        self.joint_states = []
        self.num_empty_commands = 0

        self.robot_ctrl = RobotController.getInstance(*args, **kwargs)
        self.logger.info('initializing robot...')
        self.robot_ctrl.init_robot()
        self.logger.info('robot successfully initilized')

        if 'robot_init_pos' in self.kwargs:
            self.logger.info('Sending robot to center.')
            self.robot_ctrl.send_robot_to_center(goal=kwargs['robot_init_pos'])
            self.logger.info('Robot centered.')

        np.set_printoptions(precision=2)

        self.image_size = (640, 360)
        if 'image_size' in self.kwargs:
            self.image_size = self.kwargs['image_size']
Exemple #11
0
class ControlServer(threading.Thread):
    def __init__(self, video_server):
        threading.Thread.__init__(self)
        self.video_server = video_server
        self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.server_address = (IP, PORT)
        print('starting up on {} port {}'.format(*self.server_address))
        self.socket.bind(self.server_address)
        self.socket.listen(1)
        self.is_listening = True
        self.robot_controller = RobotController()

    def run(self):
        try:
            while self.is_listening:
                print('waiting for a connection')
                connection, client_address = self.socket.accept()
                try:
                    print('client connected:', client_address)
                    while True:
                        data = connection.recv(200)
                        packet = pickle.loads(data)
                        message = packet['message']
                        token = packet['token']

                        if token == 'client_token':
                            self.robot_controller.move(message[0])
                            if message:
                                connection.sendall(message)
                            else:
                                break
                        else:
                            self.stop()
                            self.video_server.stop()
                except EOFError:
                    print('client disconnected')
                finally:
                    connection.close()
        except OSError:
            print('Socket server stopped')

    def stop(self):
        self.is_listening = False
        self.socket.close()

    def __del__(self):
        self.stop()
Exemple #12
0
def main():
    try:
        motor_controller = MotorController()
        sonar = Sonar(18, 17)
        controller = RobotController(sonar, motor_controller)
        server = Server(controller, "192.168.99.11", 8080)
        server.listen()
    finally:
        sonar.stop()
    def __init__(self, control_rate, robot_name):
        self.max_release = 0
        RobotController.__init__(self)
        self.sem_list = [Semaphore(value=0)]
        self._status_mutex = Lock()
        self.robot_name = robot_name

        self._desired_gpos = GRIPPER_OPEN
        self.gripper_speed = 300

        self._force_counter = 0
        self._integrate_gripper_force = 0.
        self.num_timeouts = 0

        self.gripper_pub = rospy.Publisher('/wsg_50_driver/goal_position',
                                           Cmd,
                                           queue_size=10)
        rospy.Subscriber("/wsg_50_driver/status", Status,
                         self._gripper_callback)

        print("waiting for first status")
        self.sem_list[0].acquire()
        print('gripper initialized!')

        self.imp_ctrl_publisher = rospy.Publisher('/desired_joint_pos',
                                                  JointState,
                                                  queue_size=1)
        self.imp_ctrl_release_spring_pub = rospy.Publisher('/release_spring',
                                                           Float32,
                                                           queue_size=10)
        self.imp_ctrl_active = rospy.Publisher('/imp_ctrl_active',
                                               Int64,
                                               queue_size=10)

        self.control_rate = rospy.Rate(control_rate)

        self.imp_ctrl_release_spring(100)
        self.imp_ctrl_active.publish(1)

        self._navigator = intera_interface.Navigator()
        self._navigator.register_callback(self._close_gripper_handler,
                                          'right_button_ok')
Exemple #14
0
    def __init__(self, model_path, vgg19_path):
        self.ctrl = RobotController()

        self.recorder = RobotRecorder(save_dir='',
                                      seq_len=60,
                                      use_aux=False,
                                      save_video=True,
                                      save_actions=False,
                                      save_images=False)

        self.action_interval = 20 #Hz
        self.action_sequence_length = 60

        self.traj_duration = self.action_sequence_length * self.action_interval
        self.action_rate = rospy.Rate(self.action_interval)
        self.control_rate = rospy.Rate(20)
        self.predictor = setup_predictor(model_path, vgg19_path)
        self.save_ctr = 0
        self.ctrl.set_neutral()
        self.s = 0
Exemple #15
0
    def __init__(self, model_path, vgg19_path):
        self.ctrl = RobotController()

        self.recorder = RobotRecorder(save_dir='',
                                      seq_len=60,
                                      use_aux=False,
                                      save_video=True,
                                      save_actions=False,
                                      save_images=False)

        self.action_interval = 10 #Hz
        self.action_sequence_length = 10

        self.control_rate = rospy.Rate(self.action_interval)
        self.predictor = setup_predictor(model_path, vgg19_path)
        self.save_ctr = 0

        self.s = 0
        self.imp_ctrl_publisher = rospy.Publisher('desired_joint_pos', JointState, queue_size=1)
        self.imp_ctrl_release_spring_pub = rospy.Publisher('release_spring', Float32, queue_size=10)
        self.imp_ctrl_active = rospy.Publisher('imp_ctrl_active', Int64, queue_size=10)
        self.move_neutral()
Exemple #16
0
class Supervisor(object):
    """
    evaluator class that holds the components needed for simulation and
    determining fitness
    """
    def __init__(self):

        self.robot = Robot()
        self.robot.pose.position.x = 3.0
        self.robot.pose.position.y = 5.0
        self.robot_controller = RobotController(self.robot)

    def use_genes(self, genes):
        """
        receives this current iteration of the genes
        """

        self.robot_controller.set_genes(genes)

    def reset(self):
        """
        resets the simulation for the next usage
        """

        # Give the robot a random position r meters away from the goal
        self.robot.set_random_position(r=5.0)

        # Give the robot a random direction
        self.robot.set_random_direction()

    def run(self):
        """
        main run function
        """

        return self.robot_controller.run(20)
Exemple #17
0
import traceback
import argparse
import robot_util
import os.path
import networking
import time
import schedule
import sys
import watchdog
import logging
import logging.handlers
import json
import atexit
from robot_controller import RobotController

robot = RobotController()

if (sys.version_info > (3, 0)):
    import importlib
    import _thread as thread
else:
    import thread

    # borrowed unregister function from
    # https://stackoverflow.com/questions/32097982/cannot-unregister-functions-from-atexit-in-python-2-7
    def unregister(func, *targs, **kargs):
        """unregister a function previously registered with atexit.
           use exactly the same aguments used for before register.
        """
        for i in range(0, len(atexit._exithandlers)):
            if (func, targs, kargs) == atexit._exithandlers[i]:
 def __init__(self, *args, **kwargs):
     self.robot_controller = RobotController.getInstance(*args, **kwargs)
     self.stop_command = [0, 0, 0, 0, 0, 0]
     self.logger = logging.getLogger("NabotController")
Exemple #19
0
app = Flask(__name__)
# FlaskのLoggerを無効化する
# app.logger.disabled = True
# werkzeugのLoggerを無効化する
# werkzeug_logger = logging.getLogger('werkzeug')
# werkzeug_logger.disabled = True

# logger = logging.getLogger()
# logger.addHandler(logging.FileHandler("/dev/null"))

# host = 'dm'
host = 'localhost'
port = 8080
app = Flask(__name__)
rc = RobotController()
topic_history_length = 6


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/stt', methods=['POST'])
def stt():
    date = request.form['date']
    time = request.form['time']
    text = request.form['text']
    user = request.form['user']
    topics, persons = rc.main(date, time, user, text)
Exemple #20
0
from robot_controller import RobotController
from graceful_killer import GracefulKiller
from argparse import ArgumentParser
from curtsies import Input
import numpy as np
from time import sleep
from vision.classify import Classifier
from vision.squeeze_classify import SqueezeClassifier
from picamera import PiCamera

parser = ArgumentParser()
parser.add_argument("--debug", help="Use debug mode")
args = parser.parse_args()

thermal = ThermalSensor()
controller = RobotController()

state = State(StateName.INIT, controller, {"a": 0})
killer = GracefulKiller(state)

classifier = SqueezeClassifier('./models/squeezenet_v1.1.prototxt',
                               './models/squeezenet_v1.1.caffemodel',
                               debug=args.debug)
'''
classifier = Classifier('models/MobileNetSSD_deploy.prototxt.txt',
                        'models/MobileNetSSD_deploy.caffemodel',
                        debug=args.debug)
'''


def is_cat(pixels):
mymap.add_wall((0,0,0,168));        # a
mymap.add_wall((0,168,84,168));     # b
mymap.add_wall((84,126,84,210));    # c
mymap.add_wall((84,210,168,210));   # d
mymap.add_wall((168,210,168,84));   # e
mymap.add_wall((168,84,210,84));    # f
mymap.add_wall((210,84,210,0));     # g
mymap.add_wall((210,0,0,0));        # h

mymap.add_wall((84,30,180,30));
mymap.add_wall((180,30,180,54));
mymap.add_wall((180,54,138,54));
mymap.add_wall((138,54,138,168));
mymap.add_wall((138,168,114,168));
mymap.add_wall((114,168,114,84));
mymap.add_wall((114,84,84,84));
mymap.add_wall((84,84,84,30));

mymap.draw();

controller = RobotController(particle)

particleSet = controller.initParticles()

#controller.rotate2(-360)
#controller.rotate2(90)
#time.sleep(2)
controller.go2(80)
#controller.rotate2(-360)
#controller.go2(82)
mymap.add_wall((168,84,210,84));    # f
mymap.add_wall((210,84,210,0));     # g
mymap.add_wall((210,0,0,0));        # h

mymap.add_wall((84,30,180,30));
mymap.add_wall((180,30,180,54));
mymap.add_wall((180,54,138,54));
mymap.add_wall((138,54,138,168));
mymap.add_wall((138,168, 138, 30));
mymap.add_wall((138, 30,84,30));



mymap.draw();

controller = RobotController(particle)

#time.sleep(0.5)
#controller.rotate2(-360)

#particleSet = controller.initParticles()

#controller.characterize_location_real_time()

#controller.navigate2(84, 30,0, 0)


controller.navigate_challenge()


Exemple #23
0
    def __init__(self):

        self.robot = Robot()
        self.robot.pose.position.x = 3.0
        self.robot.pose.position.y = 5.0
        self.robot_controller = RobotController(self.robot)
class SawyerOneShot(object):
    EE_STEPS = 2
    ACTION_SEQUENCE_LENGTH = 10
    SPRING_STIFF = 240
    CONTROL_RATE = 10000
    PREDICT_RATE = 20
    UI_RATE = 1

    CROP_H_MIN = 0
    CROP_H_MAX = -150
    CROP_W_MIN = 190
    CROP_W_MAX = -235
    TRAJ_LEN = 40

    HUMAN_MODEL = 'to_test/model_human_maml/place_sawyer_maml_3_layers_200_dim_1d_conv_ee_3_32_act_2_32_20x1_filters_64_128_3x3_filters_human_light_68k.meta'
    HUMAN_BIAS = 'to_test/model_human_maml/scale_and_bias_place_sawyer_kinect_view_human.pkl'

    ROBOT_MODEL = 'to_test/model_maml_kinect2_2/place_sawyer_maml_3_layers_200_dim_ee_2_layers_100_dim_clip_20_kinect_view_47k.meta'
    ROBOT_BIAS = 'to_test/model_maml_kinect2_2/scale_and_bias_place_sawyer_kinect_view_both.pkl'
    Z_SAFETY_THRESH = 0.20

    def __init__(self):

        self.ctrl = RobotController(self.CONTROL_RATE)

        self.recorder = RobotRecorder(save_dir='',
                                      use_aux=False,
                                      save_actions=False,
                                      save_images=False)
        self.image_publisher = rospy.Publisher('/robot/head_display',
                                               Image_msg,
                                               queue_size=2)
        self.load_splashes()

        self.control_rate = rospy.Rate(self.PREDICT_RATE)
        self.ui_rate = rospy.Rate(self.UI_RATE)

        self.robot_predictor = setup_predictor(
            self.ROBOT_MODEL, self.ROBOT_BIAS, self.CROP_H_MIN,
            self.CROP_H_MAX, self.CROP_W_MIN, self.CROP_W_MAX)
        self.human_predictor = setup_predictor_human(
            self.HUMAN_MODEL, self.HUMAN_BIAS, self.CROP_H_MIN,
            self.CROP_H_MAX, self.CROP_W_MIN, self.CROP_W_MAX)
        self.is_human = False

        self.predictor = self.robot_predictor

        rospy.Subscriber("/keyboard/keyup", Key, self.keyboard_up_listener)
        self.weiss_pub = rospy.Publisher('/wsg_50_driver/goal_position',
                                         Cmd,
                                         queue_size=10)
        # rospy.Subscriber("/wsg_50_driver/status", Status, self.weiss_status_listener)

        self._navigator = intera_interface.Navigator()
        self.demo_collect = self._navigator.register_callback(
            self.record_demo, 'right_button_ok')
        self.swap_key = self._navigator.register_callback(
            self.swap_sawyer_cuff, 'right_button_back')
        self.start_key = self._navigator.register_callback(
            self.start_traj_cuff, 'right_button_square')
        self.neutral_key = self._navigator.register_callback(
            self.neutral_cuff, 'right_button_show')
        self.x_button = self._navigator.register_callback(
            self.gripper_cuff, 'right_button_triangle')

        self.calc = EE_Calculator()

        print 'ONE SHOT READY!'
        self.running = False
        self.demo_imgs = None

        self.move_netural()

        rospy.spin()

    def publish_to_head(self, img):
        num_rows, num_cols = img.shape[:2]
        rotation_matrix = np.array([[
            0.,
            1.,
            0.,
        ], [
            -1.,
            0.,
            1024.,
        ]])
        img = cv2.warpAffine(img, rotation_matrix,
                             (num_rows, num_cols))[:, ::-1, :]

        img = np.swapaxes(img, 0, 1)[::-1, ::-1, :]

        #I don't know why the transforms above work either

        img_message = self.recorder.bridge.cv2_to_imgmsg(img)
        self.image_publisher.publish(img_message)

    def gripper_cuff(self, value):
        if self.running or not value:
            return
        print value
        self.set_weiss_griper(10.)

    def neutral_cuff(self, value):
        if self.running or not value:
            return
        self.move_netural()

    def load_splashes(self):
        self.human_splash = cv2.imread('splashes/human_imitation_start.png')
        self.robot_splash = cv2.imread('splashes/robot_imitation_start.png')
        self.demo_splash = cv2.imread('splashes/recording_oneshot.png')
        #make review splash

        self.publish_to_head(self.robot_splash)

    def swap_model(self):
        if self.is_human:
            self.publish_to_head(self.robot_splash)
            self.predictor = self.robot_predictor
            self.is_human = False
        else:
            self.predictor = self.human_predictor
            self.is_human = True
            self.publish_to_head(self.human_splash)
        self.demo_imgs = None

    def swap_sawyer_cuff(self, value):
        if not value or self.running:
            return
        self.swap_model()

    def start_traj_cuff(self, value):
        if not value or self.running:
            return
        if self.demo_imgs is None:
            print "PLEASE COLLECT DEMOS FIRST"
            return
        self.run_trajectory()

    def record_demo(self, value):
        if not value or self.running:
            return
        print "beginning demo!"
        self.running = True
        self.demo_imgs = []

        traj_ee_pos = np.zeros((self.TRAJ_LEN, 7))
        traj_ee_velocity = np.zeros((self.TRAJ_LEN, 6))
        full_duration = float(self.TRAJ_LEN) / self.PREDICT_RATE

        for i in range(4, 0, -1):
            splash = np.copy(self.demo_splash)
            if i > 1:
                cv2.putText(splash, "{}...".format(i - 1), (375, 460),
                            cv2.FONT_HERSHEY_SIMPLEX, 6, (255, 255, 255), 20,
                            cv2.LINE_AA)
                self.publish_to_head(splash)
                self.ui_rate.sleep()
            else:
                cv2.putText(splash, "GO!", (375, 460),
                            cv2.FONT_HERSHEY_SIMPLEX, 6, (255, 255, 255), 20,
                            cv2.LINE_AA)
                self.publish_to_head(splash)
                for i in range(5):
                    self.control_rate.sleep()

        for i in range(self.TRAJ_LEN):
            splash = np.copy(self.demo_splash)
            cv2.putText(
                splash,
                "{:.2f}s".format(full_duration - i * 1. / self.PREDICT_RATE),
                (250, 460), cv2.FONT_HERSHEY_SIMPLEX, 6, (255, 255, 255), 20,
                cv2.LINE_AA)
            self.publish_to_head(splash)

            self.control_rate.sleep()
            self.demo_imgs.append(self.recorder.ltob.img_cv2)
            traj_ee_pos[i, :] = self.recorder.get_endeffector_pos()

            angles = self.recorder.get_joint_angles()
            velocities = self.recorder.get_joint_angles_velocity()

            traj_ee_velocity[i, :] = self.calc.jacobian(
                angles.reshape(-1)).dot(velocities.reshape(
                    (-1, 1))).reshape(-1)

        splash = np.copy(self.demo_splash)
        cv2.putText(splash, "{:.2f}s".format(0.00), (250, 460),
                    cv2.FONT_HERSHEY_SIMPLEX, 6, (255, 255, 255), 20,
                    cv2.LINE_AA)
        self.publish_to_head(splash)

        final_ee = np.tile(traj_ee_pos[-1, :2], (40, 1))
        self.record_state = traj_ee_pos
        self.record_action = np.concatenate(
            (traj_ee_velocity[:, :3], final_ee), axis=1)

        self.demo_imgs = np.stack([
            cv2.resize(img[self.CROP_H_MIN:self.CROP_H_MAX,
                           self.CROP_W_MIN:self.CROP_W_MAX, :], (100, 100),
                       interpolation=cv2.INTER_AREA)[:, :, ::-1]
            for img in self.demo_imgs
        ],
                                  axis=0)
        for i in self.demo_imgs:
            splash = np.copy(self.demo_splash)
            resized_i = cv2.resize(i, (400, 400),
                                   interpolation=cv2.INTER_CUBIC)
            splash[190:590, 316:716, :] = resized_i[:, :, ::-1]
            self.publish_to_head(splash)

            cv2.imshow('img', i[:, :, ::-1])
            cv2.waitKey(200)
        cv2.destroyAllWindows()

        self.running = False
        print "DEMO DONE"

        if self.is_human:
            self.publish_to_head(self.human_splash)
        else:
            self.publish_to_head(self.robot_splash)

    def keyboard_up_listener(self, key_msg):
        if key_msg.code == 99 and not self.running:  #c
            if self.demo_imgs is None:
                print "PLEASE COLLECT A DEMO FIRST"
                return

            self.run_trajectory()

        if key_msg.code == 101 and not self.running:  #e
            rospy.signal_shutdown('User shutdown!')
        if key_msg.code == 110 and not self.running:  #n
            self.move_netural()
        if key_msg.code == 115 and not self.running:  #s
            self.swap_model()
        if key_msg.code == 103 and not self.running:  #g
            self.set_weiss_griper(10.)

    def query_action(self):
        image = cv2.resize(
            self.recorder.ltob.img_cv2[self.CROP_H_MIN:self.CROP_H_MAX,
                                       self.CROP_W_MIN:self.CROP_W_MAX, :],
            (100, 100),
            interpolation=cv2.INTER_AREA)[:, :, ::-1]

        robot_configs = self.recorder.get_endeffector_pos()

        if image is None or robot_configs is None:
            return None
        action, ee = self.predictor(self.demo_imgs, self.record_state,
                                    self.record_action, image, robot_configs)
        self.img_stack.append(image)

        return action, ee

    def move_to(self, des_pos, interp=True):
        desired_pose = inverse_kinematics.get_pose_stamped(
            des_pos[0], des_pos[1], des_pos[2], inverse_kinematics.EXAMPLE_O)
        start_joints = self.ctrl.limb.joint_angles()
        try:
            des_joint_angles = inverse_kinematics.get_joint_angles(
                desired_pose, seed_cmd=start_joints, use_advanced_options=True)
        except ValueError:
            rospy.logerr('no inverse kinematics solution found, '
                         'going to reset robot...')
            current_joints = self.recorder.get_joint_angles()
            des_joint_angles = current_joints

        if interp:
            self.ctrl.set_joint_positions_interp(des_joint_angles)
        else:
            self.ctrl.limb.set_joint_position_speed(0.15)
            self.ctrl.set_joint_positions(des_joint_angles)

    def set_weiss_griper(self, width):
        cmd = Cmd()
        cmd.pos = width
        cmd.speed = 100.
        self.weiss_pub.publish(cmd)

    def move_netural(self, gripper_open=True):
        self.ctrl.set_neutral()
        if gripper_open:
            self.set_weiss_griper(100.)

    def run_trajectory(self):
        self.running = True
        self.start = self.recorder.get_endeffector_pos()
        print 'actual end eep', self.start
        self.move_netural(gripper_open=False)
        self.img_stack = []

        step = 0
        actions = []

        for i in range(self.EE_STEPS):
            current_eep = self.recorder.get_endeffector_pos()
            eep_diff_action, pred_final = self.query_action()
            current_eep[:2] = pred_final

            # current_eep[2] += np.sum(np.abs(current_eep[:2])) * 0.05
            self.move_to(current_eep[:3], i > 0)

        while step < self.ACTION_SEQUENCE_LENGTH:
            current_eep = self.recorder.get_endeffector_pos()

            eep_diff_action, pred_final = self.query_action()
            current_eep[:3] += 0.05 * eep_diff_action

            current_eep[2] = max(current_eep[2], self.Z_SAFETY_THRESH)

            self.move_to(current_eep[:3])

            step += 1
        self.set_weiss_griper(100.)
        print 'end', self.recorder.get_endeffector_pos()
        clip = mpy.ImageSequenceClip([i for i in self.img_stack], fps=20)
        clip.write_gif('test_frames.gif')
        self.running = False
can = Canvas()
mymap = Map(can);
particle = Particles(can)

mymap.add_wall((0,0,0,168));        # a
mymap.add_wall((0,168,84,168));     # b
mymap.add_wall((84,126,84,210));    # c
mymap.add_wall((84,210,168,210));   # d
mymap.add_wall((168,210,168,84));   # e
mymap.add_wall((168,84,210,84));    # f
mymap.add_wall((210,84,210,0));     # g
mymap.add_wall((210,0,0,0));        # h

mymap.add_wall((84,30,180,30));
mymap.add_wall((180,30,180,54));
mymap.add_wall((180,54,138,54));
mymap.add_wall((138,54,138,168));
mymap.add_wall((138,168,114,168));
mymap.add_wall((114,168,114,84));
mymap.add_wall((114,84,84,84));
mymap.add_wall((84,84,84,30));

mymap.draw();

controller = RobotController(particle)

particleSet = controller.initParticles()


controller.navigate2()
Exemple #26
0
class SawyerImitation(object):
    def __init__(self, model_path, vgg19_path):
        self.ctrl = RobotController()

        self.recorder = RobotRecorder(save_dir='',
                                      seq_len=60,
                                      use_aux=False,
                                      save_video=True,
                                      save_actions=False,
                                      save_images=False)

        self.action_interval = 10 #Hz
        self.action_sequence_length = 10

        self.control_rate = rospy.Rate(self.action_interval)
        self.predictor = setup_predictor(model_path, vgg19_path)
        self.save_ctr = 0

        self.s = 0
        self.imp_ctrl_publisher = rospy.Publisher('desired_joint_pos', JointState, queue_size=1)
        self.imp_ctrl_release_spring_pub = rospy.Publisher('release_spring', Float32, queue_size=10)
        self.imp_ctrl_active = rospy.Publisher('imp_ctrl_active', Int64, queue_size=10)
        self.move_neutral()

    def move_neutral(self):
        # self.imp_ctrl_active.publish(0)
        self.ctrl.set_neutral()
        # self.set_neutral_with_impedance()
        # self.imp_ctrl_active.publish(1)
        # rospy.sleep(.2)

    def query_action(self, robot_configs):
        image = cv2.resize(self.recorder.ltob.img_cv2[:-150,150:-275,:], (224, 224), interpolation = cv2.INTER_AREA)



        if image is None or robot_configs is None:
            return None

        action, predicted_eep, image = self.predictor(image, robot_configs)

        self.img_stack.append(image[0, :, :, :])

        self.s += 1
        # if self.s <=5:
        #     action[np.abs(action) < 0.05] *= 15

        # print 'action vector: ', action
        # print 'predicted end effector pose: ', predicted_eep
        return action, predicted_eep

    def move_to(self, des_pos):
        desired_pose = inverse_kinematics.get_pose_stamped(des_pos[0],
                                                           des_pos[1],
                                                           des_pos[2],
                                                           inverse_kinematics.EXAMPLE_O)
        start_joints = self.ctrl.limb.joint_angles()
        try:
            des_joint_angles = inverse_kinematics.get_joint_angles(desired_pose, seed_cmd=start_joints,
                                                                   use_advanced_options=True)
        except ValueError:
            rospy.logerr('no inverse kinematics solution found, '
                         'going to reset robot...')
            current_joints = self.ctrl.limb.joint_angles()
            self.ctrl.limb.set_joint_positions(current_joints)
            raise Traj_aborted_except('raising Traj_aborted_except')

        # self.move_with_impedance(des_joint_angles)
        self.ctrl.set_joint_positions(des_joint_angles)


    def move_with_impedance(self, des_joint_angles):
        """
        non-blocking
        """
        js = JointState()
        js.name = self.ctrl.limb.joint_names()
        js.position = [des_joint_angles[n] for n in js.name]
        self.imp_ctrl_publisher.publish(js)

    def imp_ctrl_release_spring(self, maxstiff):
        self.imp_ctrl_release_spring_pub.publish(maxstiff)

    def move_with_impedance_sec(self, cmd, tsec = 2.):
        """
        blocking
        """
        tstart = rospy.get_time()
        delta_t = 0
        while delta_t < tsec:
            delta_t = rospy.get_time() - tstart
            self.move_with_impedance(cmd)

    def set_neutral_with_impedance(self):
        neutral_jointangles = [0.412271, -0.434908, -1.198768, 1.795462, 1.160788, 1.107675, 2.068076]
        cmd = dict(zip(self.ctrl.limb.joint_names(), neutral_jointangles))
        self.imp_ctrl_release_spring(20)
        self.move_with_impedance_sec(cmd)

    def run_trajectory(self):
        self.start = self.recorder.get_endeffector_pos()
        print 'actual end eep', self.start

        self.move_neutral()

        self.img_stack = []

        step = 0
        actions = []

        current_eep = self.recorder.get_endeffector_pos()
        eep_diff_action, predicted_eep = self.query_action(current_eep)
        predicted_eep[2] = max(predicted_eep[2], 0.25)
        current_eep[:3] = predicted_eep[:3]

        self.move_to(current_eep)

        while step < self.action_sequence_length:
            print step
            self.control_rate.sleep()
            current_eep = self.recorder.get_endeffector_pos()
            eep_diff_action, predicted_eep = self.query_action(current_eep)


            self.move_to(current_eep[:3] + eep_diff_action[:3])

            step += 1
        print 'end', self.recorder.get_endeffector_pos()
        clip = mpy.ImageSequenceClip([cv2.cvtColor(i, cv2.COLOR_BGR2RGB) for i in self.img_stack], fps=20)
        clip.write_gif('test_frames.gif')

        self.move_neutral()
Exemple #27
0
class RobotApp(Ui_MainWindow):



    def setupUi(self, MainWindow):
        super().setupUi(MainWindow)

        self.robot_controller = RobotController()
        self.robot_controller.connect_feedback(self.feedback_changed)
        self.robot_controller.connect_log(self.update_log)

        self.pbUp.clicked.connect(self.pbUp_clicked)
        self.pbDown.clicked.connect(self.pbDown_clicked)
        self.pbResetError.clicked.connect(self.pbResetError_clicked)
        self.pbPauseMotion.clicked.connect(self.pbPauseMotion_clicked)
        self.pbResumeMotion.clicked.connect(self.pbResumeMotion_clicked)
        self.pbClearMotion.clicked.connect(self.pbClearMotion_clicked)
        self.sldAngSpeedValue.valueChanged.connect(self.joint_speed_slided)
        self.sldCartSpeedValue.valueChanged.connect(self.cart_speed_slided)
        self.pbReadCurrAng.clicked.connect(self.pbReadCurrAng_clicked)
        self.pbMoveNewAngVal.clicked.connect(self.pbMoveNewAngVal_clicked)
        self.pbReadCurrCart.clicked.connect(self.pbReadCurrCart_clicked)
        self.pbMoveNewCartVal.clicked.connect(self.pbMoveNewCartVal_clicked)

        for i in range(0, 6):
            pbTheta_inc = getattr(self, f'pbTheta{i+1}_inc')
            pbTheta_dec = getattr(self, f'pbTheta{i+1}_dec')
            pbTheta_inc.clicked.connect(partial(self.pbTheta_inc_clicked, i))
            pbTheta_dec.clicked.connect(partial(self.pbTheta_dec_clicked, i))

        for i in range(0, 6):
            pbCart_inc = getattr(self, f'pbCart{i+1}_inc')
            pbCart_dec = getattr(self, f'pbCart{i+1}_dec')
            pbCart_inc.clicked.connect(partial(self.pbCart_inc_clicked, i))
            pbCart_dec.clicked.connect(partial(self.pbCart_dec_clicked, i))


    #...............................................................................
    #
    #     JOINTS
    #
    # ...............................................................................

    # Used to save joints values before they change
    def load_lEditAngle(self):
        MoveJoint_var[0] = float(self.lblAngle1.text())
        MoveJoint_var[1] = float(self.lblAngle2.text())
        MoveJoint_var[2] = float(self.lblAngle3.text())
        MoveJoint_var[3] = float(self.lblAngle4.text())
        MoveJoint_var[4] = float(self.lblAngle5.text())
        MoveJoint_var[5] = float(self.lblAngle6.text())

    def pbTheta_inc_clicked(self, index):
        self.move_joints(index, 1)

    def pbTheta_dec_clicked(self, index):
        self.move_joints(index, -1)

    # Read the actual position values
    def pbReadCurrAng_clicked(self):
        self.lEditAngle1.setText (self.lblAngle1.text())
        self.lEditAngle2.setText (self.lblAngle2.text())
        self.lEditAngle3.setText (self.lblAngle3.text())
        self.lEditAngle4.setText (self.lblAngle4.text())
        self.lEditAngle5.setText (self.lblAngle5.text())
        self.lEditAngle6.setText (self.lblAngle6.text())

    # Moves to the new values
    def pbMoveNewAngVal_clicked(self):
        MoveJoint_var[0] = float(self.lEditAngle1.text())
        MoveJoint_var[1] = float(self.lEditAngle2.text())
        MoveJoint_var[2] = float(self.lEditAngle3.text())
        MoveJoint_var[3] = float(self.lEditAngle4.text())
        MoveJoint_var[4] = float(self.lEditAngle5.text())
        MoveJoint_var[5] = float(self.lEditAngle6.text())
        self.robot_controller.move_joints(*MoveJoint_var)
        self.lEditAngle1.setText("0")
        self.lEditAngle2.setText("0")
        self.lEditAngle3.setText("0")
        self.lEditAngle4.setText("0")
        self.lEditAngle5.setText("0")
        self.lEditAngle6.setText("0")

    # Set speed value for joint motion
    def joint_speed_slided(self):
        AngSpeed = self.sldAngSpeedValue.value()
        self.robot_controller.set_joint_vel(int(AngSpeed))

    # Execute joint motion
    def move_joints(self, index, direction):
         angles = [0, 0, 0, 0, 0, 0]
         angles[index] = direction * self.sldAngIncDecValue.value()
         self.robot_controller.move_jointsDelta(*angles)

    #...............................................................................
    #
    #     CARTESIANS
    #
    # ...............................................................................


    def pbCart_inc_clicked(self, index):
        self.move_linear(index, 1)

    def pbCart_dec_clicked(self, index):
        self.move_linear(index, -1)


    # Set speed value for linear motions
    def cart_speed_slided(self):
        CartSpeed = self.sldCartSpeedValue.value()
        self.robot_controller.set_cart_lin_vel(CartSpeed)

    # Execute linear motion
    def move_linear(self, index, direction):
        coords = [0, 0, 0, 0, 0, 0]
        coords[index] = direction * self.sldCartIncDecValue.value()
        self.robot_controller.move_linearDelta(*coords)

    # Read the actual position values
    def pbReadCurrCart_clicked(self):
        self.lEditCart1.setText (self.lblCart1.text())
        self.lEditCart2.setText (self.lblCart2.text())
        self.lEditCart3.setText (self.lblCart3.text())
        self.lEditCart4.setText (self.lblCart4.text())
        self.lEditCart5.setText (self.lblCart5.text())
        self.lEditCart6.setText (self.lblCart6.text())

    # Moves to the new values
    def pbMoveNewCartVal_clicked(self):
        MoveLin_var[0] = float(self.lEditCart1.text())
        MoveLin_var[1] = float(self.lEditCart2.text())
        MoveLin_var[2] = float(self.lEditCart3.text())
        MoveLin_var[3] = float(self.lEditCart4.text())
        MoveLin_var[4] = float(self.lEditCart5.text())
        MoveLin_var[5] = float(self.lEditCart6.text())
        self.robot_controller.move_lin(*MoveLin_var)
        self.lEditCart1.setText("0")
        self.lEditCart2.setText("0")
        self.lEditCart3.setText("0")
        self.lEditCart4.setText("0")
        self.lEditCart5.setText("0")
        self.lEditCart6.setText("0")

        self.tableWidget.setColumnWidth(1,50)
        self.tableWidget.setColumnWidth(2,50)
        self.tableWidget.setColumnWidth(3,50)
        self.tableWidget.setColumnWidth(4,50)
        self.tableWidget.setColumnWidth(5,50)
        self.tableWidget.setColumnWidth(6,50)
        item = QTableWidgetItem()
        item.setText("100.000")
        self.tableWidget.setItem(3,1,item)

    #...............................................................................
    #
    #     BUTTONS
    #
    # ...............................................................................

    # Commands the def up
    def pbUp_clicked(self):
        self.robot_controller.up()

    # Commands the def down
    def pbDown_clicked(self):
        self.robot_controller.down()



    #...............................................................................
    #
    #     COMMANDS
    #
    # ...............................................................................

 #   def get_joints(self, t1, t2, t3, t4, t5, t6):
   #     return self.mecarobot.run('GetJoints', [t1, t2, t3, t4, t5, t6])

    def update_log(self, direction, message):
        if direction == 'in':
            self.textedit_log.insertHtml(f'<span>{message}</span><br>')
        else:
            self.textedit_log.insertHtml(f'<span style="color: gray; font-style: italic;">{message}</span><br>')


    def feedback_changed(self, code, values):
        if code == '3007':
            self.lblAngle1.setText(f'{values[0]:.3f}')
            self.lblAngle2.setText(f'{values[1]:.3f}')
            self.lblAngle3.setText(f'{values[2]:.3f}')
            self.lblAngle4.setText(f'{values[3]:.3f}')
            self.lblAngle5.setText(f'{values[4]:.3f}')
            self.lblAngle6.setText(f'{values[5]:.3f}')
            for i in range(0, 6):
                MoveJoint_var[i]=values[i]

        if code == '3010':
            self.lblCart1.setText(f'{values[0]:.3f}')
            self.lblCart2.setText(f'{values[1]:.3f}')
            self.lblCart3.setText(f'{values[2]:.3f}')
            self.lblCart4.setText(f'{values[3]:.3f}')
            self.lblCart5.setText(f'{values[4]:.3f}')
            self.lblCart6.setText(f'{values[5]:.3f}')

    def pbResetError_clicked(self):
        self.robot_controller.reseterror()

    def pbClearMotion_clicked(self):
        self.robot_controller.clear_motion()

    def pbPauseMotion_clicked(self):
         self.robot_controller.pause_motion()

    def pbResumeMotion_clicked(self):
         self.robot_controller.resume_motion()
Exemple #28
0
class GameController:
    def __init__(self, robot_port):
        self.game = Game()
        self.robot = RobotController(robot_port)

    def startGame(self):
        msg = Message()
        msg.command = Command.START
        return self.encodeMessage(msg)

    def endGame(self):
        msg = Message()
        msg.command = Command.YOUWIN
        return self.encodeMessage(msg)

    def decodeMessage(self, data):
        return json.loads(data, object_hook=lambda d: SimpleNamespace(**d))

    def encodeMessage(self, msg):
        return str.encode(json.dumps(msg.__dict__))

    def processMessage(self, raw_data):

        msg = self.decodeMessage(raw_data)

        print(msg.command, " - ", self.game.state)

        if self.game.state == State.START and msg.command == Command.START:
            self.game.start()
            return raw_data  # send back start message

        if self.game.state == State.INGAME and msg.command == Command.START:
            msg.command = Command.STEP
            msg.row, msg.column = self.game.getStep()
            return self.encodeMessage(msg)

        if self.game.state == State.INGAME and msg.command == Command.STEP:

            # Create robot feedback if enemy got hit our ship
            isNegative = self.game.stepEnemy(msg.row, msg.column)

            if self.game.isEnded():
                print("I LOST")
                self.robot.endingComment("I LOST")
                self.game.ended()
                return self.endGame()

            if isNegative:
                self.robot.negativeFeedback()
            else:
                self.robot.positiveFeedback()

            msg.row, msg.column = self.game.getStep()
            return self.encodeMessage(msg)

        if self.game.state == State.INGAME and msg.command == Command.YOUWIN:
            print("I WIN")
            self.robot.endingComment("I WIN")
            self.game.ended()

    def isEnded(self):
        return self.game.isEnded()
Exemple #29
0
class GUIController(QObject):
    """
    Sets up the different parts that populate the windows
    """
    map_change_pos = pyqtSignal(tuple)
    map_change_rotation = pyqtSignal(float)

    scanner_draw_points = pyqtSignal(list, tuple)
    scanner_draw_lines = pyqtSignal(list)
    scanner_draw_robot = pyqtSignal(tuple)
    scanner_clear = pyqtSignal()

    cartographer_grid_values_callback = pyqtSignal(list)

    def __init__(self, map_window, scanner_window, discovered_map_window):
        super(GUIController, self).__init__()
        self.map_window = map_window
        self.scanner_window = scanner_window
        self.discovered_map_window = discovered_map_window
        self.map_scene = map_window.scene
        self.scanner_scene = scanner_window.scene
        self.discovered_map_scene = discovered_map_window.scene

        self.map_change_pos.connect(self.map_scene.move_robot)
        self.map_change_rotation.connect(self.map_scene.rotate_robot)

        self.scanner_draw_points.connect(self.scanner_scene.draw_points)
        self.scanner_draw_lines.connect(self.scanner_scene.draw_lines)
        self.scanner_draw_robot.connect(self.scanner_scene.draw_robot)
        self.scanner_clear.connect(self.scanner_scene.clear)

        self.cartographer_grid_values_callback.connect(
            self.discovered_map_scene.update_grid)

        self.controller = None

        self.commands_queue = Queue()

    def run(self, robot_config_values, room_size, rects, start_pos,
            start_angle, square_size, cartographer_grid):
        self.is_running = True

        self.commands_queue = Queue()

        # Clear map and set sizes for views
        self.map_window.start(room_size)
        self.map_scene.draw_map(room_size, rects, start_pos, start_angle)
        self.scanner_window.start(room_size)
        self.discovered_map_window.start(room_size, square_size)

        # Create some objects for the RobotController to use, as we have all the data here
        hardware = Hardware(start_pos, start_angle, room_size, rects,
                            robot_config_values, self.map_change_pos,
                            self.map_change_rotation)
        cartographer = Cartographer(room_size, square_size,
                                    self.cartographer_grid_values_callback,
                                    cartographer_grid)

        self.controller = RobotController(room_size, hardware, cartographer,
                                          self.commands_queue,
                                          self.scanner_draw_points,
                                          self.scanner_draw_lines,
                                          self.scanner_draw_robot,
                                          self.scanner_clear)

        self.controller.start()

    def send_command_forward(self):
        self.commands_queue.put("forward")

    def send_command_rotate_clockwise(self):
        self.commands_queue.put("turn clockwise")

    def send_command_rotate_anticlockwise(self):
        self.commands_queue.put("turn anticlockwise")
Exemple #30
0
 def __init__(self, robot_port):
     self.game = Game()
     self.robot = RobotController(robot_port)
                                    rospy.Duration(0.1))
        rospy.sleep(0.1)
        # goal[1] -= .2
        # total_nsecs = time.nsecs + 1e8
        # fractional, integer = math.modf(total_nsecs/1e9)
        # time.secs += int(integer)
        # time.nsecs += fractional*1e9
        # controller.move_to_goal(*(goal + HOME_ORI), time=time)
    elif moving and hit:
        moving = False
        hit = False
        controller.move_to_goal(*HOME)
    # elif moving:
    #     moving = False
    #     controller.move_to_goal(*HOME)


if __name__ == '__main__':
    rospy.init_node('ball_controller')
    controller = RobotController(8)

    # hit back controller
    controller_hit = RobotController(8)

    controller.move_to_goal(*HOME)
    sub = rospy.Subscriber('/ball_detection/predicted_ball_state',
                           PosVelTimed,
                           callback,
                           queue_size=10)
    rospy.spin()
Exemple #32
0
class SawyerImitation(object):
    def __init__(self, model_path, vgg19_path):
        self.ctrl = RobotController()

        self.recorder = RobotRecorder(save_dir='',
                                      seq_len=60,
                                      use_aux=False,
                                      save_video=True,
                                      save_actions=False,
                                      save_images=False)

        self.action_interval = 20 #Hz
        self.action_sequence_length = 60

        self.traj_duration = self.action_sequence_length * self.action_interval
        self.action_rate = rospy.Rate(self.action_interval)
        self.control_rate = rospy.Rate(20)
        self.predictor = setup_predictor(model_path, vgg19_path)
        self.save_ctr = 0
        self.ctrl.set_neutral()
        self.s = 0

    def query_action(self):
        image = cv2.resize(self.recorder.ltob.img_cv2[:-150,275:-170,:], (224, 224), interpolation=cv2.INTER_AREA)

        robot_configs = np.concatenate((self.recorder.get_joint_angles(), self.recorder.get_endeffector_pos()[:3]))

        if image is None or robot_configs is None:
            return None

        action, predicted_eep = self.predictor(image, robot_configs)
        self.img_stack.append(image)

        self.s += 1
        # if self.s <=5:
        #     action[np.abs(action) < 0.05] *= 15

        # print 'action vector: ', action
        # print 'predicted end effector pose: ', predicted_eep
        return action, predicted_eep

    def apply_action(self, action):
        try:
            self.ctrl.set_joint_velocities(action)

        except OSError:
            rospy.logerr('collision detected, stopping trajectory, going to reset robot...')
            rospy.sleep(.5)
        if self.ctrl.limb.has_collided():
            rospy.logerr('collision detected!!!')
            rospy.sleep(.5)
    def move_to(self, des_pos):
        desired_pose = inverse_kinematics.get_pose_stamped(des_pos[0],
                                                           des_pos[1],
                                                           des_pos[2],
                                                           inverse_kinematics.EXAMPLE_O)
        start_joints = self.ctrl.limb.joint_angles()
        try:
            des_joint_angles = inverse_kinematics.get_joint_angles(desired_pose, seed_cmd=start_joints,
                                                                   use_advanced_options=True)
        except ValueError:
            rospy.logerr('no inverse kinematics solution found, '
                         'going to reset robot...')
            current_joints = self.ctrl.limb.joint_angles()
            self.ctrl.limb.set_joint_positions(current_joints)
            raise Traj_aborted_except('raising Traj_aborted_except')

        # self.move_with_impedance(des_joint_angles)
        self.ctrl.set_joint_positions(des_joint_angles)
    def run_trajectory(self):
        start = self.recorder.get_endeffector_pos()[:3]
        # print 'actual end eep', self.start
        self.ctrl.set_neutral()
        self.img_stack = []

        step = 0
        actions = []

        action, predicted_eep = self.query_action()
        print 'predicted eep', predicted_eep[:2]
        # print 'diff sqd', np.sum(np.power(predicted_eep - start, 2))
        # print 'diff dim', np.abs(predicted_eep - start)

        start[:2] = predicted_eep[:2]

        self.move_to(start)


        while step < self.action_sequence_length:
            self.control_rate.sleep()
            action, predicted_eep = self.query_action()


            action_dict = dict(zip(self.ctrl.joint_names, action))
            # for i in range(len(self.ctrl.joint_names)):
            #     action_dict[self.ctrl.joint_names[i]] = action[i]
                # print 'key', self.ctrl.joint_names[i], 'value', action_dict[self.ctrl.joint_names[i]]
            actions.append(action)
            self.apply_action(action_dict)

            step += 1
        print 'end', self.recorder.get_endeffector_pos()
        clip = mpy.ImageSequenceClip([cv2.cvtColor(i, cv2.COLOR_BGR2RGB) for i in self.img_stack], fps=20)
        clip.write_gif('test_frames.gif')