def __init__(self,
                 robot_file=(dirname(abspath(__file__)) +
                             "/../../../../json/nico_humanoid_upper.json"),
                 cache_file=(dirname(abspath(__file__)) + '/cube_cache.pkl')):
        self.robot = Motion.Motion(robot_file, vrep=False)

        def signal_handler(sig, frame):
            self.cleanup()
            sys.exit(0)

        signal.signal(signal.SIGINT, signal_handler)

        with open(cache_file, 'rb') as f:
            self.cache = pickle.load(f)

        self.robot.setAngle("head_y", 45, 0.05)
        self.robot.setAngle("head_z", 22.5, 0.05)
        self.initial_position()
        time.sleep(3)
Example #2
0
    parser.add_argument(
        "--vrep",
        action="store_true",
        default=False,
        help="let it run on vrep than instead of real robot",
    )
    parser.add_argument(
        "--stiffoff",
        action="store_true",
        default=False,
        help="sets the stiffness to off after movement",
    )
    args = parser.parse_args()
    # print args

    robot = Motion.Motion(args.json, vrep=args.vrep)
    mov = Mover(robot, stiff_off=args.stiffoff)

    command = args.command

    if args.filename is not None:
        filename = "../../../../../moves_and_positions/" + args.filename
    else:
        filename = None

    if args.subset is not None:
        subsetfilename = "../../../../../moves_and_positions/" + args.subset
    else:
        subsetfilename = None

    if command == "m":
Example #3
0
#!/usr/bin/env python

import atexit
import logging
import time
from os.path import abspath, dirname

from nicomotion import Kinematics, Motion

logging.basicConfig(level=logging.WARNING)

vrep = True

if vrep:
    robot = Motion.Motion(
        dirname(abspath(__file__)) +
        "/../../../../json/nico_humanoid_legged_with_hands_mod-vrep.json",
        vrep=True)
    # vrep initial position
    vel = 0.02
    robot.setAngle("l_shoulder_z", -10, vel)
    robot.setAngle("l_shoulder_y", 20, vel)
    robot.setAngle("l_arm_x", -20, vel)
    robot.setAngle("l_elbow_y", 100, vel)
    robot.setAngle("l_wrist_z", 0, vel)
    robot.setAngle("l_wrist_x", 0, vel)
else:
    robot = Motion.Motion(dirname(abspath(__file__)) +
                          "/../../../../json/nico_humanoid_upper.json",
                          vrep=False)

    def __init__(self, vrep=False):
        self.area = 0
        self.xPosition = 0
        self.yPosition = 0
        self.SPEED = 0.02  # motors speed
        self.SAMPLE_TIME = 0.3  # time to sample a new image (in seconds)
        self.ANGLE_STEP = 5  # angle to turn the motors
        self.ANGLE_STEP_BIG = 7.5  # angle to turn the motors
        self.MIN_AREA = 40000  # 120000      #minimal area to identify a color, the bigger the less noise is consider but the object has to be closer then
        self.MAX_X = 640  # camera resolution in x axis
        self.MAX_Y = 480  # camera resolution in y axis
        self.ZOOM = 200
        self.TOLERANCE = 40  # tolerance to be consider in the middle of the image

        if vrep:
            config = Motion.Motion.vrepRemoteConfig()
            config["vrep_scene"] = (
                dirname(abspath(__file__)) + "/../../../../v-rep/NICO-seated.ttt"
            )
            self.nico = Motion.Motion(
                dirname(abspath(__file__))
                + "/../../../../json/nico_humanoid_vrep.json",
                vrep=True,
                vrepConfig=config,
            )

        else:
            self.nico = Motion.Motion(
                dirname(abspath(__file__))
                + "/../../../../json/nico_humanoid_upper_rh7d.json",
            )
            self.face = faceExpression()

        def signal_handler(sig, frame):
            self.cleanup()
            sys.exit(0)

        signal.signal(signal.SIGINT, signal_handler)

        self.running = True
        self.last_detection = 0

        threads = list()

        t = threading.Thread(target=self.color_detection)
        threads.append(t)
        t.start()

        t = threading.Thread(target=self.head_movement)
        threads.append(t)
        t.start()

        t = threading.Thread(target=self.arm_movement)
        threads.append(t)
        t.start()

        t = threading.Thread(target=self.face_expression)
        threads.append(t)
        t.start()

        self.threads = threads
Example #5
0
#!/usr/bin/env python

# Very simple example to use the mover class
# Toggle the right arm between two position and open and close the hands
# (Leads to a dice throw, if you put the dice in the robots hand)


import logging
import time

from nicomotion import Motion, Mover

logging.basicConfig(level=logging.WARNING)

mover_path = "../../../moves_and_positions/"
robot = Motion.Motion("../../../json/nico_humanoid_upper.json", vrep=False)
mov = Mover.Mover(robot, stiff_off=True)

key = "c"
while key != "q":
    # mov.play_movement(mover_path+"mov_from_table_to_get.csv",
    # subsetfname=mover_path+"subset_left_arm_and_head.csv",move_speed=0.01)
    mov.move_file_position(mover_path + "pos_from_table_to_get.csv",
                           subsetfname=(
                               mover_path + "subset_left_arm_and_head.csv"),
                           move_speed=0.05)
    robot.openHand("LHand")
    raw_input()
    robot.closeHand("LHand", 1.0, 0.5)
    time.sleep(2)
    # mov.play_movement(mover_path+"mov_from_get_throw.csv",subsetfname=mover_path+"subset_left_arm.csv",move_speed=0.03)
Example #6
0
vrep = True
nico_root = dirname(abspath(__file__)) + "/../../.."

# check arguments
if len(sys.argv) != 2:
    print("Usage: {} arg ('yes' or 'no')".format(sys.argv[0]))
    exit(1)

if vrep:
    # get default config for remote api
    # vrepConfig = Motion.Motion.vrepRemoteConfig()
    # set scene (simulation will start automatically if this is set)
    # vrepConfig["vrep_scene"] = nico_root + "/v-rep/NICO-seated.ttt"
    # init simulated robot
    robot = Motion.Motion(
        nico_root + "/json/nico_humanoid_vrep.json",
        vrep=True,  # vrepConfig=vrepConfig
    )
else:
    # init real robot
    robot = Motion.Motion(nico_root + "/json/nico_humanoid_upper.json",
                          vrep=False)

# perform motion based on arg
position = -20
for i in range(6):
    position = position * -1
    if sys.argv[1] == "yes":
        robot.setAngle("head_y", position, 0.05)
    if sys.argv[1] == "no":
        robot.setAngle("head_z", position, 0.05)
    time.sleep(1.5)
Example #7
0
                 "roll", "pitch", "yaw" in radians
        :rtype: dict(str: float)
        """
        self.logger.info("Loading origin...")
        trans_matrix = np.load(path)
        self.logger.debug("{} loaded from {}".format(trans_matrix, path))
        self.logger.info("Successfully loaded '{}'".format(path))
        return trans_matrix


if __name__ == "__main__":
    logging.basicConfig(level=logging.WARNING)

    robot = Motion.Motion(
        ("../../../../../json/" +
         "nico_humanoid_legged_with_hands_mod-vrep.json"),
        vrep=True,
    )

    kinematics = Kinematics(robot)
    kinematics.logger.setLevel(logging.DEBUG)

    vel = 0.02
    kinematics.robot.setAngle("l_shoulder_z", 0, vel)
    kinematics.robot.setAngle("l_shoulder_y", 0, vel)
    kinematics.robot.setAngle("l_arm_x", 0, vel)
    kinematics.robot.setAngle("l_elbow_y", 0, vel)
    kinematics.robot.setAngle("l_wrist_z", 0, vel)
    kinematics.robot.setAngle("l_wrist_x", 0, vel)

    kinematics.logger.info("Chains:")
Example #8
0
import logging
import time

from nicomotion import Motion

logging.basicConfig(level=logging.WARNING)

vrep = True

if vrep:
    vrepConfig = Motion.Motion.vrepRemoteConfig()
    # vrepConfig = Motion.Motion.pyrepConfig() # requires python 3
    vrepConfig["vrep_scene"] = "../../../v-rep/NICO-seated-with-table.ttt"
    robot = Motion.Motion(
        "../../../json/nico_humanoid_upper_with_hands_vrep.json",
        vrep=True,
        vrepConfig=vrepConfig)
else:
    robot = Motion.Motion("../../../json/nico_humanoid_upper_with_hands.json",
                          vrep=False)

position = 20

for i in range(10):
    robot.setAngle("r_arm_x", -80 + position, 0.05)
    robot.setAngle("r_elbow_y", -40 + position, 0.05)

    robot.setAngle("l_arm_x", 80 + position, 0.05)
    robot.setAngle("l_elbow_y", 40 + position, 0.05)

    if i % 2 == 0:
Example #9
0
#!/usr/bin/env python

import logging
import time

from nicomotion import Motion

logging.basicConfig(level=logging.WARNING)

time.sleep(5)
# robot = Motion.Motion(
#     "../../../json/nico_humanoid_upper_with_hands.json", vrep=False)
robot = Motion.Motion("../../../json/nico_humanoid_upper_with_hands_vrep.json",
                      vrep=True,
                      vrepScene="../../../v-rep/NICO-seated-with-table.ttt")
position = 20

for i in xrange(10):
    robot.setAngle("r_arm_x", -80 + position, 0.05)
    robot.setAngle("r_elbow_y", -40 + position, 0.05)

    robot.setAngle("l_arm_x", 80 + position, 0.05)
    robot.setAngle("l_elbow_y", 40 + position, 0.05)

    if i % 2 == 0:
        if i % 4 == 0:
            robot.setAngle("head_z", -position, 0.05)
        else:
            robot.setAngle("head_z", position, 0.05)
    if i % 2 == 1:
        if i % 4 == 1:
Example #10
0
#!/usr/bin/env python

# USAGE: Use script with yes or no as parameter
# python yesno.py yes
# python yesno.py no

import logging
import sys
import time

from nicomotion import Motion

logging.basicConfig(level=logging.WARNING)

robot = Motion.Motion("../../../json/nico_humanoid_upper_with_hands_vrep.json",
                      vrep=False)

position = -20
for i in xrange(6):
    position = position * -1
    if sys.argv[1] == "yes":
        robot.setAngle("head_y", position, 0.05)
    if sys.argv[1] == "no":
        robot.setAngle("head_z", position, 0.05)
    time.sleep(1)

robot.setAngle("head_z", 0, 0.05)
robot.setAngle("head_y", 0, 0.05)
# -*- coding: utf-8

import time
from os.path import abspath, dirname

from nicoemotionrecognition import EmotionRecognition
from nicoface.FaceExpression import faceExpression
from nicomotion import Motion
from nicovision.VideoDevice import VideoDevice

robot = Motion.Motion(dirname(abspath(__file__)) +
                      "/../../../json/nico_humanoid_upper.json",
                      vrep=False,
                      ignoreMissing=True)
face = faceExpression()
# torso NICO

camera = VideoDevice.autodetect_nicoeyes()[0]  # 0: left_eye, 1: right_eye

emotionRecogniton = EmotionRecognition.EmotionRecognition(device=camera,
                                                          robot=robot,
                                                          face=face,
                                                          voiceEnabled=True,
                                                          german=True)

# emotionRecogniton = EmotionRecognition.EmotionRecognition(
#     device='usb-046d_080a_6C686AA1-video-index0', robot=robot, face=face, voiceEnabled=True, german=True)
# legged NICO

#emotionRecogniton = EmotionRecognition.EmotionRecognition(device='usb-046d_080a_17E79161-video-index0', robot=robot, face=face,faceDetectionDelta=10, voiceEnabled=True)
Example #12
0
    "--disable-motion",
    dest="motion",
    action="store_false",
    help="Disables head movement",
)

parser.add_argument("--disable-gui",
                    dest="gui",
                    action="store_false",
                    help="Disables the GUI.")

args = parser.parse_args()

robot = None
if args.motion:
    robot = Motion.Motion(args.json_file, ignoreMissing=True)
face = faceExpression()
# torso NICO

camera_path = VideoDevice.autodetect_nicoeyes()[0]  # 0: left_eye, 1: right_eye

logger.info("Using camera %s", camera_path)

camera = VideoDevice.from_device(camera_path)

if "See3CAM" in camera_path:
    camera.zoom(300)

emotionRecogniton = EmotionRecognition.EmotionRecognition(
    device=camera,
    robot=robot,
    def __init__(self,
                 sync_sleep_time,
                 interpolation=False,
                 fraction_max_speed=0.01,
                 wait=False,
                 motor_config='config.json',
                 vrep=False,
                 vrep_host='127.0.0.1',
                 vrep_port=19997,
                 vrep_scene=None):
        """
        The constructor of the class. Class properties should be set here
        The robot handle should be created here
        Any other initializations such as setting angles to particular values should also be taken care of here

        :type sync_sleep_time: float
        :param sync_sleep_time: Time to sleep to allow the joints to reach their targets (in seconds)
        :type interpolation: bool
        :param interpolation: Flag to indicate if intermediate joint angles should be interpolated
        :type fraction_max_speed: float
        :param fraction_max_speed: Fraction of the maximum motor speed to use
        :type wait: bool
        :param wait: Flag to indicate whether the control should wait for each angle to reach its target
        :type motor_config: str
        :param motor_config: json configuration file
        :type vrep: bool
        :param vrep: Flag to indicate if VREP is to be used
        :type vrep_host: str
        :param vrep_host: IP address of VREP server
        :type vrep_port: int
        :param vrep_port: Port of VREP server
        :type vrep_scene: str
        :param vrep_scene: VREP scene to load
        """

        super(Nico, self).__init__()

        # Set the properties
        self.sync_sleep_time = sync_sleep_time
        self.interpolation = interpolation
        self.fraction_max_speed = fraction_max_speed
        self.wait = wait
        self.motor_config = motor_config
        self.vrep = vrep
        self.vrep_host = vrep_host
        self.vrep_port = vrep_port
        self.vrep_scene = vrep_scene

        # Create the robot handle
        self.robot_handle = Motion.Motion(self.motor_config, self.vrep,
                                          self.vrep_host, self.vrep_port,
                                          self.vrep_scene)

        # List of all joint names
        self.all_joint_names = self.robot_handle.getJointNames()

        # Initialize the joints
        # for joint_name in self.all_joint_names:
        #   self.set_angles({joint_name:0.0})

        # Sleep for a few seconds to allow the changes to take effect
        time.sleep(3)
Example #14
0
    arm = sys.argv[1][0]

    if len(sys.argv) < 3:
        message = ("Please add at least one pose as argument. Multiple " +
                   "arguments will be executed in sequence.\nKnown poses " +
                   "are: \n{}".format(', '.join(poses)))
        sys.exit(message)

    for pose in sys.argv[2:]:
        if pose not in poses:
            sys.exit("Unknown pose {}, try one of the following: {}".format(
                pose, ', '.join(poses)))

    robot = Motion.Motion(dirname(abspath(__file__)) +
                          "/../../../../json/nico_humanoid_upper_rh7d.json",
                          vrep=False)

    prefix = 1. if arm == "l" else -1.

    robot.setAngle(arm + "_shoulder_z", prefix * 0., .03)
    robot.setAngle(arm + "_shoulder_y", prefix * -20., .03)
    robot.setAngle(arm + "_elbow_y", prefix * 80., .03)
    for pose in sys.argv[2:]:
        robot.setHandPose(hand, pose, .2)
        time.sleep(5.0)

    robot.setAngle(arm + "_shoulder_y", prefix * 0, .03)
    robot.setAngle(arm + "_elbow_y", prefix * 80, .03)
    robot.openHand(hand, .2)
    time.sleep(3)
    connection.commit()
    connection.close()


# face interface
fe = faceExpression()
fe.sendFaceExpression("happiness")

# Instructions for the experimenter. Brig the robot in Initial position
print("\n Please put the robot in position. Left arm on the table. Right " +
      "arm hanging down. Give RETURN after finished.\n")
raw_input()

# Put the left arm in defined position
robot = Motion.Motion(nico_path + "/json/nico_humanoid_upper.json",
                      vrep=False,
                      ignoreMissing=True)
atexit.register(cleanup, robot)

mover_path = "../../../../moves_and_positions/"
mov = Mover.Mover(robot, stiff_off=False)

# set the robot to be compliant
robot.disableTorqueAll()

robot.openHand('LHand', fractionMaxSpeed=fMS_hand)

robot.enableForceControl("l_wrist_z", 50)
robot.enableForceControl("l_wrist_x", 50)

#robot.enableForceControl("l_indexfingers_x", 50)
Example #16
0
# Erik Strahl

# GNU GPL License

from nicomotion import Motion

#Put the left arm in defined position
robot = Motion.Motion("../../../../json/nico_humanoid_legged_with_hands_mod.json",vrep=False)

#set the robot to be compliant
#robot.disableTorqueAll()

robot.openHand('LHand', 0.5)
Example #17
0
#!/usr/bin/env python

import logging
import time

from nicomotion import Motion

logging.basicConfig(level=logging.WARNING)

print("Waiting for 2 seconds - Do not know why")
time.sleep(2)
virtualRobot = Motion.Motion(
    "../../../json/nico_humanoid_upper_with_hands_vrep_mod.json", vrep=True)
realRobot = Motion.Motion(
    "../../../json/nico_humanoid_legged_with_hands_mod.json", vrep=False)


while (True):

    for jName in (realRobot.getJointNames()):

        cont = True
        targetPosition = realRobot.getAngle(jName)

        print("Setting Virtual: Joint " +
              str(jName) + " to position " + str(targetPosition))

        virtualRobot.setAngle(jName, targetPosition, 0.05)

        time.sleep(0.1)