示例#1
0
    def __init__(self, devicename=None):
        """
        NicoRosFaceExpression provides :class:`nicoface.FaceExpression`
        functions over ROS

        :param devicename: name of the device on the serial
         (e.g. '/dev/ttyACM0', autodetected if None)
        :type devicename: str
        """
        # init face
        self.face = faceExpression(devicename)

        # init ROS
        rospy.init_node('faceexpression', anonymous=True)

        # setup subscriber
        rospy.Subscriber('nico/faceExpression/sendMouth',
                         msg.affffa, self._ROSPY_sendMouth)
        rospy.Subscriber('nico/faceExpression/sendEyebrow',
                         msg.sffff, self._ROSPY_sendEyebrow)
        rospy.Subscriber('nico/faceExpression/sendFaceExpression',
                         msg.s, self._ROSPY_sendFaceExpression)

        rospy.spin()
from time import sleep
import argparse

from nicoface.FaceExpression import faceExpression

parser = argparse.ArgumentParser()
parser.add_argument(
    "-s",
    action="store_true",
    help="Enables simulated mode, where faces are shown as image instead",
)
args = parser.parse_args()

if args.s:
    fe = faceExpression(simulation=True)
else:
    fe = faceExpression()
    # handcoded presets
    # 'happiness','sadness','anger','disgust','surprise','fear','neutral','clear'
    for expression in (
            "happiness",
            "sadness",
            "anger",
            "disgust",
            "surprise",
            "fear",
            "neutral",
            "clear",
    ):
        fe.sendFaceExpression(expression)  # only works with a real robot
        sleep(1)
            return pos
        except:
            err_count += 1
            try:
                if err_count > 4:
                    print "returned last_finger_current"
                    return last_finger_currents[joint]
                #print "thumb  failure"
            except:
                pass
            print "finger current failure"
            sleep(0.2)


# face interface
fe = faceExpression()
fe.sendFaceExpression("happiness")

# Touch Sensor interface
# Adapt this for the serial number you need for the sensor
optsens = optoforce(ser_number="DSE0A125")

# definition of Maximum current - This protects the hands from breaking!! Do not change this, if you do not know!

MAX_CUR_FINGER = 120
MAX_CUR_THUMB = 100

finger_force_calibrator = 1.0
thumb_force_calibrator = 1.0

# Data for robot movement in high level mode
示例#4
0
import time
import logging
import argparse

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__file__)

parser = argparse.ArgumentParser()
parser.add_argument(
    "-s",
    action="store_true",
    help="Enables simulated mode, where faces are shown as image instead",
)
args = parser.parse_args()

face = faceExpression(simulation=args.s)

logger.info("Morphing between face presets")

# display morphable face expression to initialize morphable polynomial face
face.send_morphable_face_expression("neutral")
time.sleep(2)

# morph between face expressions modelled after default presets
for preset in (
    "happiness",
    "sadness",
    "anger",
    "disgust",
    "surprise",
    "fear",
    def __init__(self, vrep=False):
        self.area = 0
        self.xPosition = 0
        self.yPosition = 0
        self.SPEED = 0.02  # motors speed
        self.SAMPLE_TIME = 0.3  # time to sample a new image (in seconds)
        self.ANGLE_STEP = 5  # angle to turn the motors
        self.ANGLE_STEP_BIG = 7.5  # angle to turn the motors
        self.MIN_AREA = 40000  # 120000      #minimal area to identify a color, the bigger the less noise is consider but the object has to be closer then
        self.MAX_X = 640  # camera resolution in x axis
        self.MAX_Y = 480  # camera resolution in y axis
        self.ZOOM = 200
        self.TOLERANCE = 40  # tolerance to be consider in the middle of the image

        if vrep:
            config = Motion.Motion.vrepRemoteConfig()
            config["vrep_scene"] = (
                dirname(abspath(__file__)) + "/../../../../v-rep/NICO-seated.ttt"
            )
            self.nico = Motion.Motion(
                dirname(abspath(__file__))
                + "/../../../../json/nico_humanoid_vrep.json",
                vrep=True,
                vrepConfig=config,
            )

        else:
            self.nico = Motion.Motion(
                dirname(abspath(__file__))
                + "/../../../../json/nico_humanoid_upper_rh7d.json",
            )
            self.face = faceExpression()

        def signal_handler(sig, frame):
            self.cleanup()
            sys.exit(0)

        signal.signal(signal.SIGINT, signal_handler)

        self.running = True
        self.last_detection = 0

        threads = list()

        t = threading.Thread(target=self.color_detection)
        threads.append(t)
        t.start()

        t = threading.Thread(target=self.head_movement)
        threads.append(t)
        t.start()

        t = threading.Thread(target=self.arm_movement)
        threads.append(t)
        t.start()

        t = threading.Thread(target=self.face_expression)
        threads.append(t)
        t.start()

        self.threads = threads
示例#6
0
 def setUp(self):
     self.face = faceExpression(simulation=True)
示例#7
0
                                                 == "Sugar") or (item
                                                                 == "Sand"):
        return "this is "
    else:
        return "these are "


def include_indefinite_article(item):

    if (item == "Stone"):
        return " a "
    else:
        return ""


fe = faceExpression("/dev/ttyACM6")
fe.sendFaceExpression("happiness")

#definition for numbers per object
number_of_samples_per_object = 2
number_shakes_per_sample = 3

#Sampling frequency for touch and robotis sensors
move_frequency = 48

#definition of Maximum current - This protects the hands from breaking!! Do not change this, if you do not know!
MAX_CUR_FINGER = 120
MAX_CUR_THUMB = 100

#path to classifier file
json_path = "/data/sound_classification/data.json"
示例#8
0
from time import sleep

from nicoface.FaceExpression import faceExpression

simulation = False
if simulation:
    fe = faceExpression('sim')
else:
    fe = faceExpression()
    # handcoded presets
    # 'happiness','sadness','anger','disgust','surprise','fear','neutral','clear'
    for expression in ('happiness', 'sadness', 'anger', 'disgust', 'surprise',
                       'fear', 'neutral', 'clear'):
        fe.sendFaceExpression(expression)  # only works with a real robot
        sleep(1)

# trained expressions
for expression in ('Angry', 'Happy', 'Neutral', 'Sad', 'Surprise'):
    print(expression)
    fe.sendTrainedFaceExpression(expression)
    sleep(1)
import time
import logging
import argparse

logging.basicConfig(level=logging.INFO)

parser = argparse.ArgumentParser()
parser.add_argument(
    "-s",
    action="store_true",
    help="Enables simulated mode, where faces are shown as image instead",
)
args = parser.parse_args()

# Change your interface here
fe = faceExpression(args.s)

# Chnange betwee two faces

for i in range(2, -1, -1):
    fe.setCommMode(i)
    # Generate the mouth form and eyebrowse
    # Using the standard values
    fe.gen_mouth()
    fe.gen_eyebrowse(type="l")
    fe.gen_eyebrowse(type="r")
    fe.send()

    time.sleep(0.05)

    # Using both tuples for two mouth lines