Exemple #1
0
    def default_action(self):
        """ Interpret keyboard presses and assign them to movement
            for the robot."""
        keys_sensor = blenderapi.controller().sensors[0]
        #pressed_keys = keys_sensor.getPressedKeys()
        pressed_keys = keys_sensor.events

        # Reset movement variables
        vx, vy, vz = 0.0, 0.0, 0.0
        rx, ry, rz = 0.0, 0.0, 0.0

        for key, status in pressed_keys:
            logger.debug("GOT: {0}, STATUS {1}".format(key, status))
            if key == blenderapi.UPARROWKEY:
                vx = self._speed

            if key == blenderapi.DOWNARROWKEY:
                vx = -self._speed

            if key == blenderapi.LEFTARROWKEY:
                rz = self._speed

            if key == blenderapi.RIGHTARROWKEY:
                rz = -self._speed

        # Give the movement instructions directly to the parent
        # The second parameter specifies a "local" movement
        if self._type == 'Position' or self._type == 'Velocity':
            self.apply_speed(self._type, [vx, vy, vz], [rx, ry, rz / 2.0])
        elif self._type == 'Differential':
            self.apply_vw_wheels(vx, -rz)
 def default_action(self):
     """ Is currently in collision """
     controller  = blenderapi.controller()
     sensor = controller.sensors[-1]
     # see hitObjectList and hitObject for last collided object(s)
     self.local_data['collision'] = sensor.positive
     self.local_data['objects'] = ','.join([o.name for o in sensor.hitObjectList])
Exemple #3
0
    def default_action(self):
        """ Interpret joystick axis push and assign them to movement
            for the robot."""
        joystick_sensor = blenderapi.controller().sensors[0]
        # Reset movement variables
        vx, vy, vz = 0.0, 0.0, 0.0
        rx, ry, rz = 0.0, 0.0, 0.0

        rz = joystick_sensor.axisValues[0] * self._speed_factor
        vx = joystick_sensor.axisValues[1] * self._speed_factor

        # Send a 'zero motion' only once in a row.
        if self.zero_motion and (vx,vy,vz,rx,ry,rz) == (0,0,0,0,0,0):
            return

        # Give the movement instructions directly to the parent
        # The second parameter specifies a "local" movement
        if self._type == 'Position' or self._type == 'Velocity':
            self.robot_parent.apply_speed(self._type, [vx, vy, vz], [rx, ry, rz / 2.0])
        elif self._type == 'Differential':
            self.robot_parent.apply_vw_wheels(vx, -rz)

        if (vx,vy,vz,rx,ry,rz) == (0,0,0,0,0,0):
            self.zero_motion = True
        else:
            self.zero_motion = False
Exemple #4
0
def init():
    """
    Sets the camera on load
    """
    co = blenderapi.controller()
    ow = co.owner

    # get the suffix of the human to reference the right objects
    suffix = ow.name[-4:] if ow.name[-4] == "." else ""

    camAct = co.actuators["Set_Camera"]
    sobList = blenderapi.scene().objects

    human = ow

    # if the Human is external, do not use his camera initially
    if human.get("External_Robot_Tag") or human["disable_keyboard_control"]:
        return

    humCam = sobList["Human_Camera" + suffix]

    try:
        worldCam = sobList["CameraFP"]
        # check if there is a Camera displaying the world in the scene
    except KeyError:
        worldCam = None

    if ow["WorldCamera"] and worldCam:
        camAct.camera = worldCam
    else:
        camAct.camera = humCam
        blenderapi.mousepointer(visible=False)
    # set Camera following the human or displaying the world

    co.activate(camAct)
Exemple #5
0
    def default_action(self):
        """ Is currently in collision """
        controller  = blenderapi.controller()
        sensor = controller.sensors[-1]

        # see hitObjectList and hitObject for last collided object(s)
        # http://www.blender.org/api/blender_python_api_2_76_release/bge.types.KX_TouchSensor.html
        self.local_data['collision'] = sensor.positive
        self.local_data['objects'] = ','.join([o.name for o in sensor.hitObjectList])
def reset_objects():
    """ Restore all simulation objects to their original position

    Upon receiving the request using sockets, call the
    'reset_objects' function located in morse/blender/main.py
    """
    contr = blenderapi.controller()
    main_reset(contr)
    return "Objects restored to initial position"
def change():
    """
    Changes camera position to 1st person while in Manipulation-Mode
    """
    co = blenderapi.controller()
    ow = co.owner

    # get the suffix of the human to reference the right objects
    suffix = ow.name[-4:] if ow.name[-4] == "." else ""
    
    track = co.actuators['TrackCamera']
    sobList = blenderapi.scene().objects

    human = sobList[ow["human_name"] + suffix]

    # if the Human is external, do nothing
    if human.get('External_Robot_Tag') or human['disable_keyboard_control']:
        return
    
    FP = sobList['POS_1P_Cam' + suffix]
    FP_POS = sobList['POS_1P_Cam' + suffix].worldPosition
    TP_POS = sobList['POS_3P_Cam' + suffix].worldPosition
    head_target = sobList['Target_Empty' + suffix]
    hand_target = sobList['IK_Target_Look.R' + suffix]
    look_target = sobList['LOOK_TARGET' + suffix]
    right_hand = sobList['Hand_Grab.R' + suffix]
    mmb = ow.sensors['MMB']


    if ow.getDistanceTo(FP) < 0.08:
        # if the distance is smaller than 0.08,
        # snap the camera to the 'POS_1P_Cam'-empty
        ow['FP'] = True


    if not human['Manipulate']:
        ow['FP'] = False
        if not ow['prop_collision']:
            smooth_move(TP_POS, ow)
    else:
        if not ow['FP']:
            smooth_move(FP_POS, ow)
        else:
            ow.worldPosition = FP_POS


    # camera points to several empties according to the current situation
    if human['Manipulate']:
        track.object = head_target
    else:
        track.object = look_target
    co.activate(track)
Exemple #8
0
def collision():
    """
    Detect camera collision and place the camera accordingly
    """
    co = blenderapi.controller()
    ow = co.owner

    # get the suffix of the human to reference the right objects
    suffix = ow.name[-4:] if ow.name[-4] == "." else ""

    ray = co.sensors["collision"]
    right = co.sensors["RIGHT"]
    left = co.sensors["LEFT"]
    human = blenderapi.scene().objects[ow.parent.parent["human_name"]]

    # if the Human is external, do nothing
    if human.get("External_Robot_Tag") or human["disable_keyboard_control"]:
        return

    Orig_Pos = blenderapi.scene().objects["POS_3P_Cam_Orig" + suffix]
    distance = 0.05  # the distance the camera keeps to Objects

    # if near an object, place the camera slightly in front of it
    if ray.positive and not human["Manipulate"] and not (right.positive or left.positive):
        hitPos = ray.hitPosition
        ow.worldPosition = Vector(hitPos) - Vector(ray.rayDirection).normalized() * distance
        ow["prop_collision"] = True

    elif ray.positive and not human["Manipulate"] and right.positive:
        hitPos = (Vector(ray.hitPosition) + Vector(right.hitPosition)) / 2
        ow.worldPosition = hitPos - (Vector(ray.rayDirection) + Vector(right.rayDirection)).normalized() * distance
        ow["prop_collision"] = True

    elif ray.positive and not human["Manipulate"] and left.positive:
        hitPos = (Vector(ray.hitPosition) + Vector(left.hitPosition)) / 2
        ow.worldPosition = hitPos - (Vector(ray.rayDirection) + Vector(left.rayDirection)).normalized() * distance
        ow["prop_collision"] = True

    elif left.positive and not human["Manipulate"] and not (right.positive or ray.positive):
        hitPos = left.hitPosition
        ow.worldPosition = Vector(hitPos) - Vector(left.rayDirection).normalized() * distance
        ow["prop_collision"] = True

    elif right.positive and not human["Manipulate"] and not (left.positive or ray.positive):
        hitPos = right.hitPosition
        ow.worldPosition = Vector(hitPos) - Vector(right.rayDirection).normalized() * distance
        ow["prop_collision"] = True

    else:
        ow["prop_collision"] = False
        ow.worldPosition = Orig_Pos.worldPosition
Exemple #9
0
    def default_action(self):
        """ Apply (v, w) to the parent robot. """
        # get the Blender Logic Controller
        contr = blenderapi.controller()
        # get the Empty object parent of this Controller
        light_act = contr.owner
        # get the light which is a child of the Empty object
        light = light_act.children[0]

        # switch on/off the light
        if self.local_data['emit']:
            light.energy = 1.0
        else:
            light.energy = 0.0
Exemple #10
0
def raylength():
    """
    Objects can only be grabbed if they are hit by the Ray-Sensor called 'Ray'.
    Set the ray's length ,
    so that it hits objects in a certain radius around the human's z-axis
    """
    co = blenderapi.controller()
    cam = co.owner
    ray = co.sensors["Ray"]

    dir = Vector(ray.rayDirection)
    xy = Matrix.OrthoProjection("XY", 3) * dir
    # API Change in 2.59 builds - Vectors are now column vectors

    ray.range = 0.8 / xy.length
Exemple #11
0
 def default_action(self):
     """ Apply ``play`` to this actuator. """
     # get the Blender Logic Controller
     contr = blenderapi.controller()
     # http://www.blender.org/documentation/blender_python_api_2_65_release/bge.types.html#bge.types.KX_SoundActuator
     if self.local_data['mode'] == self._last_mode:
         return
     act = contr.actuators[-1]
     contr.activate(act) # enables 3D effect (!)
     if self.local_data['mode'] == 'play':
         act.startSound()
     elif self.local_data['mode'] == 'pause':
         act.pauseSound()
     elif self.local_data['mode'] == 'stop':
         act.stopSound()
     # new last mode
     self._last_mode = self.local_data['mode']
Exemple #12
0
    def default_action(self):
        """ Look for nearby victims, and heal when instructed to """
        # Look for victims in the cone of the sensor
        contr = blenderapi.controller()
        radar = contr.sensors["Radar"]

        # Clear the variables for the victims
        self.local_data["victim_dict"] = {}
        self._nearest_victim = None
        self._nearest_distance = 999999
        self._detect_distance = radar.distance

        if radar.positive:
            logger.debug("radar positive")
            for victim_obj in radar.hitObjectList:
                if victim_obj.name != self.robot_parent.name():
                    victim_position = victim_obj.worldPosition
                    # Fill the data structure for the victim
                    victim_coordinate = OrderedDict(
                        [("x", victim_position[0]), ("y", victim_position[1]), ("z", victim_position[2])]
                    )
                    victim_d = OrderedDict(
                        [
                            ("coordinate", victim_coordinate),
                            ("requirements", victim_obj["Requirements"]),
                            ("severity", victim_obj["Severity"]),
                        ]
                    )
                    self.local_data["victim_dict"][victim_obj.name] = victim_d

                    # Find the closest victim and its distance
                    new_distance = self.bge_object.getDistanceTo(victim_obj)
                    if new_distance < self._nearest_distance:
                        self._nearest_victim = victim_obj
                        self._nearest_distance = new_distance

            # When instructed to do so, help a victim
            if self._healing:
                self._heal_victim()
        else:
            if self._healing:
                self._healing = False
                message = "No victim within range (%.2f m)" % self._detect_distance
                self.completed(status.FAILED, message)
Exemple #13
0
    def find_object(self):
        """
        Store the object that is within reach of the gripper Uses a
        Blender Radar Sensor to detect objects with the 'Graspable'
        property in front of this component
        """
        # Get reference to the Radar Blender sensor
        contr = blenderapi.controller()
        radar = contr.sensors['Radar']

        self._near_object = None
        if radar.triggered and radar.positive:
            min_distance = 100
            for test_obj in radar.hitObjectList:
                # Find the closest object and its distance
                new_distance = self.bge_object.getDistanceTo(test_obj)
                if new_distance < min_distance:
                    self._near_object = test_obj
                    min_distance = new_distance
Exemple #14
0
    def default_action(self):
        """ Interpret keyboard presses and assign them to movement
            for the robot."""
        keys_sensor = blenderapi.controller().sensors['keys_sensor']
        #pressed_keys = keys_sensor.getPressedKeys()
        pressed_keys = keys_sensor.events

        # Reset movement variables
        vx, vy, vz = 0.0, 0.0, 0.0
        rx, ry, rz = 0.0, 0.0, 0.0

        for key, status in pressed_keys:
            logger.debug("GOT: {0}, STATUS {1}".format(key, status))
            if key == blenderapi.UPARROWKEY:
                vx = self._speed

            if key == blenderapi.DOWNARROWKEY:
                vx = -self._speed

            if key == blenderapi.LEFTARROWKEY:
                rz = self._speed / 2.0

            if key == blenderapi.RIGHTARROWKEY:
                rz = -self._speed / 2.0

        # Get the Blender object of the parent robot
        parent = self.robot_parent.blender_obj

        # Give the movement instructions directly to the parent
        # The second parameter specifies a "local" movement
        if self._type == 'Position':
            parent.applyMovement([vx, vy, vz], True)
            parent.applyRotation([rx, ry, rz], True)
        elif self._type == 'Velocity':
            parent.setLinearVelocity([vx, vy, vz], True)
            parent.setAngularVelocity([rx, ry, rz], True)
def terminate():
    """ Terminate the simulation (no finalization done!)
    """
    contr = blenderapi.controller()
    main_terminate(contr)
def quit():
    """ Cleanly quit the simulation
    """
    contr = blenderapi.controller()
    main_close(contr)
    main_terminate(contr)
def quit():
    """ Cleanly quit the simulation
    """
    contr = blenderapi.controller()
    main_close(contr)
    main_terminate(contr)
def terminate():
    """ Terminate the simulation (no finalization done!)
    """
    contr = blenderapi.controller()
    main_terminate(contr)
Exemple #19
0
from morse.core import blenderapi

co = blenderapi.controller()
ow = co.owner

scene = blenderapi.scene()
scriptHolder = scene.objects["TUT_Script_Holder"]


def test():
    if ow.parent and scriptHolder["Level"] == 4:
        if not "init" in ow:
            ow["init"] = True
            scriptHolder["Level"] = 5
Exemple #20
0
    def default_action(self):
        """ Apply rotation to the arm segments """
        # Get the reference to the Sound actuator
        if self._sound == None:
            logger.debug ("ACTIVATING THE SOUND ACTUATOR")
            contr = blenderapi.controller()
            self._sound = contr.actuators['Sound']
            contr.activate(self._sound)
            self._sound.stopSound()

        # Reset movement variables
        rx, ry, rz = 0.0, 0.0, 0.0

        # Scale the speeds to the time used by Blender
        try:
            rotation = self._speed / self.frequency
        # For the moment ignoring the division by zero
        # It happens apparently when the simulation starts
        except ZeroDivisionError:
            pass

        self._moving = False

        for i in range(6):
            key = ('seg%d' % i)
            target_angle = normalise_angle(self.local_data[key])

            # Get the next segment
            segment = self._segments[i]

            # Extract the angles
            rot_matrix = segment.localOrientation
            segment_matrix = mathutils.Matrix((rot_matrix[0], rot_matrix[1], rot_matrix[2]))
            segment_euler = segment_matrix.to_euler()

            # Use the corresponding direction for each rotation
            if self._dofs[i] == 'y':
                ry = rotation_direction(segment_euler[1], target_angle, self._tolerance, rotation)
                #logger.debug("PARAMETERS Y: %.4f, %.4f, %.4f, %.4f = %.4f" % (segment_euler[1], target_angle, _tolerance, rotation, ry))

            elif self._dofs[i] == 'z':
                rz = rotation_direction(segment_euler[2], target_angle, self._tolerance, rotation)
                #logger.debug("PARAMETERS Z: %.4f, %.4f, %.4f, %.4f = %.4f" % (segment_euler[2], target_angle, _tolerance, rotation, rz))

            logger.debug("ry = %.4f, rz = %.4f" % (ry, rz))

            # Give the movement instructions directly to the parent
            # The second parameter specifies a "local" movement
            segment.applyRotation([rx, ry, rz], True)

            if ry != 0.0 or rz != 0.0:
                self._moving = True

            # Reset the rotations for the next segment
            ry = rz = 0

        if self._moving:
            self._sound.startSound()
            logger.debug("STARTING SOUND")
        else:
            self._sound.stopSound()
            logger.debug("STOPPING SOUND")
from morse.core import blenderapi

co = blenderapi.controller()
sens = co.sensors['ManipulateState']
act = co.actuators[0]
objects = blenderapi.scene().objects


def add_level():
    if sens.positive and (objects['Human_Camera'].worldPosition
                          == objects['POS_1P_Cam'].worldPosition):
        co.activate(act)
Exemple #22
0
def change_light_energy():
    co = blenderapi.controller()
    ow = co.owner
    
    ow.energy = ow['Energy'] if ow['On'] else 0
Exemple #23
0
    def default_action(self):
        """ Apply rotation to the arm segments """
        # Get the reference to the Sound actuator
        if self._sound == None:
            logger.debug("ACTIVATING THE SOUND ACTUATOR")
            contr = blenderapi.controller()
            self._sound = contr.actuators['Sound']
            contr.activate(self._sound)
            self._sound.stopSound()

        # Reset movement variables
        rx, ry, rz = 0.0, 0.0, 0.0

        # Scale the speeds to the time used by Blender
        try:
            rotation = self._speed / self.frequency
        # For the moment ignoring the division by zero
        # It happens apparently when the simulation starts
        except ZeroDivisionError:
            pass

        self._moving = False

        for i in range(6):
            key = ('seg%d' % i)
            target_angle = normalise_angle(self.local_data[key])

            # Get the next segment
            segment = self._segments[i]

            # Extract the angles
            rot_matrix = segment.localOrientation
            segment_matrix = mathutils.Matrix(
                (rot_matrix[0], rot_matrix[1], rot_matrix[2]))
            segment_euler = segment_matrix.to_euler()

            # Use the corresponding direction for each rotation
            if self._dofs[i] == 'y':
                ry = rotation_direction(segment_euler[1], target_angle,
                                        self._tolerance, rotation)
                #logger.debug("PARAMETERS Y: %.4f, %.4f, %.4f, %.4f = %.4f" % (segment_euler[1], target_angle, _tolerance, rotation, ry))

            elif self._dofs[i] == 'z':
                rz = rotation_direction(segment_euler[2], target_angle,
                                        self._tolerance, rotation)
                #logger.debug("PARAMETERS Z: %.4f, %.4f, %.4f, %.4f = %.4f" % (segment_euler[2], target_angle, _tolerance, rotation, rz))

            logger.debug("ry = %.4f, rz = %.4f" % (ry, rz))

            # Give the movement instructions directly to the parent
            # The second parameter specifies a "local" movement
            segment.applyRotation([rx, ry, rz], True)

            if ry != 0.0 or rz != 0.0:
                self._moving = True

            # Reset the rotations for the next segment
            ry = rz = 0

        if self._moving:
            self._sound.startSound()
            logger.debug("STARTING SOUND")
        else:
            self._sound.stopSound()
            logger.debug("STOPPING SOUND")