Esempio n. 1
0
 def sound_recognised(self, sounds):
     sounds = sounds.sounds
     all_sounds = [elem.sound for elem in sounds]
     if len(all_sounds) == 0:
         all_sounds = 0
     self.sounds_history.append(all_sounds)
     self.sounds_history = self.sounds_history[1:]
     # Check if there are at least min_diff_sounds_required different sounds
     # In at least min_required of the len(sounds history)
     count = 0
     nr_different = set()
     for sound_hist in self.sounds_history:
         if sound_hist != 0:
             count += 1
             for sound in sound_hist:
                 nr_different.add(sound)
     if count >= self.min_sound_history_required and len(
             nr_different) >= self.min_diff_sounds_required:
         if not self.published:
             self.location_bot = self.get_bot_location()
             knowledge = pytools_utils.predicate_maker(
                 "hectic-environment", "location", self.location_bot)
             kb.add_predicate(knowledge)
             self.published = True
     else:
         if self.published:
             knowledge = pytools_utils.predicate_maker(
                 "hectic-environment", "location", self.location_bot, True)
             kb.add_predicate(knowledge)
             self.published = False
Esempio n. 2
0
 def _start(self, robot, location):
     """
     Suggest to open the door.
     """
     self.pub.publish(String("Should I open the door?"))
     kb.add_predicate(
         pytools_utils.predicate_maker("opened-door", "robot", robot))
     super(OpenDoor, self)._report_success()
 def _start(self, dog, human, robot, doglocation):
     """
     Tell the dog to be silent
     """
     self.pub.publish(String("Hey " + str(dog) + ", be silent!"))
     kb.add_predicate(
         pytools_utils.predicate_maker("dog-interaction", ["robot", "dog"],
                                       [robot, dog]))
     super(SayDogSilent, self)._report_success()
Esempio n. 4
0
 def _start(self, robot, person, emotion, location, topic):
     """
     This action is blank. It displays the possibility to react when having a certain emotion
     """
     # if str(topic) == "general":
     #     self.pub.publish(String("Hey, I will not talk to you"))
     # elif str(topic) == "kitchenhelping":
     #     self.pub.publish(String("Hey kitchen man, I will not talk to help"))
     kb.add_predicate(
         pytools_utils.predicate_maker("asked-all-good",
                                       ["person", "topic"],
                                       [person, topic]))
     super(PassiveAskAllGood, self)._report_success()
 def _start(self, robot, treatlocation, treat):
     """
     Grasp the treat
     """
     rospy.loginfo("Picking up treat")
     # The messages are used for creating the emotional model
     self.pub = rospy.Publisher('robot_command', String, queue_size=10)
     self.pub.publish(String("grasp"))
     rospy.sleep(2)
     kb.add_predicate(
         pytools_utils.predicate_maker("robot-holds",
                                       ["robot", "holdingobject"],
                                       [robot, treat]))
     super(PickupTreat, self)._report_success()
 def publish_as_knowledge(self):
     """
     Publish the quadrant to the knowledge base
     """
     sign = lambda x: -1 if x < 0 else 1
     for emotion in self.numbers_to_kb_emotion:
         if sign(self.valence) == emotion[0] and sign(
                 self.arousal) == emotion[1]:
             pred = pytools_utils.predicate_maker(
                 "current-emotion", ["person", "emotionquadrant"],
                 ["personbert", emotion[2]])
             kb.add_predicate(pred)
         else:
             pred = pytools_utils.predicate_maker(
                 "current-emotion", ["person", "emotionquadrant"],
                 ["personbert", emotion[2]], True)
             kb.add_predicate(pred)
Esempio n. 7
0
 def sound_volume_detected(self, volume):
     volume = float(volume.data)
     if volume > self.high_volume_limit:
         self.count_high_volume += 1
     else:
         self.count_high_volume = 0
     if self.count_high_volume >= self.launch_helper_limit and self.loud_noise is False:
         self.loud_noise = True
         self.noise_place = self.get_bot_location()
         kb.add_predicate(
             pytools_utils.predicate_maker("loud-volume", "Location",
                                           self.noise_place))
     elif self.count_high_volume < self.launch_helper_limit and self.loud_noise is True:
         self.loud_noise = False
         kb.add_predicate(
             pytools_utils.predicate_maker("loud-volume", "Location",
                                           self.noise_place, True))
 def _start(self, dog, doglocation, robot, human, treat):
     """
     Do the give robot treat action.
     The arguments are all arguments in the PDDL action
     """
     rospy.loginfo("GIVING FOOD TO DOG")
     self.pub.publish(String("grasp off"))
     # Wait until closed
     rospy.sleep(2)
     kb.add_predicate(
         pytools_utils.predicate_maker("dog-interaction", ["robot", "dog"],
                                       [robot, dog]))
     kb.add_predicate(
         pytools_utils.predicate_maker("robot-holds",
                                       ["robot", "holdingobject"],
                                       [robot, treat], True))
     super(TreatGiving, self)._report_success()
Esempio n. 9
0
    def _start(self, robot, person, location):
        """
        Suggest to play music. Adapt the music to the emotional state
        """
        emotions = rospy.get_param('emotion')
        sign = lambda x: -1 if x < 0 else 1
        suggested_style = 0
        for style in self.music_map:
            if sign(emotions["valence"]) == style[0] and sign(
                    emotions["arousal"]) == style[1]:
                suggested_style = style[2]

        self.pub.publish(
            String("Hey {}. Should I play some {} music?".format(
                person, suggested_style)))
        kb.add_predicate(
            pytools_utils.predicate_maker("asked-music-play", "robot", robot))
        super(MusicPlaying, self)._report_success()
Esempio n. 10
0
    def apply_effects(self):

        rospy.loginfo("[RPpt][CAPE] apply effects")

        # Simple start del effects
        for edx, effect in enumerate(self.op.at_start_del_effects):
            effect_name = effect.name
            effect_value = {}
            for pdx in range(len(effect.typed_parameters)):
                key = self.predicates[effect.name].typed_parameters[pdx].key
                value = self.bound_params[effect.typed_parameters[pdx].key]
                effect_value[key] = value
            kb.remove_predicate(effect_name, **effect_value)

        # Simple start add effects
        for edx, effect in enumerate(self.op.at_start_add_effects):
            effect_name = effect.name
            effect_value = {}
            for pdx in range(len(effect.typed_parameters)):
                key = self.predicates[effect.name].typed_parameters[pdx].key
                value = self.bound_params[effect.typed_parameters[pdx].key]
                effect_value[key] = value
            kb.add_predicate(effect_name, **effect_value)

        # Simple end del effects
        for edx, effect in enumerate(self.op.at_end_del_effects):
            effect_name = effect.name
            effect_value = {}
            for pdx in range(len(effect.typed_parameters)):
                key = self.predicates[effect.name].typed_parameters[pdx].key
                value = self.bound_params[effect.typed_parameters[pdx].key]
                effect_value[key] = value
            kb.remove_predicate(effect_name, **effect_value)

        # Simple end add effects
        for edx, effect in enumerate(self.op.at_end_add_effects):
            effect_name = effect.name
            effect_value = {}
            for pdx in range(len(effect.typed_parameters)):
                key = self.predicates[effect.name].typed_parameters[pdx].key
                value = self.bound_params[effect.typed_parameters[pdx].key]
                effect_value[key] = value
            kb.add_predicate(effect_name, **effect_value)
    def _start(self, robot, fromlocation, tolocation):
        """
        Move the robot from a location, to a location
        """
        rospy.loginfo("MOVING ROBOT")
        # The messages are used for creating the emotional model

        movement = " ".join(["move", fromlocation, tolocation])
        # Use the Naoqi interface
        self.pub.publish(String(movement))
        rospy.sleep(2)
        rospy.loginfo("Published :" + movement)
        kb.add_predicate(
            pytools_utils.predicate_maker("robot-at", ["robot", "location"],
                                          [robot, fromlocation], True))
        kb.add_predicate(
            pytools_utils.predicate_maker("robot-at", ["robot", "location"],
                                          [robot, tolocation]))
        rospy.loginfo("Reporting succes")
        super(MoveRobot, self)._report_success()
Esempio n. 12
0
    def _start(self, robot, person, emotion, location, topic):
        """
        Ask if everything is ok. What is said depends on the topic
        """
        rospy.loginfo("Asking all good on topic: " + topic)

        if str(topic) == "general":
            self.pub.publish(
                String(
                    "Hey " + person +
                    ". Is everything ok or is there something I can do for you?"
                ))
        elif str(topic) == "kitchenhelping":
            self.pub.publish(
                String("Hey " + person + ". Should I help you with cooking?"))
        kb.add_predicate(
            pytools_utils.predicate_maker("asked-all-good",
                                          ["person", "topic"],
                                          [person, topic]))
        super(AskAllGood, self)._report_success()
Esempio n. 13
0
 def sound_recognised(self, sounds):
     """
     When an alarm bell rings, put it into the knowledge base
     """
     sounds = sounds.sounds
     ringing = False
     for elem in sounds:
         if elem.sound == "an Alarm bell ringing":
             ringing = True
             if not self.notified:
                 kb.add_predicate(
                     pytools_utils.predicate_maker("doorbell-ringing",
                                                   "Location",
                                                   "kitchen",
                                                   is_negative=False))
                 self.notified = True
     if not ringing and self.notified:
         kb.add_predicate(
             pytools_utils.predicate_maker("doorbell-ringing",
                                           "Location",
                                           "kitchen",
                                           is_negative=True))
         self.notified = False
#!/usr/bin/env python
import rosplan_pytools.controller.knowledge_base as kb
import rospy
import rosplan_pytools.common.utils as pytools_utils

if __name__ == "__main__":
    rospy.init_node("loud_noise_task_generator")

    kb.initialize(prefix="/rosplan_knowledge_base")
    while not rospy.is_shutdown():
        respond = False
        loud_noise_predicates = kb.list_predicates("loud-volume")
        for loud_noise_pred in loud_noise_predicates:
            if not loud_noise_pred.is_negative:
                respond = True
        if respond and len(kb.list_goals()) == 0:
            goal = pytools_utils.predicate_maker("asked-all-good",
                                                 ["person", "subject"],
                                                 ["personbert", "general"])
            kb.add_goal(goal)
            rospy.loginfo("Goal posted")
            pytools_utils.plan_and_execute()
            print("planning done")
            kb.remove_goal(goal)
            goal.is_negative = True
            kb.add_predicate(goal)
            rospy.sleep(60 * 5)
        rospy.sleep(5.)