Пример #1
0
 def __init__(self):
     # Start knowledge and connect to the NLP pipeline
     self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     try:
         self.sock.connect(('localhost', DEFAULT_PORT))
     except socket.error:
         raise IOError("Could not connect to pipelinehost on port %d. "
                       "Make sure that pipelinehost is running." % DEFAULT_PORT)
     self.world_knowledge = Knowledge()
     
     # Sets of propositions, accessible to all methods
     self.custom_props = set()
Пример #2
0
    def run(self, aut_path=None):
        """Intialize all NL components."""
        # pylint: disable=W0603
        global WORLD_KNOWLEDGE

        # Start the NL pipeline
        if not aut_path:
            print "Starting NL pipeline..."
            init_pipes()
        else:
            print "Skipping loading nlpipeline because an automaton was loaded"

        # Start the ROS node
        print "Initializing ROS node..."
        rospy.init_node('nlproxy')

        # Set up the state mgr and its callbacks
        print "Starting state manager..."
        self.state_mgr = StateManager(self)
        self.state_mgr.set_basedir(LTLGEN_BASE_DIR)

        # Load the automaton if needed
        if aut_path:
            self.state_mgr.load_test_automaton(aut_path, False)

        # Create world knowledge
        print "Starting knowledge..."
        WORLD_KNOWLEDGE = Knowledge(self)
        self.state_mgr.world_knowledge = WORLD_KNOWLEDGE

        # Wait a little for ROS to avoid timing startup issues.
        print "Waiting for ROS node to settle..."
        time.sleep(1)

        # Set up ROS listening
        print "Subscribing to notifications..."
        # Set up state manager's sending and listening
        pub = add_ltl_publisher()
        self.state_mgr.set_publisher(pub)
        add_subscriber(LTL_ENVIRONMENT_TOPIC,
                       self.state_mgr.process_sensor_data)
        add_subscriber(LTL_ENVIRONMENT_TOPIC,
                       WORLD_KNOWLEDGE.process_sensor_data)

        # Create the input comm_proxy and iPad connections
        print "Starting comm_proxy..."
        self.comm_proxy = CallbackSocket(self.text_port)
        self.comm_proxy.register_callback(self.process_text)
        self.ipad_conn = iPadConnection(self.map_port)
        self.ipad_conn.register_rename_callback(rename_entity)
        self.ipad_conn.register_text_callback(self.process_text)
        # TODO Add highlight callback
        self.map_proxy = MapProxy(self.ipad_conn)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, self.ipad_conn.add_icons)
        WORLD_KNOWLEDGE.map_proxy = self.map_proxy

        # Set up odometry forwarding to the ipad
        tf = TransformListener()
        while not tf.frameExists("/map"):
            rospy.logwarn("Not ready for transforms yet")
            rospy.sleep(1.0)
        position_proxy = RobotPositionProxy(self.ipad_conn, tf)
        rospy.Subscriber(ODOM_TOPIC, Odometry, position_proxy.forward_position)

        # Set up getting directions
        direction_proxy = DirectionProxy()
        rospy.Subscriber(LOCATION_TOPIC, String, direction_proxy.set_location)
        WORLD_KNOWLEDGE.direction_proxy = direction_proxy

        print "NLMaster startup complete!"
        print "*" * 80

        # Finally, wait for input before quitting
        try:
            while True:
                text = raw_input("").strip()
                if text == "q":
                    break
        except (KeyboardInterrupt, EOFError):
            pass
        except:
            raise
        finally:
            # Only shutdown the pipeline if we actually were taking language input
            if not aut_path:
                print "Shutting down NL pipeline..."
                close_pipes()
            self.comm_proxy.shutdown()
            self.ipad_conn.shutdown()

        sys.exit()
Пример #3
0
    def run(self, aut_path=None):
        """Intialize all NL components."""
        # pylint: disable=W0603
        global WORLD_KNOWLEDGE
    
        # Start the NL pipeline
        if not aut_path:
            print "Starting NL pipeline..."
            init_pipes()
        else:
            print "Skipping loading nlpipeline because an automaton was loaded"
     
        # Start the ROS node
        print "Initializing ROS node..."
        rospy.init_node('nlproxy')
    
        # Set up the state mgr and its callbacks
        print "Starting state manager..."
        self.state_mgr = StateManager(self)
        self.state_mgr.set_basedir(LTLGEN_BASE_DIR)
    
        # Load the automaton if needed
        if aut_path:
            self.state_mgr.load_test_automaton(aut_path, False)
    
        # Create world knowledge
        print "Starting knowledge..."
        WORLD_KNOWLEDGE = Knowledge(self)
        self.state_mgr.world_knowledge = WORLD_KNOWLEDGE
        
        # Wait a little for ROS to avoid timing startup issues.
        print "Waiting for ROS node to settle..."
        time.sleep(1)
        
        # Set up ROS listening
        print "Subscribing to notifications..."
        # Set up state manager's sending and listening
        pub = add_ltl_publisher()
        self.state_mgr.set_publisher(pub)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, self.state_mgr.process_sensor_data)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, WORLD_KNOWLEDGE.process_sensor_data)

        # Create the input comm_proxy and iPad connections
        print "Starting comm_proxy..."
        self.comm_proxy = CallbackSocket(self.text_port)
        self.comm_proxy.register_callback(self.process_text)
        self.ipad_conn = iPadConnection(self.map_port)
        self.ipad_conn.register_rename_callback(rename_entity)
        self.ipad_conn.register_text_callback(self.process_text)
        # TODO Add highlight callback
        self.map_proxy = MapProxy(self.ipad_conn)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, self.ipad_conn.add_icons)        
        WORLD_KNOWLEDGE.map_proxy = self.map_proxy

        # Set up odometry forwarding to the ipad
        tf = TransformListener()
        while not tf.frameExists("/map"):
            rospy.logwarn("Not ready for transforms yet")
            rospy.sleep(1.0)
        position_proxy = RobotPositionProxy(self.ipad_conn,tf)
        rospy.Subscriber(ODOM_TOPIC, Odometry, position_proxy.forward_position)

        # Set up getting directions
        direction_proxy = DirectionProxy()
        rospy.Subscriber(LOCATION_TOPIC, String, direction_proxy.set_location)
        WORLD_KNOWLEDGE.direction_proxy = direction_proxy
        
        print "NLMaster startup complete!"
        print "*" * 80
    
        # Finally, wait for input before quitting
        try:
            while True:
                text = raw_input("").strip()
                if text == "q":
                    break
        except (KeyboardInterrupt, EOFError):
            pass
        except:
            raise
        finally:
            # Only shutdown the pipeline if we actually were taking language input
            if not aut_path:
                print "Shutting down NL pipeline..."
                close_pipes()
            self.comm_proxy.shutdown()
            self.ipad_conn.shutdown()
    
        sys.exit()
Пример #4
0
class SpecGenerator(object):
    """Enables specification generation using natural language."""

    def __init__(self):
        # Start knowledge and connect to the NLP pipeline
        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        try:
            self.sock.connect(('localhost', DEFAULT_PORT))
        except socket.error:
            raise IOError("Could not connect to pipelinehost on port %d. "
                          "Make sure that pipelinehost is running." % DEFAULT_PORT)
        self.world_knowledge = Knowledge()
        
        # Sets of propositions, accessible to all methods
        self.custom_props = set()

    def generate(self, text, sensors, regions, props):
        """Generate a logical specification from natural language and propositions."""
        # Clean unicode out of everything
        text = text.encode('ascii', 'ignore')
        sensors = [astr.encode('ascii', 'ignore') for astr in sensors]
        regions = [astr.encode('ascii', 'ignore') for astr in regions]
        props = [astr.encode('ascii', 'ignore') for astr in props]
        
        print "NL->LTL Generation called on:"
        print "Sensors:", sensors
        print "Props:", props
        print "Regions:", regions
        print "Text:", repr(text)
        
        # Make lists for POS conversions, including the metapar keywords
        force_nouns = list(regions) + list(sensors)
        force_verbs = list(props) + METAPARS.keys()
        
        responses = []
        system_lines = []
        env_lines = []
        env_lines_set = set() # Used to track duplicates
        custom_props = set()
        generation_trees = []
        for line in text.split('\n'):
            if not line:
                # Blank lines are counted as being processed correctly but are skipped
                responses.append(True)
                continue
            
            # Init the generation tree to the empty result
            generation_tree = [line.strip(), []] 
            
            # Lowercase and strip the text before using it
            line = line.strip().lower()
            
            print "Sending to remote parser:", repr(line)
            parse = socket_parse(asocket=self.sock, text=line, force_nouns=force_nouns,
                                 force_verbs=force_verbs)
            print parse
            user_response, semantics_result, semantics_response, new_commands = \
                self.world_knowledge.process_parse_tree(parse, line)
            
            # Build the metapars
            failure = False
            for command in new_commands:
                try:
                    new_sys_lines, new_env_lines, new_custom_props = _apply_metapar(command)
                except KeyError:
                    failure = True
                    continue
                
                system_lines.extend(new_sys_lines)
                # We need to ensure no duplicates are inserted into env_lines, so we keep
                # an redundant set. If we were requiring 2.7 we would use OrderedDict.
                for env_line in new_env_lines:
                    if env_line not in env_lines_set:
                        env_lines.append(env_line)
                        env_lines_set.add(env_line)
                custom_props.update(new_custom_props)
                # Add the statements as the children of the generation tree
                generation_tree[1].append([str(command), [new_env_lines, new_sys_lines]])
                
            # Add a true response if there were commands and no failures
            responses.append(new_commands and not failure)
                
            # Add this line's stuff to the generation tree
            generation_trees.append(generation_tree)

        # Convert custom props into a list
        custom_props = list(custom_props)

        print "Spec generation complete."
        print "Responses:", responses
        print "Environment lines:", env_lines
        print "System lines:", system_lines
        print "Custom props:", custom_props
        print "Generation tree:", generation_trees
        return env_lines, system_lines, custom_props, responses, generation_trees