def initDialogue(self): # Add SLURP to path for import p = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(p, "..", "etc", "SLURP")) from ltlbroom.specgeneration import SpecGenerator _SLURP_SPEC_GENERATOR = SpecGenerator() # Filter out regions it shouldn't know about filtered_regions = [ region.name for region in self.proj.rfi.regions if not (region.isObstacle or region.name.lower() == "boundary") ] sensorList = copy.deepcopy(self.proj.enabled_sensors) robotPropList = self.proj.enabled_actuators + self.proj.all_customs text = self.proj.specText LTLspec_env, LTLspec_sys, self.proj.internal_props, internal_sensors, results, responses, traceback = _SLURP_SPEC_GENERATOR.generate( text, sensorList, filtered_regions, robotPropList, self.proj.currentConfig.region_tags ) from ltlbroom.dialog import DialogManager self.dialogueManager = DialogManager(traceback)
def initDialogue(self): # Add SLURP to path for import p = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(p, "..", "etc", "SLURP")) from ltlbroom.specgeneration import SpecGenerator _SLURP_SPEC_GENERATOR = SpecGenerator() # Filter out regions it shouldn't know about filtered_regions = [region.name for region in self.proj.rfi.regions if not (region.isObstacle or region.name.lower() == "boundary")] sensorList = copy.deepcopy(self.proj.enabled_sensors) robotPropList = self.proj.enabled_actuators + self.proj.all_customs text = self.proj.specText LTLspec_env, LTLspec_sys, self.proj.internal_props, internal_sensors, results, responses, traceback = \ _SLURP_SPEC_GENERATOR.generate(text, sensorList, filtered_regions, robotPropList, self.hsub.executing_config.region_tags) from ltlbroom.dialog import DialogManager self.dialogueManager = DialogManager(traceback)
def setUp(self): self.specgen = SpecGenerator() self.sensors = [] self.regions = ['hallway'] self.props = [] self.tag_dict = {}
class TestSpecGenerator(unittest.TestCase): def setUp(self): self.specgen = SpecGenerator() self.sensors = [] self.regions = ['hallway'] self.props = [] self.tag_dict = {} def test_go(self): """Test a basic go command.""" text = "Go to the hallway." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, [ '!s.mem_visit_hallway', '([]((next(s.mem_visit_hallway) <-> (s.mem_visit_hallway | next(s.hallway)))))', '([]<>(s.mem_visit_hallway))' ]) def test_go_nonexistent(self): """Test going to a nonexistent room.""" text = "Go to the kitchen." results = self.lines_from_gen(text) self.assertFalse(results[0]) def test_actuator_mutex(self): """Test the actuator mutex.""" self.props = ['camera', 'radio'] text = "" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, [ '([](((s.camera & !s.radio) | (s.radio & !s.camera) ' '| (!s.camera & !s.radio))))' ]) def test_activate(self): """Test a simple activate command.""" self.props = ['camera'] text = "Activate your camera." "" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, ['([](s.camera))']) def test_activate_conditional(self): """Test a conditional activate command.""" self.props = ['camera'] text = "Activate your camera in the hallway." "" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, ['([]((s.hallway -> s.camera)))']) def lines_from_gen(self, text): """Return [env_lines, sys_lines] from a default generate call.""" return self.get_from_gen(text, 0, 2) def results_from_gen(self, text): """Return responses from a default generate call.""" return self.get_from_gen(text, 4) def get_from_gen(self, text, start, end=None): """Return the start-th or start:end element(s) of the return from generate.""" result = self.specgen.generate(text, self.sensors, self.regions, self.props, self.tag_dict, verbose=False) if end: return result[start:end] else: return result[start]
def _writeLTLFile(self): self.LTL2SpecLineNumber = None #regionList = [r.name for r in self.parser.proj.rfi.regions] regionList = [r.name for r in self.proj.rfi.regions] sensorList = deepcopy(self.proj.enabled_sensors) robotPropList = self.proj.enabled_actuators + self.proj.all_customs text = self.proj.specText response = None # Create LTL using selected parser # TODO: rename decomposition object to something other than 'parser' if self.proj.compile_options["parser"] == "slurp": # default to no region tags if no simconfig is defined, so we can compile without if self.proj.current_config == "": region_tags = {} else: self.hsub = handlerSubsystem.HandlerSubsystem( None, self.proj.project_root) config, success = self.hsub.loadConfigFile( self.proj.current_config) if success: self.hsub.configs.append(config) self.hsub.setExecutingConfig(self.proj.current_config) region_tags = self.hsub.executing_config.region_tags # Hack: We need to make sure there's only one of these global _SLURP_SPEC_GENERATOR # Make a new specgenerator and have it process the text if not _SLURP_SPEC_GENERATOR: # Add SLURP to path for import p = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(p, "..", "etc", "SLURP")) from ltlbroom.specgeneration import SpecGenerator _SLURP_SPEC_GENERATOR = SpecGenerator() # Filter out regions it shouldn't know about filtered_regions = [ region.name for region in self.proj.rfi.regions if not (region.isObstacle or region.name.lower() == "boundary") ] LTLspec_env, LTLspec_sys, self.proj.internal_props, internal_sensors, results, responses, traceback = \ _SLURP_SPEC_GENERATOR.generate(text, sensorList, filtered_regions, robotPropList, region_tags) oldspec_env = LTLspec_env oldspec_sys = LTLspec_sys for ln, result in enumerate(results): if not result: logging.warning( "Could not parse the sentence in line {0}".format(ln)) # Abort compilation if there were any errors if not all(results): return None, None, responses # Add in the sensors so they go into the SMV and spec files for s in internal_sensors: if s not in sensorList: sensorList.append(s) self.proj.all_sensors.append(s) self.proj.enabled_sensors.append(s) # Conjoin all the spec chunks LTLspec_env = '\t\t' + ' & \n\t\t'.join(LTLspec_env) LTLspec_sys = '\t\t' + ' & \n\t\t'.join(LTLspec_sys) if self.proj.compile_options["decompose"]: # substitute decomposed region names for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): LTLspec_env = re.sub( '\\bs\.' + r.name + '\\b', "(" + ' | '.join([ "s." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", LTLspec_env) LTLspec_env = re.sub( '\\be\.' + r.name + '\\b', "(" + ' | '.join([ "e." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", LTLspec_env) LTLspec_sys = re.sub( '\\bs\.' + r.name + '\\b', "(" + ' | '.join([ "s." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", LTLspec_sys) LTLspec_sys = re.sub( '\\be\.' + r.name + '\\b', "(" + ' | '.join([ "e." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", LTLspec_sys) response = responses elif self.proj.compile_options["parser"] == "ltl": # delete comments text = re.sub(r"#.*$", "", text, flags=re.MULTILINE) # split into env and sys parts (by looking for a line of just dashes in between) LTLspec_env, LTLspec_sys = re.split(r"^\s*-+\s*$", text, maxsplit=1, flags=re.MULTILINE) # split into subformulas LTLspec_env = re.split(r"(?:[ \t]*[\n\r][ \t]*)+", LTLspec_env) LTLspec_sys = re.split(r"(?:[ \t]*[\n\r][ \t]*)+", LTLspec_sys) # remove any empty initial entries (HACK?) while '' in LTLspec_env: LTLspec_env.remove('') while '' in LTLspec_sys: LTLspec_sys.remove('') # automatically conjoin all the subformulas LTLspec_env = '\t\t' + ' & \n\t\t'.join(LTLspec_env) LTLspec_sys = '\t\t' + ' & \n\t\t'.join(LTLspec_sys) if self.proj.compile_options["decompose"]: # substitute decomposed region for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): LTLspec_env = re.sub( '\\b(?:s\.)?' + r.name + '\\b', "(" + ' | '.join([ "s." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", LTLspec_env) LTLspec_sys = re.sub( '\\b(?:s\.)?' + r.name + '\\b', "(" + ' | '.join([ "s." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", LTLspec_sys) else: for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): LTLspec_env = re.sub('\\b(?:s\.)?' + r.name + '\\b', "s." + r.name, LTLspec_env) LTLspec_sys = re.sub('\\b(?:s\.)?' + r.name + '\\b', "s." + r.name, LTLspec_sys) traceback = [] # HACK: needs to be something other than None elif self.proj.compile_options["parser"] == "structured": import parseEnglishToLTL if self.proj.compile_options["decompose"]: # substitute the regions name in specs for m in re.finditer(r'near (?P<rA>\w+)', text): text = re.sub( r'near (?P<rA>\w+)', "(" + ' or '.join([ "s." + r for r in self.parser.proj.regionMapping[ 'near$' + m.group('rA') + '$' + str(50)] ]) + ")", text) for m in re.finditer( r'within (?P<dist>\d+) (from|of) (?P<rA>\w+)', text): text = re.sub( r'within ' + m.group('dist') + ' (from|of) ' + m.group('rA'), "(" + ' or '.join([ "s." + r for r in self.parser.proj.regionMapping[ 'near$' + m.group('rA') + '$' + m.group('dist')] ]) + ")", text) for m in re.finditer(r'between (?P<rA>\w+) and (?P<rB>\w+)', text): text = re.sub( r'between ' + m.group('rA') + ' and ' + m.group('rB'), "(" + ' or '.join([ "s." + r for r in self.parser.proj.regionMapping[ 'between$' + m.group('rA') + '$and$' + m.group('rB') + "$"] ]) + ")", text) # substitute decomposed region for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): text = re.sub( '\\b' + r.name + '\\b', "(" + ' | '.join([ "s." + x for x in self.parser.proj.regionMapping[r.name] ]) + ")", text) regionList = [ "s." + x.name for x in self.parser.proj.rfi.regions ] else: for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): text = re.sub('\\b' + r.name + '\\b', "s." + r.name, text) regionList = ["s." + x.name for x in self.proj.rfi.regions] spec, traceback, failed, self.LTL2SpecLineNumber, self.proj.internal_props = parseEnglishToLTL.writeSpec( text, sensorList, regionList, robotPropList) # Abort compilation if there were any errors if failed: return None, None, None LTLspec_env = spec["EnvInit"] + spec["EnvTrans"] + spec["EnvGoals"] LTLspec_sys = spec["SysInit"] + spec["SysTrans"] + spec["SysGoals"] else: logging.error("Parser type '{0}' not currently supported".format( self.proj.compile_options["parser"])) return None, None, None if self.proj.compile_options["decompose"]: regionList = [x.name for x in self.parser.proj.rfi.regions] else: regionList = [x.name for x in self.proj.rfi.regions] if self.proj.compile_options["use_region_bit_encoding"]: # Define the number of bits needed to encode the regions numBits = int(math.ceil(math.log(len(regionList), 2))) # creating the region bit encoding bitEncode = bitEncoding(len(regionList), numBits) currBitEnc = bitEncode['current'] nextBitEnc = bitEncode['next'] # switch to bit encodings for regions LTLspec_env = replaceRegionName(LTLspec_env, bitEncode, regionList) LTLspec_sys = replaceRegionName(LTLspec_sys, bitEncode, regionList) if self.LTL2SpecLineNumber is not None: for k in self.LTL2SpecLineNumber.keys(): new_k = replaceRegionName(k, bitEncode, regionList) if new_k != k: self.LTL2SpecLineNumber[ new_k] = self.LTL2SpecLineNumber[k] del self.LTL2SpecLineNumber[k] if self.proj.compile_options["decompose"]: adjData = self.parser.proj.rfi.transitions else: adjData = self.proj.rfi.transitions # Store some data needed for later analysis self.spec = {} if self.proj.compile_options["decompose"]: self.spec['Topo'] = createTopologyFragment( adjData, self.parser.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) else: self.spec['Topo'] = createTopologyFragment( adjData, self.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) # Substitute any macros that the parsers passed us LTLspec_env = self.substituteMacros(LTLspec_env) LTLspec_sys = self.substituteMacros(LTLspec_sys) # If we are not using bit-encoding, we need to # explicitly encode a mutex for regions if not self.proj.compile_options["use_region_bit_encoding"]: # DNF version (extremely slow for core-finding) #mutex = "\n\t&\n\t []({})".format(" | ".join(["({})".format(" & ".join(["s."+r2.name if r is r2 else "!s."+r2.name for r2 in self.parser.proj.rfi.regions])) for r in self.parser.proj.rfi.regions])) if self.proj.compile_options["decompose"]: region_list = self.parser.proj.rfi.regions else: region_list = self.proj.rfi.regions # Almost-CNF version exclusions = [] for i, r1 in enumerate(region_list): for r2 in region_list[i + 1:]: exclusions.append("!(s.{} & s.{})".format( r1.name, r2.name)) mutex = "\n&\n\t []({})".format(" & ".join(exclusions)) LTLspec_sys += mutex self.spec.update(self.splitSpecIntoComponents(LTLspec_env, LTLspec_sys)) # Add in a fragment to make sure that we start in a valid region if self.proj.compile_options["decompose"]: self.spec['InitRegionSanityCheck'] = createInitialRegionFragment( self.parser.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) else: self.spec['InitRegionSanityCheck'] = createInitialRegionFragment( self.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) LTLspec_sys += "\n&\n" + self.spec['InitRegionSanityCheck'] LTLspec_sys += "\n&\n" + self.spec['Topo'] createLTLfile(self.proj.getFilenamePrefix(), LTLspec_env, LTLspec_sys) if self.proj.compile_options["parser"] == "slurp": self.reversemapping = { self.postprocessLTL(line, sensorList, robotPropList).strip(): line.strip() for line in oldspec_env + oldspec_sys } self.reversemapping[self.spec['Topo'].replace("\n", "").replace( "\t", "").lstrip().rstrip("\n\t &")] = "TOPOLOGY" #for k,v in self.reversemapping.iteritems(): # print "{!r}:{!r}".format(k,v) return self.spec, traceback, response
def _writeLTLFile(self): self.LTL2SpecLineNumber = None #regionList = [r.name for r in self.parser.proj.rfi.regions] regionList = [r.name for r in self.proj.rfi.regions] sensorList = deepcopy(self.proj.enabled_sensors) robotPropList = self.proj.enabled_actuators + self.proj.all_customs text = self.proj.specText response = None # Create LTL using selected parser # TODO: rename decomposition object to something other than 'parser' if self.proj.compile_options["parser"] == "slurp": # default to no region tags if no simconfig is defined, so we can compile without if self.proj.currentConfig is None: region_tags = {} else: region_tags = self.proj.currentConfig.region_tags # Hack: We need to make sure there's only one of these global _SLURP_SPEC_GENERATOR # Make a new specgenerator and have it process the text if not _SLURP_SPEC_GENERATOR: # Add SLURP to path for import p = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(p, "..", "etc", "SLURP")) from ltlbroom.specgeneration import SpecGenerator _SLURP_SPEC_GENERATOR = SpecGenerator() # Filter out regions it shouldn't know about filtered_regions = [region.name for region in self.proj.rfi.regions if not (region.isObstacle or region.name.lower() == "boundary")] LTLspec_env, LTLspec_sys, self.proj.internal_props, internal_sensors, results, responses, traceback = \ _SLURP_SPEC_GENERATOR.generate(text, sensorList, filtered_regions, robotPropList, region_tags) oldspec_env = LTLspec_env oldspec_sys = LTLspec_sys for ln, result in enumerate(results): if not result: logging.warning("Could not parse the sentence in line {0}".format(ln)) # Abort compilation if there were any errors if not all(results): return None, None, responses # Add in the sensors so they go into the SMV and spec files for s in internal_sensors: if s not in sensorList: sensorList.append(s) self.proj.all_sensors.append(s) self.proj.enabled_sensors.append(s) # Conjoin all the spec chunks LTLspec_env = '\t\t' + ' & \n\t\t'.join(LTLspec_env) LTLspec_sys = '\t\t' + ' & \n\t\t'.join(LTLspec_sys) if self.proj.compile_options["decompose"]: # substitute decomposed region names for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): LTLspec_env = re.sub('\\bs\.' + r.name + '\\b', "("+' | '.join(["s."+x for x in self.parser.proj.regionMapping[r.name]])+")", LTLspec_env) LTLspec_env = re.sub('\\be\.' + r.name + '\\b', "("+' | '.join(["e."+x for x in self.parser.proj.regionMapping[r.name]])+")", LTLspec_env) LTLspec_sys = re.sub('\\bs\.' + r.name + '\\b', "("+' | '.join(["s."+x for x in self.parser.proj.regionMapping[r.name]])+")", LTLspec_sys) LTLspec_sys = re.sub('\\be\.' + r.name + '\\b', "("+' | '.join(["e."+x for x in self.parser.proj.regionMapping[r.name]])+")", LTLspec_sys) response = responses elif self.proj.compile_options["parser"] == "ltl": # delete comments text = re.sub(r"#.*$", "", text, flags=re.MULTILINE) # split into env and sys parts (by looking for a line of just dashes in between) LTLspec_env, LTLspec_sys = re.split(r"^\s*-+\s*$", text, maxsplit=1, flags=re.MULTILINE) # split into subformulas LTLspec_env = re.split(r"(?:[ \t]*[\n\r][ \t]*)+", LTLspec_env) LTLspec_sys = re.split(r"(?:[ \t]*[\n\r][ \t]*)+", LTLspec_sys) # remove any empty initial entries (HACK?) while '' in LTLspec_env: LTLspec_env.remove('') while '' in LTLspec_sys: LTLspec_sys.remove('') # automatically conjoin all the subformulas LTLspec_env = '\t\t' + ' & \n\t\t'.join(LTLspec_env) LTLspec_sys = '\t\t' + ' & \n\t\t'.join(LTLspec_sys) if self.proj.compile_options["decompose"]: # substitute decomposed region for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): LTLspec_env = re.sub('\\b(?:s\.)?' + r.name + '\\b', "("+' | '.join(["s."+x for x in self.parser.proj.regionMapping[r.name]])+")", LTLspec_env) LTLspec_sys = re.sub('\\b(?:s\.)?' + r.name + '\\b', "("+' | '.join(["s."+x for x in self.parser.proj.regionMapping[r.name]])+")", LTLspec_sys) else: for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): LTLspec_env = re.sub('\\b(?:s\.)?' + r.name + '\\b', "s."+r.name, LTLspec_env) LTLspec_sys = re.sub('\\b(?:s\.)?' + r.name + '\\b', "s."+r.name, LTLspec_sys) traceback = [] # HACK: needs to be something other than None elif self.proj.compile_options["parser"] == "structured": import parseEnglishToLTL if self.proj.compile_options["decompose"]: # substitute the regions name in specs for m in re.finditer(r'near (?P<rA>\w+)', text): text=re.sub(r'near (?P<rA>\w+)', "("+' or '.join(["s."+r for r in self.parser.proj.regionMapping['near$'+m.group('rA')+'$'+str(50)]])+")", text) for m in re.finditer(r'within (?P<dist>\d+) (from|of) (?P<rA>\w+)', text): text=re.sub(r'within ' + m.group('dist')+' (from|of) '+ m.group('rA'), "("+' or '.join(["s."+r for r in self.parser.proj.regionMapping['near$'+m.group('rA')+'$'+m.group('dist')]])+")", text) for m in re.finditer(r'between (?P<rA>\w+) and (?P<rB>\w+)', text): text=re.sub(r'between ' + m.group('rA')+' and '+ m.group('rB'),"("+' or '.join(["s."+r for r in self.parser.proj.regionMapping['between$'+m.group('rA')+'$and$'+m.group('rB')+"$"]])+")", text) # substitute decomposed region for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): text = re.sub('\\b' + r.name + '\\b', "("+' | '.join(["s."+x for x in self.parser.proj.regionMapping[r.name]])+")", text) regionList = ["s."+x.name for x in self.parser.proj.rfi.regions] else: for r in self.proj.rfi.regions: if not (r.isObstacle or r.name.lower() == "boundary"): text = re.sub('\\b' + r.name + '\\b', "s."+r.name, text) regionList = ["s."+x.name for x in self.proj.rfi.regions] spec, traceback, failed, self.LTL2SpecLineNumber, self.proj.internal_props = parseEnglishToLTL.writeSpec(text, sensorList, regionList, robotPropList) # Abort compilation if there were any errors if failed: return None, None, None LTLspec_env = spec["EnvInit"] + spec["EnvTrans"] + spec["EnvGoals"] LTLspec_sys = spec["SysInit"] + spec["SysTrans"] + spec["SysGoals"] else: logging.error("Parser type '{0}' not currently supported".format(self.proj.compile_options["parser"])) return None, None, None if self.proj.compile_options["decompose"]: regionList = [x.name for x in self.parser.proj.rfi.regions] else: regionList = [x.name for x in self.proj.rfi.regions] if self.proj.compile_options["use_region_bit_encoding"]: # Define the number of bits needed to encode the regions numBits = int(math.ceil(math.log(len(regionList),2))) # creating the region bit encoding bitEncode = bitEncoding(len(regionList),numBits) currBitEnc = bitEncode['current'] nextBitEnc = bitEncode['next'] # switch to bit encodings for regions LTLspec_env = replaceRegionName(LTLspec_env, bitEncode, regionList) LTLspec_sys = replaceRegionName(LTLspec_sys, bitEncode, regionList) if self.LTL2SpecLineNumber is not None: for k in self.LTL2SpecLineNumber.keys(): new_k = replaceRegionName(k, bitEncode, regionList) if new_k != k: self.LTL2SpecLineNumber[new_k] = self.LTL2SpecLineNumber[k] del self.LTL2SpecLineNumber[k] if self.proj.compile_options["decompose"]: adjData = self.parser.proj.rfi.transitions else: adjData = self.proj.rfi.transitions # Store some data needed for later analysis self.spec = {} if self.proj.compile_options["decompose"]: self.spec['Topo'] = createTopologyFragment(adjData, self.parser.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) else: self.spec['Topo'] = createTopologyFragment(adjData, self.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) # Substitute any macros that the parsers passed us LTLspec_env = self.substituteMacros(LTLspec_env) LTLspec_sys = self.substituteMacros(LTLspec_sys) # If we are not using bit-encoding, we need to # explicitly encode a mutex for regions if not self.proj.compile_options["use_region_bit_encoding"]: # DNF version (extremely slow for core-finding) #mutex = "\n\t&\n\t []({})".format(" | ".join(["({})".format(" & ".join(["s."+r2.name if r is r2 else "!s."+r2.name for r2 in self.parser.proj.rfi.regions])) for r in self.parser.proj.rfi.regions])) if self.proj.compile_options["decompose"]: region_list = self.parser.proj.rfi.regions else: region_list = self.proj.rfi.regions # Almost-CNF version exclusions = [] for i, r1 in enumerate(region_list): for r2 in region_list[i+1:]: exclusions.append("!(s.{} & s.{})".format(r1.name, r2.name)) mutex = "\n&\n\t []({})".format(" & ".join(exclusions)) LTLspec_sys += mutex self.spec.update(self.splitSpecIntoComponents(LTLspec_env, LTLspec_sys)) # Add in a fragment to make sure that we start in a valid region if self.proj.compile_options["decompose"]: self.spec['InitRegionSanityCheck'] = createInitialRegionFragment(self.parser.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) else: self.spec['InitRegionSanityCheck'] = createInitialRegionFragment(self.proj.rfi.regions, use_bits=self.proj.compile_options["use_region_bit_encoding"]) LTLspec_sys += "\n&\n" + self.spec['InitRegionSanityCheck'] LTLspec_sys += "\n&\n" + self.spec['Topo'] createLTLfile(self.proj.getFilenamePrefix(), LTLspec_env, LTLspec_sys) if self.proj.compile_options["parser"] == "slurp": self.reversemapping = {self.postprocessLTL(line,sensorList,robotPropList).strip():line.strip() for line in oldspec_env + oldspec_sys} self.reversemapping[self.spec['Topo'].replace("\n","").replace("\t","").lstrip().rstrip("\n\t &")] = "TOPOLOGY" #for k,v in self.reversemapping.iteritems(): # print "{!r}:{!r}".format(k,v) return self.spec, traceback, response
class TestSpecGenerator(unittest.TestCase): def setUp(self): self.specgen = SpecGenerator() self.sensors = [] self.regions = ['hallway'] self.props = [] self.tag_dict = {} def test_go(self): """Test a basic go command.""" text = "Go to the hallway." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, [ '!s.mem_visit_hallway', '([]((next(s.mem_visit_hallway) <-> (s.mem_visit_hallway | next(s.hallway)))))', '([]<>(s.mem_visit_hallway))' ]) def test_go_nonexistent(self): """Test going to a nonexistent room.""" text = "Go to the kitchen." results = self.lines_from_gen(text) self.assertFalse(results[0]) def test_actuator_mutex(self): """Test the actuator mutex.""" self.props = ['camera', 'radio'] text = "" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, [ '([](((s.camera & !s.radio) | (s.radio & !s.camera) ' '| (!s.camera & !s.radio))))' ]) def test_activate(self): """Test a simple activate command.""" self.props = ['camera'] text = "Activate your camera.""" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, ['([](s.camera))']) def test_activate_conditional(self): """Test a conditional activate command.""" self.props = ['camera'] text = "Activate your camera in the hallway.""" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, ['([]((s.hallway -> s.camera)))']) def lines_from_gen(self, text): """Return [env_lines, sys_lines] from a default generate call.""" return self.get_from_gen(text, 0, 2) def results_from_gen(self, text): """Return responses from a default generate call.""" return self.get_from_gen(text, 4) def get_from_gen(self, text, start, end=None): """Return the start-th or start:end element(s) of the return from generate.""" result = self.specgen.generate(text, self.sensors, self.regions, self.props, self.tag_dict, verbose=False) if end: return result[start:end] else: return result[start]
class TestSpecGenerator(unittest.TestCase): """Test spec generation.""" def setUp(self): self.specgen = SpecGenerator(False) self.sensors = [] self.regions = ['hallway'] self.props = [] self.tag_dict = {} def test_go(self): """Test a basic go command.""" text = "Go to the hallway." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, [ '!s.mem_visit_hallway', '([]((next(s.mem_visit_hallway) <-> (s.mem_visit_hallway | next(s.hallway)))))', '([]<>(s.mem_visit_hallway))' ]) def test_go_nonexistent(self): """Test going to a nonexistent room.""" text = "Go to the kitchen." results = self.lines_from_gen(text) self.assertFalse(results[0]) def test_actuator_mutex(self): """Test the actuator mutex.""" self.props = ['camera', 'radio'] text = "" enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) # Assumes mutex is the last sys line self.assertEqual( syl[-1], '([](((s.camera & !s.radio) | (s.radio & !s.camera) ' '| (!s.camera & !s.radio))))') def test_activate(self): """Test a simple activate command.""" self.props = ['camera'] text = "Activate your camera." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) self.assertEqual(syl, [ '([](((!s.camera & next(s.camera)) -> STAY_THERE)))', '([](next(s.camera)))' ]) def test_activate_location(self): """Test an activate command restricted to a location.""" self.props = ['camera'] text = "Activate your camera in the hallway." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) # Assume actual safety is last line self.assertEqual(syl[-1], '([]((next(s.hallway) -> next(s.camera))))') def test_activate_conditional1(self): """Test an activate command restricted by a preceding conditional.""" self.props = ['camera'] self.sensors = ['bomb'] text = "If you see a bomb, activate your camera." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) # Assume actual safety is last line self.assertEqual(syl[-1], '([]((next(s.react_bomb) -> next(s.camera))))') def test_actuate_all(self): """Test an actuation command on 'all'.""" self.props = ['defuse'] self.sensors = ['bomb'] text = "Defuse all bombs." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) # Assume actual safety is last line self.assertEqual(syl[-1], '([]((next(s.react_bomb) -> next(s.defuse))))') def test_actuate_the(self): """Test an actuation command on 'the'.""" self.props = ['defuse'] self.sensors = ['bomb'] text = "Defuse the bomb." enl, syl = self.lines_from_gen(text) self.assertEqual(enl, []) # Assume actual safety is last line self.assertEqual(syl[-1], '([]((next(s.react_bomb) -> next(s.defuse))))') def lines_from_gen(self, text): """Return [env_lines, sys_lines] from a default generate call.""" return self.get_from_gen(text, 0, 2) def results_from_gen(self, text): """Return responses from a default generate call.""" return self.get_from_gen(text, 4) def get_from_gen(self, text, start, end=None): """Return the start-th or start:end element(s) of the return from generate.""" result = self.specgen.generate(text, self.sensors, self.regions, self.props, self.tag_dict) if end: return result[start:end] else: return result[start]