def __init__(self, node, debug_enable: bool = False): dbg = Debug(debug_enable) assert isinstance(node, CommonTree) self.structure = node definitions = node.getChildren() self.name = definitions[0].getText() self.variables = {} self.getvarnames(definitions) dbg.pdebug("Object: " + node.getText() + " " + self.name + " -> varNames: " + str(self.variables))
def RunProtoGen(file, filename): graphdbgparser = True graphdbgalgorithm = True path = os.getcwd() MakeDir("ProtoGen_Output") dbg = Debug(True) develop = 0 if not develop: # Frontend dbg.pheader("PROTOGEN PARSER") Parser = ProtoParser(file, filename, graphdbgparser) if not Parser.checkAccessBehaviourDefined(): print("Exiting.") sys.exit(1) if not Parser.checkAllStatesReachable(): print("Exiting.") sys.exit(1) level = Level(Parser, "L1") dbg.pheader(dbg.spacer + "PROTOGEN ALGORITHM") # Saving the objects: with open('objs.pkl', 'wb') as f: # Python 3: open(..., 'wb') pickle.dump(level, f) else: # Getting back the objects: with open('objs.pkl', 'rb') as f: # Python 3: open(..., 'rb') level = pickle.load(f) talgo = time.time() Algorithm = ProtoAlgorithm(level, ProtoConfig(), graphdbgalgorithm) dbg.pdebug("ProtoGen runtime: " + str(time.time() - talgo) + '\n') dbg.pheader(dbg.spacer + "PROTOGEN BACKEND") os.chdir(path) return level, Algorithm
# First run HieraGen dbg.pheader("HIERAGEN OUTPUT:") h_gen = HieraGen(lvl1, lvl2, dbg.dbg) # Run ProtoGen for hierachical controller config = ProtoConfig() dbg.pheader("HIERAGEN CONCURRENT CONTROLLER OUTPUT:") ProtoAlgorithm(h_gen.high_level, config, dbg.dbg) high_level = h_gen.high_level # Then Run ProtoGen for the ll and hl cache and directory controllers dbg.pheader("LOWER LEVEL CACHE CONCURRENT CONTROLLER OUTPUT:") ProtoAlgorithm(lvl1, ProtoConfig(), dbg.dbg) dbg.pheader("HIGHER LEVEL CACHE & DIRECTORY CONCURRENT CONTROLLER OUTPUT:") ProtoAlgorithm(lvl2, ProtoConfig(), dbg.dbg) # Combine clusters # by replacing directory of lower level with dir/cache and the designated cache of the higher level cluster_1.replace_arch(lvl1.directory, high_level.cache) cluster_2.replace_arch(lvl2.cache, high_level.cache) # Run the Murphi model checker verification RunMurphiModular([cluster_1, cluster_2], test_case[0], False, 8000) dbg.pdebug("HIR_" + test_case[0].split(".")[0] + "_&_" + test_case[1].split(".")[0] + " Runtime: " + str(time.time() - start_time)) # Reset the path os.chdir(path)
def RunMurphi(level: Level, filename: str, dbg_enabled: bool = True): dbg = Debug(dbg_enabled) path = os.getcwd() MakeDir("Murphi") # Generate System Tuple level archs = [level.cache, level.directory] cache_machine = Machine(level.cache) directory_machine = Machine(level.directory) sys_description = Cluster( (cache_machine, cache_machine, cache_machine, directory_machine), 'C1', [level]) clusters = [sys_description] SSP_MurphiDesc = MurphiModular(clusters, True) MurphiDesc = MurphiModular(clusters, False) print("Murphi files were generated in: " + os.getcwd()) dbg.pheader(dbg.spacer + "Murphi make and run") talgo = time.time() dbg.pheader(dbg.spacer + "Starting SSP verification" + '\n') time.sleep( 0.005 ) # The delay is only related to the output queues of the model checker and the python tool #SSP_MurphiDesc.runMurphi(True, filename) #ssp_success = False ssp_success = True try: resultsfile = open("SSP_" + filename.split(".")[0] + "_results.txt") except FileNotFoundError: dbg.pwarning( "SSP results file does not exist - did it compile correctly?" + "\nPlease check SSP_" + filename.split(".")[0] + "_compile.txt" + " for details, and make sure your input is correctly specified.") else: if "No error found" in resultsfile.read(): time.sleep(0.005) dbg.psuccess("SSP verified without error") ssp_success = True else: dbg.pwarning("SSP did not verify correctly; please see SSP_" + filename.split(".")[0] + "_results.txt for the Murphi output.") resultsfile.close() if ssp_success: dbg.pheader(dbg.spacer + "Starting full protocol verification" + '\n') time.sleep(0.005) MurphiDesc.runMurphi(False, filename) try: resultsfile = open(filename.split(".")[0] + "_results.txt") except FileNotFoundError: dbg.pwarning( "Results file does not exist - did it compile correctly?" + "\nPlease check " + filename.split(".")[0] + "_compile.txt " + "for details, and make sure your input is correctly specified." ) else: if "No error found" in resultsfile.read(): time.sleep(0.005) dbg.psuccess("Full protocol verified without error") else: dbg.pwarning( "Full protocol did not verify correctly; please see " + filename.split(".")[0] + "_results.txt for the Murphi output.") resultsfile.close() else: dbg.pwarning( "Aborting full protocol verification as SSP deemed incorrect.") dbg.pdebug(dbg.spacer + "Murphi runtime: " + str(time.time() - talgo) + '\n') os.chdir(path)
def RunMurphiModular(clusters: List[Cluster], filename: str, run_SSP: bool = True, memory: int = 0, dbg_enabled: bool = True): dbg = Debug(dbg_enabled) path = os.getcwd() if not memory: # Calculate the free memory in Megabyte memory = int(virtual_memory().free / 2**20) - 8000 # Leave about 1GB of additional free memory MakeDir("Murphi") SSP_MurphiDesc = MurphiModular(clusters, True) MurphiDesc = MurphiModular(clusters, False) print("Murphi files were generated in: " + os.getcwd()) dbg.pheader(dbg.spacer + "Murphi make and run") talgo = time.time() dbg.pheader(dbg.spacer + "Starting SSP verification" + '\n') time.sleep( 0.005 ) # The delay is only related to the output queues of the model checker and the python tool ssp_success = False if run_SSP: SSP_MurphiDesc.runMurphi(True, filename) try: resultsfile = open("SSP_" + filename.split(".")[0] + "_results.txt") except FileNotFoundError: dbg.pwarning( "SSP results file does not exist - did it compile correctly?" + "\nPlease check SSP_" + filename.split(".")[0] + "_compile.txt" + " for details, and make sure your input is correctly specified." ) else: if "No error found" in resultsfile.read(): time.sleep(0.005) dbg.psuccess("SSP verified without error") ssp_success = True else: dbg.pwarning("SSP did not verify correctly; please see SSP_" + filename.split(".")[0] + "_results.txt for the Murphi output.") resultsfile.close() if ssp_success or not run_SSP: dbg.pheader(dbg.spacer + "Starting full protocol verification" + '\n') time.sleep(0.005) MurphiDesc.runMurphi(False, filename, memory) try: resultsfile = open(filename.split(".")[0] + "_results.txt") except FileNotFoundError: dbg.pwarning( "Results file does not exist - did it compile correctly?" + "\nPlease check " + filename.split(".")[0] + "_compile.txt " + "for details, and make sure your input is correctly specified." ) else: result_str = resultsfile.read() if result_str.rfind("No error found") != -1: time.sleep(0.005) dbg.psuccess("Full protocol verified without error") else: if result_str.rfind("Closed hash table full.") != -1 or \ result_str.rfind("Internal Error: Too many active states.") != -1: dbg.pwarning( "Murphi memory full, please allocate more memory for the verification thread: \n See" + filename.split(".")[0] + "_results.txt for the Murphi output. \n") else: dbg.pwarning( "Full protocol did not verify correctly; please see " + filename.split(".")[0] + "_results.txt for the Murphi output.") resultsfile.close() else: dbg.pwarning( "Aborting full protocol verification as SSP deemed incorrect.") dbg.pdebug(dbg.spacer + "Murphi runtime: " + str(time.time() - talgo) + '\n') # Reset the path os.chdir(path)
Parser2 = ProtoParser(file2, filename2, graphdbgparser) if not Parser2.checkAccessBehaviourDefined(): print("Exiting.") sys.exit(1) if not Parser2.checkAllStatesReachable(): print("Exiting.") sys.exit(1) dbg.pheader("Parsing Complete") # Generate cache and directory controllers lvl1 = Level(Parser1, "L1", filename1) lvl2 = Level(Parser2, "L2", filename2) dbg.pheader("Parsing Complete") dbg.pdebug("Runtime: " + str(time.time() - start_time)) start_time = time.time() # Saving the objects: with open('objs.pkl', 'wb') as f: # Python 3: open(..., 'wb') pickle.dump([lvl1, lvl2], f) else: MakeDir("HIR_" + filename1.split(".")[0] + "_&_" + filename2.split(".")[0]) if dev_hgen or dev_pgen: # Getting back the objects: with open('objs.pkl', 'rb') as f: # Python 3: open(..., 'rb') lvl1, lvl2 = pickle.load(f) dbg.pheader("Level Generation Complete")