コード例 #1
0
ファイル: RunProtoGen.py プロジェクト: icsa-caps/HieraGen
def RunProtoGen(file, filename):
    graphdbgparser = True
    graphdbgalgorithm = True

    path = os.getcwd()
    MakeDir("ProtoGen_Output")

    dbg = Debug(True)

    develop = 0
    if not develop:
        # Frontend
        dbg.pheader("PROTOGEN PARSER")
        Parser = ProtoParser(file, filename, graphdbgparser)
        if not Parser.checkAccessBehaviourDefined():
            print("Exiting.")
            sys.exit(1)
        if not Parser.checkAllStatesReachable():
            print("Exiting.")
            sys.exit(1)

        level = Level(Parser, "L1")

        dbg.pheader(dbg.spacer + "PROTOGEN ALGORITHM")

        # Saving the objects:
        with open('objs.pkl', 'wb') as f:  # Python 3: open(..., 'wb')
            pickle.dump(level, f)
    else:
        # Getting back the objects:
        with open('objs.pkl', 'rb') as f:  # Python 3: open(..., 'rb')
            level = pickle.load(f)

    talgo = time.time()

    Algorithm = ProtoAlgorithm(level, ProtoConfig(), graphdbgalgorithm)
    dbg.pdebug("ProtoGen runtime: " + str(time.time() - talgo) + '\n')
    dbg.pheader(dbg.spacer + "PROTOGEN BACKEND")

    os.chdir(path)

    return level, Algorithm
コード例 #2
0
                        [lvl1])

    # Generate Level 2
    cache_machine = Machine(
        lvl2.cache)  # This is the cache part of the cache/dir controller
    cache_machine_hl = Machine(lvl2.cache)
    cache_tuple_hl = tuple(cache_machine_hl
                           for ind in range(0, L2_cache_count))
    directory_machine = Machine(lvl2.directory)
    # Single remote directory
    cluster_2 = Cluster(cache_tuple_hl + (cache_machine, directory_machine),
                        'C2', [lvl2])

    # Run HieraGen
    # First run HieraGen
    dbg.pheader("HIERAGEN OUTPUT:")
    h_gen = HieraGen(lvl1, lvl2, dbg.dbg)

    # Run ProtoGen for hierachical controller
    config = ProtoConfig()
    dbg.pheader("HIERAGEN CONCURRENT CONTROLLER OUTPUT:")
    ProtoAlgorithm(h_gen.high_level, config, dbg.dbg)
    high_level = h_gen.high_level

    # Then Run ProtoGen for the ll and hl cache and directory controllers
    dbg.pheader("LOWER LEVEL CACHE CONCURRENT CONTROLLER OUTPUT:")
    ProtoAlgorithm(lvl1, ProtoConfig(), dbg.dbg)
    dbg.pheader("HIGHER LEVEL CACHE & DIRECTORY CONCURRENT CONTROLLER OUTPUT:")
    ProtoAlgorithm(lvl2, ProtoConfig(), dbg.dbg)

    # Combine clusters
コード例 #3
0
def RunMurphi(level: Level, filename: str, dbg_enabled: bool = True):
    dbg = Debug(dbg_enabled)
    path = os.getcwd()

    MakeDir("Murphi")

    # Generate System Tuple level
    archs = [level.cache, level.directory]
    cache_machine = Machine(level.cache)
    directory_machine = Machine(level.directory)
    sys_description = Cluster(
        (cache_machine, cache_machine, cache_machine, directory_machine), 'C1',
        [level])
    clusters = [sys_description]

    SSP_MurphiDesc = MurphiModular(clusters, True)
    MurphiDesc = MurphiModular(clusters, False)

    print("Murphi files were generated in: " + os.getcwd())

    dbg.pheader(dbg.spacer + "Murphi make and run")
    talgo = time.time()

    dbg.pheader(dbg.spacer + "Starting SSP verification" + '\n')
    time.sleep(
        0.005
    )  # The delay is only related to the output queues of the model checker and the python tool
    #SSP_MurphiDesc.runMurphi(True, filename)
    #ssp_success = False
    ssp_success = True

    try:
        resultsfile = open("SSP_" + filename.split(".")[0] + "_results.txt")
    except FileNotFoundError:
        dbg.pwarning(
            "SSP results file does not exist - did it compile correctly?" +
            "\nPlease check SSP_" + filename.split(".")[0] + "_compile.txt" +
            " for details, and make sure your input is correctly specified.")
    else:
        if "No error found" in resultsfile.read():
            time.sleep(0.005)
            dbg.psuccess("SSP verified without error")
            ssp_success = True
        else:
            dbg.pwarning("SSP did not verify correctly; please see SSP_" +
                         filename.split(".")[0] +
                         "_results.txt for the Murphi output.")
        resultsfile.close()
    if ssp_success:
        dbg.pheader(dbg.spacer + "Starting full protocol verification" + '\n')
        time.sleep(0.005)
        MurphiDesc.runMurphi(False, filename)
        try:
            resultsfile = open(filename.split(".")[0] + "_results.txt")
        except FileNotFoundError:
            dbg.pwarning(
                "Results file does not exist - did it compile correctly?" +
                "\nPlease check " + filename.split(".")[0] + "_compile.txt " +
                "for details, and make sure your input is correctly specified."
            )
        else:
            if "No error found" in resultsfile.read():
                time.sleep(0.005)
                dbg.psuccess("Full protocol verified without error")
            else:
                dbg.pwarning(
                    "Full protocol did not verify correctly; please see " +
                    filename.split(".")[0] +
                    "_results.txt for the Murphi output.")
            resultsfile.close()
    else:
        dbg.pwarning(
            "Aborting full protocol verification as SSP deemed incorrect.")
    dbg.pdebug(dbg.spacer + "Murphi runtime: " + str(time.time() - talgo) +
               '\n')

    os.chdir(path)
コード例 #4
0
def RunMurphiModular(clusters: List[Cluster],
                     filename: str,
                     run_SSP: bool = True,
                     memory: int = 0,
                     dbg_enabled: bool = True):

    dbg = Debug(dbg_enabled)
    path = os.getcwd()

    if not memory:
        # Calculate the free memory in Megabyte
        memory = int(virtual_memory().free /
                     2**20) - 8000  # Leave about 1GB of additional free memory

    MakeDir("Murphi")

    SSP_MurphiDesc = MurphiModular(clusters, True)
    MurphiDesc = MurphiModular(clusters, False)

    print("Murphi files were generated in: " + os.getcwd())

    dbg.pheader(dbg.spacer + "Murphi make and run")
    talgo = time.time()

    dbg.pheader(dbg.spacer + "Starting SSP verification" + '\n')
    time.sleep(
        0.005
    )  # The delay is only related to the output queues of the model checker and the python tool
    ssp_success = False

    if run_SSP:
        SSP_MurphiDesc.runMurphi(True, filename)

        try:
            resultsfile = open("SSP_" + filename.split(".")[0] +
                               "_results.txt")
        except FileNotFoundError:
            dbg.pwarning(
                "SSP results file does not exist - did it compile correctly?" +
                "\nPlease check SSP_" + filename.split(".")[0] +
                "_compile.txt" +
                " for details, and make sure your input is correctly specified."
            )
        else:
            if "No error found" in resultsfile.read():
                time.sleep(0.005)
                dbg.psuccess("SSP verified without error")
                ssp_success = True
            else:
                dbg.pwarning("SSP did not verify correctly; please see SSP_" +
                             filename.split(".")[0] +
                             "_results.txt for the Murphi output.")
            resultsfile.close()

    if ssp_success or not run_SSP:
        dbg.pheader(dbg.spacer + "Starting full protocol verification" + '\n')
        time.sleep(0.005)
        MurphiDesc.runMurphi(False, filename, memory)
        try:
            resultsfile = open(filename.split(".")[0] + "_results.txt")
        except FileNotFoundError:
            dbg.pwarning(
                "Results file does not exist - did it compile correctly?" +
                "\nPlease check " + filename.split(".")[0] + "_compile.txt " +
                "for details, and make sure your input is correctly specified."
            )
        else:
            result_str = resultsfile.read()
            if result_str.rfind("No error found") != -1:
                time.sleep(0.005)
                dbg.psuccess("Full protocol verified without error")
            else:
                if result_str.rfind("Closed hash table full.") != -1 or \
                        result_str.rfind("Internal Error: Too many active states.") != -1:
                    dbg.pwarning(
                        "Murphi memory full, please allocate more memory for the verification thread: \n See"
                        + filename.split(".")[0] +
                        "_results.txt for the Murphi output. \n")
                else:
                    dbg.pwarning(
                        "Full protocol did not verify correctly; please see " +
                        filename.split(".")[0] +
                        "_results.txt for the Murphi output.")
            resultsfile.close()
    else:
        dbg.pwarning(
            "Aborting full protocol verification as SSP deemed incorrect.")
    dbg.pdebug(dbg.spacer + "Murphi runtime: " + str(time.time() - talgo) +
               '\n')

    # Reset the path
    os.chdir(path)
コード例 #5
0
    if not Parser1.checkAccessBehaviourDefined():
        print("Exiting.")
        sys.exit(1)
    if not Parser1.checkAllStatesReachable():
        print("Exiting.")
        sys.exit(1)

    Parser2 = ProtoParser(file2, filename2, graphdbgparser)
    if not Parser2.checkAccessBehaviourDefined():
        print("Exiting.")
        sys.exit(1)
    if not Parser2.checkAllStatesReachable():
        print("Exiting.")
        sys.exit(1)

    dbg.pheader("Parsing Complete")

    # Generate cache and directory controllers
    lvl1 = Level(Parser1, "L1", filename1)
    lvl2 = Level(Parser2, "L2", filename2)

    dbg.pheader("Parsing Complete")
    dbg.pdebug("Runtime: " + str(time.time() - start_time))
    start_time = time.time()

    # Saving the objects:
    with open('objs.pkl', 'wb') as f:  # Python 3: open(..., 'wb')
        pickle.dump([lvl1, lvl2], f)

else:
    MakeDir("HIR_" + filename1.split(".")[0] + "_&_" + filename2.split(".")[0])