Example #1
0
    def __init__(self, template_dir, debug_enable: bool = False):
        GlobalVariables.__init__(self)
        Debug.__init__(self, debug_enable)

        GenConst.__init__(self, template_dir)
        GenEnums.__init__(self, template_dir)
        GenObjectSets.__init__(self, template_dir)
        GenNetworkObj.__init__(self, template_dir)
        GenAccessType.__init__(self, template_dir)
        GenMachObj.__init__(self, template_dir)
        GenLockType.__init__(self, template_dir)
        GenVars.__init__(self, template_dir)
        GenLockFunc.__init__(self, template_dir)
        GenNetworkFunc.__init__(self, template_dir)
        GenAccessFunc.__init__(self, template_dir)
        GenFSMFuncObj.__init__(self, template_dir)

        GenAccessRuleset.__init__(self, template_dir)
        GenAccessSendFunc.__init__(self, template_dir)

        GenModStateFunc.__init__(self, template_dir)

        GenNetworkRules.__init__(self, template_dir)
        GenStartStates.__init__(self, template_dir)
        GenInvar.__init__(self, template_dir)
Example #2
0
    def __init__(self, file="", filename="", graphexport=False, dbg_enabled: bool = False):
        Debug.__init__(self, dbg_enabled)

        # PROTO DATA OBJECTS ####
        self.constNode: Dict[str, CommonTree] = {}
        self.networkNode: List[CommonTree] = []

        # Architecture nodes
        self.cache_node: PCCObject = None
        self.dir_node: PCCObject = None
        self.mem_node: PCCObject = None

        self.msgNode: List[PCCObject] = []
        self.msgTypes: List[str] = []
        self.dataMsgTypes: List[str] = []  # Data msg type names, should be included in the message

        self.archNode: Dict[str, List[Transaction]] = {}
        self.stableStates: Dict[str, List[str]] = {}        # [arch_name, List[stable_state_names]
        self.initStateNodes: Dict[str, str] = {}                # This is missing

        if file and filename:
            self.filename = filename
            lexer = ProtoCCLexer(antlr3.StringStream(file))
            parser = ProtoCCParser(antlr3.CommonTokenStream(lexer))
            tree = parser.document().getTree()
            new_tree_base = copy_tree(tree)
            self.pdebug(new_tree_base.toStringTree())
            self._ParseNodes(new_tree_base)

            self.perror("Accesses for SSP not defined", self.checkAccessBehaviourDefined())
            self.perror("Terminal states detected in SSP", self.checkAllStatesReachable())

            if graphexport:
                self._dArch()
Example #3
0
    def __init__(self,
                 level: Level,
                 config,
                 dbg_term: bool = False,
                 dbg_graph: bool = False):

        ProtoStalling.__init__(self)
        ProtoNonStalling.__init__(self)
        ProtoDir.__init__(self)
        ProtoAccessAssign.__init__(self)
        Debug.__init__(self, dbg_term)
        self.dbg_graph = dbg_graph

        self.debug_all_generated_states = []

        self.level = level

        self.parser = level.parser

        self.datamsgs = level.parser.getDataMsgTypes()

        self.access = level.parser.getAccess()
        self.evict = level.parser.getEvict()

        self.archProtoGen = {}
        self.renamedMessages = level.renamedMessages
        self.hiddenChangeStates = level.hiddenChangeStates

        self.cacheStateSets = []

        self.progressMessages = level.progressMessages
        """ PROTOGEN OPTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
        """
        # Options Cache
        self.CCconservativeInv = config.CCconservativeInv
        self.nonstalling = config.nonstalling

        self.maxNestingDepthCC = config.maxNestingDepthCC

        # Options Directory
        self.DCconservativeInv = config.DCconservativeInv
        self.maxNestingDepthDC = config.maxNestingDepthDC

        # Options Access Assignment
        self.stableStatesOnly = config.stableStatesOnly

        self.conservativeAccess = config.conservativeAccess
        self.ignoreDeferedStates = config.ignoreDeferedStates

        self.maxagressiveAccess = config.maxagressiveAccess

        # Options Merging
        self.enableStateMerging = config.enableStateMerging
        self.maxMergingIter = config.maxMergingIter
        self.MergeStates = MergeStates(self.maxMergingIter, self.access,
                                       self.evict)

        self._ProcessArch()
Example #4
0
    def __init__(self, node, debug_enable: bool = False):

        dbg = Debug(debug_enable)
        assert isinstance(node, CommonTree)

        self.structure = node

        definitions = node.getChildren()
        self.name = definitions[0].getText()
        self.variables = {}
        self.getvarnames(definitions)
        dbg.pdebug("Object: " + node.getText() + " " + self.name + " -> varNames: " + str(self.variables))
Example #5
0
def RunProtoGen(file, filename):
    graphdbgparser = True
    graphdbgalgorithm = True

    path = os.getcwd()
    MakeDir("ProtoGen_Output")

    dbg = Debug(True)

    develop = 0
    if not develop:
        # Frontend
        dbg.pheader("PROTOGEN PARSER")
        Parser = ProtoParser(file, filename, graphdbgparser)
        if not Parser.checkAccessBehaviourDefined():
            print("Exiting.")
            sys.exit(1)
        if not Parser.checkAllStatesReachable():
            print("Exiting.")
            sys.exit(1)

        level = Level(Parser, "L1")

        dbg.pheader(dbg.spacer + "PROTOGEN ALGORITHM")

        # Saving the objects:
        with open('objs.pkl', 'wb') as f:  # Python 3: open(..., 'wb')
            pickle.dump(level, f)
    else:
        # Getting back the objects:
        with open('objs.pkl', 'rb') as f:  # Python 3: open(..., 'rb')
            level = pickle.load(f)

    talgo = time.time()

    Algorithm = ProtoAlgorithm(level, ProtoConfig(), graphdbgalgorithm)
    dbg.pdebug("ProtoGen runtime: " + str(time.time() - talgo) + '\n')
    dbg.pheader(dbg.spacer + "PROTOGEN BACKEND")

    os.chdir(path)

    return level, Algorithm
Example #6
0
 def __init__(self, debug_enable: bool = True):
     Debug.__init__(self, debug_enable)
Example #7
0
    ["MOSI.pcc", "MOSI.pcc"],
    ["MOESI.pcc", "MOESI.pcc"],
]

# Configure the test run cache counts
L1_cache_count = 2
L2_cache_count = 2

start_time = time.time()

# Change path to the protocol directory path
os.chdir(os.getcwd() + "/Protocols/")
# Save default path
path = os.getcwd()

dbg = Debug(True)

for test_case in test_cases:
    dbg.psection("\n NEXT TEST CASE \n")

    file1 = open(test_case[0]).read()
    file2 = open(test_case[1]).read()

    print(test_case[0].split(".")[0] + "_&_" + test_case[1].split(".")[0])

    MakeDir("HIR_" + test_case[0].split(".")[0] + "_&_" +
            test_case[1].split(".")[0])

    # Parse the input files
    ParserLL = ProtoParser(file1, test_case[0])
    ParserHL = ProtoParser(file2, test_case[1])
Example #8
0
def RunMurphi(level: Level, filename: str, dbg_enabled: bool = True):
    dbg = Debug(dbg_enabled)
    path = os.getcwd()

    MakeDir("Murphi")

    # Generate System Tuple level
    archs = [level.cache, level.directory]
    cache_machine = Machine(level.cache)
    directory_machine = Machine(level.directory)
    sys_description = Cluster(
        (cache_machine, cache_machine, cache_machine, directory_machine), 'C1',
        [level])
    clusters = [sys_description]

    SSP_MurphiDesc = MurphiModular(clusters, True)
    MurphiDesc = MurphiModular(clusters, False)

    print("Murphi files were generated in: " + os.getcwd())

    dbg.pheader(dbg.spacer + "Murphi make and run")
    talgo = time.time()

    dbg.pheader(dbg.spacer + "Starting SSP verification" + '\n')
    time.sleep(
        0.005
    )  # The delay is only related to the output queues of the model checker and the python tool
    #SSP_MurphiDesc.runMurphi(True, filename)
    #ssp_success = False
    ssp_success = True

    try:
        resultsfile = open("SSP_" + filename.split(".")[0] + "_results.txt")
    except FileNotFoundError:
        dbg.pwarning(
            "SSP results file does not exist - did it compile correctly?" +
            "\nPlease check SSP_" + filename.split(".")[0] + "_compile.txt" +
            " for details, and make sure your input is correctly specified.")
    else:
        if "No error found" in resultsfile.read():
            time.sleep(0.005)
            dbg.psuccess("SSP verified without error")
            ssp_success = True
        else:
            dbg.pwarning("SSP did not verify correctly; please see SSP_" +
                         filename.split(".")[0] +
                         "_results.txt for the Murphi output.")
        resultsfile.close()
    if ssp_success:
        dbg.pheader(dbg.spacer + "Starting full protocol verification" + '\n')
        time.sleep(0.005)
        MurphiDesc.runMurphi(False, filename)
        try:
            resultsfile = open(filename.split(".")[0] + "_results.txt")
        except FileNotFoundError:
            dbg.pwarning(
                "Results file does not exist - did it compile correctly?" +
                "\nPlease check " + filename.split(".")[0] + "_compile.txt " +
                "for details, and make sure your input is correctly specified."
            )
        else:
            if "No error found" in resultsfile.read():
                time.sleep(0.005)
                dbg.psuccess("Full protocol verified without error")
            else:
                dbg.pwarning(
                    "Full protocol did not verify correctly; please see " +
                    filename.split(".")[0] +
                    "_results.txt for the Murphi output.")
            resultsfile.close()
    else:
        dbg.pwarning(
            "Aborting full protocol verification as SSP deemed incorrect.")
    dbg.pdebug(dbg.spacer + "Murphi runtime: " + str(time.time() - talgo) +
               '\n')

    os.chdir(path)
Example #9
0
    def __init__(self,
                 parser,
                 level_id: str,
                 proto_type=None,
                 run_model_checker: bool = True,
                 debug_enabled: bool = False):

        Debug.__init__(self, debug_enabled)

        self.pheader("LEVEL: " + level_id)

        self.parser = parser
        self.level_id: str = level_id
        self.proto_type: str = proto_type
        self.state_tuple_list: List[SystemTuple] = []
        self.dir_access_classification_map = {}

        # Preprocessing make messages unique
        self.renamedMessages = {}
        self.progressMessages = []
        self.hiddenChangeStates = []

        self.model_checker = None

        # Classify communication
        CommunicationClassification().classify_parser(parser)

        self.message_objects: List[PCCObject] = parser.getMessageNodes()

        # Cache
        caches = list(parser.getCacheIdentifiers())
        assert len(
            caches
        ) <= 1, "Maximum number of architectures per parser supported is 1"
        self.cache = Architecture(parser, caches[0])

        # Cache preprocessing
        self.find_progress_messages(self.cache.state_sets)
        self.find_hidden_progess_messages(self.cache.state_sets)

        # # Make decision between memory and directory
        snoop = False
        directory = list(parser.getDirIdentifiers())
        if not directory:
            snoop = True
            directory = list(parser.getMemIdentifiers())

        assert len(
            directory
        ) <= 1, "Maximum number of architectures per parser supported is 1"
        self.directory = Architecture(parser, directory[0])

        if not snoop:
            # Detect directory message type conflicts
            self.renamedMessages.update(
                self.process_remote_requests(self.cache.state_sets,
                                             self.cache.raw_traces))
            self.cache.update_traces()

            # Resolve directory message type conflicts
            self.process_request_messages(self.renamedMessages,
                                          self.directory.state_sets)
            # Only handle silent upgrades and different request messages
            self.complete_transitions(self.cache.state_sets,
                                      self.directory.state_sets, False)

        # Update the traces
        self.cache.update_traces()
        self.directory.update_traces()

        # INITIAL MODEL CHECKING
        self.init_tuple = StateTuple(self.cache.init_state,
                                     self.directory.init_state,
                                     self.cache.init_state)
        self.level_name = "Level: " + str(level_id) + " | " + str(
            self.cache) + " && " + str(self.directory)

        if run_model_checker:
            self.initial_model_checking()

        # Complete all eviction transitions at the directory level to account for concurrency
        self.complete_transitions(self.cache.state_sets,
                                  self.directory.state_sets)
        # Update the traces
        self.cache.update_traces()
        self.directory.update_traces()

        self.cache.renamed_messages = self.renamedMessages
        self.directory.renamed_messages = self.renamedMessages

        # Classify cache and directory transitions

        # Murphi
        self.unique_id = []

        # Update machine names in operations
        self.update_mach_name_operation_append(level_id)

        self.network_class = ClassNetworkClassification(
            parser, self.cache.transitions)
Example #10
0
def RunMurphiModular(clusters: List[Cluster],
                     filename: str,
                     run_SSP: bool = True,
                     memory: int = 0,
                     dbg_enabled: bool = True):

    dbg = Debug(dbg_enabled)
    path = os.getcwd()

    if not memory:
        # Calculate the free memory in Megabyte
        memory = int(virtual_memory().free /
                     2**20) - 8000  # Leave about 1GB of additional free memory

    MakeDir("Murphi")

    SSP_MurphiDesc = MurphiModular(clusters, True)
    MurphiDesc = MurphiModular(clusters, False)

    print("Murphi files were generated in: " + os.getcwd())

    dbg.pheader(dbg.spacer + "Murphi make and run")
    talgo = time.time()

    dbg.pheader(dbg.spacer + "Starting SSP verification" + '\n')
    time.sleep(
        0.005
    )  # The delay is only related to the output queues of the model checker and the python tool
    ssp_success = False

    if run_SSP:
        SSP_MurphiDesc.runMurphi(True, filename)

        try:
            resultsfile = open("SSP_" + filename.split(".")[0] +
                               "_results.txt")
        except FileNotFoundError:
            dbg.pwarning(
                "SSP results file does not exist - did it compile correctly?" +
                "\nPlease check SSP_" + filename.split(".")[0] +
                "_compile.txt" +
                " for details, and make sure your input is correctly specified."
            )
        else:
            if "No error found" in resultsfile.read():
                time.sleep(0.005)
                dbg.psuccess("SSP verified without error")
                ssp_success = True
            else:
                dbg.pwarning("SSP did not verify correctly; please see SSP_" +
                             filename.split(".")[0] +
                             "_results.txt for the Murphi output.")
            resultsfile.close()

    if ssp_success or not run_SSP:
        dbg.pheader(dbg.spacer + "Starting full protocol verification" + '\n')
        time.sleep(0.005)
        MurphiDesc.runMurphi(False, filename, memory)
        try:
            resultsfile = open(filename.split(".")[0] + "_results.txt")
        except FileNotFoundError:
            dbg.pwarning(
                "Results file does not exist - did it compile correctly?" +
                "\nPlease check " + filename.split(".")[0] + "_compile.txt " +
                "for details, and make sure your input is correctly specified."
            )
        else:
            result_str = resultsfile.read()
            if result_str.rfind("No error found") != -1:
                time.sleep(0.005)
                dbg.psuccess("Full protocol verified without error")
            else:
                if result_str.rfind("Closed hash table full.") != -1 or \
                        result_str.rfind("Internal Error: Too many active states.") != -1:
                    dbg.pwarning(
                        "Murphi memory full, please allocate more memory for the verification thread: \n See"
                        + filename.split(".")[0] +
                        "_results.txt for the Murphi output. \n")
                else:
                    dbg.pwarning(
                        "Full protocol did not verify correctly; please see " +
                        filename.split(".")[0] +
                        "_results.txt for the Murphi output.")
            resultsfile.close()
    else:
        dbg.pwarning(
            "Aborting full protocol verification as SSP deemed incorrect.")
    dbg.pdebug(dbg.spacer + "Murphi runtime: " + str(time.time() - talgo) +
               '\n')

    # Reset the path
    os.chdir(path)
Example #11
0
    def __init__(self, low_level: Level, high_level: Level, dbg_term: bool = False, dbg_graph: bool = False):

        Debug.__init__(self, dbg_term)
        self.dbg_graph = dbg_graph

        # for each ll state, add corresponding dir state and possible higher level cache states
        # higher level cache permissions are greater or equal to lower level state permissions

        # Lower level cache optimization
        # Optimization flag: Accesses that do hit in the higher level cache, do not cause the generation of
        self.ll_access_immed_hit = True

        # Higher level cache optimization
        # Optimization flag: Remote access are not conveyed to the lower level cache, if the hl trace final state has
        # lower access permissions than the current lower level caches
        self.hl_remote_immed_hit = True

        self.pessimistic_access = True

        self.complete_defer = False

        self.conservative_access_ll_request = True
        self.conservative_access_hl_request = True

        self.unique_id_str = "HIERA"
        self.unique_id = [self.unique_id_str]

        # Update messages to avoid conflicts for non-stalling implementations
        self.detect_message_dependencies(low_level, high_level)

        self.low_level: Level = copy.deepcopy(low_level)
        self.high_level: Level = copy.deepcopy(high_level)

        self.low_level.update_mach_name_operation(self.low_level.directory.get_unique_id_str(), self.unique_id_str)
        self.high_level.update_mach_name_operation(self.high_level.cache.get_unique_id_str(), self.unique_id_str)

        self.merge_data_objects()

        self.low_level.update_traces()
        self.high_level.update_traces()

        self.init_tuple: HieraStateTuple = HieraStateTuple(self.low_level.init_tuple, self.high_level.cache.init_state)

        ''' Get access mappings from lower level cache controller '''
        self.ll_access_map = access_request_mapping(self.low_level.cache.state_sets)

        self.cc_dir_to_cc_state_map: Dict[State, List[State]] = {}
        self.cc_dir_to_dir_state_map: Dict[State, List[State]] = {}

        # Do the child init here
        HieraStateSpaceGen.__init__(self, self.low_level, self.high_level)
        """ Generate State Transitions """
        HieraTransGen.__init__(self, self.low_level, self.high_level)
        HieraGraph.__init__(self, self.init_tuple, self.low_level, self.high_level)

        # Make new states based on new_state_tuples
        self.cc_dir_transitions = self.cc_dir_fsm_states()

        # Generate new stabel state nodes
        self.stable_states = self.create_cc_dir_states(self.access_state_tuples + self.remote_state_tuples +
                                                        self.ll_evict_state_tuples + self.hl_evict_state_tuples)
        if self.dbg:
            self.print_debug_info()

        # New init_state_id
        new_state_name = self.new_state_name(self.init_tuple.ll_dir_start_state.state,
                                                    self.init_tuple.hl_cc_start_state.state)

        # Make a new architecture
        self.low_level.directory = Architecture(self.low_level.directory.parser,
                                self.low_level.directory.arch_name,
                                self.cc_dir_transitions,
                                list(self.stable_states.keys()),
                                new_state_name,
                                self.unique_id,
                                self.low_level.directory.data_constant,
                                self.low_level.directory.data_object
                                )

        # Make a new architecture
        self.high_level.cache = Architecture(self.high_level.directory.parser,
                                self.high_level.cache.arch_name,
                                self.cc_dir_transitions,
                                list(self.stable_states.keys()),
                                new_state_name,
                                self.unique_id,
                                self.high_level.cache.data_constant,
                                self.high_level.cache.data_object
                                )

        self.replace_archs = [low_level.directory, high_level.cache]
Example #12
0
start_time = time.time()

spacer = "\n\n\n"
graphdbgParser = False
graphdbgAlgorithm = True

graphdbgparser = False

dev_parser = 1
dev_make_clusters = 1
dev_hgen = 1
dev_pgen = 1
dev_override_backend = 1

dbg = Debug(True)

if len(sys.argv[1:]) == 0:
    os.chdir('../..')
    os.chdir(os.getcwd() + "/Protocols/")
else:
    assert len(sys.argv[1:]) == 1, "Too many arguments"
    filename = sys.argv[1]

filename1 = "MOESI.pcc"
filename2 = "MOESI.pcc"

# Save default path
path = os.getcwd()

if dev_parser: