def get_constraint_path (self, constraint_fname): """get_constraint_path given a constraint file name determine where that constraint it Args: constraint_fname: the name of the constraint file to search for Return: path of the constraint Raises: IOError """ sap_abs_base = os.getenv("SAPLIB_BASE") board_name = self.project_tags["board"] sap_abs_base = saputils.resolve_linux_path(sap_abs_base) if (exists(os.getcwd() + "/" + constraint_fname)): return os.getcwd() + "/" + constraint_fname #search through the board directory if (exists(sap_abs_base + "/hdl/boards/" + board_name + "/" + constraint_fname)): return sap_abs_base + "/hdl/boards/" + board_name + "/" + constraint_fname raise IOError ("Path for the constraint file %s not found" % constraint_fname)
def get_constraint_path(self, constraint_fname): """get_constraint_path given a constraint file name determine where that constraint it Args: constraint_fname: the name of the constraint file to search for Return: path of the constraint Raises: IOError """ sap_abs_base = os.getenv("SAPLIB_BASE") board_name = self.project_tags["board"] sap_abs_base = saputils.resolve_linux_path(sap_abs_base) if (exists(os.getcwd() + "/" + constraint_fname)): return os.getcwd() + "/" + constraint_fname #search through the board directory if (exists(sap_abs_base + "/hdl/boards/" + board_name + "/" + constraint_fname)): return sap_abs_base + "/hdl/boards/" + board_name + "/" + constraint_fname raise IOError("Path for the constraint file %s not found" % constraint_fname)
def read_file(self, filename): """open file with the speicifed name, and location""" try: filein = open (saputils.resolve_linux_path(filename)) self.buf = filein.read() except IOError as err: return False return True
def test_resolve_linux_path(self): """given a filename with or without the ~ return a filename with the ~ expanded""" filename1 = "/filename1" filename = saputils.resolve_linux_path(filename1) #print "first test: " + filename #if (filename == filename1): # print "test1: they are equal!" self.assertEqual(filename == "/filename1", True) filename2 = "~/filename2" filename = saputils.resolve_linux_path(filename2) correct_result = os.path.expanduser("~") + "/filename2" #print "second test: " + filename + " should equal to: " + correct_result #if (correct_result == filename): # print "test2: they are equal!" self.assertEqual(correct_result == filename, True) filename = filename.strip()
def test_resolve_linux_path(self): """given a filename with or without the ~ return a filename with the ~ expanded""" import saputils filename1 = "/filename1" filename = saputils.resolve_linux_path(filename1) #print "first test: " + filename #if (filename == filename1): # print "test1: they are equal!" self.assertEqual(filename == "/filename1", True) filename2 = "~/filename2" filename = saputils.resolve_linux_path(filename2) correct_result = os.path.expanduser("~") + "/filename2" #print "second test: " + filename + " should equal to: " + correct_result #if (correct_result == filename): # print "test2: they are equal!" self.assertEqual(correct_result == filename, True) filename = filename.strip()
def get_constraint_path (self, constraint_fname): sap_abs_base = os.getenv("SAPLIB_BASE") board_name = self.project_tags["CONSTRAINTS"]["board"] sap_abs_base = saputils.resolve_linux_path(sap_abs_base) if (exists(os.getcwd() + "/" + constraint_fname)): return os.getcwd() + "/" + constraint_fname #search through the board directory if (exists(sap_abs_base + "/hdl/boards/" + board_name + "/" + constraint_fname)): return sap_abs_base + "/hdl/boards/" + board_name + "/" + constraint_fname return ""
def test_generate_project(self): file_name = os.getenv("SAPLIB_BASE") + "/example_project/gpio_example.json" self.sc.load_config_file(file_name) home_dir = saputils.resolve_linux_path("~") self.sc.save_config_file(home_dir + "/test_out.json") self.sc.set_config_file_location(home_dir + "/test_out.json") self.sc.generate_project() # FIXME: How do I actually test out the project generation? self.assertEqual(True, True)
def test_generate_project(self): file_name = os.getenv( "SAPLIB_BASE") + "/example_project/gpio_example.json" self.sc.load_config_file(file_name) home_dir = saputils.resolve_linux_path("~") self.sc.save_config_file(home_dir + "/test_out.json") self.sc.set_config_file_location(home_dir + "/test_out.json") self.sc.generate_project() # FIXME: How do I actually test out the project generation? self.assertEqual(True, True)
def test_save_config_file(self): file_name = os.getenv("SAPLIB_BASE") + "/example_project/gpio_example.json" self.sc.load_config_file(file_name) home_dir = saputils.resolve_linux_path("~") self.sc.save_config_file(home_dir + "/test_out.json") try: filein = open(home_dir + "/test_out.json") json_string = filein.read() filein.close() except IOError as err: print ("File Error: " + str(err)) self.assertEqual(True, False) self.assertEqual(True, True)
def test_save_config_file(self): file_name = os.getenv( "SAPLIB_BASE") + "/example_project/gpio_example.json" self.sc.load_config_file(file_name) home_dir = saputils.resolve_linux_path("~") self.sc.save_config_file(home_dir + "/test_out.json") try: filein = open(home_dir + "/test_out.json") json_string = filein.read() filein.close() except IOError as err: print("File Error: " + str(err)) self.assertEqual(True, False) self.assertEqual(True, True)
def test_apply_stave_tags_to_project(self): file_name = os.getenv( "SAPLIB_BASE") + "/example_project/arb_example.json" self.sc.load_config_file(file_name) self.sc.initialize_graph() # This example only attaches one of the two arbitrators. # Attach the second arbitrator. filename = saputils.find_rtl_file_location("tft.v") slave_name = self.sc.add_slave("tft1", filename, SlaveType.PERIPHERAL) host_name = self.sc.sgm.get_slave_name_at(SlaveType.PERIPHERAL, 1) arb_master = "lcd" self.sc.add_arbitrator_by_name(host_name, arb_master, slave_name) # Add a binding for the tft screen. self.sc.set_binding(slave_name, "data_en", "lcd_e") # Now we have something sigificantly different than what was loaded in. self.sc.set_project_name("arbitrator_project") self.sc.apply_slave_tags_to_project() pt = self.sc.project_tags # Check to see if the new slave took. self.assertIn("tft1", pt["SLAVES"].keys()) # Check to see if the arbitrator was set up. self.assertIn("lcd", pt["SLAVES"]["console"]["BUS"].keys()) # Check to see if the arbitrator is attached to the slave. self.assertEqual("tft1", pt["SLAVES"]["console"]["BUS"]["lcd"]) # Check to see if the binding was written. self.assertIn("data_en", pt["SLAVES"]["tft1"]["bind"].keys()) home_dir = saputils.resolve_linux_path("~") self.sc.save_config_file(home_dir + "/arb_test_out.json")
def test_apply_stave_tags_to_project(self): file_name = os.getenv("SAPLIB_BASE") + "/example_project/arb_example.json" self.sc.load_config_file(file_name) self.sc.initialize_graph() # This example only attaches one of the two arbitrators. # Attach the second arbitrator. filename = saputils.find_rtl_file_location("tft.v") slave_name = self.sc.add_slave("tft1", filename, SlaveType.PERIPHERAL) host_name = self.sc.sgm.get_slave_name_at(SlaveType.PERIPHERAL, 1) arb_master = "lcd" self.sc.add_arbitrator_by_name(host_name, arb_master, slave_name) # Add a binding for the tft screen. self.sc.set_binding(slave_name, "data_en", "lcd_e") # Now we have something sigificantly different than what was loaded in. self.sc.set_project_name("arbitrator_project") self.sc.apply_slave_tags_to_project() pt = self.sc.project_tags # Check to see if the new slave took. self.assertIn("tft1", pt["SLAVES"].keys()) # Check to see if the arbitrator was set up. self.assertIn("lcd", pt["SLAVES"]["console"]["BUS"].keys()) # Check to see if the arbitrator is attached to the slave. self.assertEqual("tft1", pt["SLAVES"]["console"]["BUS"]["lcd"]) # Check to see if the binding was written. self.assertIn("data_en", pt["SLAVES"]["tft1"]["bind"].keys()) home_dir = saputils.resolve_linux_path("~") self.sc.save_config_file(home_dir + "/arb_test_out.json")
def process_file(self, filename="", file_dict={}, directory="", debug=False): """process_file read in a file, modify it (if necessary), then write it to the location specified by the directory variable Args: filename: the name of the file to process file_dict: dictionary associated with this file directory: output directory Return: Raises: ModuleFactoryError IOError """ if (len(filename) == 0): raise ModuleFactoryError("No filename specified") if (len(directory) == 0): raise ModuleFactoryError("No output directory specified") if (filename.endswith(".v")): self.verilog_file_list.append(filename) if debug: print "in process file" print "\t%s" % filename #maybe load a tags?? #using the location value in the file_dict find the file and #pull it into a buf self.buf = "" file_location = "" #There are two types of files #ones that are copied over from a location #ones that are generated by scripts #The file is specified by a location and basically needs to be copied over if file_dict.has_key("location"): file_location = os.getenv( "SAPLIB_BASE") + "/" + file_dict["location"] if (debug): print("getting file: " + filename + " from location: " + file_location) found_file = False try: filein = open( saputils.resolve_linux_path(file_location + "/" + filename)) self.buf = filein.read() filein.close() found_file = True except IOError as err: pass if not found_file: if debug: print "searching for file...", try: absfilename = saputils.find_rtl_file_location(filename) filein = open(absfilename) self.buf = filein.read() filein.close() except: if debug: print "Failed to find file" raise ModuleFactoryError("File %s not found searched %s and in the HDL dir \n \ (%s)" % (filename, \ file_location, \ os.getenv("SAPLIB_BASE") + "/hdl/rtl")) if debug: print "found file!" print "file content: " + self.buf #File is generated by a script elif (not file_dict.has_key("gen_script")): raise ModuleFactoryError( "File %s does not declare a location or a script! Check the template file" % filename) if (debug): print "Project name: " + self.tags["PROJECT_NAME"] #if the generation flag is set in the dictionary if (file_dict.has_key("gen_script")): if (debug): print "found the generation script" print "run generation script: " + file_dict["gen_script"] #open up the new gen module cl = __import__("gen") if debug: print "cl: " + str(cl) Gen = getattr(cl, "Gen") if debug: print "Gen: " + str(Gen) self.gen_module = __import__(file_dict["gen_script"]) gen_success_flag = False #find the script and dynamically add it for name in dir(self.gen_module): obj = getattr(self.gen_module, name) # print "object type: " + str(obj) #XXX: debug section start if debug: print "name: " + name if isclass(obj): if debug: print "\tobject type: " + str(obj) print "\tis class" if issubclass(obj, cl.Gen): if debug: print "\t\tis subclass" #XXX: debug section end if isclass(obj) and issubclass(obj, Gen) and obj is not Gen: self.gen = obj() if debug: print "obj = " + str(self.gen) self.buf = self.gen.gen_script(tags=self.tags, buf=self.buf) gen_success_flag = True if not gen_success_flag: raise ModuleFactoryError( "Failed to execute the generation script %s" % file_dict["gen_script"]) else: #no script to execute, just tags self.apply_tags() if debug: print self.buf #write the file to the specified directory if (len(self.buf) > 0): result = self.write_file(directory, filename) if (self.has_dependencies(filename)): deps = self.get_list_of_dependencies(filename) for d in deps: try: result = saputils.find_module_filename(d) if (len(result) == 0): print "Error: couldn't find dependency filename" continue f = saputils.find_module_filename(d) if (f not in self.verilog_dependency_list and f not in self.verilog_file_list): if debug: print "found dependency: " + f self.verilog_dependency_list.append(f) except ModuleNotFound as err: continue
def generate_project(self, config_file_name, debug=False): """Generate the folders and files for the project Using the project tags and template tags this function generates all the directories and files of the project. It will go through the template structure and determine what files need to be added and call either a generation script (in the case of \"top.v\") or simply copy the file over (in the case of a peripheral or memory module. Args: config_file_name: name of the JSON configuration file Return: True: Success False: Failure Raises: TypeError IOError SapError """ #reading the project config data into the the project tags #XXX: This should be changed to an exception begin raised and not a True False statement self.read_config_file(config_file_name) board_dict = saputils.get_board_config(self.project_tags["board"]) cfiles = [] pt = self.project_tags if "constraint_files" in pt.keys(): cfiles = pt["constraint_files"] #if the user didn't specify any constraint files #load the default if len(cfiles) == 0: cfiles = board_dict["default_constraint_files"] #extrapolate the bus template #XXX: Need to check all the constraint files self.project_tags["CLOCK_RATE"] = saputils.read_clock_rate(cfiles[0]) self.read_template(self.project_tags["TEMPLATE"]) #set all the tags within the filegen structure if debug: print "set all tags wihin filegen structure" self.filegen.set_tags(self.project_tags) #generate the project directories and files saputils.create_dir(self.project_tags["BASE_DIR"]) if debug: print "generated the first dir" #generate the arbitrator tags, this is important because the top #needs the arbitrator tags arb_tags = saparbitrator.generate_arbitrator_tags( self.project_tags, False) self.project_tags["ARBITRATORS"] = arb_tags #print "Parent dir: " + self.project_tags["BASE_DIR"] for key in self.template_tags["PROJECT_TEMPLATE"]["files"]: self.recursive_structure_generator( self.template_tags["PROJECT_TEMPLATE"]["files"], key, self.project_tags["BASE_DIR"]) if debug: print "generating project directories finished" if debug: print "generate the arbitrators" self.generate_arbitrators() #Generate all the slaves for slave in self.project_tags["SLAVES"]: fdict = {"location": ""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" fn = self.project_tags["SLAVES"][slave]["filename"] try: self.filegen.process_file(filename=fn, file_dict=fdict, directory=file_dest, debug=debug) except ModuleFactoryError as err: print "ModuleFactoryError while generating a slave: %s" % str( err) #each slave if ("MEMORY" in self.project_tags): for mem in self.project_tags["MEMORY"]: fdict = {"location": ""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" fn = self.project_tags["MEMORY"][mem]["filename"] try: self.filegen.process_file(filename=fn, file_dict=fdict, directory=file_dest) except ModuleFactoryError as err: print "ModuleFactoryError while generating a memory slave: %s" % str( err) #Copy the user specified constraint files to the constraints directory for constraint_fname in cfiles: sap_abs_base = os.getenv("SAPLIB_BASE") abs_proj_base = saputils.resolve_linux_path( self.project_tags["BASE_DIR"]) constraint_path = self.get_constraint_path(constraint_fname) if (len(constraint_path) == 0): print "Couldn't find constraint: " + constraint_fname + ", searched in current directory and " + sap_abs_base + " /hdl/" + self.project_tags[ "board"] continue shutil.copy(constraint_path, abs_proj_base + "/constraints/" + constraint_fname) #Generate the IO handler interface_filename = self.project_tags["INTERFACE"]["filename"] fdict = {"location": ""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface" result = self.filegen.process_file(filename=interface_filename, file_dict=fdict, directory=file_dest) if debug: print "copy over the dependencies..." print "verilog files: " for f in self.filegen.verilog_file_list: print f print "dependent files: " for d in self.filegen.verilog_dependency_list: fdict = {"location": ""} file_dest = self.project_tags["BASE_DIR"] + "/dependencies" result = self.filegen.process_file(filename=d, file_dict=fdict, directory=file_dest) if debug: print d return True
def generate_project(self, config_file_name, debug=False): """Recursively go through template structure and generate the folders and files""" #reading the project config data into the the project tags result = self.read_config_file(config_file_name) if (not result): if (debug): print "failed to read in project config file" return False #extrapolate the bus template result = self.read_template(self.project_tags["TEMPLATE"]) if (not result): if (debug): print "failed to read in template file" return False #set all the tags within the filegen structure if debug: print "set all tags wihin filegen structure" self.filegen.set_tags(self.project_tags) #generate the project directories and files saputils.create_dir(self.project_tags["BASE_DIR"]) if debug: print "generated the first dir" #print "Parent dir: " + self.project_tags["BASE_DIR"] for key in self.template_tags["PROJECT_TEMPLATE"]["files"]: self.recursive_structure_generator( self.template_tags["PROJECT_TEMPLATE"]["files"], key, self.project_tags["BASE_DIR"]) if debug: print "generating project directories finished" #Generate all the slaves for slave in self.project_tags["SLAVES"]: fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" result = self.filegen.process_file(filename = slave, file_dict = fdict, directory=file_dest) #each slave #Copy the user specified constraint files to the constraints directory for constraint_fname in self.project_tags["CONSTRAINTS"]["constraint_files"]: sap_abs_base = os.getenv("SAPLIB_BASE") abs_proj_base = saputils.resolve_linux_path(self.project_tags["BASE_DIR"]) constraint_path = self.get_constraint_path(constraint_fname) if (len(constraint_path) == 0): print "Couldn't find constraint: " + constraint_fname + ", searched in current directory and " + sap_abs_base + " /hdl/" + self.project_tags["CONSTRAINTS"]["board"] continue shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname) #Generate the IO handler interface_filename = self.project_tags["INTERFACE"] fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface" result = self.filegen.process_file(filename = interface_filename, file_dict=fdict , directory=file_dest) if debug: print "copy over the dependencies..." print "verilog files: " for f in self.filegen.verilog_file_list: print f print "dependent files: " for d in self.filegen.verilog_dependency_list: fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/dependencies" result = self.filegen.process_file(filename = d, file_dict = fdict, directory = file_dest) print d return True
def generate_project(self, config_file_name, debug=False): """Generate the folders and files for the project Using the project tags and template tags this function generates all the directories and files of the project. It will go through the template structure and determine what files need to be added and call either a generation script (in the case of \"top.v\") or simply copy the file over (in the case of a peripheral or memory module. Args: config_file_name: name of the JSON configuration file Return: True: Success False: Failure Raises: TypeError IOError SapError """ #reading the project config data into the the project tags #XXX: This should be changed to an exception begin raised and not a True False statement self.read_config_file(config_file_name) board_dict = saputils.get_board_config(self.project_tags["board"]) cfiles = [] pt = self.project_tags if "constraint_files" in pt.keys(): cfiles = pt["constraint_files"] #if the user didn't specify any constraint files #load the default if len(cfiles) == 0: cfiles = board_dict["default_constraint_files"] #extrapolate the bus template #XXX: Need to check all the constraint files self.project_tags["CLOCK_RATE"] = saputils.read_clock_rate(cfiles[0]) self.read_template(self.project_tags["TEMPLATE"]) #set all the tags within the filegen structure if debug: print "set all tags wihin filegen structure" self.filegen.set_tags(self.project_tags) #generate the project directories and files saputils.create_dir(self.project_tags["BASE_DIR"]) if debug: print "generated the first dir" #generate the arbitrator tags, this is important because the top #needs the arbitrator tags arb_tags = saparbitrator.generate_arbitrator_tags(self.project_tags, False) self.project_tags["ARBITRATORS"] = arb_tags #print "Parent dir: " + self.project_tags["BASE_DIR"] for key in self.template_tags["PROJECT_TEMPLATE"]["files"]: self.recursive_structure_generator( self.template_tags["PROJECT_TEMPLATE"]["files"], key, self.project_tags["BASE_DIR"]) if debug: print "generating project directories finished" if debug: print "generate the arbitrators" self.generate_arbitrators() #Generate all the slaves for slave in self.project_tags["SLAVES"]: fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" fn = self.project_tags["SLAVES"][slave]["filename"] try: self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug=debug) except ModuleFactoryError as err: print "ModuleFactoryError while generating a slave: %s" % str(err) #each slave if ("MEMORY" in self.project_tags): for mem in self.project_tags["MEMORY"]: fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" fn = self.project_tags["MEMORY"][mem]["filename"] try: self.filegen.process_file(filename = fn, file_dict = fdict, directory = file_dest) except ModuleFactoryError as err: print "ModuleFactoryError while generating a memory slave: %s" % str(err) #Copy the user specified constraint files to the constraints directory for constraint_fname in cfiles: sap_abs_base = os.getenv("SAPLIB_BASE") abs_proj_base = saputils.resolve_linux_path(self.project_tags["BASE_DIR"]) constraint_path = self.get_constraint_path(constraint_fname) if (len(constraint_path) == 0): print "Couldn't find constraint: " + constraint_fname + ", searched in current directory and " + sap_abs_base + " /hdl/" + self.project_tags["board"] continue shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname) #Generate the IO handler interface_filename = self.project_tags["INTERFACE"]["filename"] fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface" result = self.filegen.process_file(filename = interface_filename, file_dict=fdict , directory=file_dest) if debug: print "copy over the dependencies..." print "verilog files: " for f in self.filegen.verilog_file_list: print f print "dependent files: " for d in self.filegen.verilog_dependency_list: fdict = {"location":""} file_dest = self.project_tags["BASE_DIR"] + "/dependencies" result = self.filegen.process_file(filename = d, file_dict = fdict, directory = file_dest) if debug: print d return True