def gen_script(self, tags={}, buf="", user_paths=[], debug=False): """ Need to do a replace, but due to the {} in the script file it doesn't make sense to use the template """ board_dict = utils.get_board_config(tags["board"]) fpga_pn = board_dict["fpga_part_number"] out_buf = "" if (len(buf.partition("set projName")[2]) > 0): temp_pre = buf.partition("set projName")[0] + "set projName" temp_buf = buf.partition("set projName")[2] out_buf = temp_pre + " " + tags[ "PROJECT_NAME"] + "\n" + temp_buf.partition("\n")[2] #add the device if (len(buf.partition("set device")[2]) > 0): temp_pre = buf.partition("set device")[0] + "set device" temp_buf = buf.partition("set device")[2] out_buf = temp_pre + " " + fpga_pn + "\n" + temp_buf.partition( "\n")[2] return out_buf
def _get_default_board_config(self, board): path = os.path.join(utils.get_board_directory(board), board) bd = utils.get_board_config(board) name = "default.json" if "default_project" in bd: name = bd["default_project"] default_path = os.path.join(path, "board", name) #print "Path: %s" % default_path board_dict = json.load(open(default_path, "r"), object_pairs_hook=OrderedDict) if "parent_board" in bd: for parent in bd["parent_board"]: if self.s: self.s.Verbose( "Start with default board configuration for board: %s" % parent) pd = utils.get_board_config(parent) name = "default.json" if "default_project" in pd: name = pd["default_project"] path = os.path.join(utils.get_board_directory(parent), parent) default_path = os.path.join(path, "board", name) parent_dict = json.load(open(default_path, "r"), object_pairs_hook=OrderedDict) for key in parent_dict: #if self.s: self.s.Verbose("Working on %s key: %s" % (parent, key)) if key in board_dict: if isinstance(board_dict[key], list): #print "board_dict [%s]: %s" % (key, str(board_dict[key])) #print "parent_dict [%s]: %s" % (key, str(parent_dict[key])) l = parent_dict[key] + board_dict[key] #print "L: %s" % l #Remove duplicates board_dict[key] = list(set(l)) else: board_dict[key] = parent_dict[key] #print "board dict: %s" % str(board_dict) return board_dict
def get_parent_board_paths(board_dict): paths = [] if "parent_board" not in board_dict: return for parent in board_dict["parent_board"]: bd = utils.get_board_config(parent) if "paths" in bd: paths.extend(bd["paths"]) paths.append(utils.get_board_directory(parent)) if "parent_board" in bd: paths.extend(get_parent_board_paths(bd)) return list(set(paths))
def _get_default_board_config(self, board): path = os.path.join(utils.get_board_directory(board), board) bd = utils.get_board_config(board) name = "default.json" if "default_project" in bd: name = bd["default_project"] default_path = os.path.join(path, "board", name) #print "Path: %s" % default_path board_dict = json.load(open(default_path, "r"), object_pairs_hook=OrderedDict) if "parent_board" in bd: for parent in bd["parent_board"]: if self.s: self.s.Verbose("Start with default board configuration for board: %s" % parent) pd = utils.get_board_config(parent) name = "default.json" if "default_project" in pd: name = pd["default_project"] path = os.path.join(utils.get_board_directory(parent), parent) default_path = os.path.join(path, "board", name) parent_dict = json.load(open(default_path, "r"), object_pairs_hook=OrderedDict) for key in parent_dict: #if self.s: self.s.Verbose("Working on %s key: %s" % (parent, key)) if key in board_dict: if isinstance(board_dict[key], list): #print "board_dict [%s]: %s" % (key, str(board_dict[key])) #print "parent_dict [%s]: %s" % (key, str(parent_dict[key])) l = parent_dict[key] + board_dict[key] #print "L: %s" % l #Remove duplicates board_dict[key] = list(set(l)) else: board_dict[key] = parent_dict[key] #print "board dict: %s" % str(board_dict) return board_dict
def gen_script(self, tags={}, buf="", user_paths=[], debug=False): """Generate the project_defines.v""" template = Template(buf) vendor_string = "VENDOR_FPGA" board_dict = utils.get_board_config(tags["board"]) if board_dict["build_tool"] == "xilinx": #if (tags["BUILD_TOOL"] == "xilinx"): buf = template.safe_substitute(VENDOR_FPGA="VENDOR_XILINX") vendor_string = "VENDOR_XILINX" num_of_entities = self.get_rom_length(tags, user_paths, debug) buf = template.substitute(PROJECT_NAME = tags["PROJECT_NAME"], \ NUMBER_OF_DEVICES=num_of_entities, \ VENDOR_FPGA=vendor_string, \ CLOCK_RATE=tags["CLOCK_RATE"]) return buf
def gen_script (self, tags={}, buf="", user_paths = [], debug = False): """Generate the project_defines.v""" template = Template(buf) vendor_string = "VENDOR_FPGA" board_dict = utils.get_board_config(tags["board"]) if board_dict["build_tool"] == "xilinx": #if (tags["BUILD_TOOL"] == "xilinx"): buf = template.safe_substitute(VENDOR_FPGA = "VENDOR_XILINX") vendor_string = "VENDOR_XILINX" num_of_entities = self.get_rom_length(tags, user_paths, debug) buf = template.substitute(PROJECT_NAME = tags["PROJECT_NAME"], \ NUMBER_OF_DEVICES=num_of_entities, \ VENDOR_FPGA=vendor_string, \ CLOCK_RATE=tags["CLOCK_RATE"]) return buf
def gen_script(self, tags = {}, buf = "", user_paths = [], debug = False): #print "tags: %s" % str(tags) #Get the configuration in a dictionary form config = json.loads(buf) board_config = utils.get_board_config(tags["board"]) #Specify the part config["device"] = board_config["fpga_part_number"] config["board"] = board_config["board_name"] #if "image" in board_config.keys(): #config["board_image"] = board_config["image"] #Specify the top module config["top_module"] = "top" #Check if there is any configuration flags for the build if "build_flags" in board_config.keys(): for key in board_config["build_flags"]: #Replace whatever is in the configuration dictionary config[key] = {} config[key] = board_config["build_flags"][key] if "build_flags" in tags: #Go through each of hte tool keys and add all the tools in there ('ie xst)" for tool_key in tags["build_flags"]: if tool_key not in config: #Add the tool if it doesn't already exist config[tool_key] = {} for key in tags["build_flags"][tool_key]: if key == "flags": continue config[tool_key][key] = tags["build_flags"][tool_key][key] #Add the flags if "flags" in tags["build_flags"][tool_key]: if "flags" not in config[tool_key]: config[tool_key]["flags"] = {} for key in tags["build_flags"][tool_key]["flags"]: config[tool_key]["flags"][key] = tags["build_flags"][tool_key]["flags"][key] return json.dumps(config, sort_keys = True, indent = 4, separators = [',', ':'])
def gen_script(self, tags = {}, buf = "", user_paths = [], debug = False): """ Need to do a replace, but due to the {} in the script file it doesn't make sense to use the template """ board_dict = utils.get_board_config(tags["board"]) fpga_pn = board_dict["fpga_part_number"] out_buf = "" if (len(buf.partition("set projName")[2]) > 0): temp_pre = buf.partition("set projName")[0] + "set projName" temp_buf = buf.partition("set projName")[2] out_buf = temp_pre + " " + tags["PROJECT_NAME"] + "\n" + temp_buf.partition("\n")[2] #add the device if (len(buf.partition("set device")[2]) > 0): temp_pre = buf.partition("set device")[0] + "set device" temp_buf = buf.partition("set device")[2] out_buf = temp_pre + " " + fpga_pn + "\n" + temp_buf.partition("\n")[2] return out_buf
def generate_project(self, config_filename, output_directory = None): """Generate the folders and files for the project Using the project tags and template tags this function generates all the directories and files of the project. It will go through the template structure and determine what files need to be added and call either a generation script (in the case of \"top.v\") or simply copy the file over (in the case of a peripheral or memory module. Args: config_filename: name of the JSON configuration file output_directory: Path to override default output directory Return: True: Success False: Failure Raises: TypeError IOError SapError """ status = self.s if status: status.Debug("Openning site manager") sm = site_manager.SiteManager() path_dicts = sm.get_paths_dict() self.read_config_file(config_filename) path_dict = sm.get_paths_dict() if output_directory is not None: self.project_tags["BASE_DIR"] = output_directory board_dict = utils.get_board_config(self.project_tags["board"], debug = False) cfiles = [] cpaths = [] self.user_paths.extend(utils.get_local_verilog_paths()) self.user_paths = list(set(self.user_paths)) if "paths" in board_dict: self.user_paths.extend(board_dict["paths"]) self.user_paths = list(set(self.user_paths)) if "parent_board" in board_dict: self.user_paths.extend(get_parent_board_paths(board_dict)) self.user_paths = list(set(self.user_paths)) if self.s: self.s.Verbose("Paths:") for p in self.user_paths: if self.s: self.s.Verbose("\t%s" %p) # Go through the board dict and see if there is anything that needs to be # incorporated into the project tags for key in board_dict: if key not in self.project_tags: self.project_tags[key] = board_dict[key] elif isinstance(self.project_tags[key], OrderedDict): for k in board_dict[key]: self.project_tags[key][k] = board_dict[key][k] elif isinstance(self.project_tags[key], list): self.project_tags[key].extend(board_dict[key]) elif isinstance(self.project_tags[key], dict): for k in board_dict[key]: self.project_tags[key][k] = board_dict[key][k] self.filegen = ModuleProcessor(user_paths = self.user_paths) pt = self.project_tags if "constraint_files" not in pt.keys(): pt["constraint_files"] = [] cfiles = pt["constraint_files"] for c in cfiles: board = self.project_tags["board"] try: cpaths.append(utils.get_constraint_file_path(board, c)) except IBuilderError as e: if self.s: self.s.Verbose("Could not find constraint: %s in default board searching parent board..." % c) #path = os.path.join(utils.get_board_directory(board), board) board_dict = utils.get_board_config(board) mname = "default.json" if "default_project" in board_dict: mname = board_dict["default_project"] if "parent_board" in board_dict: for parent in board_dict["parent_board"]: if self.s: self.s.Verbose("\tsearching: %s @ %s..." % (parent, utils.get_board_directory(parent))) filepath = utils.get_constraint_file_path(parent, c) if filepath is None: if self.s: self.s.Verbose("Did not file file: %s in parent board" % (c)) else: if self.s: self.s.Verbose("Found file, located at: %s" % filepath) cpaths.append(utils.get_constraint_file_path(parent, c, debug = True)) #if the user didn't specify any constraint files #load the default if len(cfiles) == 0: if status: status.Debug("loading default constraints for: %s" % board_dict["board_name"]) cfiles = board_dict["default_constraint_files"] for c in cfiles: cpaths.append(utils.get_constraint_file_path(self.project_tags["board"], c)) #cpaths.append(utils.get_constraint_file_path(c)) #extrapolate the bus template clock_rate = "" if "clockrate" in board_dict: if self.s: self.s.Info("User Specified a clockrate of: %d" % board_dict["clockrate"]) clock_rate = str(board_dict["clockrate"]) if len(clock_rate) == 0: for c in cpaths: clock_rate = utils.read_clock_rate(c) if len(clock_rate) > 0: #First place I can find the clock rate drop out break if len (clock_rate) == 0: if self.s: self.s.Fatal("Unable to find the clock rate in any of the constraint" "files: %s" % str(cpaths)) raise PGE("Unable to find the clock rate in any of the constraint" "files: %s" % str(cpaths)) #self.project_tags["CLOCK_RATE"] = utils.read_clock_rate(cpaths[0]) self.project_tags["CLOCK_RATE"] = clock_rate self.read_template_file(self.project_tags["TEMPLATE"]) #set all the tags within the filegen structure if status: status.Verbose("set all tags wihin filegen structure") self.filegen.set_tags(self.project_tags) #generate the project directories and files utils.create_dir(self.project_tags["BASE_DIR"]) if status: status.Verbose("generated project base direcotry: %s" % utils.resolve_path(self.project_tags["BASE_DIR"])) #generate the arbiter tags, this is important because the top #needs the arbiter tags arb_tags = arbiter.generate_arbiter_tags(self.project_tags, False) self.project_tags["ARBITERS"] = arb_tags #print "Parent dir: " + self.project_tags["BASE_DIR"] for key in self.template_tags["PROJECT_TEMPLATE"]["files"]: self.recursive_structure_generator( self.template_tags["PROJECT_TEMPLATE"]["files"], key, self.project_tags["BASE_DIR"]) if status: status.Verbose("finished generating project directories") if arbiter.is_arbiter_required(self.project_tags): if status: status.Verbose("generate the arbiters") self.generate_arbiters() #Generate all the slaves for slave in self.project_tags["SLAVES"]: fdict = {"location":""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "slave") #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" fn = self.project_tags["SLAVES"][slave]["filename"] try: #self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = True) self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = False) except ModuleFactoryError as err: if status: status.Error("ModuleFactoryError while generating slave: %s" % str(err)) raise ModuleFactoryError(err) slave_dir = os.path.split(utils.find_rtl_file_location(fn, self.user_paths))[0] if "constraint_files" in self.project_tags["SLAVES"][slave]: temp_paths = self.user_paths temp_paths.append(slave_dir) for c in self.project_tags["SLAVES"][slave]["constraint_files"]: file_location = utils.get_constraint_file_path(self.project_tags["board"], c, temp_paths) dest_path = utils.resolve_path(self.project_tags["BASE_DIR"]) shutil.copy (file_location, os.path.join(dest_path, "constraints", c)) if "cores" in self.project_tags["SLAVES"][slave]: if status: status.Verbose("User Specified an core(s) for a slave") for c in self.project_tags["SLAVES"][slave]["cores"]: file_location = os.path.join(slave_dir, os.pardir, "cores", c) if not os.path.exists(file_location): raise PGE("Core: %s does not exist" % file_location) dest_path = utils.resolve_path(self.project_tags["BASE_DIR"]) shutil.copy (file_location, os.path.join(dest_path, "cores", c)) #each slave if "MEMORY" in self.project_tags: for mem in self.project_tags["MEMORY"]: fdict = {"location":""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "mem") #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/mem" fn = self.project_tags["MEMORY"][mem]["filename"] try: self.filegen.process_file(filename = fn, file_dict = fdict, directory = file_dest) except ModuleFactoryError as err: if status: status.Error("ModuleFactoryError while generating memory: %s" % str(err)) raise ModuleFactoryError(err) mem_dir = os.path.split(utils.find_rtl_file_location(fn, self.user_paths))[0] if "constraint_files" in self.project_tags["MEMORY"][mem]: temp_paths = self.user_paths temp_paths.append(mem_dir) for c in self.project_tags["MEMORY"][mem]["constraint_files"]: file_location = utils.get_constraint_file_path(self.project_tags["board"], c, temp_paths) dest_path = utils.resolve_path(self.project_tags["BASE_DIR"]) shutil.copy (file_location, os.path.join(dest_path, "constraints", c)) if "cores" in self.project_tags["MEMORY"][mem]: if status: status.Verbose("User Specified an core(s) for a mem") for c in self.project_tags["MEMORY"][mem]["cores"]: file_location = os.path.join(mem_dir, os.pardir, "cores", c) if not os.path.exists(file_location): raise PGE("Core: %s does not exist" % file_location) dest_path = utils.resolve_path(self.project_tags["BASE_DIR"]) shutil.copy (file_location, os.path.join(dest_path, "cores", c)) ''' if 'infrastructure' in self.project_tags: if status: status.Verbose("User Specified an infrastructure(s)") for entry in self.project_tags["infrastructure"]: name = entry.keys()[0] im = entry[name] path = utils.get_board_directory(name) path = os.path.join(path, name, "infrastructure", im["filename"]) ftdict = {"location":path} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "infrastructure") fn = im["filename"] self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest) ''' if "cores" in self.project_tags: if status: status.Verbose("User Specified an core(s)") for entry in self.project_tags["cores"]: name = entry.keys()[0] for core in entry[name]: file_location = None path = utils.get_board_directory(name) path = os.path.join(path, name, "cores") for root, dirs, files in os.walk(path): if core in files: file_location = os.path.join(root, core) break if not os.path.exists(file_location): raise PGE("Core: %s does not exist" % file_location) dest_path = utils.resolve_path(self.project_tags["BASE_DIR"]) shutil.copy (file_location, os.path.join(dest_path, "cores", core)) #Copy the user specified constraint files to the constraints directory for constraint_fname in cfiles: abs_proj_base = utils.resolve_path(self.project_tags["BASE_DIR"]) constraint_path = utils.get_constraint_file_path(self.project_tags["board"], constraint_fname) if os.path.exists(constraint_fname): constraint_fname = os.path.split(constraint_fname)[-1] #constraint_path = constraint_fname if len(constraint_path) == 0: print ("Couldn't find constraint: %s, searched in the current directory and %s/hdl/%s" % (constraint_fname, abs_proj_base, self.project_tags["board"])) continue shutil.copy (constraint_path, os.path.join(abs_proj_base, "constraints", constraint_fname)) #shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname) #Generate the IO handler interface_filename = self.project_tags["INTERFACE"]["filename"] fdict = {"location":""} #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface" file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "interface") result = self.filegen.process_file(filename = interface_filename, file_dict=fdict , directory=file_dest) if status: status.Verbose("verilog files: ") for f in self.filegen.verilog_file_list: status.Verbose("\t%s" % f) #if len(self.filegen.verilog_dependency_list) > 0: # status.Verbose("\t\tdependent files: ") if status: status.Verbose("copy over the dependencies...") for d in self.filegen.verilog_dependency_list: fdict = {"location":""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies") result = self.filegen.process_file(filename = d, file_dict = fdict, directory = file_dest, debug = True) if status: status.Verbose("\tDependent File: %s" % d) if "dependencies" in self.project_tags: if status: status.Verbose("User Specified dependencies") for name in self.project_tags["dependencies"]: if status: status.Verbose("\tUser Dependent File: %s" % name) fdict = {"location":""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies") result = self.filegen.process_file(filename = name, file_dict = fdict, directory = file_dest, debug = True) return True
def generate_project(self, config_filename, output_directory=None): """Generate the folders and files for the project Using the project tags and template tags this function generates all the directories and files of the project. It will go through the template structure and determine what files need to be added and call either a generation script (in the case of \"top.v\") or simply copy the file over (in the case of a peripheral or memory module. Args: config_filename: name of the JSON configuration file output_directory: Path to override default output directory Return: True: Success False: Failure Raises: TypeError IOError SapError """ status = self.s if status: status.Debug("Openning site manager") sm = site_manager.SiteManager() path_dicts = sm.get_paths_dict() self.read_config_file(config_filename) path_dict = sm.get_paths_dict() if output_directory is not None: self.project_tags["BASE_DIR"] = output_directory board_dict = utils.get_board_config(self.project_tags["board"], debug=False) cfiles = [] cpaths = [] self.user_paths.extend(utils.get_local_verilog_paths()) self.user_paths = list(set(self.user_paths)) if "paths" in board_dict: self.user_paths.extend(board_dict["paths"]) self.user_paths = list(set(self.user_paths)) if "parent_board" in board_dict: self.user_paths.extend(get_parent_board_paths(board_dict)) self.user_paths = list(set(self.user_paths)) if self.s: self.s.Verbose("Paths:") for p in self.user_paths: if self.s: self.s.Verbose("\t%s" % p) # Go through the board dict and see if there is anything that needs to be # incorporated into the project tags for key in board_dict: if key not in self.project_tags: self.project_tags[key] = board_dict[key] elif isinstance(self.project_tags[key], OrderedDict): for k in board_dict[key]: self.project_tags[key][k] = board_dict[key][k] elif isinstance(self.project_tags[key], list): self.project_tags[key].extend(board_dict[key]) elif isinstance(self.project_tags[key], dict): for k in board_dict[key]: self.project_tags[key][k] = board_dict[key][k] self.filegen = ModuleProcessor(user_paths=self.user_paths) pt = self.project_tags if "constraint_files" not in pt.keys(): pt["constraint_files"] = [] cfiles = pt["constraint_files"] for c in cfiles: board = self.project_tags["board"] try: cpaths.append(utils.get_constraint_file_path(board, c)) except IBuilderError as e: if self.s: self.s.Verbose( "Could not find constraint: %s in default board searching parent board..." % c) #path = os.path.join(utils.get_board_directory(board), board) board_dict = utils.get_board_config(board) mname = "default.json" if "default_project" in board_dict: mname = board_dict["default_project"] if "parent_board" in board_dict: for parent in board_dict["parent_board"]: if self.s: self.s.Verbose( "\tsearching: %s @ %s..." % (parent, utils.get_board_directory(parent))) filepath = utils.get_constraint_file_path(parent, c) if filepath is None: if self.s: self.s.Verbose( "Did not file file: %s in parent board" % (c)) else: if self.s: self.s.Verbose("Found file, located at: %s" % filepath) cpaths.append( utils.get_constraint_file_path(parent, c, debug=True)) #if the user didn't specify any constraint files #load the default if len(cfiles) == 0: if status: status.Debug("loading default constraints for: %s" % board_dict["board_name"]) cfiles = board_dict["default_constraint_files"] for c in cfiles: cpaths.append( utils.get_constraint_file_path(self.project_tags["board"], c)) #cpaths.append(utils.get_constraint_file_path(c)) #extrapolate the bus template clock_rate = "" if "clockrate" in board_dict: if self.s: self.s.Info("User Specified a clockrate of: %d" % board_dict["clockrate"]) clock_rate = str(board_dict["clockrate"]) if len(clock_rate) == 0: for c in cpaths: clock_rate = utils.read_clock_rate(c) if len(clock_rate) > 0: #First place I can find the clock rate drop out break if len(clock_rate) == 0: if self.s: self.s.Fatal( "Unable to find the clock rate in any of the constraint" "files: %s" % str(cpaths)) raise PGE("Unable to find the clock rate in any of the constraint" "files: %s" % str(cpaths)) #self.project_tags["CLOCK_RATE"] = utils.read_clock_rate(cpaths[0]) self.project_tags["CLOCK_RATE"] = clock_rate self.read_template_file(self.project_tags["TEMPLATE"]) #set all the tags within the filegen structure if status: status.Verbose("set all tags wihin filegen structure") self.filegen.set_tags(self.project_tags) #generate the project directories and files utils.create_dir(self.project_tags["BASE_DIR"]) if status: status.Verbose("generated project base direcotry: %s" % utils.resolve_path(self.project_tags["BASE_DIR"])) #generate the arbiter tags, this is important because the top #needs the arbiter tags arb_tags = arbiter.generate_arbiter_tags(self.project_tags, False) self.project_tags["ARBITERS"] = arb_tags #print "Parent dir: " + self.project_tags["BASE_DIR"] for key in self.template_tags["PROJECT_TEMPLATE"]["files"]: self.recursive_structure_generator( self.template_tags["PROJECT_TEMPLATE"]["files"], key, self.project_tags["BASE_DIR"]) if status: status.Verbose("finished generating project directories") if arbiter.is_arbiter_required(self.project_tags): if status: status.Verbose("generate the arbiters") self.generate_arbiters() #Generate all the slaves for slave in self.project_tags["SLAVES"]: fdict = {"location": ""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "slave") #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave" fn = self.project_tags["SLAVES"][slave]["filename"] try: #self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = True) self.filegen.process_file(filename=fn, file_dict=fdict, directory=file_dest, debug=False) except ModuleFactoryError as err: if status: status.Error( "ModuleFactoryError while generating slave: %s" % str(err)) raise ModuleFactoryError(err) slave_dir = os.path.split( utils.find_rtl_file_location(fn, self.user_paths))[0] if "constraint_files" in self.project_tags["SLAVES"][slave]: temp_paths = self.user_paths temp_paths.append(slave_dir) for c in self.project_tags["SLAVES"][slave][ "constraint_files"]: file_location = utils.get_constraint_file_path( self.project_tags["board"], c, temp_paths) dest_path = utils.resolve_path( self.project_tags["BASE_DIR"]) shutil.copy(file_location, os.path.join(dest_path, "constraints", c)) if "cores" in self.project_tags["SLAVES"][slave]: if status: status.Verbose("User Specified an core(s) for a slave") for c in self.project_tags["SLAVES"][slave]["cores"]: file_location = os.path.join(slave_dir, os.pardir, "cores", c) if not os.path.exists(file_location): raise PGE("Core: %s does not exist" % file_location) dest_path = utils.resolve_path( self.project_tags["BASE_DIR"]) shutil.copy(file_location, os.path.join(dest_path, "cores", c)) #each slave if "MEMORY" in self.project_tags: for mem in self.project_tags["MEMORY"]: fdict = {"location": ""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "mem") #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/mem" fn = self.project_tags["MEMORY"][mem]["filename"] try: self.filegen.process_file(filename=fn, file_dict=fdict, directory=file_dest) except ModuleFactoryError as err: if status: status.Error( "ModuleFactoryError while generating memory: %s" % str(err)) raise ModuleFactoryError(err) mem_dir = os.path.split( utils.find_rtl_file_location(fn, self.user_paths))[0] if "constraint_files" in self.project_tags["MEMORY"][mem]: temp_paths = self.user_paths temp_paths.append(mem_dir) for c in self.project_tags["MEMORY"][mem]["constraint_files"]: file_location = utils.get_constraint_file_path( self.project_tags["board"], c, temp_paths) dest_path = utils.resolve_path( self.project_tags["BASE_DIR"]) shutil.copy(file_location, os.path.join(dest_path, "constraints", c)) if "cores" in self.project_tags["MEMORY"][mem]: if status: status.Verbose("User Specified an core(s) for a mem") for c in self.project_tags["MEMORY"][mem]["cores"]: file_location = os.path.join(mem_dir, os.pardir, "cores", c) if not os.path.exists(file_location): raise PGE("Core: %s does not exist" % file_location) dest_path = utils.resolve_path( self.project_tags["BASE_DIR"]) shutil.copy(file_location, os.path.join(dest_path, "cores", c)) ''' if 'infrastructure' in self.project_tags: if status: status.Verbose("User Specified an infrastructure(s)") for entry in self.project_tags["infrastructure"]: name = entry.keys()[0] im = entry[name] path = utils.get_board_directory(name) path = os.path.join(path, name, "infrastructure", im["filename"]) ftdict = {"location":path} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "infrastructure") fn = im["filename"] self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest) ''' if "cores" in self.project_tags: if status: status.Verbose("User Specified an core(s)") for entry in self.project_tags["cores"]: name = entry.keys()[0] for core in entry[name]: file_location = None path = utils.get_board_directory(name) path = os.path.join(path, name, "cores") for root, dirs, files in os.walk(path): if core in files: file_location = os.path.join(root, core) break if not os.path.exists(file_location): raise PGE("Core: %s does not exist" % file_location) dest_path = utils.resolve_path( self.project_tags["BASE_DIR"]) shutil.copy(file_location, os.path.join(dest_path, "cores", core)) #Copy the user specified constraint files to the constraints directory for constraint_fname in cfiles: abs_proj_base = utils.resolve_path(self.project_tags["BASE_DIR"]) constraint_path = utils.get_constraint_file_path( self.project_tags["board"], constraint_fname) if os.path.exists(constraint_fname): constraint_fname = os.path.split(constraint_fname)[-1] #constraint_path = constraint_fname if len(constraint_path) == 0: print( "Couldn't find constraint: %s, searched in the current directory and %s/hdl/%s" % (constraint_fname, abs_proj_base, self.project_tags["board"])) continue shutil.copy( constraint_path, os.path.join(abs_proj_base, "constraints", constraint_fname)) #shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname) #Generate the IO handler interface_filename = self.project_tags["INTERFACE"]["filename"] fdict = {"location": ""} #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface" file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "interface") result = self.filegen.process_file(filename=interface_filename, file_dict=fdict, directory=file_dest) if status: status.Verbose("verilog files: ") for f in self.filegen.verilog_file_list: status.Verbose("\t%s" % f) #if len(self.filegen.verilog_dependency_list) > 0: # status.Verbose("\t\tdependent files: ") if status: status.Verbose("copy over the dependencies...") for d in self.filegen.verilog_dependency_list: fdict = {"location": ""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies") result = self.filegen.process_file(filename=d, file_dict=fdict, directory=file_dest, debug=True) if status: status.Verbose("\tDependent File: %s" % d) if "dependencies" in self.project_tags: if status: status.Verbose("User Specified dependencies") for name in self.project_tags["dependencies"]: if status: status.Verbose("\tUser Dependent File: %s" % name) fdict = {"location": ""} file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies") result = self.filegen.process_file(filename=name, file_dict=fdict, directory=file_dest, debug=True) return True
def generate_tb_module(tags, top_buffer, user_paths = [], debug=False): """ Generate the test bench Args: tags (dictionary): Dictionary defining a project top_buffer (string): A buffer of the top module user_paths (list of strings): a list of paths pointing to user directories Returns: (string): buffer of the test module Raises: Nothing """ top_module_tags = vutils.get_module_buffer_tags(top_buffer, bus = "wishbone", user_paths = []) top_module = { "bind":{ "sim_in_reset":{ "loc":"i_sim_in_reset", "direction":"input", "reg":True }, "sim_in_ready":{ "loc":"i_sim_in_ready", "direction":"input", "reg":True }, "sim_in_command":{ "loc":"i_sim_in_command", "direction":"input", "reg":True }, "sim_in_address":{ "loc":"i_sim_in_address", "direction":"input", "reg":True }, "sim_in_data":{ "loc":"i_sim_in_data", "direction":"input", "reg":True }, "sim_in_data_count":{ "loc":"i_sim_in_data_count", "direction":"input", "reg":True }, "sim_master_ready":{ "loc":"o_sim_master_ready", "direction":"output" }, "sim_out_en":{ "loc":"o_sim_out_en", "direction":"output" }, "sim_out_status":{ "loc":"o_sim_out_status", "direction":"output" }, "sim_out_address":{ "loc":"o_sim_out_address", "direction":"output" }, "sim_out_data":{ "loc":"o_sim_out_data", "direction":"output" }, "sim_out_data_count":{ "loc":"o_sim_out_data_count", "direction":"output" } } } top_module["ports"] = {} top_module["ports"] = top_module_tags["ports"] top_module["module"] = top_module_tags["module"] #print "top module tags:\n" #utils.pretty_print_dict(top_module) invert_reset = utils.get_board_config(tags["board"]) sim_modules = {} if "SLAVES" in tags: for slave in tags["SLAVES"]: if "sim" in tags["SLAVES"][slave]: module_name = tags["SLAVES"][slave]["filename"].strip(".v") sim_dict = get_sim_module_dict(module_name, user_paths) add_sim_modules_to_project(tags, sim_dict, user_paths) sim_modules[slave] = generate_sub_slave_dict(sim_dict) if "MEMORY" in tags: for mem in tags["MEMORY"]: if "sim" in tags["MEMORY"][mem]: module_name = tags["MEMORY"][mem]["filename"].strip(".v") sim_dict = get_sim_module_dict(module_name, user_paths) add_sim_modules_to_project(tags, sim_dict, user_paths) sim_modules[mem] = generate_sub_slave_dict(sim_dict) tb_tags = {} tb_tags["module"] = "tb" #ports = tb_tags["ports"] ports = {} ports["input"] = {} ports["output"] = {} ports["inout"] = {} ports["input"]["clk"] = { "direction":"input", "size":1 } ports["input"]["rst"] = { "direction":"input", "size":1 } ports["input"]["i_sim_in_reset"]= { "direction":"input", "size":1 } ports["input"]["i_sim_in_ready"] = { "direction":"input", "size":1 } ports["input"]["i_sim_in_command"] = { "direction":"input", "size":32, "min_val":0, "max_val":31 } ports["input"]["i_sim_in_address"] = { "direction":"input", "size":32, "min_val":0, "max_val":31 } ports["input"]["i_sim_in_data"] = { "direction":"input", "size":32, "min_val":0, "max_val":31 } ports["input"]["i_sim_in_data_count"] = { "direction":"input", "size":32, "min_val":0, "max_val":31 } ports["output"]["o_sim_master_ready"] = { "direction":"output", "size":1 } ports["output"]["o_sim_out_en"] = { "direction":"output", "size":1 } ports["output"]["o_sim_out_status"] = { "direction":"output", "size":32, "min_val":0, "max_val":31 } ports["output"]["o_sim_out_address"] = { "direction":"output", "size":32, "min_val":0, "max_val":31 } ports["output"]["o_sim_out_data"] = { "direction":"output", "size":32, "min_val":0, "max_val":31 } ports["output"]["o_sim_out_data_count"] = { "direction":"output", "size":32, "min_val":0, "max_val":31 } tb_tags["ports"] = ports #if debug: # utils.pretty_print_dict(tb_tags) MB = TBModuleBuilder(tb_tags) #Generate 'slave_tags' or tags we will use to bind ports to simulation #Add the ports to the wires MB.add_ports_to_wires() sub_buffers = [] sub_buffers.append(MB.generate_sub_module( invert_reset, "top", top_module, top_module, enable_unique_ports = False)) for sim in sim_modules: sub_buffers.append(MB.generate_sub_module( invert_reset, sim, sim_modules[sim], sim_modules[sim])) assign_buf = generate_assigns_buffer(invert_reset, MB.bindings, internal_bindings = {}, debug = False) buf = mb.generate_timespec_buf() buf += mb.generate_module_ports("tb", MB.tags["ports"], param_dict = {}, debug = False) buf += "\n" buf += MB.generate_module_wires(invert_reset) buf += generate_top_inout_wires(top_module) for sub in sub_buffers: buf += sub buf += "\n" buf += "\n" buf += assign_buf buf += "\n" buf += ("\n" + "initial begin\n" " $dumpfile (\"waveform.vcd\");\n" + " $dumpvars (0, tb);\n" + "end\n\n") buf += "endmodule" return string.expandtabs(buf, 2)