def write_dict_file(self, dicts, filePathNames, keySeparator=None): """Write the dictionary information to the filePathName file.""" if keySeparator == None: keySeparator = self.CDF_SEPARATOR for fpn, the_dict in zip(cm.listify(filePathNames), cm.listify(dicts)): with open(fpn, 'w') as fp: for key in the_dict: fp.write('%s%s%s\n' % (key, keySeparator, the_dict[key]))
def create_vivado_project_file(self, lib_names=None, design=None): """Create the Vivado project file (XPR) for all HDL libraries that have a top level entity key synth_top_level_entity. The project revision has the same name as the lib_name and will result in a <lib_name>.bit FPGA image file. Arguments: - lib_names : one or more HDL libraries. """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names) if design==None: syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts) else: syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values = [design], dicts=lib_dicts) for syn_dict in cm.listify(syn_dicts): # Open xpr for each HDL library that has a synth_top_level_entity lib_name = syn_dict['hdl_lib_name'] xpr_name = lib_name + '_project.tcl' self.create_project_build_script(lib_name) xpr_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict) print("xpr_path = {}".format(xpr_path)) cm.mkdir(xpr_path) xprPathName = cm.expand_file_path_name_posix(xpr_name, xpr_path) print(xprPathName) with open(xprPathName, 'w') as fp: # call board configuration tcl files fp.write('\n# vivado_tcl_files (toplevel)\n') fp.write('set DEVICE \"{}\"\n'.format(self.tool_dict['device'])) fp.write('set BOARD \"{}\"\n'.format(self.tool_dict['board'])) board_dicts = cm.listify(self.libs.get_dicts('hdl_lib_name', values=syn_dict['hdl_lib_uses_synth'].split())) board_dicts.append(syn_dict) for board_dict in board_dicts: if 'vivado_tcl_files' in board_dict: vivado_tcl_files = board_dict['vivado_tcl_files'].split() for fn in vivado_tcl_files: if '_board.tcl' in fn: filePathName = cm.expand_file_path_name_posix(fn, self.libs.get_filePath(board_dict)) # filePathName = filePathName.replace('/cygdrive/C','c:') fp.write('source %s\n' % filePathName) # fp.write('PROJECT_REVISION = "%s"\n' % lib_name) fp.write('# This script sets the project variables\n') fp.write('puts "Creating new project: %s"\n' % lib_name) fp.write('cd $proj_dir\n') fp.write('create_project %s -part $DEVICE -force\n' % lib_name) fp.write('set_property board_part $BOARD [current_project]\n') fp.write('set_property target_language VHDL [current_project]\n') #fp.write('set_property target_simulator ModelSim [current_project]\n') fp.write('set_property target_simulator XSim [current_project]\n') #fp.write('set_property "ip_repo_paths" /home/software/Xilinx/extra_ip_repo/xhmc_v1_0 [get_filesets sources_1]\n') #fp.write('update_ip_catalog -rebuild\n') fp.write('cd ../..\n') fp.close() self.create_vivado_ip_lib_file(lib_name)
def write_dict_file(self, dicts, filePathNames, keySeparator=None): """Write the dictionary information to the filePathName file.""" if keySeparator==None: keySeparator=self.CDF_SEPARATOR for fpn, the_dict in zip(cm.listify(filePathNames), cm.listify(dicts)): with open(fpn, 'w') as fp: for key in the_dict: fp.write('%s%s%s\n' % (key, keySeparator, the_dict[key]))
def create_quartus_settings_file(self, lib_names=None): """Create the Quartus settings file (QSF) for all HDL libraries that have a toplevel entity key synth_top_level_entity. Note: . No support for revisions, so only one qsf per qpf Arguments: - lib_names : one or more HDL libraries """ if lib_names == None: lib_names = self.lib_names lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names) syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts) for syn_dict in cm.listify(syn_dicts): # Open qsf for each HDL library that has a synth_top_level_entity lib_name = syn_dict['hdl_lib_name'] lib_path = self.libs.get_filePath(syn_dict) top_level_entity = syn_dict['synth_top_level_entity'] if top_level_entity == '': top_level_entity = lib_name qsf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict) cm.mkdir(qsf_path) # One qsf per lib_name qsf_name = lib_name + '.qsf' qsfPathName = cm.expand_file_path_name(qsf_name, qsf_path) with open(qsfPathName, 'w') as fp: fp.write('# synth_top_level_entity\n') fp.write('set_global_assignment -name TOP_LEVEL_ENTITY %s\n' % top_level_entity) fp.write('\n') fp.write('# quartus_qsf_files\n') quartus_qsf_files = syn_dict['quartus_qsf_files'].split() for fn in quartus_qsf_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write( 'set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName) fp.write('\n') fp.write( '# All used HDL library *_lib.qip files in order with top level last\n' ) use_lib_order = self.derive_lib_order('synth', lib_name) #use_lib_dicts = self.libs.get_dicts('hdl_lib_name', values=use_lib_order) # uses original libs.dicts order, but use_lib_dicts = self.get_lib_dicts_from_lib_names( lib_names=use_lib_order ) # must preserve use_lib_order order to ensure that top level design qip with sdc file is include last in qsf for lib_dict in cm.listify(use_lib_dicts): qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict) qip_name = lib_dict['hdl_lib_name'] + '_lib.qip' qipPathName = cm.expand_file_path_name(qip_name, qip_path) fp.write('set_global_assignment -name QIP_FILE %s\n' % qipPathName)
def append_key_to_dict_file(self, filePathName, key, values): """Write append the key = value pair to the filePathName file.""" with open(filePathName, 'a') as fp: if len(cm.listify(values)) == 1: fp.write('%s = %s' % (key, values)) else: fp.write('%s = \n' % key) for v in cm.listify(values): fp.write('%s\n' % v)
def append_key_to_dict_file(self, filePathName, key, values): """Write append the key = value pair to the filePathName file.""" with open(filePathName, 'a') as fp: if len(cm.listify(values))==1: fp.write('%s = %s' % (key, values)) else: fp.write('%s = \n' % key) for v in cm.listify(values): fp.write('%s\n' % v)
def validate_data(data): num_pins = data['num_pins'] pins = listify(data.get('nc', [])) for group in data.keys(): if group == 'num_pins' or group == 'nc': continue group_data = data.get(group, {}) if isinstance(group_data, dict): for k, v in group_data.items(): pins.extend(listify(v)) else: pins.extend(listify(group_data)) assert len(pins) == num_pins and len(set(pins)) == num_pins
def get_dicts(self, key, values=None, dicts=None): """Get all dictionaries in dicts that contain the key with a value specified in values. If values==None then get all dictionaries in dicts that contain the key. """ if dicts == None: dicts = self.dicts the_dicts = [] for fd in cm.listify(dicts): if fd not in the_dicts: if key in fd: if values == None: the_dicts.append(fd) elif fd[key] in cm.listify(values): the_dicts.append(fd) return cm.unlistify(the_dicts)
def get_dicts(self, key, values=None, dicts=None): """Get all dictionaries in dicts that contain the key with a value specified in values. If values==None then get all dictionaries in dicts that contain the key. """ if dicts==None: dicts=self.dicts the_dicts = [] for fd in cm.listify(dicts): if fd not in the_dicts: if key in fd: if values==None: the_dicts.append(fd) elif fd[key] in cm.listify(values): the_dicts.append(fd) return cm.unlistify(the_dicts)
def create_modelsim_project_files_file(self, lib_names=None): """Create file with list of the Modelsim project files for all HDL libraries. Arguments: - lib_names : one or more HDL libraries """ fileName = 'modelsim_project_files.txt' # use fixed file name build_maindir, build_toolsetdir, build_tooldir = self.get_tool_build_dir('sim') fileNamePath=os.path.join(build_maindir, build_toolsetdir, build_tooldir, fileName) # and use too build dir for file path if lib_names==None: lib_names=self.lib_names with open(fileNamePath, 'w') as fp: lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names) mpf_paths = self.get_lib_build_dirs('sim', lib_dicts=lib_dicts) for lib_name, mpf_path in zip(cm.listify(lib_names),cm.listify(mpf_paths)): fp.write('%s = %s\n' % (lib_name, mpf_path))
def create_quartus_project_file(self, lib_names=None): """Create the Quartus project file (QPF) for all HDL libraries that have a toplevel entity key synth_top_level_entity. Note: . Default if the synth_top_level_entity key is defined but left empty then the top level entity has the same name as the lib_name in hdl_lib_name. Otherwise synth_top_level_entity can specify another top level entity name in the library. Each HDL library can only have one Quartus project file . The project revision has the same name as the lib_name and will result in a <lib_name>.sof FPGA image file. . For each additional revision a subdirectory can be used. This subdirectory can be named 'revisions/' and lists a number of revisions as subdirectories. Each revision will have a separate hdllib.cfg file and a .vhd file with the toplevel entity. The toplevel .vhd file specifies the <g_design_name> for the revision in the generics. Arguments: - lib_names : one or more HDL libraries """ if lib_names == None: lib_names = self.lib_names lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names) syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts) for syn_dict in cm.listify(syn_dicts): # Open qpf for each HDL library that has a synth_top_level_entity lib_name = syn_dict['hdl_lib_name'] qpf_name = lib_name + '.qpf' qpf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict) cm.mkdir(qpf_path) qpfPathName = cm.expand_file_path_name(qpf_name, qpf_path) with open(qpfPathName, 'w') as fp: fp.write('PROJECT_REVISION = "%s"\n' % lib_name)
def derive_lib_order(self, build_type, lib_names=None): """Derive the dependency order for all HDL libraries in the fully specified list of lib_names. Note: . Only the generic HDL libraries and the technology specific libraries that match self.technologyNames are used. . The lib_names list must include all used libs, so if necessary first call derive_all_use_libs(). """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names) # use list() to take local copy to avoid modifying list order of self.lib_names which matches self.libs.dicts list order lib_order = list(lib_names) for lib_dict in cm.listify(lib_dicts): lib_name = lib_dict['hdl_lib_name'] use_libs, exclude_libs = self.get_used_libs(build_type, lib_dict, []) try: for use_lib in use_libs: if use_lib in lib_names: if lib_order.index(use_lib) > lib_order.index(lib_name): lib_order.remove(use_lib) lib_order.insert(lib_order.index(lib_name), use_lib) # move used lib to just before this lib except NameError: pass # use recursion to keep on reordering the lib_order until it is stable if lib_names != lib_order: lib_order = self.derive_lib_order(build_type, lib_order) return lib_order
def create_quartus_project_file(self, lib_names=None): """Create the Quartus project file (QPF) for all HDL libraries that have a toplevel entity key synth_top_level_entity. Note: . Default if the synth_top_level_entity key is defined but left empty then the top level entity has the same name as the lib_name in hdl_lib_name. Otherwise synth_top_level_entity can specify another top level entity name in the library. Each HDL library can only have one Quartus project file . The project revision has the same name as the lib_name and will result in a <lib_name>.sof FPGA image file. . For each additional revision a subdirectory can be used. This subdirectory can be named 'revisions/' and lists a number of revisions as subdirectories. Each revision will have a separate hdllib.cfg file and a .vhd file with the toplevel entity. The toplevel .vhd file specifies the <g_design_name> for the revision in the generics. Arguments: - lib_names : one or more HDL libraries """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names) syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts) for syn_dict in cm.listify(syn_dicts): # Open qpf for each HDL library that has a synth_top_level_entity lib_name = syn_dict['hdl_lib_name'] qpf_name = lib_name + '.qpf' qpf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict) cm.mkdir(qpf_path) qpfPathName = cm.expand_file_path_name(qpf_name, qpf_path) with open(qpfPathName, 'w') as fp: fp.write('PROJECT_REVISION = "%s"\n' % lib_name)
def copy_files(self, build_type, lib_names=None): """Copy all source directories and source files listed at the <tool_name>_copy_files key. The build_type selects the <tool_name>_copy_files key using the tool_name_<build_type> key value from the hdltool_<toolset>.cfg. The <tool_name>_copy_files key expects a source and a destination pair per listed directory or file: - The sources need to be specified with absolute path or relative to the HDL library source directory where the hdllib.cfg is stored - The destinations need to be specified with absolute path or relative to HDL library build directory where the project file (e.g. mpf, qpf) gets stored Arguments: - lib_names : one or more HDL libraries """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names) tool_name_key = 'tool_name_' + build_type tool_name_value = self.tool_dict[tool_name_key] tool_name_copy_key = tool_name_value + '_copy_files' for lib_dict in cm.listify(lib_dicts): if tool_name_copy_key in lib_dict: lib_path = self.libs.get_filePath(lib_dict) build_dir_path = self.get_lib_build_dirs(build_type, lib_dicts=lib_dict) cm.mkdir(build_dir_path) key_values = lib_dict[tool_name_copy_key].split() sources = key_values[0::2] destinations = key_values[1::2] file_io = zip(sources, destinations) for fpn_io in file_io: sourcePathName = cm.expand_file_path_name(fpn_io[0], lib_path) destinationPath = cm.expand_file_path_name(fpn_io[1], build_dir_path) if os.path.isfile(sourcePathName): shutil.copy(sourcePathName, destinationPath) # copy file else: copy_tree(sourcePathName, destinationPath) # copy directory tree (will create new destinationPath directory)
def create_quartus_settings_file(self, lib_names=None): """Create the Quartus settings file (QSF) for all HDL libraries that have a toplevel entity key synth_top_level_entity. Note: . No support for revisions, so only one qsf per qpf Arguments: - lib_names : one or more HDL libraries """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names) syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts) for syn_dict in cm.listify(syn_dicts): # Open qsf for each HDL library that has a synth_top_level_entity lib_name = syn_dict['hdl_lib_name'] lib_path = self.libs.get_filePath(syn_dict) top_level_entity = syn_dict['synth_top_level_entity'] if top_level_entity=='': top_level_entity = lib_name qsf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict) cm.mkdir(qsf_path) # One qsf per lib_name qsf_name = lib_name + '.qsf' qsfPathName = cm.expand_file_path_name(qsf_name, qsf_path) with open(qsfPathName, 'w') as fp: fp.write('# synth_top_level_entity\n') fp.write('set_global_assignment -name TOP_LEVEL_ENTITY %s\n' % top_level_entity) fp.write('\n') fp.write('# quartus_qsf_files\n') quartus_qsf_files = syn_dict['quartus_qsf_files'].split() for fn in quartus_qsf_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName) fp.write('\n') fp.write('# All used HDL library *_lib.qip files in order with top level last\n') use_lib_names = self.derive_all_use_libs('synth', lib_name) use_lib_order = self.derive_lib_order('synth', use_lib_names) #use_lib_dicts = self.libs.get_dicts('hdl_lib_name', values=use_lib_order) # uses original libs.dicts order, but use_lib_dicts = self.get_lib_dicts_from_lib_names(lib_names=use_lib_order) # must preserve use_lib_order order to ensure that top level design qip with sdc file is include last in qsf for lib_dict in cm.listify(use_lib_dicts): qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict) qip_name = lib_dict['hdl_lib_name'] + '_lib.qip' qipPathName = cm.expand_file_path_name(qip_name, qip_path) fp.write('set_global_assignment -name QIP_FILE %s\n' % qipPathName)
def get_key_values(self, key, dicts=None): """Get the value of a key in the dicts or None in case the key does not exist.""" if dicts==None: dicts=self.dicts key_values = [] for fd in cm.listify(dicts): if key in fd: key_values.append(fd[key]) else: key_values.append(None) return cm.unlistify(key_values)
def create_modelsim_project_files_file(self, lib_names=None): """Create file with list of the Modelsim project files for all HDL libraries. Arguments: - lib_names : one or more HDL libraries """ fileName = 'modelsim_project_files.txt' # use fixed file name build_maindir, build_toolsetdir, build_tooldir = self.get_tool_build_dir( 'sim') fileNamePath = os.path.join( build_maindir, build_toolsetdir, build_tooldir, fileName) # and use too build dir for file path if lib_names == None: lib_names = self.lib_names with open(fileNamePath, 'w') as fp: lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names) mpf_paths = self.get_lib_build_dirs('sim', lib_dicts=lib_dicts) for lib_name, mpf_path in zip(cm.listify(lib_names), cm.listify(mpf_paths)): fp.write('%s = %s\n' % (lib_name, mpf_path))
def get_lib_dicts_from_lib_names(self, lib_names=None): """Get list the HDL libraries lib_dicts from list of HDL libraries lib_names and preseve the library order. """ if lib_names==None: lib_names=self.lib_names # Cannot use: #lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names) # because then the order of self.libs.dicts is used lib_dicts = [] for lib_name in cm.listify(lib_names): lib_dict = self.libs.dicts[self.lib_names.index(lib_name)] lib_dicts.append(lib_dict) return lib_dicts
def get_key_values(self, key, dicts=None, must_exist=False): """Get the value of a key in the dicts, or None in case the key does not exist, or exit if the key must exist. If no dicts are specified then default to the self.dicts of the object. """ if dicts == None: dicts = self.dicts key_values = [] for fd in cm.listify(dicts): if key in fd: key_values.append(fd[key]) elif must_exist: sys.exit( 'Error : Key %s does not exist in the dictionary:\n%s.' % (key, fd)) else: key_values.append(None) return cm.unlistify(key_values)
def insert_side(data, group, pin_type, side): """ Runs the commands for inserting some pins into the Kicad schematic editor. :param data: :param group: :param pin_type: Pin type :param side: Pin side (up, left, right, down) :return: """ reset_pin_dialog(side) group_data = data[group] if isinstance(group_data, list): group_data = {group: group_data} for k, v in group_data.items(): insert_pins(k, listify(v), pin_type, side) reset_pin_dialog(side)
def create_lib_order_files(self, build_type, lib_names=None): """Create the compile order file '<lib_name>_lib_order.txt' for all HDL libraries in the specified list of lib_names. The file is stored in the sim build directory of the HDL library. The file is read by commands.do in Modelsim to avoid having to derive the library compile order in TCL. """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names) for lib_dict in cm.listify(lib_dicts): lib_name = lib_dict['hdl_lib_name'] use_libs = self.derive_all_use_libs(build_type, lib_name) lib_order = self.derive_lib_order(build_type, use_libs) file_name = lib_name + '_lib_order.txt' file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict) cm.mkdir(file_path) filePathName = os.path.join(file_path, file_name) with open(filePathName, 'w') as fp: for lib in lib_order: fp.write('%s ' % lib)
def get_lib_build_dirs(self, build_type, lib_dicts=None): """Get the subdirectories within the central tool build directory for all HDL libraries in the specified list of lib_dicts. The build_type can be: 'sim' uses the 'tool_name_sim' key in the self.tool_dict 'synth' uses the 'tool_name_synth' key in the self.tool_dict The build dir key value must be an absolute directory path. The lib build dir consists of - the absolute path to the central main build directory - the tool_name_key value as subdirectory - the library name as library subdirectory """ if lib_dicts==None: lib_dicts=self.libs.dicts build_maindir, build_toolset_dir, build_tooldir = self.get_tool_build_dir(build_type) build_dirs = [] for lib_dict in cm.listify(lib_dicts): lib_name = lib_dict['hdl_lib_name'] build_dirs.append(os.path.join(build_maindir, build_toolset_dir, build_tooldir, lib_name)) # central build main directory with subdirectory per library return cm.unlistify(build_dirs)
def create_modelsim_lib_compile_ip_files(self, lib_names=None): """Create the '<lib_name>_lib_compile_ip.txt' file for all HDL libraries in the specified list of lib_names. The file is stored in the sim build directory of the HDL library. The file is read by commands.do in Modelsim to know which IP needs to be compiled before the library is compiled. """ if lib_names == None: lib_names = self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names) build_maindir, build_toolset_dir, build_tooldir = self.get_tool_build_dir( 'sim') genIPScript = os.path.join(build_maindir, build_toolset_dir, build_tooldir, 'generate_ip.tcl') with open(genIPScript, 'w') as ipFile: ipFile.write('set_part %s\n' % self.tool_dict['device']) for lib_dict in cm.listify(lib_dicts): if 'modelsim_compile_ip_files' in lib_dict: compile_ip_files = lib_dict[ 'modelsim_compile_ip_files'].split() lib_name = lib_dict['hdl_lib_name'] file_name = lib_name + '_lib_compile_ip.txt' file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict) cm.mkdir(file_path) filePathName = os.path.join(file_path, file_name) # Append to the ip generation scripts (TCL always uses UNIX slashes) ipFile.write('cd %s\n' % file_path.replace("\\", "/")) with open(filePathName, 'w') as fp: for fpn in compile_ip_files: # Write the expanded file path name for <lib_name>_lib_compile_ip.txt so that it can be executed directly from its location in SVN using the Modelsim "do"-command in the commands.do. # An alternative would be to write the basename, so only <lib_name>_lib_compile_ip.txt, but that would require copying the basename file to the mpf build directory efpn = os.path.expandvars(fpn) fp.write( '%s ' % os.path.splitext(os.path.split(fpn)[1])[0]) ipFile.write('source %s\n' % os.path.join( lib_dict['lib_path'], fpn).replace("\\", "/")) ipFile.write('exit\n')
def create_modelsim_lib_compile_ip_files(self, lib_names=None): """Create the '<lib_name>_lib_compile_ip.txt' file for all HDL libraries in the specified list of lib_names. The file is stored in the sim build directory of the HDL library. The file is read by commands.do in Modelsim to know which IP needs to be compiled before the library is compiled. """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names) for lib_dict in cm.listify(lib_dicts): if 'modelsim_compile_ip_files' in lib_dict: compile_ip_files = lib_dict['modelsim_compile_ip_files'].split() lib_name = lib_dict['hdl_lib_name'] file_name = lib_name + '_lib_compile_ip.txt' file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict) cm.mkdir(file_path) filePathName = os.path.join(file_path, file_name) with open(filePathName, 'w') as fp: for fpn in compile_ip_files: # Write the expanded file path name for <lib_name>_lib_compile_ip.txt so that it can be executed directly from its location in SVN using the Modelsim "do"-command in the commands.do. # An alternative would be to write the basename, so only <lib_name>_lib_compile_ip.txt, but that would require copying the basename file to the mpf build directory efpn = os.path.expandvars(fpn) fp.write('%s ' % efpn)
def create_quartus_ip_lib_file(self, lib_names=None): """Create the Quartus IP file <hdl_lib_name>_lib.qip for all HDL libraries. The <hdl_lib_name>.qip file contains the list of files that are given by the synth_files key and the quartus_*_file keys. Note: . Use post fix '_lib' in QIP file name *_lib.qip to avoid potential conflict with *.qip that may come with the IP. . The HDL library *_lib.qip files contain all files that are listed by the synth_files key. Hence when these qip files are included then the Quartus project will analyse all files even if there entity is not instantiated in the design. This is fine, it is unnecessary to parse the hierarchy of the synth_top_level_entity VHDL file to find and include only the source files that are actually used. Arguments: - lib_names : one or more HDL libraries """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names) for lib_dict in cm.listify(lib_dicts): # Open qip lib_name = lib_dict['hdl_lib_name'] lib_path = self.libs.get_filePath(lib_dict) qip_name = lib_name + '_lib.qip' qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict) cm.mkdir(qip_path) qipPathName = cm.expand_file_path_name(qip_name, qip_path) with open(qipPathName, 'w') as fp: if 'synth_files' in lib_dict: fp.write('# synth_files\n') synth_files = lib_dict['synth_files'].split() for fn in synth_files: filePathName = cm.expand_file_path_name(fn, lib_path) file_ext = fn.split('.')[-1] if file_ext=='vhd' or file_ext=='vhdl': file_type = 'VHDL_FILE' elif file_ext=='v': file_type = 'VERILOG_FILE' else: print '\nERROR - Undefined file extension in synth_files:', fn sys.exit() fp.write('set_global_assignment -name %s %s -library %s\n' % (file_type, filePathName, lib_name + '_lib')) if 'quartus_vhdl_files' in lib_dict: fp.write('\n') fp.write('# quartus_vhdl_files\n') quartus_vhdl_files = lib_dict['quartus_vhdl_files'].split() for fn in quartus_vhdl_files: filePathName = cm.expand_file_path_name(fn, lib_path) file_ext = fn.split('.')[-1] if file_ext=='vhd' or file_ext=='vhdl': file_type = 'VHDL_FILE' elif file_ext=='v': file_type = 'VERILOG_FILE' else: print '\nERROR - Undefined file extension in quartus_vhdl_files:', fn sys.exit() fp.write('set_global_assignment -name VHDL_FILE %s -library %s\n' % (filePathName, lib_name + '_lib')) if 'quartus_qip_files' in lib_dict: fp.write('\n') fp.write('# quartus_qip_files\n') quartus_qip_files = lib_dict['quartus_qip_files'].split() for fn in quartus_qip_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('set_global_assignment -name QIP_FILE %s\n' % filePathName) if 'quartus_tcl_files' in lib_dict: fp.write('\n') fp.write('# quartus_tcl_files\n') quartus_tcl_files = lib_dict['quartus_tcl_files'].split() for fn in quartus_tcl_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName) if 'quartus_sdc_files' in lib_dict: fp.write('\n') fp.write('# quartus_sdc_files\n') quartus_sdc_files = lib_dict['quartus_sdc_files'].split() for fn in quartus_sdc_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('set_global_assignment -name SDC_FILE %s\n' % filePathName)
def create_modelsim_project_file(self, lib_names=None): """Create the Modelsim project file for all technology libraries and RTL HDL libraries. Arguments: - lib_names : one or more HDL libraries Library mapping: - Technology libraries that are available, but not used are mapped to work. - Unavailable libraries are also mapped to work. The default library clause name is <lib_name> with postfix '_lib'. This is a best effort guess, because it is impossible to know the library clause name for an unavailable library. If the best effort guess is not suitable, then the workaround is to create a place holder directory with hdllib.cfg that defines the actual library clause name as it appears in the VHDL for the unavailable HDL library. unavailable library names occur when e.g. a technology IP library is not available in the toolRootDir because it is not needed, or it may indicate a spelling error. """ if lib_names == None: lib_names = self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names) for lib_dict in cm.listify(lib_dicts): # Open mpf lib_name = lib_dict['hdl_lib_name'] mpf_name = lib_name + '.mpf' mpf_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict) cm.mkdir(mpf_path) mpfPathName = os.path.normpath(os.path.join(mpf_path, mpf_name)) with open(mpfPathName, 'w') as fp: # Write [Library] section for all used libraries fp.write('[Library]\n') # . map used vendor technology libs to their target directory for technologyName in self.technologyNames: tech_dict = self.read_hdl_libraries_technology_file( technologyName, 'tool_name_sim') for lib_clause, lib_work in tech_dict.items(): if type(lib_work) is str: lib_work = cm.expand_file_path_name(lib_work) fp.write('%s = %s\n' % (lib_clause, lib_work)) # . not used vendor technology libs are not compiled but are mapped to work to avoid compile error when mentioned in the LIBRARY clause # for tech_dict in self.removed_dicts: # fp.write('%s = work\n' % tech_dict['hdl_library_clause_name']) # . unavailable used libs are not compiled but are mapped to work to avoid compile error when mentioned in the LIBRARY clause for unavailable_use_name in self.unavailable_use_libs: # if the unavailable library is not in the dictionary of disclosed unavailable library clause names, then assume that the library clause # name has the default postfix '_lib'. if unavailable_use_name in self.disclosed_library_clause_names: fp.write('%s = work\n' % self.disclosed_library_clause_names[ unavailable_use_name]) else: fp.write('%s_lib = work\n' % unavailable_use_name) # . all used libs for this lib_name use_lib_names = self.derive_all_use_libs('sim', lib_name) use_lib_dicts = self.libs.get_dicts('hdl_lib_name', use_lib_names) use_lib_build_sim_dirs = self.get_lib_build_dirs( 'sim', lib_dicts=use_lib_dicts) use_lib_clause_names = self.libs.get_key_values( 'hdl_library_clause_name', use_lib_dicts) for lib_clause, lib_dir in zip( cm.listify(use_lib_clause_names), cm.listify(use_lib_build_sim_dirs)): lib_work = os.path.normpath(os.path.join(lib_dir, 'work')) fp.write('%s = %s\n' % (lib_clause, lib_work)) # . work fp.write('work = work\n') # . others modelsim default libs model_tech_dir = os.path.expandvars( self.tool_dict['model_tech_dir']) fp.write('others = %s\n' % os.path.normpath( os.path.join(model_tech_dir, 'modelsim.ini'))) # Write [Project] section for all used libraries fp.write('[Project]\n') fp.write('Project_Version = 6\n') # must be >= 6 to fit all fp.write('Project_DefaultLib = work\n') fp.write('Project_SortMethod = unused\n') # - project files try: synth_files = lib_dict['synth_files'].split() except KeyError: synth_files = [] try: test_bench_files = lib_dict['test_bench_files'].split() except KeyError: test_bench_files = [] project_files = synth_files + test_bench_files if 'modelsim_compile_ip_files' in lib_dict: compile_ip_files = lib_dict[ 'modelsim_compile_ip_files'].split() project_files += compile_ip_files fp.write('Project_Files_Count = %d\n' % len(project_files)) lib_path = self.libs.get_filePath(lib_dict) project_file_p_defaults_hdl = 'vhdl_novitalcheck 0 group_id 0 cover_nofec 0 vhdl_nodebug 0 vhdl_1164 1 vhdl_noload 0 vhdl_synth 0 vhdl_enable0In 0 vlog_1995compat 0 last_compile 0 vhdl_disableopt 0 cover_excludedefault 0 vhdl_vital 0 vhdl_warn1 1 vhdl_warn2 1 vhdl_explicit 1 vhdl_showsource 0 cover_covercells 0 vhdl_0InOptions {} vhdl_warn3 1 vlog_vopt {} cover_optlevel 3 voptflow 1 vhdl_options {} vhdl_warn4 1 toggle - ood 0 vhdl_warn5 1 cover_noshort 0 compile_to work cover_nosub 0 dont_compile 0 vhdl_use93 2008 cover_stmt 1' project_file_p_defaults_vhdl = 'file_type vhdl' project_file_p_defaults_verilog = 'file_type verilog' project_file_p_defaults_systemverilog = 'file_type systemverilog' project_file_p_defaults_tcl = 'last_compile 0 compile_order -1 file_type tcl group_id 0 dont_compile 1 ood 1' project_folders = [] offset = 0 nof_synth_files = len(synth_files) # for i, fn in enumerate(project_files): # filePathName = cm.expand_file_path_name(fn, lib_path) # fp.write('Project_File_%d = %s\n' % (i, filePathName)) if nof_synth_files > 0: project_folders.append('synth_files') for i in range(nof_synth_files): # Add file type specific settings file_ext = synth_files[i].split('.')[-1] if file_ext == 'vhd' or file_ext == 'vhdl': project_file_p_defaults_file_specific = project_file_p_defaults_vhdl elif file_ext == 'v': project_file_p_defaults_file_specific = project_file_p_defaults_verilog elif file_ext == 'vh': project_file_p_defaults_file_specific = project_file_p_defaults_verilog elif file_ext == 'sv': project_file_p_defaults_file_specific = project_file_p_defaults_systemverilog else: print( "\nERROR - Undefined file extension in synth_files:", lib_name, synth_files[i]) sys.exit() # Prepend the library path if a relative path if synth_files[i].find(":") == -1: filePathName = cm.expand_file_path_name( synth_files[i], lib_path) else: filePathName = synth_files[i] fp.write('Project_File_%d = %s\n' % (i, filePathName)) fp.write( 'Project_File_P_%d = folder %s compile_order %d %s\n' % (offset + i, project_folders[-1], offset + i, project_file_p_defaults_hdl + ' ' + project_file_p_defaults_file_specific)) offset = nof_synth_files nof_test_bench_files = len(test_bench_files) if nof_test_bench_files > 0: project_folders.append('test_bench_files') for i in range(nof_test_bench_files): # Add file type specific settings file_ext = test_bench_files[i].split('.')[-1] if file_ext == 'vhd' or file_ext == 'vho' or file_ext == 'vhdl': project_file_p_defaults_file_specific = project_file_p_defaults_vhdl elif file_ext == 'v': project_file_p_defaults_file_specific = project_file_p_defaults_verilog elif file_ext == 'vh': project_file_p_defaults_file_specific = project_file_p_defaults_verilog elif file_ext == 'sv': project_file_p_defaults_file_specific = project_file_p_defaults_systemverilog else: print( "\nERROR - Undefined file extension in test_bench_files:", lib_name, test_bench_files[i]) sys.exit() filePathName = cm.expand_file_path_name( test_bench_files[i], lib_path) fp.write('Project_File_%d = %s\n' % (offset + i, filePathName)) fp.write( 'Project_File_P_%d = folder %s compile_order %d %s\n' % (offset + i, project_folders[-1], offset + i, project_file_p_defaults_hdl + ' ' + project_file_p_defaults_file_specific)) offset += nof_test_bench_files if 'modelsim_compile_ip_files' in lib_dict: nof_compile_ip_files = len(compile_ip_files) if nof_compile_ip_files > 0: project_folders.append('compile_ip_files') for i in range(nof_compile_ip_files): filePathName = cm.expand_file_path_name( compile_ip_files[i], lib_path) fp.write('Project_File_%d = %s\n' % (offset + i, filePathName)) fp.write( 'Project_File_P_%d = folder %s compile_order %d %s\n' % (offset + i, project_folders[-1], offset + i, project_file_p_defaults_tcl)) offset += nof_compile_ip_files # - project folders fp.write('Project_Folder_Count = %d\n' % len(project_folders)) for i, fd in enumerate(project_folders): fp.write('Project_Folder_%d = %s\n' % (i, fd)) fp.write('Project_Folder_P_%d = folder {Top Level}\n' % i) # - simulation configurations fp.write('Project_Sim_Count = %d\n' % len(test_bench_files)) project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization = self.simulation_configuration( ) for i, fn in enumerate(test_bench_files): fName = os.path.basename(fn) tbName = os.path.splitext(fName)[0] fp.write('Project_Sim_%d = %s\n' % (i, tbName)) for i, fn in enumerate(test_bench_files): fName = os.path.basename(fn) tbName = os.path.splitext(fName)[0] #if project_sim_p_search_libraries.find("xpm") != -1: tbName += " xpm.glbl" fp.write( 'Project_Sim_P_%d = folder {Top Level} additional_dus { work.%s } %s %s %s %s\n' % (i, tbName, project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization)) # Write [vsim] section fp.write('[vsim]\n') fp.write('RunLength = 0 ps\n') fp.write('resolution = 1fs\n') fp.write( 'IterationLimit = 5000\n' ) # According to 'verror 3601' the default is 5000, typically 100 is enough, but e.g. the ip_stratixiv_phy_xaui_0 requires more. fp.write('DefaultRadix = hexadecimal\n') fp.write('NumericStdNoWarnings = 1\n') fp.write('StdArithNoWarnings = 1\n')
def create_quartus_ip_lib_file(self, lib_names=None): """Create the Quartus IP file <hdl_lib_name>_lib.qip for all HDL libraries. The <hdl_lib_name>.qip file contains the list of files that are given by the synth_files key and the quartus_*_file keys. Note: . Use post fix '_lib' in QIP file name *_lib.qip to avoid potential conflict with *.qip that may come with the IP. . The HDL library *_lib.qip files contain all files that are listed by the synth_files key. Hence when these qip files are included then the Quartus project will analyse all files even if there entity is not instantiated in the design. This is fine, it is unnecessary to parse the hierarchy of the synth_top_level_entity VHDL file to find and include only the source files that are actually used. Arguments: - lib_names : one or more HDL libraries """ if lib_names == None: lib_names = self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names) for lib_dict in cm.listify(lib_dicts): # Open qip lib_name = lib_dict['hdl_lib_name'] lib_path = self.libs.get_filePath(lib_dict) qip_name = lib_name + '_lib.qip' qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict) cm.mkdir(qip_path) qipPathName = cm.expand_file_path_name(qip_name, qip_path) with open(qipPathName, 'w') as fp: if 'synth_files' in lib_dict: fp.write('# synth_files\n') synth_files = lib_dict['synth_files'].split() for fn in synth_files: filePathName = cm.expand_file_path_name(fn, lib_path) file_ext = fn.split('.')[-1] if file_ext == 'vhd' or file_ext == 'vhdl': file_type = 'VHDL_FILE' elif file_ext == 'v': file_type = 'VERILOG_FILE' else: print '\nERROR - Undefined file extension in synth_files:', fn sys.exit() fp.write( 'set_global_assignment -name %s %s -library %s\n' % (file_type, filePathName, lib_name + '_lib')) if 'quartus_vhdl_files' in lib_dict: fp.write('\n') fp.write('# quartus_vhdl_files\n') quartus_vhdl_files = lib_dict['quartus_vhdl_files'].split() for fn in quartus_vhdl_files: filePathName = cm.expand_file_path_name(fn, lib_path) file_ext = fn.split('.')[-1] if file_ext == 'vhd' or file_ext == 'vhdl': file_type = 'VHDL_FILE' elif file_ext == 'v': file_type = 'VERILOG_FILE' else: print '\nERROR - Undefined file extension in quartus_vhdl_files:', fn sys.exit() fp.write( 'set_global_assignment -name VHDL_FILE %s -library %s\n' % (filePathName, lib_name + '_lib')) if 'quartus_qip_files' in lib_dict: fp.write('\n') fp.write('# quartus_qip_files\n') quartus_qip_files = lib_dict['quartus_qip_files'].split() for fn in quartus_qip_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('set_global_assignment -name QIP_FILE %s\n' % filePathName) if 'quartus_tcl_files' in lib_dict: fp.write('\n') fp.write('# quartus_tcl_files\n') quartus_tcl_files = lib_dict['quartus_tcl_files'].split() for fn in quartus_tcl_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write( 'set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName) if 'quartus_sdc_files' in lib_dict: fp.write('\n') fp.write('# quartus_sdc_files\n') quartus_sdc_files = lib_dict['quartus_sdc_files'].split() for fn in quartus_sdc_files: filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('set_global_assignment -name SDC_FILE %s\n' % filePathName)
def create_modelsim_project_file(self, lib_names=None): """Create the Modelsim project file for all technology libraries and RTL HDL libraries. Arguments: - lib_names : one or more HDL libraries """ if lib_names==None: lib_names=self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names) for lib_dict in cm.listify(lib_dicts): # Open mpf lib_name = lib_dict['hdl_lib_name'] mpf_name = lib_name + '.mpf' mpf_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict) cm.mkdir(mpf_path) mpfPathName = os.path.join(mpf_path, mpf_name) with open(mpfPathName, 'w') as fp: # Write [Library] section for all used libraries fp.write('[Library]\n') # . map used vendor technology libs to their target directory for technologyName in cm.listify(self.technologyNames): tech_dict = self.read_hdl_libraries_technology_file(technologyName) for lib_clause, lib_work in tech_dict.iteritems(): fp.write('%s = %s\n' % (lib_clause, lib_work)) # . not used vendor technology libs are not compiled but are mapped to work to avoid compile error when mentioned in the LIBRARY clause for tech_dict in self.removed_dicts: fp.write('%s = work\n' % tech_dict['hdl_library_clause_name']) # . all used libs for this lib_name use_lib_names = self.derive_all_use_libs('sim', lib_name) use_lib_dicts = self.libs.get_dicts('hdl_lib_name', use_lib_names) use_lib_build_sim_dirs = self.get_lib_build_dirs('sim', lib_dicts=use_lib_dicts) use_lib_clause_names = self.libs.get_key_values('hdl_library_clause_name', use_lib_dicts) for lib_clause, lib_dir in zip(cm.listify(use_lib_clause_names), cm.listify(use_lib_build_sim_dirs)): lib_work = os.path.join(lib_dir, 'work') fp.write('%s = %s\n' % (lib_clause, lib_work)) # . work fp.write('work = work\n') # . others modelsim default libs model_tech_dir = os.path.expandvars(self.tool_dict['model_tech_dir']) fp.write('others = %s\n' % os.path.join(model_tech_dir, 'modelsim.ini')) # Write [Project] section for all used libraries fp.write('[Project]\n') fp.write('Project_Version = 6\n') # must be >= 6 to fit all fp.write('Project_DefaultLib = work\n') fp.write('Project_SortMethod = unused\n') # - project files synth_files = lib_dict['synth_files'].split() test_bench_files = lib_dict['test_bench_files'].split() project_files = synth_files + test_bench_files if 'modelsim_compile_ip_files' in lib_dict: compile_ip_files = lib_dict['modelsim_compile_ip_files'].split() project_files += compile_ip_files fp.write('Project_Files_Count = %d\n' % len(project_files)) lib_path = self.libs.get_filePath(lib_dict) for i, fn in enumerate(project_files): filePathName = cm.expand_file_path_name(fn, lib_path) fp.write('Project_File_%d = %s\n' % (i, filePathName)) project_file_p_defaults_hdl = 'vhdl_novitalcheck 0 group_id 0 cover_nofec 0 vhdl_nodebug 0 vhdl_1164 1 vhdl_noload 0 vhdl_synth 0 vhdl_enable0In 0 vlog_1995compat 0 last_compile 0 vhdl_disableopt 0 cover_excludedefault 0 vhdl_vital 0 vhdl_warn1 1 vhdl_warn2 1 vhdl_explicit 1 vhdl_showsource 0 cover_covercells 0 vhdl_0InOptions {} vhdl_warn3 1 vlog_vopt {} cover_optlevel 3 voptflow 1 vhdl_options {} vhdl_warn4 1 toggle - ood 0 vhdl_warn5 1 cover_noshort 0 compile_to work cover_nosub 0 dont_compile 0 vhdl_use93 2002 cover_stmt 1' project_file_p_defaults_vhdl = 'file_type vhdl' project_file_p_defaults_verilog = 'file_type verilog' project_file_p_defaults_tcl = 'last_compile 0 compile_order -1 file_type tcl group_id 0 dont_compile 1 ood 1' project_folders = [] offset = 0 nof_synth_files = len(synth_files) if nof_synth_files>0: project_folders.append('synth_files') for i in range(nof_synth_files): # Add file type specific settings file_ext = synth_files[i].split('.')[-1] if file_ext=='vhd' or file_ext=='vhdl': project_file_p_defaults_file_specific = project_file_p_defaults_vhdl elif file_ext=='v': project_file_p_defaults_file_specific = project_file_p_defaults_verilog else: print '\nERROR - Undefined file extension in synth_files:', synth_files[i] sys.exit() fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_hdl+' '+project_file_p_defaults_file_specific)) offset = nof_synth_files nof_test_bench_files = len(test_bench_files) if nof_test_bench_files>0: project_folders.append('test_bench_files') for i in range(nof_test_bench_files): # Add file type specific settings file_ext = test_bench_files[i].split('.')[-1] if file_ext=='vhd' or file_ext=='vho' or file_ext=='vhdl': project_file_p_defaults_file_specific = project_file_p_defaults_vhdl elif file_ext=='v': project_file_p_defaults_file_specific = project_file_p_defaults_verilog else: print '\nERROR - Undefined file extension in test_bench_files:', test_bench_files[i] sys.exit() fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_hdl+' '+project_file_p_defaults_file_specific)) offset += nof_test_bench_files if 'modelsim_compile_ip_files' in lib_dict: nof_compile_ip_files = len(compile_ip_files) if nof_compile_ip_files>0: project_folders.append('compile_ip_files') for i in range(nof_compile_ip_files): fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_tcl)) offset += nof_compile_ip_files # - project folders fp.write('Project_Folder_Count = %d\n' % len(project_folders)) for i, fd in enumerate(project_folders): fp.write('Project_Folder_%d = %s\n' % (i, fd)) fp.write('Project_Folder_P_%d = folder {Top Level}\n' % i) # - simulation configurations fp.write('Project_Sim_Count = %d\n' % len(test_bench_files)) project_sim_p_defaults = 'Generics {} timing default -std_output {} -nopsl 0 +notimingchecks 0 selected_du {} -hazards 0 -sdf {} ok 1 -0in 0 -nosva 0 +pulse_r {} -absentisempty 0 -multisource_delay {} +pulse_e {} vopt_env 1 -coverage 0 -sdfnoerror 0 +plusarg {} -vital2.2b 0 -t default -memprof 0 is_vopt_flow 0 -noglitch 0 -nofileshare 0 -wlf {} -assertdebug 0 +no_pulse_msg 0 -0in_options {} -assertfile {} -sdfnowarn 0 -Lf {} -std_input {}' project_sim_p_search_libraries = '-L {}' if 'modelsim_search_libraries' in self.tool_dict: project_sim_p_search_libraries = '-L {' for sl in self.tool_dict['modelsim_search_libraries'].split(): project_sim_p_search_libraries += sl project_sim_p_search_libraries += ' ' project_sim_p_search_libraries += '}' project_sim_p_otherargs = 'OtherArgs {}' project_sim_p_otherargs = 'OtherArgs {+nowarn8684 +nowarn8683 -quiet}' project_sim_p_otherargs = 'OtherArgs {+nowarn8684 +nowarn8683}' project_sim_p_otherargs = 'OtherArgs {+nowarn8684 +nowarn8683 +nowarnTFMPC +nowarnPCDPC}' # nowarn on verilog IP connection mismatch warnings project_sim_p_optimization = 'is_vopt_opt_used 2' # = when 'Enable optimization' is not selected in GUI project_sim_p_optimization = 'is_vopt_opt_used 1 voptargs {OtherVoptArgs {} timing default VoptOutFile {} -vopt_keep_delta 0 -0in 0 -fvopt {} VoptOptimize:method 1 -vopt_00 2 +vopt_notimingcheck 0 -Lfvopt {} VoptOptimize:list .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.listbox -Lvopt {} +vopt_acc {} VoptOptimize .vopt_opt.nb.canvas.notebook.cs.page1.cs -vopt_hazards 0 VoptOptimize:Buttons .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.bf 0InOptionsWgt .vopt_opt.nb.canvas.notebook.cs.page3.cs.zf.ze -0in_options {}}' # = when 'Enable optimization' is selected in GUI for full visibility for i, fn in enumerate(test_bench_files): fName = os.path.basename(fn) tbName = os.path.splitext(fName)[0] fp.write('Project_Sim_%d = %s\n' % (i, tbName)) for i, fn in enumerate(test_bench_files): fName = os.path.basename(fn) tbName = os.path.splitext(fName)[0] fp.write('Project_Sim_P_%d = folder {Top Level} additional_dus work.%s %s %s %s %s\n' % (i, tbName, project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization)) # Write [vsim] section fp.write('[vsim]\n') fp.write('RunLength = 0 ps\n') fp.write('resolution = 1fs\n') fp.write('IterationLimit = 5000\n') # According to 'verror 3601' the default is 5000, typically 100 is enough, but e.g. the ip_stratixiv_phy_xaui_0 requires more. fp.write('DefaultRadix = decimal\n')
if hdl_args.verbosity>=2: print('#') print('# VivadoConfig:') print('#') print('') print("HDL library paths that are found in {}:".format(vsyn.libRootDir)) for p in vsyn.libs.filePaths: print(' ', p) if hdl_args.verbosity>=1: print('') print("HDL libraries with a top level entity for synthesis that are found in {}:".format(vsyn.libRootDir)) # print' %-40s' % 'HDL library', ': Top level entity' print(" {:40} {}".format('HDL library', ': Top level entity')) syn_dicts = vsyn.libs.get_dicts(key='synth_top_level_entity') for d in cm.listify(syn_dicts): if d['synth_top_level_entity']=='': # print ' %-40s' % d['hdl_lib_name'], ':', d['hdl_lib_name'] print(" {:40} : {}".format(d['hdl_lib_name'], d['hdl_lib_name'])) else: # print ' %-40s' % d['hdl_lib_name'], ':', d['synth_top_level_entity'] print(" {:40} : {}".format(d['hdl_lib_name'], d['synth_top_level_entity'])) print('') print("Copy Vivado directories and files from HDL library source tree to build_dir for all HDL libraries that are found in ${}.".format(vsyn.libRootDir)) if(len(hdl_args.lib_names) == 0): arg_design = None else: arg_design = hdl_args.lib_names
if lib_names == []: lib_names = msim.lib_names # If no lib_names are provided then use all available HDL libraries lib_dicts = msim.libs.get_dicts(key='hdl_lib_name', values=lib_names) # Get HDL libraries dicts test_dicts = msim.libs.get_dicts( key='regression_test_vhdl', values=None, dicts=lib_dicts) # Get HDL libraries dicts with 'regression_test_vhdl' key if hdl_args.verbosity >= 1: print('') print( "List of HDL libraries with 'regression_test_vhdl' key and the specified VHDL test benches:" ) nof_lib = 0 total_nof_tb = 0 for lib_dict in cm.listify(test_dicts): nof_lib += 1 lib_name = lib_dict['hdl_lib_name'] test_bench_files = lib_dict['regression_test_vhdl'].split() if len(test_bench_files) == 0: print('%-20s : -' % lib_name) else: for tbf in test_bench_files: total_nof_tb += 1 print('%-20s : %s' % (lib_name, tbf)) print('') print( 'The regression test contains %d HDL libraries and in total %d test benches.' % (nof_lib, total_nof_tb)) print('')
def create_vivado_ip_lib_file(self, lib_name): #lib_names=None): """Create <lib_name>_load_files.tcl based on if lib_names==None: lib_names=self.lib_names and add to build script """ lib_names = self.lib_names lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names) toplevel_lib_name = lib_name self.create_project_build_script(lib_name) use_lib_names = self.derive_lib_order('synth', lib_name) print("USE LIBS",use_lib_names) lib_dicts = self.libs.get_dicts('hdl_lib_name', values=use_lib_names) # libraries used for top level design synth_top_level_entity=toplevel_lib_name for lib_dict in cm.listify(lib_dicts): # Open qip lib_name = lib_dict['hdl_lib_name'] print('get_filepath', lib_name) lib_path = self.libs.get_filePath(lib_dict) loader_script_name = lib_name + '_load_files.tcl' loader_script_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict) cm.mkdir(loader_script_path) loaderScriptPathName = cm.expand_file_path_name_posix(loader_script_name, loader_script_path) logger.info("loaderscriptPathName = {}".format(loaderScriptPathName)) for key, value in lib_dict.items(): if value: logger.debug("\t%s :\t%s",key, value) with open(loaderScriptPathName, 'w') as fp: if 'synth_files' in lib_dict: fp.write('# synth_files\n') synth_files = lib_dict['synth_files'].split() if any(synth_files): fp.write('add_files -fileset sources_1 [glob \\\n') [fp.write('%s \\\n' % cm.expand_file_path_name_posix(fn, lib_path)) for fn in synth_files] fp.write(']\n') fp.write('set_property library %s [get_files {\\\n' % (lib_name + '_lib')) [fp.write('%s \\\n' % cm.expand_file_path_name_posix(fn, lib_path)) for fn in synth_files] fp.write('}]\n') if 'test_bench_files' in lib_dict: fp.write('# test_bench_files\n') test_bench_files = lib_dict['test_bench_files'].split() if any(test_bench_files): fp.write('add_files -fileset sim_1 [glob \\\n') [fp.write('%s \\\n' % cm.expand_file_path_name_posix(fn, lib_path)) for fn in test_bench_files] fp.write(']\n') fp.write('set_property library %s [get_files {\\\n' % (lib_name + '_lib')) [fp.write('%s \\\n' % cm.expand_file_path_name_posix(fn, lib_path)) for fn in test_bench_files] fp.write('}]\n') fp.write('set_property -name {xsim.compile.xvlog.more_options} -value {-d SIM_SPEED_UP} -objects [get_filesets sim_1]\n') fp.write('set_property top_lib xil_defaultlib [get_filesets sim_1]\n') if 'vivado_ip_repo' in lib_dict: fp.write('# vivado_ip_repo\n') iprepo_files = lib_dict['vivado_ip_repo'].split() for fn in iprepo_files: filePathName = cm.expand_file_path_name_posix(fn, lib_path) fp.write('set repo_dir $proj_dir/ip_repo\n') # Tcl variable $proj_dir is still available since create_project fp.write('file mkdir $repo_dir\n') fp.write('set_property "ip_repo_paths" $repo_dir [get_filesets sources_1]\n') fp.write('update_ip_catalog -rebuild\n') fp.write('update_ip_catalog -add_ip %s -repo_path $repo_dir\n' % (filePathName)) fp.write('update_ip_catalog -rebuild\n') if 'vivado_bd_files' in lib_dict: fp.write('# vivado_bd_files\n') bd_files = lib_dict['vivado_bd_files'].split() for fn in bd_files: filePathName = cm.expand_file_path_name_posix(fn, lib_path) fp.write('import_files -force -fileset sources_1 [glob %s]\n' % filePathName) # paramterize constraints set #fp.write('add_files -fileset sources_1 [ glob %s ]\n' % filePathName ) if 'vivado_elf_files' in lib_dict: fp.write('# vivado_elf_files\n') elf_files = lib_dict['vivado_elf_files'].split() for fn in elf_files: filePathName = cm.expand_file_path_name_posix(fn, lib_path) fp.write('import_files -force -fileset sources_1 [glob %s]\n' % filePathName) # paramterize constraints set #fp.write('add_files -fileset sources_1 [ glob %s ]\n' % filePathName ) if 'vivado_xdc_files' in lib_dict: fp.write('# vivado_xdc_files\n') xdc_files = lib_dict['vivado_xdc_files'].split() for fn in xdc_files: # filePathName = filePathName.replace('/cygdrive/C','c:') filePathName = cm.expand_file_path_name_posix(fn, lib_path) #fp.write('import_files -force -fileset [get_filesets constrs_1] %s\n' % filePathName) # paramterize constraints set fp.write('add_files -fileset constrs_1 [ glob %s ]\n' % filePathName) # paramterize constraints set # For the top level xdc files set processing order of xdc to LATE if lib_name == toplevel_lib_name: fp.write('set_property PROCESSING_ORDER LATE [get_files %s]\n' % filePathName) if 'vivado_xci_files' in lib_dict: fp.write('# vivado_xci_files: Importing IP to the project\n') xci_files = lib_dict['vivado_xci_files'].split() for fn in xci_files: filePathName = cm.expand_file_path_name_posix(fn, lib_path) ipName = re.split('[./]',fn)[-2] print("ipName = {}".format(ipName)) fp.write('import_ip -files %s -name %s\n' % (filePathName, ipName)) # paramterize constraints set fp.write('create_ip_run -force [get_ips %s]\n' % (ipName)) #create_ip_run -force [get_files my_core_1.xci] if 'vivado_top_level_entity' in lib_dict: synth_top_level_entity = lib_dict['vivado_top_level_entity'].split()[0] print ("set toplevel entity to ", synth_top_level_entity); if 'vivado_tcl_files' in lib_dict: # can make this more flexible by searching recursively by extension fp.write('# tcl scripts for ip generation\n') tcl_files = lib_dict['vivado_tcl_files'].split() for fn in tcl_files: if '_board.tcl' not in fn: filePathName = cm.expand_file_path_name_posix(fn, lib_path) fp.write('source %s\n' % filePathName) logger.debug('lib %s source %s\n', lib_name, filePathName) f = open(self.buildScriptPathName, 'a+') # if lib appears in print("source [pwd]/{}/{}\n".format(lib_name, loader_script_name)) f.write('source [pwd]/%s/%s\n' % (lib_name, loader_script_name)) f.write('set_property top %s [current_fileset]\n' %synth_top_level_entity) f.write('set_property top %s [get_filesets sim_1]\n' % ('tb_' + toplevel_lib_name)) # self.implement_project(f); f.close()
if arg_verbosity>=2: print '#' print '# QuartusConfig:' print '#' print '' print 'HDL library paths that are found in $%s:' % qsyn.libRootDir for p in qsyn.libs.filePaths: print ' ', p if arg_verbosity>=1: print '' print 'HDL libraries with a top level entity for synthesis that are found in $%s:' % qsyn.libRootDir print ' %-40s' % 'HDL library', ': Top level entity' syn_dicts = qsyn.libs.get_dicts(key='synth_top_level_entity') for d in cm.listify(syn_dicts): if d['synth_top_level_entity']=='': print ' %-40s' % d['hdl_lib_name'], ':', d['hdl_lib_name'] else: print ' %-40s' % d['hdl_lib_name'], ':', d['synth_top_level_entity'] print '' print 'Create Quartus IP library qip files for all HDL libraries in $%s.' % qsyn.libRootDir qsyn.create_quartus_ip_lib_file() print '' print 'Copy Quartus directories and files from HDL library source tree to build_dir for all HDL libraries that are found in $%s.' % qsyn.libRootDir qsyn.copy_files('synth') print '' print 'Create Quartus project files (QPF) for technology %s and all HDL libraries with a top level entity for synthesis that are found in $%s.' % (qsyn.technologyNames, qsyn.libRootDir)