def _find_provider_file(self, req, v_file, fset): assert isinstance(v_file, VerilogFile) assert isinstance(fset, list) sff = SourceFileFactory() #TODO: Can this be done elsewhere? if global_mod.top_module.sim_tool == "iverilog": for f in fset: if f.rel_path() == os.path.relpath(req): return f return sff.new(req, module=None) import os vf_dirname = v_file.dirname h_file = os.path.join(vf_dirname, req) if os.path.exists(h_file) and not os.path.isdir(h_file): return sff.new(h_file, v_file.module) inc_dirs = self._parse_vlog_opt(v_file.vlog_opt) for dir in inc_dirs: dir = os.path.join(os.getcwd(), dir) if not os.path.exists(dir) or not os.path.isdir(dir): logging.warning("Include path "+dir+" doesn't exist") continue h_file = os.path.join(dir, req) if os.path.exists(h_file) and not os.path.isdir(h_file): return sff.new(h_file, module=v_file.module) return None
def _find_provider_file(self, req, v_file, fset): assert isinstance(v_file, VerilogFile) assert isinstance(fset, list) sff = SourceFileFactory() #TODO: Can this be done elsewhere? if global_mod.top_module.sim_tool == "iverilog": for f in fset: if f.rel_path() == os.path.relpath(req): return f return sff.new(req, module=None) import os vf_dirname = v_file.dirname h_file = os.path.join(vf_dirname, req) if os.path.exists(h_file) and not os.path.isdir(h_file): return sff.new(h_file, v_file.module) inc_dirs = self._parse_vlog_opt(v_file.vlog_opt) for dir in inc_dirs: dir = os.path.join(os.getcwd(), dir) if not os.path.exists(dir) or not os.path.isdir(dir): logging.warning("Include path " + dir + " doesn't exist") continue h_file = os.path.join(dir, req) if os.path.exists(h_file) and not os.path.isdir(h_file): return sff.new(h_file, module=v_file.module) return None
def generate_remote_synthesis_makefile(self): if self.connection.ssh_user == None or self.connection.ssh_server == None: p.warning("Connection data is not given. " "Accessing environmental variables in the makefile") p.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() if not os.path.exists(top_mod.fetchto): p.warning("There are no modules fetched. " "Are you sure it's correct?") ise_path = self.__figure_out_ise_path() tcl = self.__search_tcl_file() if tcl == None: self.__generate_tcl() tcl = "run.tcl" files = self.modules_pool.build_very_global_file_list() sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(top_mod.syn_project)) self.make_writer.generate_remote_synthesis_makefile( files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.connection.ssh_user, server=self.connection.ssh_server, ise_path=ise_path)
def _handle_ise_project(self, update=False): top_mod = self.modules_pool.get_top_module() fileset = self.modules_pool.build_file_set() flist = dep_solver.make_dependency_sorted_list(fileset) assert isinstance(flist, list) prj = ISEProject(ise=self.env["ise_version"], top_mod=self.modules_pool.get_top_module()) prj.add_files(flist) sff = SourceFileFactory() logging.debug(top_mod.vlog_opt) if self.options.generate_project_vhd: self._write_project_vhd() prj.add_files([sff.new(path=path.rel2abs("project.vhd"), module=self.modules_pool.get_module_by_path("."))])\ prj.add_libs(fileset.get_libs()) if update is True: try: prj.load_xml(top_mod.syn_project) except: logging.error("Error while reading the project file.\n" "Are you sure that syn_project indicates a correct ISE project file?") raise else: prj.add_initial_properties() logging.info("Writing down .xise project file") prj.emit_xml(top_mod.syn_project)
def generate_remote_synthesis_makefile(self): if self.connection.ssh_user == None or self.connection.ssh_server == None: p.warning("Connection data is not given. " "Accessing environmental variables in the makefile") p.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() if not os.path.exists(top_mod.fetchto): p.warning("There are no modules fetched. " "Are you sure it's correct?") ise_path = self.__figure_out_ise_path() tcl = self.__search_tcl_file() if tcl == None: self.__generate_tcl() tcl = "run.tcl" files = self.modules_pool.build_very_global_file_list() sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(top_mod.syn_project)) self.make_writer.generate_remote_synthesis_makefile(files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.connection.ssh_user, server=self.connection.ssh_server, ise_path=ise_path)
def build_very_global_file_list(self): from srcfile import SourceFileFactory, VerilogFile sff = SourceFileFactory() files = self.build_global_file_list() extra_verilog_files = set() manifest_verilog_files = files.filter(VerilogFile) queue = manifest_verilog_files while len(queue) > 0: verilog_file = queue.pop() for f_required in verilog_file.dep_requires: new_vl = sff.new(os.path.join(verilog_file.dirname, f_required)) queue.append(new_vl) if f_required not in extra_verilog_files and \ f_required not in manifest_verilog_files: extra_verilog_files.add(new_vl) p.vprint("Extra verilog files, not listed in manifests:") for extra_vl in extra_verilog_files: p.vprint(str(extra_vl)) for extra_vl in extra_verilog_files: files.add(extra_vl) return files
def _generate_synthesis_project(self, tool_object): tool_info = tool_object.get_keys() if sys.platform == 'cygwin': bin_name = tool_info['windows_bin'] else: bin_name = tool_info['linux_bin'] path_key = tool_info['id'] + '_path' version_key = tool_info['id'] + '_version' name = tool_info['name'] id_value = tool_info['id'] ext_value = tool_info['project_ext'] env = self.env if not self.options.force: if self.env[path_key] is None: logging.error("Can't generate the " + name + " project. " + name + " not found.") quit() if not env[version_key]: logging.error(name + " version cannot be deduced. Cannot generate " + name + " " "project file properly. Please use syn_" + id_value + "_version in the manifest " "or set") sys.exit("Exiting") logging.info("Generating project for " + name + " v. %s" % env[version_key]) if os.path.exists(self.top_module.syn_project) or os.path.exists( self.top_module.syn_project + "." + ext_value): logging.info("Existing project detected: updating...") update = True else: logging.info("No previous project: creating a new one...") update = False top_mod = self.modules_pool.get_top_module() fileset = self.modules_pool.build_file_set() non_dependable = fileset.inversed_filter(DependableFile) fileset.add(non_dependable) sff = SourceFileFactory() if self.options.generate_project_vhd: self._write_project_vhd(id_value, env[version_key]) fileset.add([sff.new(path=path.rel2abs("project.vhd"), module=self.modules_pool.get_module_by_path("."))])\ tool_object.generate_synthesis_project( update=update, tool_version=self.env[version_key], top_mod=self.modules_pool.get_top_module(), fileset=fileset) logging.info(name + " project file generated.")
def _generate_synthesis_project(self, tool_object): tool_info = tool_object.get_keys() if sys.platform == "cygwin": bin_name = tool_info["windows_bin"] else: bin_name = tool_info["linux_bin"] path_key = tool_info["id"] + "_path" version_key = tool_info["id"] + "_version" name = tool_info["name"] id_value = tool_info["id"] ext_value = tool_info["project_ext"] env = self.env if not self.options.force: if self.env[path_key] is None: logging.error("Can't generate the " + name + " project. " + name + " not found.") quit() if not env[version_key]: logging.error( name + " version cannot be deduced. Cannot generate " + name + " " "project file properly. Please use syn_" + id_value + "_version in the manifest " "or set" ) sys.exit("Exiting") logging.info("Generating project for " + name + " v. %s" % env[version_key]) if os.path.exists(self.top_module.syn_project) or os.path.exists(self.top_module.syn_project + "." + ext_value): logging.info("Existing project detected: updating...") update = True else: logging.info("No previous project: creating a new one...") update = False top_mod = self.modules_pool.get_top_module() fileset = self.modules_pool.build_file_set() non_dependable = fileset.inversed_filter(DependableFile) fileset.add(non_dependable) sff = SourceFileFactory() if self.options.generate_project_vhd: self._write_project_vhd(id_value, env[version_key]) fileset.add([sff.new(path=path.rel2abs("project.vhd"), module=self.modules_pool.get_module_by_path("."))]) tool_object.generate_synthesis_project( update=update, tool_version=self.env[version_key], top_mod=self.modules_pool.get_top_module(), fileset=fileset, ) logging.info(name + " project file generated.")
def __create_file_list_from_paths(self, paths): sff = SourceFileFactory() srcs = SourceFileSet() for p in paths: if os.path.isdir(p): dir = os.listdir(p) for f_dir in dir: f_dir = os.path.join(self.path, p, f_dir) if not os.path.isdir(f_dir): srcs.add(sff.new(f_dir, self.library, self.vcom_opt, self.vlog_opt, self.include_dirs)) else: srcs.add(sff.new(p, self.library, self.vcom_opt, self.vlog_opt, self.include_dirs)) return srcs
def __create_file_list_from_paths(self, paths): sff = SourceFileFactory() srcs = SourceFileSet() for p in paths: if os.path.isdir(p): dir = os.listdir(p) for f_dir in dir: f_dir = os.path.join(self.path, p, f_dir) if not os.path.isdir(f_dir): srcs.add( sff.new(f_dir, self.library, self.vcom_opt, self.vlog_opt, self.include_dirs)) else: srcs.add( sff.new(p, self.library, self.vcom_opt, self.vlog_opt, self.include_dirs)) return srcs
def parse(self, dep_file): i = 0; if dep_file.is_parsed: return logging.info("Parsing %s" % dep_file.path) # assert isinstance(dep_file, DepFile), print("unexpected type: " + str(type(dep_file))) buf = self.preprocessor.preprocess(dep_file) self.preprocessed = buf[:] #add includes as dependencies try: includes = self.preprocessor.vpp_filedeps[dep_file.path + dep_file.library] for f in includes: dep_file.depends_on.add(SourceFileFactory().new(path=f, module=dep_file.module)) logging.debug( "%s has %d includes." % (str(dep_file), len(includes))) except KeyError: logging.debug(str(dep_file) + " has no includes.") #look for packages used inside in file #it may generate false dependencies as package in SV can be used by: # import my_package::*; #or directly # logic var = my_package::MY_CONST; #The same way constants and others can be imported directly from other modules: # logic var = my_other_module::MY_CONST; #and HdlMake will anyway create dependency marking my_other_module as requested package import_pattern = re.compile("(\w+) *::(\w+|\\*)") def do_imports(s): logging.debug("file %s imports/uses %s.%s package" %( dep_file.path , dep_file.library, s.group(1) ) ) dep_file.add_relation( DepRelation( "%s.%s" % (dep_file.library, s.group(1)) , DepRelation.USE, DepRelation.PACKAGE)) re.subn(import_pattern, do_imports, buf) #packages m_inside_package = re.compile("package\s+(\w+)\s*(?:\(.*?\))?\s*(.+?)endpackage", re.DOTALL | re.MULTILINE) def do_package(s): logging.debug("found pacakge %s.%s" %(dep_file.library, s.group(1)) ) dep_file.add_relation(DepRelation( "%s.%s" % (dep_file.library, s.group(1)), DepRelation.PROVIDE, DepRelation.PACKAGE)) re.subn(m_inside_package, do_package, buf) #modules and instatniations m_inside_module = re.compile("(?:module|interface)\s+(\w+)\s*(?:\(.*?\))?\s*(.+?)(?:endmodule|endinterface)", re.DOTALL | re.MULTILINE) m_instantiation = re.compile("(?:\A|\\s*)\s*(\w+)\s+(?:#\s*\(.*?\)\s*)?(\w+)\s*\(.*?\)\s*", re.DOTALL | re.MULTILINE) def do_module(s): logging.debug("found module %s.%s" % (dep_file.library, s.group(1) )) dep_file.add_relation(DepRelation( "%s.%s" % (dep_file.library, s.group(1)), DepRelation.PROVIDE, DepRelation.ENTITY)) def do_inst(s): mod_name = s.group(1) if(mod_name in self.reserved_words): return logging.debug("-> instantiates %s.%s as %s" % (dep_file.library, s.group(1), s.group(2) )) dep_file.add_relation(DepRelation( "%s.%s" % (dep_file.library, s.group(1)), DepRelation.USE, DepRelation.ENTITY)) re.subn(m_instantiation, do_inst, s.group(2)) re.subn(m_inside_module, do_module, buf) dep_file.add_relation(DepRelation(dep_file.path, DepRelation.PROVIDE, DepRelation.INCLUDE)) dep_file.is_parsed = True
def _generate_remote_synthesis_makefile(self, tool_object): logging.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() #tcl = self._search_tcl_file() #if tcl is None: # self._generate_tcl() # tcl = "run.tcl" files = self.modules_pool.build_global_file_set() sff = SourceFileFactory() files.add(sff.new("run.tcl", module=None)) files.add(sff.new(top_mod.syn_project, module=None)) tool_object.generate_remote_synthesis_makefile(files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.env["rsynth_user"], server=self.env["rsynth_server"]) logging.info("Remote synthesis makefile generated.")
def __find_provider_verilog_file(self, req, v_file): from srcfile import SourceFileFactory import os vf_dirname = v_file.dirname sff = SourceFileFactory() h_file = os.path.join(vf_dirname, req) if os.path.exists(h_file) and not os.path.isdir(h_file): return sff.new(h_file) inc_dirs = self.__parse_vlog_opt(v_file.vlog_opt) for dir in inc_dirs: dir = os.path.join( os.getcwd(), dir) if not os.path.exists(dir) or not os.path.isdir(dir): p.warning("Include path "+dir+" doesn't exist") continue h_file = os.path.join(dir, req) if os.path.exists(h_file) and not os.path.isdir(h_file): return sff.new(h_file) return None
def run(self): self._check_all_fetched_or_quit() logging.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() tcl = self._search_tcl_file() if tcl is None: self._generate_tcl() tcl = "run.tcl" files = self.modules_pool.build_global_file_set() sff = SourceFileFactory() files.add(sff.new(tcl, module=None)) files.add(sff.new(top_mod.syn_project, module=None)) global_mod.makefile_writer.generate_remote_synthesis_makefile(files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.env["rsynth_user"], server=self.env["rsynth_server"]) logging.info("Remote synthesis makefile generated.")
def _generate_remote_synthesis_makefile(self, tool_object): logging.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() #tcl = self._search_tcl_file() #if tcl is None: # self._generate_tcl() # tcl = "run.tcl" files = self.modules_pool.build_global_file_set() sff = SourceFileFactory() files.add(sff.new("run.tcl", module=None)) files.add(sff.new(top_mod.syn_project, module=None)) tool_object.generate_remote_synthesis_makefile( files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.env["rsynth_user"], server=self.env["rsynth_server"]) logging.info("Remote synthesis makefile generated.")
def run_remote_synthesis(self): ssh = self.connection cwd = os.getcwd() p.vprint("The program will be using ssh connection: " + str(ssh)) if not ssh.is_good(): p.error("SSH connection failure. Remote host doesn't response.") quit() if not os.path.exists(self.top_module.fetchto): p.warning( "There are no modules fetched. Are you sure it's correct?") files = self.modules_pool.build_very_global_file_list() # tcl = self.__search_tcl_file() # if tcl == None: self.__generate_tcl() tcl = "run.tcl" sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(self.top_module.syn_project)) dest_folder = ssh.transfer_files_forth( files, dest_folder=self.top_module.syn_name) syn_cmd = "cd " + dest_folder + cwd + " && xtclsh run.tcl" p.vprint("Launching synthesis on " + str(ssh) + ": " + syn_cmd) ret = ssh.system(syn_cmd) if ret == 1: p.error("Synthesis failed. Nothing will be transfered back") quit() cur_dir = os.path.basename(cwd) os.chdir("..") ssh.transfer_files_back(what=dest_folder + cwd, where=".") os.chdir(cur_dir)
def _create_file_list_from_paths(self, paths): from srcfile import SourceFileFactory, SourceFileSet sff = SourceFileFactory() srcs = SourceFileSet() for p in paths: if os.path.isdir(p): dir_ = os.listdir(p) for f_dir in dir_: f_dir = os.path.join(self.path, p, f_dir) if not os.path.isdir(f_dir): srcs.add(sff.new(path=f_dir, module=self, library=self.library, vcom_opt=self.vcom_opt, vlog_opt=self.vlog_opt, include_dirs=self.include_dirs)) else: srcs.add(sff.new(path=p, module=self, library=self.library, vcom_opt=self.vcom_opt, vlog_opt=self.vlog_opt, include_dirs=self.include_dirs)) return srcs
def run_remote_synthesis(self): ssh = self.connection cwd = os.getcwd() p.vprint("The program will be using ssh connection: "+str(ssh)) if not ssh.is_good(): p.error("SSH connection failure. Remote host doesn't response.") quit() if not os.path.exists(self.top_module.fetchto): p.warning("There are no modules fetched. Are you sure it's correct?") files = self.modules_pool.build_very_global_file_list() # tcl = self.__search_tcl_file() # if tcl == None: self.__generate_tcl() tcl = "run.tcl" sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(self.top_module.syn_project)) dest_folder = ssh.transfer_files_forth(files, dest_folder=self.top_module.syn_name) syn_cmd = "cd "+dest_folder+cwd+" && xtclsh run.tcl" p.vprint("Launching synthesis on " + str(ssh) + ": " + syn_cmd) ret = ssh.system(syn_cmd) if ret == 1: p.error("Synthesis failed. Nothing will be transfered back") quit() cur_dir = os.path.basename(cwd) os.chdir("..") ssh.transfer_files_back(what=dest_folder+cwd, where=".") os.chdir(cur_dir)