def __parse_vlog_opt(self, vlog_opt): import re ret = [] inc_vsim_vlog = re.compile(".*?\+incdir\+([^ ]+)") # Either a normal (non-special) character or an escaped special character repeated >= 1 times #unix_path = re.compile(r"([^\0 \!\$\`\&\*\(\)\+]|\\(:? |\!|\$|\`|\&|\*|\(|\)|\+))+") # -i <unix_path> one or more times inc_isim_vlog = re.compile(r"\s*\-i\s*((\w|/|\\ |\.|\.\.)+)\s*") vlog_vsim_opt = vlog_opt # Try ModelSim include format (+incdir+<path>) while True: vsim_inc = re.match(inc_vsim_vlog, vlog_vsim_opt) if vsim_inc: ret.append(vsim_inc.group(1)) vlog_vsim_opt = vlog_vsim_opt[vsim_inc.end():] else: break # Could use vlog_opt directly here # Try ISim include format (-i <path>) if not ret: vlog_isim_opt = vlog_opt while True: isim_inc = re.match(inc_isim_vlog, vlog_isim_opt) if isim_inc: ret.append(isim_inc.group(1)) vlog_isim_opt = vlog_isim_opt[isim_inc.end():] else: break p.vprint ("Include paths are: " + ' '.join(ret)) return ret
def transfer_files_forth(self, files, dest_folder): """ Takes list of files and sends them to remote machine. Name of a directory, where files are put is returned """ self.__check() # create a new catalogue on remote machine if dest_folder == None: dest_folder = "".join(random.choice(string.ascii_letters + string.digits) for x in range(8)) mkdir_cmd = "mkdir -p " + dest_folder import msg as p p.vprint("Connecting to " + str(self) + " and creating directory " + dest_folder + ": " + mkdir_cmd) self.system(mkdir_cmd) # create a string with filenames from pipes import quote local_files_str = " ".join(quote(file.path) for file in files) rsync_cmd = "rsync -Rav " + local_files_str + " " + self.ssh_user + "@" + self.ssh_server + ":" + dest_folder # rsync_cmd += " > /dev/null" p.vprint("Coping files to remote machine: " + rsync_cmd) import subprocess p = subprocess.Popen(rsync_cmd, shell=True) os.waitpid(p.pid, 0)[1] return dest_folder
def __check_ise_version(self): import subprocess import re xst = subprocess.Popen('which xst', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) lines = xst.stdout.readlines() if not lines: p.error("Xilinx binaries are not in the PATH variable\n" "Can't determine ISE version") quit() xst = str(lines[0].strip()) version_pattern = re.compile(".*?(\d\d\.\d).*") #First check if we have version in path match = re.match(version_pattern, xst) if match: ise_version=match.group(1) else: #If it is not the case call the "xst -h" to get version xst_output = subprocess.Popen('xst -h', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) xst_output = xst_output.stdout.readlines()[0] xst_output = xst_output.strip() version_pattern = \ re.compile('Release\s(?P<major>\d|\d\d)[^\d](?P<minor>\d|\d\d)\s.*') match = re.match(version_pattern, xst_output) if match: ise_version=''.join((match.group('major'), '.', match.group('minor'))) else: p.error("xst output is not in expected format: "+ xst_output +"\n" "Can't determine ISE version") return None p.vprint("ISE version: " + ise_version) return ise_version
def build_very_global_file_list(self): from srcfile import SourceFileFactory, VerilogFile sff = SourceFileFactory() files = self.build_global_file_list() extra_verilog_files = set() manifest_verilog_files = files.filter(VerilogFile) queue = manifest_verilog_files while len(queue) > 0: verilog_file = queue.pop() for f_required in verilog_file.dep_requires: new_vl = sff.new(os.path.join(verilog_file.dirname, f_required)) queue.append(new_vl) if f_required not in extra_verilog_files and \ f_required not in manifest_verilog_files: extra_verilog_files.add(new_vl) p.vprint("Extra verilog files, not listed in manifests:") for extra_vl in extra_verilog_files: p.vprint(str(extra_vl)) for extra_vl in extra_verilog_files: files.add(extra_vl) return files
def __output_files(self, node): import os from srcfile import UCFFile, VHDLFile, VerilogFile, CDCFile, NGCFile for f in self.files: p.vprint("Writing .xise file for version " + str(self.ise)) fp = self.xml_doc.createElement("file") fp.setAttribute("xil_pn:name", os.path.relpath(f.path)) if isinstance(f, VHDLFile): fp.setAttribute("xil_pn:type", "FILE_VHDL") elif isinstance(f, VerilogFile): fp.setAttribute("xil_pn:type", "FILE_VERILOG") elif isinstance(f, UCFFile): fp.setAttribute("xil_pn:type", "FILE_UCF") elif isinstance(f, CDCFile): fp.setAttribute("xil_pn:type", "FILE_CDC") elif isinstance(f, NGCFile): fp.setAttribute("xil_pn:type", "FILE_NGC") else: continue assoc = self.xml_doc.createElement("association") assoc.setAttribute("xil_pn:name", "Implementation") assoc.setAttribute("xil_pn:seqID", str(self.files.index(f)+1)) try: if(f.library != "work"): lib = self.xml_doc.createElement("library") lib.setAttribute("xil_pn:name", f.library) fp.appendChild(lib) except: pass fp.appendChild(assoc) node.appendChild(fp)
def transfer_files_forth(self, files, dest_folder): """ Takes list of files and sends them to remote machine. Name of a directory, where files are put is returned """ self.__check() #create a new catalogue on remote machine if dest_folder == None: dest_folder = ''.join( random.choice(string.ascii_letters + string.digits) for x in range(8)) mkdir_cmd = 'mkdir -p ' + dest_folder import msg as p p.vprint("Connecting to " + str(self) + " and creating directory " + dest_folder + ": " + mkdir_cmd) self.system(mkdir_cmd) #create a string with filenames from pipes import quote local_files_str = ' '.join(quote(file.path) for file in files) rsync_cmd = "rsync -Rav " + local_files_str + " " + self.ssh_user + "@" + self.ssh_server + ":" + dest_folder #rsync_cmd += " > /dev/null" p.vprint("Coping files to remote machine: " + rsync_cmd) import subprocess p = subprocess.Popen(rsync_cmd, shell=True) os.waitpid(p.pid, 0)[1] return dest_folder
def __output_files(self, node): import os from srcfile import UCFFile, VHDLFile, VerilogFile, CDCFile, NGCFile for f in self.files: p.vprint("Writing .xise file for version " + str(self.ise)) fp = self.xml_doc.createElement("file") fp.setAttribute("xil_pn:name", os.path.relpath(f.path)) if isinstance(f, VHDLFile): fp.setAttribute("xil_pn:type", "FILE_VHDL") elif isinstance(f, VerilogFile): fp.setAttribute("xil_pn:type", "FILE_VERILOG") elif isinstance(f, UCFFile): fp.setAttribute("xil_pn:type", "FILE_UCF") elif isinstance(f, CDCFile): fp.setAttribute("xil_pn:type", "FILE_CDC") elif isinstance(f, NGCFile): fp.setAttribute("xil_pn:type", "FILE_NGC") else: continue assoc = self.xml_doc.createElement("association") assoc.setAttribute("xil_pn:name", "Implementation") assoc.setAttribute("xil_pn:seqID", str(self.files.index(f) + 1)) try: if (f.library != "work"): lib = self.xml_doc.createElement("library") lib.setAttribute("xil_pn:name", f.library) fp.appendChild(lib) except: pass fp.appendChild(assoc) node.appendChild(fp)
def __create_deps(self): if self.__check_encryption(): self.dep_index = SourceFile.gen_index(self) self._dep_fixed = True else: self.dep_requires = list(self.__search_use_clauses()) self.dep_provides = list(self.__search_packages()) p.vprint(self.path + " provides " + str(self.dep_provides)) p.vprint(self.path + " requires " + str(self.dep_requires))
def search_for_manifest(search_path): """ Look for manifest in the given folder """ p.vprint("Looking for manifest in " + search_path) for filename in os.listdir(search_path): if filename == "manifest.py" and not os.path.isdir(filename): return os.path.abspath(os.path.join(search_path, filename)) # no manifest file found return None
def __search_for_manifest(self): """ Look for manifest in the given folder """ p.vprint("Looking for manifest in " + self.path) for filename in os.listdir(self.path): if filename == "manifest.py" or filename == "Manifest.py": if not os.path.isdir(filename): p.vprint("*** found manifest for module "+self.path); manifest = Manifest(path=os.path.abspath(os.path.join(self.path, filename))) return manifest return None
def __search_for_manifest(self): """ Look for manifest in the given folder """ p.vprint("Looking for manifest in " + self.path) for filename in os.listdir(self.path): if filename == "manifest.py" or filename == "Manifest.py": if not os.path.isdir(filename): p.vprint("*** found manifest for module " + self.path) manifest = Manifest(path=os.path.abspath( os.path.join(self.path, filename))) return manifest return None
def add(self, files): if isinstance(files, str): raise RuntimeError("Expected object, not a string") elif files == None: p.vprint("Got None as a file.\n Ommiting") else: try: for f in files: if f not in self: self.append(f) except: #single file, not a list if files not in self: self.append(files)
def __fetch_from_git(self, module): if not os.path.exists(module.fetchto): os.mkdir(module.fetchto) cur_dir = os.getcwd() if module.branch == None: module.branch = "master" basename = path.url_basename(module.url) mod_path = os.path.join(module.fetchto, basename) if basename.endswith(".git"): basename = basename[:-4] #remove trailing .git if module.isfetched: update_only = True else: update_only = False if update_only: cmd = "(cd {0} && git checkout {1})" cmd = cmd.format(mod_path, module.branch) else: cmd = "(cd {0} && git clone -b {2} {1})" cmd = cmd.format(module.fetchto, module.url, module.branch) rval = True p.vprint(cmd) if os.system(cmd) != 0: rval = False if module.revision and rval: os.chdir(mod_path) cmd = "git checkout " + module.revision p.vprint(cmd) if os.system(cmd) != 0: rval = False os.chdir(cur_dir) module.isfetched = True module.path = mod_path return rval
def remove_dir_from_disk(self): if not self.isfetched: return import shutil import os p.vprint("Removing " + self.path) shutil.rmtree(self.path) parts = self.path.split('/') while True: try: parts = parts[:-1] tmp = '/'.join(parts) p.vprint("Trying to remove " + tmp) os.rmdir(tmp) except OSError: #a catologue is not empty - we are done break
def __check_ise_version(self): import subprocess import re xst = subprocess.Popen('which xst', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) lines = xst.stdout.readlines() if not lines: p.error("Xilinx binaries are not in the PATH variable\n" "Can't determine ISE version") quit() xst = str(lines[0].strip()) version_pattern = re.compile( ".*?(\d\d\.\d).*") #First check if we have version in path match = re.match(version_pattern, xst) if match: ise_version = match.group(1) else: #If it is not the case call the "xst -h" to get version xst_output = subprocess.Popen('xst -h', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) xst_output = xst_output.stdout.readlines()[0] xst_output = xst_output.strip() version_pattern = \ re.compile('Release\s(?P<major>\d|\d\d)[^\d](?P<minor>\d|\d\d)\s.*') match = re.match(version_pattern, xst_output) if match: ise_version = ''.join( (match.group('major'), '.', match.group('minor'))) else: p.error("xst output is not in expected format: " + xst_output + "\n" "Can't determine ISE version") return None p.vprint("ISE version: " + ise_version) return ise_version
def new(self, path, library=None, vcom_opt=None, vlog_opt=None, include_dirs=None): if path == None or path == "": raise RuntimeError("Expected a file path, got: " + str(path)) if not os.path.isabs(path): path = os.path.abspath(path) tmp = path.rsplit('.') extension = tmp[len(tmp) - 1] p.vprint("SFF> " + path) nf = None if extension == 'vhd' or extension == 'vhdl' or extension == 'vho': nf = VHDLFile(path, library, vcom_opt) elif extension == 'v' or extension == 'vh' or extension == 'vo' or extension == 'vm': nf = VerilogFile(path, library, vlog_opt, include_dirs) elif extension == 'sv' or extension == 'svh': nf = SVFile(path, library, vlog_opt, include_dirs) elif extension == 'ngc': nf = NGCFile(path) elif extension == 'ucf': nf = UCFFile(path) elif extension == 'cdc': nf = CDCFile(path) elif extension == 'wb': nf = WBGenFile(path) elif extension == 'tcl': nf = TCLFile(path) elif extension == 'xise' or extension == 'ise': nf = XISEFile(path) elif extension == 'stp': nf = SignalTapFile(path) elif extension == 'sdc': nf = SDCFile(path) elif extension == 'qip': nf = QIPFile(path) elif extension == 'dpf': nf = DPFFile(path) return nf
def fetch_all(self, unfetched_only = False): fetcher = self.ModuleFetcher() fetch_queue = [m for m in self] while len(fetch_queue) > 0: cur_mod = fetch_queue.pop() new_modules = [] if unfetched_only: if cur_mod.isfetched: new_modules = cur_mod.submodules() else: new_modules = fetcher.fetch_single_module(cur_mod) else: new_modules = fetcher.fetch_single_module(cur_mod) for mod in new_modules: if not mod.isfetched: p.vprint("Appended to fetch queue: " +str(mod.url)) self._add(mod) fetch_queue.append(mod) else: p.vprint("NOT appended to fetch queue: " +str(mod.url))
def fetch_all(self, unfetched_only=False): fetcher = self.ModuleFetcher() fetch_queue = [m for m in self] while len(fetch_queue) > 0: cur_mod = fetch_queue.pop() new_modules = [] if unfetched_only: if cur_mod.isfetched: new_modules = cur_mod.submodules() else: new_modules = fetcher.fetch_single_module(cur_mod) else: new_modules = fetcher.fetch_single_module(cur_mod) for mod in new_modules: if not mod.isfetched: p.vprint("Appended to fetch queue: " + str(mod.url)) self._add(mod) fetch_queue.append(mod) else: p.vprint("NOT appended to fetch queue: " + str(mod.url))
def make_list_of_modules(self): p.vprint("Making list of modules for " + str(self)) new_modules = [self] modules = [self] while len(new_modules) > 0: cur_module = new_modules.pop() if not cur_module.isfetched: p.error("Unfetched module in modules list: " + str(cur_module)) quit() if cur_module.manifest == None: p.vprint("No manifest in " + str(cur_module)) continue cur_module.parse_manifest() for module in cur_module.local: modules.append(module) new_modules.append(module) for module in cur_module.git: modules.append(module) new_modules.append(module) for module in cur_module.svn: modules.append(module) new_modules.append(module) if len(modules) == 0: p.vprint("No modules were found in " + self.fetchto) return modules
def __fetch_from_svn(self, module): if not os.path.exists(module.fetchto): os.mkdir(module.fetchto) cur_dir = os.getcwd() os.chdir(module.fetchto) cmd = "svn checkout {0} " + module.basename if module.revision: cmd = cmd.format(module.url + '@' + module.revision) else: cmd = cmd.format(module.url) rval = True p.vprint(cmd) if os.system(cmd) != 0: rval = False os.chdir(cur_dir) module.isfetched = True module.path = os.path.join(module.fetchto, module.basename) return rval
def run_remote_synthesis(self): ssh = self.connection cwd = os.getcwd() p.vprint("The program will be using ssh connection: " + str(ssh)) if not ssh.is_good(): p.error("SSH connection failure. Remote host doesn't response.") quit() if not os.path.exists(self.top_module.fetchto): p.warning( "There are no modules fetched. Are you sure it's correct?") files = self.modules_pool.build_very_global_file_list() # tcl = self.__search_tcl_file() # if tcl == None: self.__generate_tcl() tcl = "run.tcl" sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(self.top_module.syn_project)) dest_folder = ssh.transfer_files_forth( files, dest_folder=self.top_module.syn_name) syn_cmd = "cd " + dest_folder + cwd + " && xtclsh run.tcl" p.vprint("Launching synthesis on " + str(ssh) + ": " + syn_cmd) ret = ssh.system(syn_cmd) if ret == 1: p.error("Synthesis failed. Nothing will be transfered back") quit() cur_dir = os.path.basename(cwd) os.chdir("..") ssh.transfer_files_back(what=dest_folder + cwd, where=".") os.chdir(cur_dir)
def new (self, path, library=None, vcom_opt=None, vlog_opt=None, include_dirs=None): if path == None or path == "": raise RuntimeError("Expected a file path, got: "+str(path)) if not os.path.isabs(path): path = os.path.abspath(path) tmp = path.rsplit('.') extension = tmp[len(tmp)-1] p.vprint("SFF> " + path) nf = None if extension == 'vhd' or extension == 'vhdl' or extension == 'vho': nf = VHDLFile(path, library, vcom_opt) elif extension == 'v' or extension == 'vh' or extension == 'vo' or extension == 'vm': nf = VerilogFile(path, library, vlog_opt, include_dirs) elif extension == 'sv' or extension == 'svh': nf = SVFile(path, library, vlog_opt, include_dirs) elif extension == 'ngc': nf = NGCFile(path) elif extension == 'ucf': nf = UCFFile(path) elif extension == 'cdc': nf = CDCFile(path) elif extension == 'wb': nf = WBGenFile(path) elif extension == 'tcl': nf = TCLFile(path) elif extension == 'xise' or extension == 'ise': nf = XISEFile(path) elif extension == 'stp': nf = SignalTapFile(path) elif extension == 'sdc': nf = SDCFile(path) elif extension == 'qip': nf = QIPFile(path) elif extension == 'dpf': nf = DPFFile(path) return nf
def run_remote_synthesis(self): ssh = self.connection cwd = os.getcwd() p.vprint("The program will be using ssh connection: "+str(ssh)) if not ssh.is_good(): p.error("SSH connection failure. Remote host doesn't response.") quit() if not os.path.exists(self.top_module.fetchto): p.warning("There are no modules fetched. Are you sure it's correct?") files = self.modules_pool.build_very_global_file_list() # tcl = self.__search_tcl_file() # if tcl == None: self.__generate_tcl() tcl = "run.tcl" sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(self.top_module.syn_project)) dest_folder = ssh.transfer_files_forth(files, dest_folder=self.top_module.syn_name) syn_cmd = "cd "+dest_folder+cwd+" && xtclsh run.tcl" p.vprint("Launching synthesis on " + str(ssh) + ": " + syn_cmd) ret = ssh.system(syn_cmd) if ret == 1: p.error("Synthesis failed. Nothing will be transfered back") quit() cur_dir = os.path.basename(cwd) os.chdir("..") ssh.transfer_files_back(what=dest_folder+cwd, where=".") os.chdir(cur_dir)
def fetch_single_module(self, module): import global_mod new_modules = [] p.vprint("Fetching module: " + str(module)) if module.source == "local": p.vprint("ModPath: " + module.path) else: p.printhr() p.info("Fetching module: " + str(module) +\ " [parent: " + str(module.parent) + "]") if module.source == "svn": p.info("[svn] Fetching to " + module.fetchto) self.__fetch_from_svn(module) if module.source == "git": p.info("[git] Fetching to " + module.fetchto) self.__fetch_from_git(module) module.parse_manifest() new_modules.extend(module.local) new_modules.extend(module.svn) new_modules.extend(module.git) return new_modules
def fetch(self, unfetched_only=False): p.info("Fetching needed modules.") self.modules_pool.fetch_all(unfetched_only) p.vprint(str(self.modules_pool))
def parse_manifest(self): if self.isparsed == True or self.isfetched == False: return if self.manifest == None: self.manifest = self.__search_for_manifest() if self.path == None: raise RuntimeError() manifest_parser = ManifestParser() # For non-top modules if(self.parent != None): manifest_parser.add_arbitrary_code("target=\""+str(global_mod.top_module.target)+"\"") manifest_parser.add_arbitrary_code("action=\""+str(global_mod.top_module.action)+"\"") # syn_device and sim_tool will be set for non-top modules manifest_parser.add_arbitrary_code("syn_device=\""+str(global_mod.top_module.syn_device)+"\"") manifest_parser.add_arbitrary_code("__manifest=\""+self.path+"\"") manifest_parser.add_arbitrary_code(global_mod.options.arbitrary_code) if self.manifest == None: p.vprint("No manifest found in module "+str(self)) else: manifest_parser.add_manifest(self.manifest) p.vprint("Parsing manifest file: " + str(self.manifest)) opt_map = None try: opt_map = manifest_parser.parse() except NameError as ne: p.echo("Error while parsing {0}:\n{1}: {2}.".format(self.manifest, type(ne), ne)) quit() if(opt_map["fetchto"] != None): fetchto = path_mod.rel2abs(opt_map["fetchto"], self.path) self.fetchto = fetchto else: fetchto = self.fetchto if self.ise == None: self.ise = "13.1" if "local" in opt_map["modules"]: local_paths = self.__make_list(opt_map["modules"]["local"]) local_mods = [] for path in local_paths: if path_mod.is_abs_path(path): p.error("Found an absolute path (" + path + ") in a manifest") p.rawprint("(" + self.path + ")") quit() path = path_mod.rel2abs(path, self.path) local_mods.append(self.pool.new_module(parent=self, url=path, source="local", fetchto=fetchto)) self.local = local_mods else: self.local = [] self.vmap_opt = opt_map["vmap_opt"] self.vcom_opt = opt_map["vcom_opt"] self.vsim_opt = opt_map["vsim_opt"] self.vlog_opt = opt_map["vlog_opt"] #if self.vlog_opt == "": # self.vlog_opt = global_mod.top_module.vlog_opt #if self.vcom_opt == "": # self.vcom_opt = global_mod.top_module.vcom_opt #if self.vsim_opt == "": # self.vsim_opt = global_mod.top_module.vsim_opt # if self.vmap_opt == "": # self.vmap_opt = global_mod.top_module.vmap_opt self.library = opt_map["library"] self.include_dirs = [] if opt_map["include_dirs"] != None: if isinstance(opt_map["include_dirs"], basestring): self.include_dirs.append(opt_map["include_dirs"]) else: self.include_dirs.extend(opt_map["include_dirs"]) for dir in self.include_dirs: if path_mod.is_abs_path(dir): p.warning(self.path + " contains absolute path to an include directory: " + dir) if not os.path.exists(dir): p.warning(self.path + " has an unexisting include directory: " + dir) if opt_map["files"] == []: self.files = SourceFileSet() else: opt_map["files"] = self.__make_list(opt_map["files"]) paths = [] for path in opt_map["files"]: if not path_mod.is_abs_path(path): path = path_mod.rel2abs(path, self.path) paths.append(path) else: p.warning(path + " is an absolute path. Omitting.") if not os.path.exists(path): p.error("File listed in " + self.manifest.path + " doesn't exist: " + path +".\nExiting.") quit() from srcfile import VerilogFile, VHDLFile self.files = self.__create_file_list_from_paths(paths=paths); for f in self.files: if isinstance(f, VerilogFile): f.vsim_opt = self.vsim_opt elif isinstance(f, VHDLFile): f.vcom_opt = self.vcom_opt if "svn" in opt_map["modules"]: opt_map["modules"]["svn"] = self.__make_list(opt_map["modules"]["svn"]) svn_mods = [] for url in opt_map["modules"]["svn"]: svn_mods.append(self.pool.new_module(parent=self, url=url, source="svn", fetchto=fetchto)) self.svn = svn_mods else: self.svn = [] if "git" in opt_map["modules"]: opt_map["modules"]["git"] = self.__make_list(opt_map["modules"]["git"]) git_mods = [] for url in opt_map["modules"]["git"]: git_mods.append(self.pool.new_module(parent=self, url=url, source="git", fetchto=fetchto)) self.git = git_mods else: self.git = [] self.target = opt_map["target"] self.action = opt_map["action"] if opt_map["syn_name"] == None and opt_map["syn_project"] != None: self.syn_name = opt_map["syn_project"][:-5] #cut out .xise from the end else: self.syn_name = opt_map["syn_name"] self.syn_device = opt_map["syn_device"]; self.syn_grade = opt_map["syn_grade"]; self.syn_package= opt_map["syn_package"]; self.syn_project = opt_map["syn_project"]; self.syn_top = opt_map["syn_top"]; self.isparsed = True for m in self.submodules(): m.parse_manifest()
def transfer_files_back(self, what, where): self.__check() rsync_cmd = "rsync -av " + self.ssh_user + "@" + self.ssh_server + ":" + what + ' ' + where p.vprint(rsync_cmd) os.system(rsync_cmd)
def parse_manifest(self): if self.isparsed == True or self.isfetched == False: return if self.manifest == None: self.manifest = self.__search_for_manifest() if self.path == None: raise RuntimeError() manifest_parser = ManifestParser() # For non-top modules if (self.parent != None): manifest_parser.add_arbitrary_code( "target=\"" + str(global_mod.top_module.target) + "\"") manifest_parser.add_arbitrary_code( "action=\"" + str(global_mod.top_module.action) + "\"") # syn_device and sim_tool will be set for non-top modules manifest_parser.add_arbitrary_code( "syn_device=\"" + str(global_mod.top_module.syn_device) + "\"") manifest_parser.add_arbitrary_code("__manifest=\"" + self.path + "\"") manifest_parser.add_arbitrary_code(global_mod.options.arbitrary_code) if self.manifest == None: p.vprint("No manifest found in module " + str(self)) else: manifest_parser.add_manifest(self.manifest) p.vprint("Parsing manifest file: " + str(self.manifest)) opt_map = None try: opt_map = manifest_parser.parse() except NameError as ne: p.echo("Error while parsing {0}:\n{1}: {2}.".format( self.manifest, type(ne), ne)) quit() if (opt_map["fetchto"] != None): fetchto = path_mod.rel2abs(opt_map["fetchto"], self.path) self.fetchto = fetchto else: fetchto = self.fetchto if self.ise == None: self.ise = "13.1" if "local" in opt_map["modules"]: local_paths = self.__make_list(opt_map["modules"]["local"]) local_mods = [] for path in local_paths: if path_mod.is_abs_path(path): p.error("Found an absolute path (" + path + ") in a manifest") p.rawprint("(" + self.path + ")") quit() path = path_mod.rel2abs(path, self.path) local_mods.append( self.pool.new_module(parent=self, url=path, source="local", fetchto=fetchto)) self.local = local_mods else: self.local = [] self.vmap_opt = opt_map["vmap_opt"] self.vcom_opt = opt_map["vcom_opt"] self.vsim_opt = opt_map["vsim_opt"] self.vlog_opt = opt_map["vlog_opt"] #if self.vlog_opt == "": # self.vlog_opt = global_mod.top_module.vlog_opt #if self.vcom_opt == "": # self.vcom_opt = global_mod.top_module.vcom_opt #if self.vsim_opt == "": # self.vsim_opt = global_mod.top_module.vsim_opt # if self.vmap_opt == "": # self.vmap_opt = global_mod.top_module.vmap_opt self.library = opt_map["library"] self.include_dirs = [] if opt_map["include_dirs"] != None: if isinstance(opt_map["include_dirs"], basestring): self.include_dirs.append(opt_map["include_dirs"]) else: self.include_dirs.extend(opt_map["include_dirs"]) for dir in self.include_dirs: if path_mod.is_abs_path(dir): p.warning(self.path + " contains absolute path to an include directory: " + dir) if not os.path.exists(dir): p.warning(self.path + " has an unexisting include directory: " + dir) if opt_map["files"] == []: self.files = SourceFileSet() else: opt_map["files"] = self.__make_list(opt_map["files"]) paths = [] for path in opt_map["files"]: if not path_mod.is_abs_path(path): path = path_mod.rel2abs(path, self.path) paths.append(path) else: p.warning(path + " is an absolute path. Omitting.") if not os.path.exists(path): p.error("File listed in " + self.manifest.path + " doesn't exist: " + path + ".\nExiting.") quit() from srcfile import VerilogFile, VHDLFile self.files = self.__create_file_list_from_paths(paths=paths) for f in self.files: if isinstance(f, VerilogFile): f.vsim_opt = self.vsim_opt elif isinstance(f, VHDLFile): f.vcom_opt = self.vcom_opt if "svn" in opt_map["modules"]: opt_map["modules"]["svn"] = self.__make_list( opt_map["modules"]["svn"]) svn_mods = [] for url in opt_map["modules"]["svn"]: svn_mods.append( self.pool.new_module(parent=self, url=url, source="svn", fetchto=fetchto)) self.svn = svn_mods else: self.svn = [] if "git" in opt_map["modules"]: opt_map["modules"]["git"] = self.__make_list( opt_map["modules"]["git"]) git_mods = [] for url in opt_map["modules"]["git"]: git_mods.append( self.pool.new_module(parent=self, url=url, source="git", fetchto=fetchto)) self.git = git_mods else: self.git = [] self.target = opt_map["target"] self.action = opt_map["action"] if opt_map["syn_name"] == None and opt_map["syn_project"] != None: self.syn_name = opt_map[ "syn_project"][:-5] #cut out .xise from the end else: self.syn_name = opt_map["syn_name"] self.syn_device = opt_map["syn_device"] self.syn_grade = opt_map["syn_grade"] self.syn_package = opt_map["syn_package"] self.syn_project = opt_map["syn_project"] self.syn_top = opt_map["syn_top"] self.isparsed = True for m in self.submodules(): m.parse_manifest()
def solve(self, fileset): n_iter = 0 max_iter = 100 import copy fset = fileset.filter(IDependable); f_nondep = [] done = False while not done and (n_iter < max_iter): n_iter = n_iter+1 done = True for f in fset: if not f._dep_fixed: idx = fset.index(f) k = self.__lookup_post_provider(files=fset, start_index=idx, file=f); if k: done = False #swap fset[idx], fset[k] = fset[k], fset[idx] if(n_iter == max_iter): p.error("Maximum number of iterations reached when trying to solve the dependencies."+ "Perhaps a cyclic inter-dependency problem."); return None for f in fset: if f._dep_fixed: f_nondep.append(copy.copy(f)) del f f_nondep.sort(key=lambda f: f.dep_index) from srcfile import VHDLFile, VerilogFile for f in [file for file in fset if isinstance(file, VHDLFile)]: p.vprint(f.path) if f.dep_requires: for req in f.dep_requires: pf = self.__find_provider_vhdl_file([file for file in fset if isinstance(file, VHDLFile)], req) if not pf: p.error("Missing dependency in file "+str(f)+": " + req[0]+'.'+req[1]) else: p.vprint("--> " + pf.path); if pf.path != f.path: f.dep_depends_on.append(pf) #get rid of duplicates by making a set from the list and vice versa f.dep_depends_on = list(set(f.dep_depends_on)) import srcfile as sf for f in [file for file in fset if isinstance(file, VerilogFile)]: p.vprint(f.path) if f.dep_requires: for req in f.dep_requires: pf = self.__find_provider_verilog_file(req, f) if not pf: p.warning("Cannot find depending for file "+str(f)+": "+req) else: p.vprint("--> " + pf.path) f.dep_depends_on.append(pf) #get rid of duplicates by making a set from the list and vice versa f.dep_depends_on = list(set(f.dep_depends_on)) newobj = sf.SourceFileSet(); newobj.add(f_nondep); for f in fset: try: if not f._dep_fixed: newobj.add(f) except: newobj.add(f) #search for SV includes (BFS algorithm) from srcfile import SVFile for f in [file for file in newobj if isinstance(file, SVFile)]: stack = f.dep_depends_on[:] while stack: qf = stack.pop(0) if qf.dep_requires: f.dep_requires.extend(qf.dep_requires) for req in qf.dep_requires: pf = self.__find_provider_verilog_file(req, f) if not pf: p.warning("Cannot find include for file "+str(f)+": "+req) else: p.vprint("--> " + pf.path) f.dep_depends_on.append(pf) stack.append(pf) #get rid of duplicates by making a set from the list and vice versa f.dep_depends_on = list(set(f.dep_depends_on)) for k in newobj: p.vprint(str(k.dep_index) + " " + k.path + str(k._dep_fixed)) return newobj
def main(): usage = "usage: %prog [options]\n" usage += "type %prog --help to get help message" parser = optparse.OptionParser(usage=usage) parser.add_option("--manifest-help", action="store_true", dest="manifest_help", help="print manifest file variables description") parser.add_option("--make-vsim", dest="make_vsim", action="store_true", default=None, help="generate a ModelSim simulation Makefile") parser.add_option("--make-isim", dest="make_isim", action="store_true", default=None, help="generate a ISE Simulation (ISim) simulation Makefile") parser.add_option("--make-fetch", dest="make_fetch", action="store_true", default=None, help="generate a makefile for modules' fetching") parser.add_option("--make-ise", dest="make_ise", action="store_true", default=None, help="generate a makefile for local ISE synthesis") parser.add_option("--make-remote", dest="make_remote", action="store_true", default=None, help="generate a makefile for remote synthesis") parser.add_option("-f", "--fetch", action="store_true", dest="fetch", default=None, help="fetch and/or update remote modules listed in Manifest") parser.add_option("--clean", action="store_true", dest="clean", default=None, help="remove all modules fetched for this one") parser.add_option("--list", action="store_true", dest="list", default=None, help="List all modules together with their files") parser.add_option("--list-files", action="store_true", dest="list_files", default=None, help="List all files in a from of a space-separated string") parser.add_option("--merge-cores=name", default=None, dest="merge_cores", help="Merges entire synthesizable content of an project into a pair of VHDL/Verilog files") parser.add_option("--ise-proj", action="store_true", dest="ise_proj", default=None, help="create/update an ise project including list of project" "files") parser.add_option("--quartus-proj", action="store_true", dest="quartus_proj", default=None, help="create/update a quartus project including list of project" "files") parser.add_option("-l", "--synthesize-locally", dest="local", default=None, action="store_true", help="perform a local synthesis") parser.add_option("-r", "--synthesize-remotelly", dest="remote", default=None, action="store_true", help="perform a remote synthesis") parser.add_option("--synth-server", dest="synth_server", default=None, help="use given SERVER for remote synthesis", metavar="SERVER") parser.add_option("--synth-user", dest="synth_user", default=None, help="use given USER for remote synthesis", metavar="USER") parser.add_option("--force-ise", dest="force_ise", default=None, type=float, help="Force given ISE version to be used in" " synthesis,use 0 for current version", metavar="ISE") parser.add_option("--py", dest="arbitrary_code", default="", help="add arbitrary code to all manifests' evaluation") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default="false", help="verbose mode") parser.add_option("--version", dest="print_version", action="store_true", default="false", help="print version id of this Hdlmake build") (options, _) = parser.parse_args() global_mod.options = options #HANDLE PROJECT INDEPENDENT OPTIONS if options.manifest_help == True: from manifest_parser import ManifestParser ManifestParser().help() quit() if options.print_version == True: p.print_version() quit() # Check later if a simulation tool should have been specified if options.make_isim == True: global_mod.sim_tool = "isim" elif options.make_vsim == True: global_mod.sim_tool = "vsim" p.info("Simulation tool: " + str(global_mod.sim_tool)) p.vprint("LoadTopManifest") pool = ModulePool() pool.new_module(parent=None, url=os.getcwd(), source="local", fetchto=".") if pool.get_top_module().manifest == None: p.rawprint("No manifest found. At least an empty one is needed") p.rawprint("To see some help, type hdlmake --help") quit() global_mod.top_module = pool.get_top_module() global_mod.global_target = global_mod.top_module.target ssh = Connection(ssh_user=options.synth_user, ssh_server=options.synth_server) from hdlmake_kernel import HdlmakeKernel kernel = HdlmakeKernel(modules_pool=pool, connection=ssh, options=options) options_kernel_mapping = { "fetch" : "fetch", "make_vsim" : "generate_vsim_makefile", "make_isim" : "generate_isim_makefile", "ise_proj" : "generate_ise_project", "quartus_proj" : "generate_quartus_project", "local" : "run_local_synthesis", "remote": "run_remote_synthesis", "make_fetch": "generate_fetch_makefile", "make_ise" : "generate_ise_makefile", "make_remote" : "generate_remote_synthesis_makefile", "list" : "list_modules", "clean" : "clean_modules", "merge_cores" : "merge_cores" } sth_chosen = False import traceback for option, function in options_kernel_mapping.items(): try: is_set = getattr(options, option) if is_set: sth_chosen = True getattr(kernel, function)() except Exception, unknown_error : p.echo("Oooops! We've got an error. Here is the appropriate info:\n") p.print_version() print(unknown_error) traceback.print_exc()
def main(): usage = "usage: %prog [options]\n" usage += "type %prog --help to get help message" parser = optparse.OptionParser(usage=usage) parser.add_option("--manifest-help", action="store_true", dest="manifest_help", help="print manifest file variables description") parser.add_option("--make-vsim", dest="make_vsim", action="store_true", default=None, help="generate a ModelSim simulation Makefile") parser.add_option( "--make-isim", dest="make_isim", action="store_true", default=None, help="generate a ISE Simulation (ISim) simulation Makefile") parser.add_option("--make-fetch", dest="make_fetch", action="store_true", default=None, help="generate a makefile for modules' fetching") parser.add_option("--make-ise", dest="make_ise", action="store_true", default=None, help="generate a makefile for local ISE synthesis") parser.add_option("--make-remote", dest="make_remote", action="store_true", default=None, help="generate a makefile for remote synthesis") parser.add_option( "-f", "--fetch", action="store_true", dest="fetch", default=None, help="fetch and/or update remote modules listed in Manifest") parser.add_option("--clean", action="store_true", dest="clean", default=None, help="remove all modules fetched for this one") parser.add_option("--list", action="store_true", dest="list", default=None, help="List all modules together with their files") parser.add_option( "--list-files", action="store_true", dest="list_files", default=None, help="List all files in a from of a space-separated string") parser.add_option( "--merge-cores=name", default=None, dest="merge_cores", help= "Merges entire synthesizable content of an project into a pair of VHDL/Verilog files" ) parser.add_option( "--ise-proj", action="store_true", dest="ise_proj", default=None, help="create/update an ise project including list of project" "files") parser.add_option( "--quartus-proj", action="store_true", dest="quartus_proj", default=None, help="create/update a quartus project including list of project" "files") parser.add_option("-l", "--synthesize-locally", dest="local", default=None, action="store_true", help="perform a local synthesis") parser.add_option("-r", "--synthesize-remotelly", dest="remote", default=None, action="store_true", help="perform a remote synthesis") parser.add_option("--synth-server", dest="synth_server", default=None, help="use given SERVER for remote synthesis", metavar="SERVER") parser.add_option("--synth-user", dest="synth_user", default=None, help="use given USER for remote synthesis", metavar="USER") parser.add_option("--force-ise", dest="force_ise", default=None, type=float, help="Force given ISE version to be used in" " synthesis,use 0 for current version", metavar="ISE") parser.add_option("--py", dest="arbitrary_code", default="", help="add arbitrary code to all manifests' evaluation") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default="false", help="verbose mode") parser.add_option("--version", dest="print_version", action="store_true", default="false", help="print version id of this Hdlmake build") (options, _) = parser.parse_args() global_mod.options = options #HANDLE PROJECT INDEPENDENT OPTIONS if options.manifest_help == True: from manifest_parser import ManifestParser ManifestParser().help() quit() if options.print_version == True: p.print_version() quit() # Check later if a simulation tool should have been specified if options.make_isim == True: global_mod.sim_tool = "isim" elif options.make_vsim == True: global_mod.sim_tool = "vsim" p.info("Simulation tool: " + str(global_mod.sim_tool)) p.vprint("LoadTopManifest") pool = ModulePool() pool.new_module(parent=None, url=os.getcwd(), source="local", fetchto=".") if pool.get_top_module().manifest == None: p.rawprint("No manifest found. At least an empty one is needed") p.rawprint("To see some help, type hdlmake --help") quit() global_mod.top_module = pool.get_top_module() global_mod.global_target = global_mod.top_module.target ssh = Connection(ssh_user=options.synth_user, ssh_server=options.synth_server) from hdlmake_kernel import HdlmakeKernel kernel = HdlmakeKernel(modules_pool=pool, connection=ssh, options=options) options_kernel_mapping = { "fetch": "fetch", "make_vsim": "generate_vsim_makefile", "make_isim": "generate_isim_makefile", "ise_proj": "generate_ise_project", "quartus_proj": "generate_quartus_project", "local": "run_local_synthesis", "remote": "run_remote_synthesis", "make_fetch": "generate_fetch_makefile", "make_ise": "generate_ise_makefile", "make_remote": "generate_remote_synthesis_makefile", "list": "list_modules", "clean": "clean_modules", "merge_cores": "merge_cores" } sth_chosen = False import traceback for option, function in options_kernel_mapping.items(): try: is_set = getattr(options, option) if is_set: sth_chosen = True getattr(kernel, function)() except Exception, unknown_error: p.echo( "Oooops! We've got an error. Here is the appropriate info:\n") p.print_version() print(unknown_error) traceback.print_exc()
def transfer_files_back(self, what, where): self.__check() rsync_cmd = "rsync -av " + self.ssh_user + "@" + self.ssh_server + ":" + what + " " + where p.vprint(rsync_cmd) os.system(rsync_cmd)
def fetch(self, unfetched_only = False): p.info("Fetching needed modules.") self.modules_pool.fetch_all(unfetched_only) p.vprint(str(self.modules_pool))