def get_default_platform(): if 'OPAE_ASE_DEFAULT_PLATFORM' in os.environ: return os.environ['OPAE_ASE_DEFAULT_PLATFORM'] # FPGA platform releases store the platform class in their # hw/lib tree. There are two variables that may point to # this tree. if ('BBS_LIB_PATH' in os.environ): # Legacy variable, shared with afu_synth_setup and HW releases hw_lib_dir = os.environ['BBS_LIB_PATH'].rstrip('/') elif ('OPAE_PLATFORM_ROOT' in os.environ): # Currently documented variable, pointing to a platform release hw_lib_dir = os.path.join(os.environ['OPAE_PLATFORM_ROOT'].rstrip('/'), 'hw/lib') else: hw_lib_dir = None # The release library stores the platform class. Match the ASE # environment to the current platform. if hw_lib_dir is not None: try: fname = os.path.join(hw_lib_dir, 'fme-platform-class.txt') with open(fname, 'r') as fd: fpga_platform = fd.read().strip() return fpga_platform except Exception: ase_functions.begin_red_fontcolor() sys.stderr.write('Warning: expected to find FPGA platform ' + 'in {0}\n\n'.format(fname)) ase_functions.end_red_fontcolor() return 'intg_xeon'
def errorExit(msg): ase_functions.begin_red_fontcolor() sys.stderr.write("Error: " + msg + "\n") ase_functions.end_red_fontcolor() # Try to remove ase_sources.mk to make it clear something went wrong try: os.remove('ase_sources.mk') except Exception: None sys.exit(1)
def show_help(): ase_functions.begin_red_fontcolor() print("INCORRECT command, CORRECT (required) usage is:") print("python generate_ase_environment.py <dir 1> [dir 2] ... [dir n] [-t <VCS|QUESTA>]") print("") print("Required switches => ") print(" <dir1> => Atleast one sources directory is required") print(" [dir2]...[dir n] => Other optional directories with sources") print("") print("Optional switches => ") print(" -h,--help => Show this help message") print(" -t,--tool => Enter tool type as 'VCS' or 'QUESTA'") print("") ase_functions.end_red_fontcolor()
def commands_list_getoutput(cmd): try: byte_out = subprocess.check_output(cmd) str_out = byte_out.decode() except OSError as e: if e.errno == os.errno.ENOENT: msg = cmd[0] + " not found on PATH!\n" msg += "The installed OPAE SDK bin directory must be on " + \ "the PATH environment variable." errorExit(msg) else: raise except subprocess.CalledProcessError as e: ase_functions.begin_red_fontcolor() sys.stderr.write(e.output) ase_functions.end_red_fontcolor() raise return str_out
def get_default_platform(): if 'OPAE_ASE_DEFAULT_PLATFORM' in os.environ: return os.environ['OPAE_ASE_DEFAULT_PLATFORM'] # FPGA platform releases use BBS_LIB_PATH to specify a path to # the release library. The library stores the platform class. # Match the ASE environment to the current platform. if 'BBS_LIB_PATH' in os.environ: fname = os.path.join(os.environ['BBS_LIB_PATH'], 'fme-platform-class.txt') try: with open(fname, 'r') as fd: fpga_platform = fd.read().strip() return fpga_platform except Exception: ase_functions.begin_red_fontcolor() sys.stderr.write('Warning: expected to find FPGA platform ' + 'in {0}\n\n'.format(fname)) ase_functions.end_red_fontcolor() return 'intg_xeon'
def auto_find_sources(fd): # Prepare list of candidate directories print("Valid directories supplied => ") valid_dirlist = filter(lambda p: os.path.exists(p), args.dirlist) str_dirlist = " ".join(valid_dirlist) if len(valid_dirlist) == 0: # This line should never be reached since the directory list was # already checked after argument parsing. errorExit("No source directories specifield") # Check if VHDL files exist, populate if any print("") print("Finding VHDL files ... ") str = "" vhdl_filepaths = "" for extn in VHD_EXTENSIONS: for dir in valid_dirlist: for file in search_file("*" + extn, dir): str = str + file + '\n' if len(str) != 0: str = str + "\n" if len(str.strip()) != 0: open(VHDL_FILE_LIST, "w").write(str) vhdl_filepaths = str print("DUT_VHD_SRC_LIST = " + VHDL_FILE_LIST) fd.write("DUT_VHD_SRC_LIST = " + VHDL_FILE_LIST + " \n\n") else: print("No VHDL files were found !") # Check if V/SV files exist, populate if any print("") print("Finding {System}Verilog files ... ") str = "" pkgfiles = [] vlog_filepaths = "" cmd = "" for extn in VLOG_EXTENSIONS: for dir in valid_dirlist: pkgfiles = search_file("*pkg*" + extn, dir) for file in pkgfiles: str = str + file + '\n' for extn in VLOG_EXTENSIONS: for dir in valid_dirlist: for file in search_file("*" + extn, dir): if file not in pkgfiles: str = str + file + '\n' if len(str) != 0: open(VLOG_FILE_LIST, "w").write(str) vlog_filepaths = str print("DUT_VLOG_SRC_LIST = " + VLOG_FILE_LIST) fd.write("DUT_VLOG_SRC_LIST = " + VLOG_FILE_LIST + " \n\n") else: print("No {System}Verilog files were found !") vlog_filepaths = remove_dups(VLOG_FILE_LIST, args.exclude) # Recursively find and add directory locations for VH print("") print("Finding include directories ... ") # use absolute path names in DUT_INCDIR to keep Questa happy str = "" for dir in valid_dirlist: for file in search_dir("*", dir): str = str + file + '\n' str = str.replace("\n", "+") if len(str) != 0: print("DUT_INCDIR = " + str) fd.write("DUT_INCDIR = " + str + "\n\n") # Module repetition check vhdl_filepaths = vhdl_filepaths.replace("\n", " ").split() vlog_filepaths = vlog_filepaths.replace("\n", " ").split() all_filepaths = vhdl_filepaths + vlog_filepaths module_namelist = [] module_files = defaultdict(list) for filepath in all_filepaths: file_content = open(filepath).readlines() for line in file_content: strip_line = line.strip() if strip_line.startswith("//"): continue elif strip_line.startswith("module"): words = strip_line.split() modname = words[1] module_files[modname].append(filepath) module_namelist.append(modname) if (has_duplicates(module_files)): ase_functions.begin_red_fontcolor() print("\n") print("Duplicate module names were found in the RTL file lists.") print("Please remove them manually as RTL compilation is expected " + "to FAIL !") ase_functions.end_red_fontcolor() # Search for a JSON file describing the AFU json_file = None str = "" for dir in valid_dirlist: for file in search_file("*.json", dir): str = file if (len(str)): for js in str.split('\n'): try: with open(js, 'r') as f: db = json.load(f) f.close() afu_image = db['afu-image'] # If we get this far without an exception the JSON file looks # like an AFU descriptor. json_file = js break except ValueError: ase_functions.begin_red_fontcolor() sys.stderr.write("Error: reading JSON file {0}".format(js)) ase_functions.end_red_fontcolor() raise except KeyError: # Ignore key error -- maybe the file isn't an AFU descriptor None # Print auto-find instructions print("") ase_functions.begin_green_fontcolor() print("NOTES TO USER => ") print("* This script assumes File Extensions: ") print(" * VHDL : .vhd") print(" * V/SV : .sv .vs .v") print(' * If you use arbitrary extensions, please edit this script to ' 'reflect them, and re-run the script') print("* See ase_sources.mk and check for correctness") print("* See if DUT_INCDIR has all the locations mentioned") print(" * If a directory is missing, append it separated by '+' symbol") ase_functions.end_green_fontcolor() print("") return json_file
####################################################################### # Prepare list of candidate directories print ("Valid directories supplied => "); str_dirlist = "" special_chars_in_path = 0 for loc in arg_list: loc = os.path.abspath(loc) if set(SPECIAL_CHARS).intersection(loc): special_chars_in_path = 1 if (os.path.isdir(str(loc))): valid_dirlist.append(loc) str_dirlist = str_dirlist + loc + "/ " print ("\t",loc) # If no legal directories, error out if len(valid_dirlist) == 0: ase_functions.begin_red_fontcolor() print("No Valid source directories were specified ... please re-run script with legal directory name") show_help() ase_functions.end_red_fontcolor() sys.exit(0) ######################################################## ### Write Makefile snippet ### ######################################################## fd = open("ase_sources.mk", "w") # Print Information in ase_sources.mk fd.write("####################################################################\n") fd.write("# #\n") fd.write("# Xeon(R) + FPGA AFU Simulation Environment #\n") fd.write("# File generated by AALSDK/ase/scripts/generate_ase_environment.py #\n")
def auto_find_sources(fd): # Prepare list of candidate directories print("Valid directories supplied => ") valid_dirlist = filter(lambda p: os.path.exists(p), args.dirlist) str_dirlist = " ".join(valid_dirlist) if len(valid_dirlist) == 0: # This line should never be reached since the directory list was # already checked after argument parsing. errorExit("No source directories specifield") # Check if VHDL files exist, populate if any print("") print("Finding VHDL files ... ") str = "" vhdl_filepaths = "" for extn in VHD_EXTENSIONS: for dir in valid_dirlist: for file in search_file("*"+extn, dir): str = str + file + '\n' if len(str) != 0: str = str + "\n" if len(str.strip()) != 0: open(VHDL_FILE_LIST, "w").write(str) vhdl_filepaths = str print("DUT_VHD_SRC_LIST = " + VHDL_FILE_LIST) fd.write("DUT_VHD_SRC_LIST = " + VHDL_FILE_LIST + " \n\n") else: print("No VHDL files were found !") # Check if V/SV files exist, populate if any print("") print("Finding {System}Verilog files ... ") str = "" pkgfiles = [] vlog_filepaths = "" cmd = "" for extn in VLOG_EXTENSIONS: for dir in valid_dirlist: pkgfiles = search_file("*pkg*" + extn, dir) for file in pkgfiles: str = str + file + '\n' for extn in VLOG_EXTENSIONS: for dir in valid_dirlist: for file in search_file("*"+extn, dir): if file not in pkgfiles: str = str + file + '\n' if len(str) != 0: open(VLOG_FILE_LIST, "w").write(str) vlog_filepaths = str print("DUT_VLOG_SRC_LIST = " + VLOG_FILE_LIST) fd.write("DUT_VLOG_SRC_LIST = " + VLOG_FILE_LIST + " \n\n") else: print("No {System}Verilog files were found !") vlog_filepaths = remove_dups(VLOG_FILE_LIST, args.exclude) # Recursively find and add directory locations for VH print("") print("Finding include directories ... ") # use absolute path names in DUT_INCDIR to keep Questa happy str = "" for dir in valid_dirlist: for file in search_dir("*", dir): str = str + file + '\n' str = str.replace("\n", "+") if len(str) != 0: print("DUT_INCDIR = " + str) fd.write("DUT_INCDIR = " + str + "\n\n") # Module repetition check vhdl_filepaths = vhdl_filepaths.replace("\n", " ").split() vlog_filepaths = vlog_filepaths.replace("\n", " ").split() all_filepaths = vhdl_filepaths + vlog_filepaths module_namelist = [] module_files = defaultdict(list) for filepath in all_filepaths: file_content = open(filepath).readlines() for line in file_content: strip_line = line.strip() if strip_line.startswith("//"): continue elif strip_line.startswith("module"): words = strip_line.split() modname = words[1] module_files[modname].append(filepath) module_namelist.append(modname) if (has_duplicates(module_files)): ase_functions.begin_red_fontcolor() print("\n") print("Duplicate module names were found in the RTL file lists.") print("Please remove them manually as RTL compilation is expected " + "to FAIL !") ase_functions.end_red_fontcolor() # Search for a JSON file describing the AFU json_file = None str = "" for dir in valid_dirlist: for file in search_file("*.json", dir): str = file if (len(str)): for js in str.split('\n'): try: with open(js, 'r') as f: db = json.load(f) f.close() afu_image = db['afu-image'] # If we get this far without an exception the JSON file looks # like an AFU descriptor. json_file = js break except ValueError: ase_functions.begin_red_fontcolor() sys.stderr.write("Error: reading JSON file {0}".format(js)) ase_functions.end_red_fontcolor() raise except KeyError: # Ignore key error -- maybe the file isn't an AFU descriptor None # Print auto-find instructions print("") ase_functions.begin_green_fontcolor() print("NOTES TO USER => ") print("* This script assumes File Extensions: ") print(" * VHDL : .vhd") print(" * V/SV : .sv .vs .v") print(' * If you use arbitrary extensions, please edit this script to ' 'reflect them, and re-run the script') print("* See ase_sources.mk and check for correctness") print("* See if DUT_INCDIR has all the locations mentioned") print(" * If a directory is missing, append it separated by '+' symbol") ase_functions.end_green_fontcolor() print("") return json_file