def create_library(self, library_name, path): if not file_exists(dirname(path)): os.makedirs(dirname(path)) if not file_exists(path): proc = Process(['vlib', '-unix', path]) proc.consume_output(callback=None) try: proc = Process( ['vmap', '-modelsimini', self._modelsim_ini, library_name]) proc.consume_output(callback=None) except Process.NonZeroExitCode: pass match = self._vmap_pattern.search(proc.output) if match: do_vmap = not file_exists(match.group('dir')) else: do_vmap = False if 'No mapping for library' in proc.output: do_vmap = True if do_vmap: proc = Process([ 'vmap', '-modelsimini', self._modelsim_ini, library_name, path ]) proc.consume_output(callback=None)
def create_library(self, library_name, path, mapped_libraries=None): """ Create and map a library_name to path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(abspath(path))): os.makedirs(dirname(abspath(path))) if not file_exists(path): proc = Process( [join(self._prefix, "vlib"), library_name, path], cwd=dirname(self._library_cfg), env=self.get_env(), ) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[ library_name] == path: return proc = Process( [join(self._prefix, "vmap"), library_name, path], cwd=dirname(self._library_cfg), env=self.get_env(), ) proc.consume_output(callback=None)
def _needs_recompile(self, dependency_graph, source_file): md5 = source_file.md5() md5_file_name = self._hash_file_name_of(source_file) if not ostools.file_exists(md5_file_name): logger.debug("%s has no vunit_hash file at %s and must be recompiled", source_file.name, md5_file_name) return True old_md5 = ostools.read_file(md5_file_name) if old_md5 != md5: logger.debug("%s has different hash than last time and must be recompiled", source_file.name) return True for other_file in dependency_graph.get_dependencies(source_file): other_md5_file_name = self._hash_file_name_of(other_file) if not ostools.file_exists(other_md5_file_name): continue if ostools.get_modification_time(other_md5_file_name) > ostools.get_modification_time(md5_file_name): logger.debug("%s has dependency compiled earlier and must be recompiled", source_file.name) return True logger.debug("%s has same hash file and must not be recompiled", source_file.name) return False
def _needs_recompile(self, dependency_graph, source_file): """ Returns True if the source_file needs to be recompiled given the dependency_graph, the file contents and the last modification time """ content_hash = source_file.content_hash content_hash_file_name = self._hash_file_name_of(source_file) if not ostools.file_exists(content_hash_file_name): LOGGER.debug("%s has no vunit_hash file at %s and must be recompiled", source_file.name, content_hash_file_name) return True old_content_hash = ostools.read_file(content_hash_file_name) if old_content_hash != content_hash: LOGGER.debug("%s has different hash than last time and must be recompiled", source_file.name) return True for other_file in dependency_graph.get_direct_dependencies(source_file): other_content_hash_file_name = self._hash_file_name_of(other_file) if not ostools.file_exists(other_content_hash_file_name): continue if more_recent(other_content_hash_file_name, content_hash_file_name): LOGGER.debug("%s has dependency compiled earlier and must be recompiled", source_file.name) return True LOGGER.debug("%s has same hash file and must not be recompiled", source_file.name) return False
def create_library(self, library_name, path): if not file_exists(dirname(path)): os.makedirs(dirname(path)) if not file_exists(path): proc = Process(['vlib', '-unix', path]) proc.consume_output(callback=None) try: proc = Process(['vmap', '-modelsimini', self._modelsim_ini, library_name]) proc.consume_output(callback=None) except Process.NonZeroExitCode: pass match = self._vmap_pattern.search(proc.output) if match: do_vmap = not file_exists(match.group('dir')) else: do_vmap = False if 'No mapping for library' in proc.output: do_vmap = True if do_vmap: proc = Process(['vmap','-modelsimini', self._modelsim_ini, library_name, path]) proc.consume_output(callback=None)
def _create_modelsim_ini(self): """ Create the modelsim.ini file if it does not exist """ if file_exists(self._modelsim_ini): return parent = dirname(self._modelsim_ini) if not file_exists(parent): os.makedirs(parent) with open(join(self._prefix, "..", "modelsim.ini"), 'rb') as fread: with open(self._modelsim_ini, 'wb') as fwrite: fwrite.write(fread.read())
def _create_modelsim_ini(self): """ Create the modelsim.ini file if it does not exist """ if file_exists(self._modelsim_ini): return write_file(self._modelsim_ini, read_file(join(self._prefix, "..", "modelsim.ini")))
def _preprocess(self, library_name, file_name, preprocessors): """ Preprocess file_name within library_name using explicit preprocessors if preprocessors is None then use implicit globally defined processors """ # @TODO dependency checking etc... if preprocessors is None: preprocessors = [self._location_preprocessor, self._check_preprocessor] preprocessors = [p for p in preprocessors if p is not None] preprocessors = self._external_preprocessors + preprocessors if len(preprocessors) == 0: return file_name code = ostools.read_file(file_name) for preprocessor in preprocessors: code = preprocessor.run(code, basename(file_name)) pp_file_name = join(self._preprocessed_path, library_name, basename(file_name)) idx = 1 while ostools.file_exists(pp_file_name): LOGGER.debug("Preprocessed file exists '%s', adding prefix", pp_file_name) pp_file_name = join(self._preprocessed_path, library_name, "%i_%s" % (idx, basename(file_name))) idx += 1 ostools.write_file(pp_file_name, code) return pp_file_name
def add_source_file(self, # pylint: disable=too-many-arguments file_name, library_name, file_type='vhdl', include_dirs=None, defines=None, vhdl_standard='2008', no_parse=False): """ Add a file_name as a source file in library_name with file_type :param no_parse: Do not parse file contents """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) LOGGER.debug('Adding source file %s to library %s', file_name, library_name) self._validate_library_name(library_name) library = self._libraries[library_name] if file_type == "vhdl": assert include_dirs is None source_file = VHDLSourceFile(file_name, library, vhdl_parser=self._vhdl_parser, vhdl_standard=vhdl_standard, no_parse=no_parse) library.add_vhdl_design_units(source_file.design_units) elif file_type == "verilog": source_file = VerilogSourceFile(file_name, library, self._verilog_parser, include_dirs, defines, no_parse) library.add_verilog_design_units(source_file.design_units) else: raise ValueError(file_type) library.add_source_file(source_file) self._source_files_in_order.append(source_file) return source_file
def add_source_file(self, # pylint: disable=too-many-arguments file_name, library_name, file_type='vhdl', include_dirs=None, defines=None, vhdl_standard=None, no_parse=False): """ Add a file_name as a source file in library_name with file_type :param no_parse: Do not parse file contents """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) LOGGER.debug('Adding source file %s to library %s', file_name, library_name) library = self._libraries[library_name] if file_type == "vhdl": assert include_dirs is None source_file = VHDLSourceFile( file_name, library, vhdl_parser=self._vhdl_parser, vhdl_standard=library.vhdl_standard if vhdl_standard is None else vhdl_standard, no_parse=no_parse) library.add_vhdl_design_units(source_file.design_units) elif file_type == "verilog": source_file = VerilogSourceFile(file_name, library, self._verilog_parser, include_dirs, defines, no_parse) library.add_verilog_design_units(source_file.design_units) else: raise ValueError(file_type) library.add_source_file(source_file) self._source_files_in_order.append(source_file) return source_file
def merge_coverage(self, file_name, args=None): """ Merge coverage from all test cases """ if self._persistent_shell is not None: # Teardown to ensure ucdb file was written. self._persistent_shell.teardown() if args is None: args = [] coverage_files = join(self._output_path, "coverage_files.txt") vcover_cmd = ( [join(self._prefix, "vcover"), "merge", "-inputs"] + [coverage_files] + args + [file_name] ) with open(coverage_files, "w") as fptr: for coverage_file in self._coverage_files: if file_exists(coverage_file): fptr.write(str(coverage_file) + "\n") else: LOGGER.warning("Missing coverage file: %s", coverage_file) print("Merging coverage files into %s..." % file_name) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def _preprocess(self, library_name, file_name, preprocessors): # @TODO dependency checking etc... if preprocessors is None: preprocessors = [ self._location_preprocessor, self._check_preprocessor ] preprocessors = [p for p in preprocessors if not p is None] preprocessors = self._external_preprocessors + preprocessors if len(preprocessors) == 0: return file_name code = ostools.read_file(file_name) for p in preprocessors: code = p.run(code, basename(file_name)) pp_file_name = join(self._preprocessed_path, library_name, basename(file_name)) idx = 1 while ostools.file_exists(pp_file_name): logger.debug("Preprocessed file exists '%s', adding prefix" % pp_file_name) pp_file_name = join(self._preprocessed_path, library_name, "%i_%s" % (idx, basename(file_name))) idx += 1 ostools.write_file(pp_file_name, code) return pp_file_name
def _preprocess(self, library_name, file_name, preprocessors): # @TODO dependency checking etc... if preprocessors is None: preprocessors = [self._location_preprocessor, self._check_preprocessor] preprocessors = [p for p in preprocessors if not p is None] preprocessors = self._external_preprocessors + preprocessors if len(preprocessors) == 0: return file_name code = ostools.read_file(file_name) for p in preprocessors: code = p.run(code, basename(file_name)) pp_file_name = join(self._preprocessed_path, library_name, basename(file_name)) idx = 1 while ostools.file_exists(pp_file_name): logger.debug("Preprocessed file exists '%s', adding prefix" % pp_file_name) pp_file_name = join(self._preprocessed_path, library_name, "%i_%s" % (idx, basename(file_name))) idx += 1 ostools.write_file(pp_file_name, code) return pp_file_name
def _determine_partial_pass(self, output_path): """ @TODO is this a good way? """ log_file = join(output_path, "test_runner_trace.csv") retval = {} for name in self.test_cases: retval[name] = FAILED if not ostools.file_exists(log_file): return retval test_log = ostools.read_file(log_file) test_starts = [] for test_name in self._test_cases: if ("Test Runner,Test case: " + test_name) in test_log: test_starts.append(test_name) for test_name in test_starts[:-1]: retval[self._full_name(test_name)] = PASSED for test_name in self._test_cases: if not test_name in test_starts: retval[self._full_name(test_name)] = SKIPPED return retval
def _determine_partial_pass(self, output_path): """ In case of simulation failure determine which of the individual test cases failed. This is done by reading the test_runner_trace.csv file and checking for test case entry points. """ log_file = join(output_path, "test_runner_trace.csv") retval = {} for name in self.test_cases: retval[name] = FAILED if not ostools.file_exists(log_file): return retval test_log = ostools.read_file(log_file) test_starts = [] for test_name in self._test_cases: if "Test Runner,Test case: " + test_name in test_log: test_starts.append(test_name) for test_name in test_starts[:-1]: retval[self._full_name(test_name)] = PASSED for test_name in self._test_cases: if test_name not in test_starts: retval[self._full_name(test_name)] = SKIPPED return retval
def _create_modelsim_ini(self): """ Create the modelsim.ini file if it does not exist """ if file_exists(self._sim_cfg_file_name): return parent = dirname(self._sim_cfg_file_name) if not file_exists(parent): os.makedirs(parent) original_modelsim_ini = os.environ.get( "VUNIT_MODELSIM_INI", join(self._prefix, "..", "modelsim.ini")) with open(original_modelsim_ini, 'rb') as fread: with open(self._sim_cfg_file_name, 'wb') as fwrite: fwrite.write(fread.read())
def post_process(self, output_path): """ Merge coverage from all test cases, """ if self._coverage is None: return # Teardown to ensure acdb file was written. del self._persistent_shell merged_coverage_file = join(output_path, "merged_coverage.acdb") merge_command = "acdb merge" for coverage_file in self._coverage_files: if file_exists(coverage_file): merge_command += " -i {%s}" % coverage_file.replace('\\', '/') else: LOGGER.warning("Missing coverage file: %s", coverage_file) merge_command += " -o {%s}" % merged_coverage_file.replace('\\', '/') vcover_cmd = [ join(self._prefix, 'vsim'), '-c', '-do', '%s; quit;' % merge_command ] print("Merging coverage files into %s..." % merged_coverage_file) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def merge_coverage(self, file_name, args=None): """ Merge coverage from all test cases, """ # Teardown to ensure acdb file was written. self._persistent_shell.teardown() merge_command = "acdb merge" for coverage_file in self._coverage_files: if file_exists(coverage_file): merge_command += " -i {%s}" % coverage_file.replace('\\', '/') else: LOGGER.warning("Missing coverage file: %s", coverage_file) if args is not None: merge_command += " " + " ".join("{%s}" % arg for arg in args) merge_command += " -o {%s}" % file_name.replace('\\', '/') merge_script_name = join(self._output_path, "acdb_merge.tcl") with open(merge_script_name, "w") as fptr: fptr.write(merge_command + "\n") vcover_cmd = [join(self._prefix, 'vsim'), '-c', '-do', 'source %s; quit;' % merge_script_name.replace('\\', '/')] print("Merging coverage files into %s..." % file_name) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def post_process(self, output_path): """ Merge coverage from all test cases, top hierarchy level is removed since it has different name in each test case """ if self._coverage is None: return # Teardown to ensure ucdb file was written. del self._persistent_shell merged_coverage_file = join(output_path, "merged_coverage.ucdb") vcover_cmd = [ join(self._prefix, 'vcover'), 'merge', '-strip', '1', merged_coverage_file ] for coverage_file in self._coverage_files: if file_exists(coverage_file): vcover_cmd.append(coverage_file) else: LOGGER.warning("Missing coverage file: %s", coverage_file) print("Merging coverage files into %s..." % merged_coverage_file) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def merge_coverage(self, file_name, args=None): """ Merge coverage from all test cases, """ merge_command = "onerror {quit -code 1}\n" merge_command += "acdb merge" for coverage_file in self._coverage_files: if file_exists(coverage_file): merge_command += " -i {%s}" % fix_path(coverage_file) else: LOGGER.warning("Missing coverage file: %s", coverage_file) if args is not None: merge_command += " " + " ".join("{%s}" % arg for arg in args) merge_command += " -o {%s}" % fix_path(file_name) + "\n" merge_script_name = join(self._output_path, "acdb_merge.tcl") with open(merge_script_name, "w") as fptr: fptr.write(merge_command + "\n") vcover_cmd = [ join(self._prefix, 'vsimsa'), '-tcl', '%s' % fix_path(merge_script_name) ] print("Merging coverage files into %s..." % file_name) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def add_source_file(self, file_name, library_name, file_type='vhdl', include_dirs=None, defines=None): """ Add a file_name as a source file in library_name with file_type """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) LOGGER.info('Adding source file %s to library %s', file_name, library_name) self._validate_library_name(library_name) library = self._libraries[library_name] if file_type == "vhdl": assert include_dirs is None source_file = VHDLSourceFile(file_name, library, vhdl_parser=self._vhdl_parser) library.add_vhdl_design_units(source_file.design_units) elif file_type == "verilog": source_file = VerilogSourceFile(file_name, library, self._verilog_parser, include_dirs, defines) library.add_verilog_design_units(source_file.design_units) else: raise ValueError(file_type) library.add_source_file(source_file) self._source_files_in_order.append(source_file) return source_file
def scan_tests_from_file(self, file_name): """ Scan file for test cases and pragmas """ if not file_exists(file_name): raise ValueError("File %r does not exist" % file_name) def parse(content): """ Parse pragmas and test case names """ pragmas = _find_pragmas(content, file_name) test_case_names = _find_test_cases(content, file_name) return pragmas, test_case_names pragmas, test_case_names = cached("test_bench.parse", parse, file_name, encoding=HDL_FILE_ENCODING, database=self._database) default_config = Configuration(DEFAULT_NAME, self.design_unit) if "fail_on_warning" in pragmas: default_config.set_sim_option("vhdl_assert_stop_level", "warning") self._configs = OrderedDict({default_config.name: default_config}) self._individual_tests = "run_all_in_same_sim" not in pragmas and len(test_case_names) > 0 self.test_cases = [TestCase(name, self.design_unit, self._individual_tests, default_config.copy()) for name in test_case_names]
def merge_coverage(self, file_name, args=None): """ Merge coverage from all test cases, """ if self._persistent_shell is not None: # Teardown to ensure acdb file was written. self._persistent_shell.teardown() merge_command = "acdb merge" for coverage_file in self._coverage_files: if file_exists(coverage_file): merge_command += " -i {%s}" % coverage_file.replace('\\', '/') else: LOGGER.warning("Missing coverage file: %s", coverage_file) if args is not None: merge_command += " " + " ".join("{%s}" % arg for arg in args) merge_command += " -o {%s}" % file_name.replace('\\', '/') merge_script_name = join(self._output_path, "acdb_merge.tcl") with open(merge_script_name, "w") as fptr: fptr.write(merge_command + "\n") vcover_cmd = [join(self._prefix, 'vsim'), '-c', '-do', 'source %s; quit;' % merge_script_name.replace('\\', '/')] print("Merging coverage files into %s..." % file_name) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def run(self, output_path): """ Run the test case using the output_path """ if not call_pre_config(self._config.pre_config, output_path): return False enabled_test_cases = [self._test_case] config = _add_runner_cfg(self._config, output_path, enabled_test_cases) sim_ok = self._simulator_if.simulate( join(output_path, self._simulator_if.name), self._name, config, elaborate_only=self._elaborate_only) if self._elaborate_only: return sim_ok vunit_results_file = join(output_path, "vunit_results") if not ostools.file_exists(vunit_results_file): return False test_results = ostools.read_file(vunit_results_file) expected_results = "" if self._test_case is not None: expected_results += "test_start:%s\n" % self._test_case expected_results += "test_suite_done\n" if not test_results == expected_results: return False if self._config.post_check is None: return True return self._config.post_check(output_path)
def create_library(self, library_name, library_path, mapped_libraries=None): """ Create and map a library_name to library_path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(abspath(library_path)): os.makedirs(abspath(library_path)) if not file_exists(abspath(library_path+"/64/")): os.makedirs(abspath(library_path+"/64/")) if library_name in mapped_libraries and mapped_libraries[library_name] == library_path: return vcs = SetupFile.parse(self._vcssetup) vcs[library_name] = library_path vcs.write(self._vcssetup)
def create_library(self, library_name, path, mapped_libraries=None): """ Create and map a library_name to path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(path)): os.makedirs(dirname(path)) if not file_exists(path): proc = Process([join(self._prefix, 'vlib'), '-unix', path]) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[library_name] == path: return proc = Process([join(self._prefix, 'vmap'), '-modelsimini', self._modelsim_ini, library_name, path]) proc.consume_output(callback=None)
def scan_tests_from_file(self, file_name): """ Scan file for test cases and attributes """ if not file_exists(file_name): raise ValueError("File %r does not exist" % file_name) def parse(content): """ Parse attributes and test case names """ tests, attributes = _find_tests_and_attributes(content, file_name) return tests, attributes tests, attributes = cached("test_bench.parse", parse, file_name, encoding=HDL_FILE_ENCODING, database=self._database, newline='') for attr in attributes: if _is_user_attribute(attr.name): raise RuntimeError("File global attributes are not yet supported: %s in %s line %i" % (attr.name, file_name, attr.location.lineno)) for test in tests: for attr in test.attributes: if attr.name in _VALID_ATTRIBUTES: raise RuntimeError("Attribute %s is global and cannot be associated with test %s: %s line %i" % (attr.name, test.name, file_name, attr.location.lineno)) attribute_names = [attr.name for attr in attributes] default_config = Configuration(DEFAULT_NAME, self.design_unit) if "fail_on_warning" in attribute_names: default_config.set_sim_option("vhdl_assert_stop_level", "warning") self._configs = OrderedDict({default_config.name: default_config}) explicit_tests = [test for test in tests if test.is_explicit] if explicit_tests: # All tests shall be explicit when there are at least one explicit test assert len(tests) == len(explicit_tests) self._implicit_test = None else: # There can only be one implicit test assert len(tests) == 1 self._implicit_test = tests[0] self._individual_tests = "run_all_in_same_sim" not in attribute_names and len(explicit_tests) > 0 self._test_cases = [TestConfigurationVisitor(test, self.design_unit, self._individual_tests, default_config.copy()) for test in explicit_tests]
def create_library(self, library_name, path, mapped_libraries=None): """ Create and map a library_name to path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(abspath(path))): os.makedirs(dirname(abspath(path))) if not file_exists(path): proc = Process([join(self._prefix, 'vlib'), library_name, path], cwd=dirname(self._library_cfg)) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[library_name] == path: return proc = Process([join(self._prefix, 'vmap'), library_name, path], cwd=dirname(self._library_cfg)) proc.consume_output(callback=None)
def _create_library_cfg(self): """ Create the library.cfg file if it does not exist """ if file_exists(self._sim_cfg_file_name): return with open(self._sim_cfg_file_name, "w") as ofile: ofile.write('$INCLUDE = "%s"\n' % self._builtin_library_cfg)
def _create_library_cfg(self): """ Create the library.cfg file if it does not exist """ if file_exists(self._sim_cfg_file_name): return with open(self._sim_cfg_file_name, "w") as ofile: ofile.write('$INCLUDE = "%s"\n' % join(self._prefix, "..", "vlib", "library.cfg"))
def _create_library_cfg(self): """ Create the library.cfg file if it does not exist """ if file_exists(self._library_cfg): return with open(self._library_cfg, "w") as ofile: ofile.write('$INCLUDE = "%s"\n' % join(self._prefix, "..", "vlib", "library.cfg"))
def create_library(self, library_name, path, mapped_libraries=None): """ Create and map a library_name to path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(abspath(path))): os.makedirs(dirname(abspath(path))) if not file_exists(path): proc = Process([join(self._prefix, 'vlib'), '-unix', path], env=self.get_env()) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[library_name] == path: return cfg = parse_modelsimini(self._sim_cfg_file_name) cfg.set("Library", library_name, path) write_modelsimini(cfg, self._sim_cfg_file_name)
def create_library(self, library_name, path, mapped_libraries=None): """ Create and map a library_name to path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(abspath(path))): os.makedirs(dirname(abspath(path))) if not file_exists(path): proc = Process([join(self._prefix, 'vlib'), '-unix', path]) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[library_name] == path: return cfg = RawConfigParser() cfg.read(self._modelsim_ini) cfg.set("Library", library_name, path) with open(self._modelsim_ini, "w") as optr: cfg.write(optr)
def add_source_file( # pylint: disable=too-many-arguments self, file_name, library_name, file_type="vhdl", include_dirs=None, defines=None, vhdl_standard: Optional[VHDLStandard] = None, no_parse=False, ): """ Add a file_name as a source file in library_name with file_type :param no_parse: Do not parse file contents """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) LOGGER.debug("Adding source file %s to library %s", file_name, library_name) library = self._libraries[library_name] if file_type == "vhdl": assert include_dirs is None source_file: SourceFile = VHDLSourceFile( file_name, library, vhdl_parser=self._vhdl_parser, database=self._database, vhdl_standard=library.vhdl_standard if vhdl_standard is None else vhdl_standard, no_parse=no_parse, ) elif file_type in VERILOG_FILE_TYPES: source_file = VerilogSourceFile( file_type, file_name, library, verilog_parser=self._verilog_parser, database=self._database, include_dirs=include_dirs, defines=defines, no_parse=no_parse, ) else: raise ValueError(file_type) old_source_file = library.add_source_file(source_file) if id(source_file) == id(old_source_file): self._source_files_in_order.append(source_file) return old_source_file
def _create_modelsim_ini(self): """ Create the modelsim.ini file """ parent = dirname(self._sim_cfg_file_name) if not file_exists(parent): os.makedirs(parent) original_modelsim_ini = os.environ.get("VUNIT_MODELSIM_INI", join(self._prefix, "..", "modelsim.ini")) with open(original_modelsim_ini, 'rb') as fread: with open(self._sim_cfg_file_name, 'wb') as fwrite: fwrite.write(fread.read())
def create_library(self, library_name, path, mapped_libraries=None): """ Create and map a library_name to path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(abspath(path))): os.makedirs(dirname(abspath(path))) if not file_exists(path): proc = Process([join(self._prefix, 'vlib'), '-unix', path]) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[ library_name] == path: return cfg = RawConfigParser() cfg.read(self._modelsim_ini) cfg.set("Library", library_name, path) with open(self._modelsim_ini, "w") as optr: cfg.write(optr)
def _get_compile_timestamps(self, files): """ Return a dictionary of mapping file to the timestamp when it was compiled or None if it was not compiled """ # Cache timestamps to avoid duplicate file operations timestamps = {} for source_file in files: hash_file_name = self._hash_file_name_of(source_file) if not ostools.file_exists(hash_file_name): timestamps[source_file] = None else: timestamps[source_file] = ostools.get_modification_time(hash_file_name) return timestamps
def create_library(self, library_name, library_path, mapped_libraries=None): """ Create and map a library_name to library_path """ mapped_libraries = mapped_libraries if mapped_libraries is not None else {} if not file_exists(dirname(abspath(library_path))): os.makedirs(dirname(abspath(library_path))) if library_name in mapped_libraries and mapped_libraries[library_name] == library_path: return cds = CDSFile.parse(self._cdslib) cds[library_name] = library_path cds.write(self._cdslib)
def describe_location(location, first=True): """ Describe the location as a string """ if location is None: return "Unknown location" ((file_name, (start, end)), previous) = location retval = "" if previous is not None: retval += describe_location(previous, first=False) + "\n" if file_name is None: retval += "Unknown Python string" return retval if not file_exists(file_name): retval += "Unknown location in %s" % file_name return retval contents = read_file(file_name) if first: prefix = "at" else: prefix = "from" count = 0 for lineno, line in enumerate(contents.splitlines()): lstart = count lend = lstart + len(line) if lstart <= start <= lend: retval += "%s %s line %i:\n" % ( prefix, simplify_path(file_name), lineno + 1, ) retval += line + "\n" retval += (" " * (start - lstart)) + ("~" * (min(lend - 1, end) - start + 1)) return retval count = lend + 1 return retval
def _read_test_results(self, file_name): """ Read test results from vunit_results file """ results = {} for name in self._test_cases: results[name] = FAILED if not ostools.file_exists(file_name): return results test_results = ostools.read_file(file_name) test_starts = [] test_suite_done = False for line in test_results.splitlines(): if line.startswith("test_start:"): test_name = line[len("test_start:"):] test_starts.append(test_name) elif line.startswith("test_suite_done"): test_suite_done = True for idx, test_name in enumerate(test_starts): last_start = idx == len(test_starts) - 1 if test_suite_done or not last_start: results[test_name] = PASSED for test_name in self._test_cases: # Anonymous test case if test_name is None: results[test_name] = PASSED if test_suite_done else FAILED continue if test_name not in test_starts: results[test_name] = SKIPPED for test_name in results: if test_name not in self._test_cases: raise RuntimeError("Got unknown test case %s" % test_name) return results
def scan_tests_from_file(self, file_name): """ Scan file for test cases and pragmas """ if not file_exists(file_name): raise ValueError("File %r does not exist" % file_name) def parse(content): """ Parse pragmas and test case names """ pragmas = _find_pragmas(content, file_name) tests = _find_tests(content, file_name) return pragmas, tests pragmas, tests = cached("test_bench.parse", parse, file_name, encoding=HDL_FILE_ENCODING, database=self._database) default_config = Configuration(DEFAULT_NAME, self.design_unit) if "fail_on_warning" in pragmas: default_config.set_sim_option("vhdl_assert_stop_level", "warning") self._configs = OrderedDict({default_config.name: default_config}) explicit_tests = [test for test in tests if test.is_explicit] if explicit_tests: # All tests shall be explicit when there are at least one explicit test assert len(tests) == len(explicit_tests) self._implicit_test = None else: # There can only be one implicit test assert len(tests) == 1 self._implicit_test = tests[0] self._individual_tests = "run_all_in_same_sim" not in pragmas and len( explicit_tests) > 0 self._test_cases = [ TestConfigurationVisitor(test, self.design_unit, self._individual_tests, default_config.copy()) for test in explicit_tests ]
def _create_modelsim_ini(self): """ Create the modelsim.ini file if it does not exist """ if file_exists(self._modelsim_ini): return cwd = join(dirname(self._modelsim_ini)) try: env = os.environ.copy() del env["MODELSIM"] output = subprocess.check_output([join(self._prefix, 'vmap'), '-c'], cwd=cwd, stderr=subprocess.PIPE, env=env) except subprocess.CalledProcessError as exc: LOGGER.error("Failed to create %s by running 'vmap -c' in %s exit code was %i", self._modelsim_ini, cwd, exc.returncode) print("== Output of 'vmap -c' " + ("=" * 60)) print(exc.output) print("=======================" + ("=" * 60)) raise
def add_source_file(self, # pylint: disable=too-many-arguments file_name, library_name, file_type='vhdl', include_dirs=None, defines=None, vhdl_standard=None, no_parse=False): """ Add a file_name as a source file in library_name with file_type :param no_parse: Do not parse file contents """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) LOGGER.debug('Adding source file %s to library %s', file_name, library_name) library = self._libraries[library_name] if file_type == "vhdl": assert include_dirs is None source_file = VHDLSourceFile( file_name, library, vhdl_parser=self._vhdl_parser, database=self._database, vhdl_standard=library.vhdl_standard if vhdl_standard is None else vhdl_standard, no_parse=no_parse) elif file_type in VERILOG_FILE_TYPES: source_file = VerilogSourceFile(file_type, file_name, library, verilog_parser=self._verilog_parser, database=self._database, include_dirs=include_dirs, defines=defines, no_parse=no_parse) else: raise ValueError(file_type) old_source_file = library.add_source_file(source_file) if id(source_file) == id(old_source_file): self._source_files_in_order.append(source_file) return old_source_file
def run(self, output_path): """ Run the test case using the output_path """ generics = {} if not call_pre_config(self._pre_config, output_path): return False if self._has_runner_cfg: runner_cfg = { "enabled_test_cases": encode_test_case(self._test_case), "output path": output_path.replace("\\", "/") + "/", "active python runner": True, } generics["runner_cfg"] = encode_dict(runner_cfg) sim_ok = self._test_bench.run(output_path, generics, elaborate_only=self._elaborate_only) if self._elaborate_only: return sim_ok vunit_results_file = join(output_path, "vunit_results") if not ostools.file_exists(vunit_results_file): return False test_results = ostools.read_file(vunit_results_file) expected_results = "" if self._test_case is not None: expected_results += "test_start:%s\n" % self._test_case expected_results += "test_suite_done\n" if not test_results == expected_results: return False if self._post_check is None: return True return self._post_check(output_path)
def describe_location(location, first=True): """ Describe the location as a string """ if location is None: return "Unknown location" ((file_name, (start, end)), previous) = location retval = "" if previous is not None: retval += describe_location(previous, first=False) + "\n" if file_name is None: retval += "Unknown Python string" return retval if not file_exists(file_name): retval += "Unknown location in %s" % file_name return retval contents = read_file(file_name) if first: prefix = "at" else: prefix = "from" count = 0 for lineno, line in enumerate(contents.splitlines()): lstart = count lend = lstart + len(line) if lstart <= start <= lend: retval += "%s %s line %i:\n" % (prefix, simplify_path(file_name), lineno + 1) retval += line + "\n" retval += (" " * (start - lstart)) + ("~" * (min(lend - 1, end) - start + 1)) return retval count = lend + 1 return retval
def merge_coverage(self, file_name, args=None): """ Merge coverage from all test cases """ # Teardown to ensure ucdb file was written. self._persistent_shell.teardown() if args is None: args = [] vcover_cmd = [join(self._prefix, 'vcover'), 'merge' ] + args + [file_name] for coverage_file in self._coverage_files: if file_exists(coverage_file): vcover_cmd.append(coverage_file) else: LOGGER.warning("Missing coverage file: %s", coverage_file) print("Merging coverage files into %s..." % file_name) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def _read_test_results(self, output_path): """ Read test results from vunit_results file """ vunit_results_file = join(output_path, "vunit_results") retval = {} for name in self.test_cases: retval[name] = FAILED if not ostools.file_exists(vunit_results_file): return retval test_results = ostools.read_file(vunit_results_file) test_starts = [] test_suite_done = False for line in test_results.splitlines(): if line.startswith("test_start:"): test_name = line[len("test_start:"):] test_starts.append(self._full_name(test_name)) elif line.startswith("test_suite_done"): test_suite_done = True for idx, test_name in enumerate(test_starts): last_start = idx == len(test_starts) - 1 if test_suite_done or not last_start: retval[test_name] = PASSED for test_name in self.test_cases: if test_name not in test_starts: retval[test_name] = SKIPPED return retval
def merge_coverage(self, file_name, args=None): """ Merge coverage from all test cases """ if self._persistent_shell is not None: # Teardown to ensure ucdb file was written. self._persistent_shell.teardown() if args is None: args = [] vcover_cmd = [join(self._prefix, 'vcover'), 'merge'] + args + [file_name] for coverage_file in self._coverage_files: if file_exists(coverage_file): vcover_cmd.append(coverage_file) else: LOGGER.warning("Missing coverage file: %s", coverage_file) print("Merging coverage files into %s..." % file_name) vcover_merge_process = Process(vcover_cmd, env=self.get_env()) vcover_merge_process.consume_output() print("Done merging coverage files")
def add_source_file(self, file_name, library_name, file_type='vhdl', include_dirs=None): """ Add a file_name as a source file in library_name with file_type """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) LOGGER.info('Adding source file %s to library %s', file_name, library_name) self._validate_library_name(library_name) library = self._libraries[library_name] if file_type == "vhdl": assert include_dirs is None source_file = VHDLSourceFile(file_name, library, vhdl_parser=self._vhdl_parser) library.add_vhdl_design_units(source_file.design_units) elif file_type == "verilog": source_file = VerilogSourceFile(file_name, library, self._verilog_parser, include_dirs) library.add_verilog_design_units(source_file.design_units) else: raise ValueError(file_type) library.add_source_file(source_file) self._source_files_in_order.append(source_file) return source_file
def scan_tests_from_file(self, file_name): """ Scan tests from another file than the one containg the test bench. Useful for when the top level test bench does not contain the tests. Such a structure is not the preferred way of doing things in VUnit but this method exists to accommodate legacy needs. :param file_name: The name of another file to scan for tests .. warning:: The nested module containing the tests needs to be given the ``runner_cfg`` parameter or generic by the instantiating top level test bench. The nested module should not call its parameter or generic `runner_cfg` but rather `nested_runner_cfg` to avoid the VUnit test scanner detecting and running it as a test bench. In SystemVerilog the ``NESTED_TEST_SUITE`` macro should be used instead of the ``TEST_SUITE`` macro. """ if not ostools.file_exists(file_name): raise ValueError("File %r does not exist" % file_name) self._config.scan_tests_from_file(self._scope, file_name)
def post_process(self, output_path): """ Merge coverage from all test cases, top hierarchy level is removed since it has different name in each test case """ if self._coverage is None: return # Teardown to ensure ucdb file was written. self.teardown() merged_coverage_file = join(output_path, "merged_coverage.ucdb") vcover_cmd = [join(self._prefix, 'vcover'), 'merge', '-strip', '1', merged_coverage_file] for coverage_file in self._coverage_files: if file_exists(coverage_file): vcover_cmd.append(coverage_file) else: LOGGER.warning("Missing coverage ucdb file: %s", coverage_file) print("Merging coverage files into %s..." % merged_coverage_file) vcover_merge_process = Process(vcover_cmd) vcover_merge_process.consume_output() print("Done merging coverage files")