class VUnit(object): # pylint: disable=too-many-instance-attributes, too-many-public-methods """ The public interface of VUnit """ @classmethod def from_argv(cls, argv=None, compile_builtins=True): """ Create VUnit instance from command line arguments Can take arguments from 'argv' if not None instead of sys.argv """ args = VUnitCLI().parse_args(argv=argv) return cls.from_args(args, compile_builtins=compile_builtins) @classmethod def from_args(cls, args, compile_builtins=True): """ Create VUnit instance from args namespace """ def test_filter(name): return any(fnmatch(name, pattern) for pattern in args.test_patterns) return cls(output_path=args.output_path, clean=args.clean, vhdl_standard=select_vhdl_standard(), use_debug_codecs=args.use_debug_codecs, no_color=args.no_color, verbose=args.verbose, xunit_xml=args.xunit_xml, log_level=args.log_level, test_filter=test_filter, list_only=args.list, compile_only=args.compile, elaborate_only=args.elaborate, compile_builtins=compile_builtins, simulator_factory=SimulatorFactory(args), num_threads=args.num_threads, exit_0=args.exit_0) def __init__(self, # pylint: disable=too-many-locals, too-many-arguments output_path, simulator_factory, clean=False, use_debug_codecs=False, no_color=False, verbose=False, xunit_xml=None, log_level="warning", test_filter=None, list_only=False, compile_only=False, elaborate_only=False, vhdl_standard='2008', compile_builtins=True, num_threads=1, exit_0=False): self._configure_logging(log_level) self._output_path = output_path if no_color: self._printer = NO_COLOR_PRINTER else: self._printer = COLOR_PRINTER self._verbose = verbose self._xunit_xml = xunit_xml self._test_filter = test_filter if test_filter is not None else lambda name: True self._list_only = list_only self._compile_only = compile_only self._vhdl_standard = vhdl_standard self._tb_filter = tb_filter self._configuration = TestConfiguration(elaborate_only=elaborate_only) self._external_preprocessors = [] self._location_preprocessor = None self._check_preprocessor = None self._use_debug_codecs = use_debug_codecs self._simulator_factory = simulator_factory self._create_output_path(clean) self._project = None self._create_project() self._num_threads = num_threads self._exit_0 = exit_0 if compile_builtins: self.add_builtins(library_name="vunit_lib") def _create_project(self): """ Create Project instance """ database = self._create_database() self._project = Project( vhdl_parser=CachedVHDLParser(database=database), depend_on_package_body=self._simulator_factory.package_users_depend_on_bodies()) def _create_database(self): """ Create a persistent database to store expensive parse results Check for Python version used to create the database is the same as the running python instance or re-create """ project_database_file_name = join(self._output_path, "project_database") create_new = False key = b"version" version = str((4, sys.version)).encode() database = None try: database = DataBase(project_database_file_name) create_new = (key not in database) or (database[key] != version) except KeyboardInterrupt: raise except: # pylint: disable=bare-except traceback.print_exc() create_new = True if create_new: database = DataBase(project_database_file_name, new=True) database[key] = version return PickledDataBase(database) @staticmethod def _configure_logging(log_level): """ Configure logging based on log_level string """ level = getattr(logging, log_level.upper()) logging.basicConfig(filename=None, format='%(levelname)7s - %(message)s', level=level) def add_external_library(self, library_name, path): """ Add external black box library """ self._project.add_library(library_name, abspath(path), is_external=True) return self._create_library_facade(library_name) def add_library(self, library_name): """ Add vunit managed white box library """ path = join(self._simulator_factory.simulator_output_path, "libraries", library_name) self._project.add_library(library_name, abspath(path)) return self._create_library_facade(library_name) def library(self, library_name): """ Get reference to library """ if not self._project.has_library(library_name): raise KeyError(library_name) return self._create_library_facade(library_name) def _create_library_facade(self, library_name): """ Create a Library object to be exposed to users """ return LibraryFacade(library_name, self, self._project, self._configuration) def set_generic(self, name, value): """ Globally set generic """ self._configuration.set_generic(name.lower(), value, scope=create_scope()) def set_parameter(self, name, value): """ Globally set parameter """ self.set_generic(name, value) def set_sim_option(self, name, value): """ Globally set simulation option """ self._configuration.set_sim_option(name, value, scope=create_scope()) def set_pli(self, value): """ Globally set pli """ self._configuration.set_pli(value, scope=create_scope()) def disable_ieee_warnings(self): """ Globally disable ieee warnings """ self._configuration.disable_ieee_warnings(scope=create_scope()) def add_source_files(self, pattern, library_name, preprocessors=None, include_dirs=None): """ Add source files matching wildcard pattern to library """ for file_name in glob(pattern): self.add_source_file(file_name, library_name, preprocessors, include_dirs) def add_source_file(self, file_name, library_name, preprocessors=None, include_dirs=None): """ Add source file to library """ file_type = file_type_of(file_name) if file_type == "verilog": include_dirs = include_dirs if include_dirs is not None else [] add_verilog_include_dir(include_dirs) file_name = self._preprocess(library_name, abspath(file_name), preprocessors) self._project.add_source_file(file_name, library_name, file_type=file_type, include_dirs=include_dirs) def _preprocess(self, library_name, file_name, preprocessors): """ Preprocess file_name within library_name using explicit preprocessors if preprocessors is None then use implicit globally defined processors """ # @TODO dependency checking etc... if preprocessors is None: preprocessors = [self._location_preprocessor, self._check_preprocessor] preprocessors = [p for p in preprocessors if p is not None] preprocessors = self._external_preprocessors + preprocessors if len(preprocessors) == 0: return file_name code = ostools.read_file(file_name) for preprocessor in preprocessors: code = preprocessor.run(code, basename(file_name)) pp_file_name = join(self._preprocessed_path, library_name, basename(file_name)) idx = 1 while ostools.file_exists(pp_file_name): LOGGER.debug("Preprocessed file exists '%s', adding prefix", pp_file_name) pp_file_name = join(self._preprocessed_path, library_name, "%i_%s" % (idx, basename(file_name))) idx += 1 ostools.write_file(pp_file_name, code) return pp_file_name def add_preprocessor(self, preprocessor): """ Add a custom preprocessor to be used on all files, must be called before adding any files """ self._external_preprocessors.append(preprocessor) def enable_location_preprocessing(self, additional_subprograms=None): """ Enable location preprocessing, must be called before adding any files """ preprocessor = LocationPreprocessor() if additional_subprograms is not None: for subprogram in additional_subprograms: preprocessor.add_subprogram(subprogram) self._location_preprocessor = preprocessor def enable_check_preprocessing(self): """ Enable check preprocessing, must be called before adding any files """ self._check_preprocessor = CheckPreprocessor() def main(self): """ Run vunit main function and exit with code """ try: all_ok = self._main() except KeyboardInterrupt: exit(1) except CompileError: exit(1) except TestScannerError: exit(1) except: # pylint: disable=bare-except traceback.print_exc() exit(1) if (not all_ok) and (not self._exit_0): exit(1) exit(0) def _main(self): """ Base vunit main function without performing exit """ if self._list_only: return self._main_list_only() if self._compile_only: return self._main_compile_only() simulator_if = self._create_simulator_if() test_cases = self._create_tests(simulator_if) self._compile(simulator_if) start_time = ostools.get_time() report = TestReport(printer=self._printer) try: self._run_test(test_cases, report) simulator_if.post_process(self._simulator_factory.simulator_output_path) except KeyboardInterrupt: print() LOGGER.debug("_main: Caught Ctrl-C shutting down") finally: del test_cases del simulator_if report.set_real_total_time(ostools.get_time() - start_time) self._post_process(report) return report.all_ok() def _main_list_only(self): """ Main function when only listing test cases """ simulator_if = None test_suites = self._create_tests(simulator_if) num_tests = 0 for test_suite in test_suites: for name in test_suite.test_cases: print(name) num_tests += 1 print("Listed %i tests" % num_tests) return True def _main_compile_only(self): """ Main function when only compiling """ simulator_if = self._create_simulator_if() self._compile(simulator_if) return True def _create_output_path(self, clean): """ Create or re-create the output path if necessary """ if clean: ostools.renew_path(self._output_path) elif not exists(self._output_path): os.makedirs(self._output_path) ostools.renew_path(self._preprocessed_path) def _create_simulator_if(self): """ Create a simulator interface instance """ return self._simulator_factory.create() @property def vhdl_standard(self): return self._vhdl_standard @property def _preprocessed_path(self): return join(self._output_path, "preprocessed") @property def codecs_path(self): return join(self._output_path, "codecs") @property def use_debug_codecs(self): return self._use_debug_codecs def _create_tests(self, simulator_if): """ Create the test suites by scanning the project """ scanner = TestScanner(simulator_if, self._configuration) test_list = scanner.from_project(self._project, entity_filter=self._tb_filter) test_list.keep_matches(self._test_filter) return test_list def _compile(self, simulator_if): """ Compile entire project """ simulator_if.compile_project(self._project, self._vhdl_standard) def _run_test(self, test_cases, report): """ Run the test suites and return the report """ runner = TestRunner(report, join(self._output_path, "tests"), verbose=self._verbose, num_threads=self._num_threads) runner.run(test_cases) def _post_process(self, report): """ Print the report to stdout and optionally write it to an XML file """ report.print_str() if self._xunit_xml is not None: xml = report.to_junit_xml_str() ostools.write_file(self._xunit_xml, xml) def add_builtins(self, library_name="vunit_lib", mock_lang=False, mock_log=False): """ Add vunit VHDL builtin libraries """ library = self.add_library(library_name) add_vhdl_builtins(library, self._vhdl_standard, mock_lang, mock_log) def add_com(self, library_name="vunit_lib", use_debug_codecs=None): """ Add communication package """ if not self._project.has_library(library_name): library = self.add_library(library_name) else: library = self.library(library_name) if use_debug_codecs is not None: self._use_debug_codecs = use_debug_codecs add_com(library, self._vhdl_standard, use_debug_codecs=self._use_debug_codecs) def add_array_util(self, library_name="vunit_lib"): """ Add array utility package """ library = self.library(library_name) add_array_util(library, self._vhdl_standard) def add_osvvm(self, library_name="osvvm"): """ Add osvvm library """ if not self._project.has_library(library_name): library = self.add_library(library_name) else: library = self.library(library_name) add_osvvm(library) def get_project_compile_order(self, target=None): """ Get all project files in compile order. An optional target file may be specified causing only its direct and indirect dependencies to be included. """ if target is not None: target = abspath(target) return self._project.get_dependencies_in_compile_order(target=target)
class TestProject(unittest.TestCase): # pylint: disable=too-many-public-methods """ Test the Project class """ def setUp(self): self.output_path = join(dirname(__file__), "test_project_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() os.chdir(self.output_path) def tearDown(self): os.chdir(self.cwd) if exists(self.output_path): rmtree(self.output_path) def test_parses_entity_architecture(self): self.project.add_library("lib", "work_path") # Add architecture before entity to test that they are paired later self.add_source_file("lib", "file2.vhd", """\ architecture arch3 of foo is begin end architecture; """) file1 = self.add_source_file("lib", "file1.vhd", """\ entity foo is end entity; architecture arch of foo is begin end architecture; architecture arch2 of foo is begin end architecture; """) self.assert_has_entity(file1, "foo", architecture_names=["arch", "arch2", "arch3"]) self.add_source_file("lib", "file3.vhd", """\ architecture arch4 of foo is begin end architecture; """) self.assert_has_entity(file1, "foo", architecture_names=["arch", "arch2", "arch3", "arch4"]) self.assert_has_architecture("file1.vhd", "arch", "foo") self.assert_has_architecture("file1.vhd", "arch2", "foo") self.assert_has_architecture("file2.vhd", "arch3", "foo") self.assert_has_architecture("file3.vhd", "arch4", "foo") def test_parses_entity_architecture_with_generics(self): self.project.add_library("lib", "work_path") file1 = self.add_source_file("lib", "file1.vhd", """\ entity foo is generic ( testing_that_foo : boolean; testing_that_bar : boolean); end entity; architecture arch of foo is begin end architecture; """) self.assert_has_entity(file1, "foo", generic_names=["testing_that_bar", "testing_that_foo"], architecture_names=["arch"]) self.assert_has_architecture("file1.vhd", "arch", "foo") def test_parses_package(self): self.project.add_library("lib", "work_path") self.add_source_file("lib", "file1.vhd", """\ package foo is end package; package body foo is begin end package body; """) self.assert_has_package("file1.vhd", "foo") self.assert_has_package_body("file1.vhd", "foo") def test_finds_entity_instantiation_dependencies(self): file1, file2, file3 = self.create_dummy_three_file_project() self.assert_compiles(file1, before=file2) self.assert_compiles(file2, before=file3) def test_primary_with_same_name_in_multiple_libraries_secondary_dependency(self): self.project.add_library("lib1", "lib1_path") self.project.add_library("lib2", "lib2_path") foo_arch = self.add_source_file("lib1", "foo_arch.vhd", """ architecture arch of foo is begin end architecture; """) foo1_ent = self.add_source_file("lib1", "foo1_ent.vhd", """ entity foo is port (signal bar : boolean); end entity; """) self.add_source_file("lib2", "foo2_ent.vhd", """ entity foo is end entity; """) self.assert_compiles(foo1_ent, before=foo_arch) def test_multiple_identical_file_names_with_different_path_in_same_library(self): self.project.add_library("lib", "lib_path") a_foo = self.add_source_file("lib", join("a", "foo.vhd"), """ entity a_foo is end entity; """) b_foo = self.add_source_file("lib", join("b", "foo.vhd"), """ entity b_foo is end entity; """) self.assert_should_recompile([a_foo, b_foo]) self.update(a_foo) self.assert_should_recompile([b_foo]) self.update(b_foo) self.assert_should_recompile([]) def test_finds_entity_architecture_dependencies(self): self.project.add_library("lib", "lib_path") entity = self.add_source_file("lib", "entity.vhd", """ entity foo is end entity; """) arch1 = self.add_source_file("lib", "arch1.vhd", """ architecture arch1 of foo is begin end architecture; """) arch2 = self.add_source_file("lib", "arch2.vhd", """ architecture arch2 of foo is begin end architecture; """) self.assert_compiles(entity, before=arch1) self.assert_compiles(entity, before=arch2) def test_finds_package_dependencies(self): self.project.add_library("lib", "lib_path") package = self.add_source_file("lib", "package.vhd", """ package foo is end package; """) body = self.add_source_file("lib", "body.vhd", """ package body foo is begin end package body; """) self.assert_compiles(package, before=body) def create_module_package_and_body(self, add_body=True): """ Help function to create a three file project with a package, a package body and a module using the package """ self.project.add_library("lib", "lib_path") package = self.add_source_file("lib", "package.vhd", """ package pkg is end package; """) body = None if add_body: body = self.add_source_file("lib", "body.vhd", """ package body pkg is begin end package body; """) self.project.add_library("lib2", "work_path") module = self.add_source_file("lib2", "module.vhd", """ library lib; use lib.pkg.all; entity module is end entity; architecture arch of module is begin end architecture; """) return package, body, module def test_finds_use_package_dependencies_case_insensitive(self): for library_clause, use_clause in itertools.combinations(("lib", "Lib"), 2): self.project = Project() self.project.add_library("Lib", "lib_path") package = self.add_source_file("Lib", "package.vhd", """ package pkg is end package; package body PKG is begin end package body; """) self.project.add_library("lib2", "lib2_path") module = self.add_source_file("lib2", "module.vhd", """ library {library_clause}; use {use_clause}.PKG.all; """.format(library_clause=library_clause, use_clause=use_clause)) self.assert_compiles(package, before=module) def test_error_on_case_insensitive_library_name_conflict(self): self.project.add_library("Lib", "lib_path1") try: self.project.add_library("lib", "lib_path1") except RuntimeError as exception: self.assertEqual(str(exception), "Library name 'lib' not case-insensitive unique. " "Library name 'Lib' previously defined") else: raise AssertionError("RuntimeError not raised") def test_finds_use_package_dependencies(self): package, body, module = self.create_module_package_and_body() self.assert_compiles(package, before=body) self.assert_compiles(package, before=module) self.assert_not_compiles(body, before=module) def test_finds_extra_package_body_dependencies(self): self.project = Project(depend_on_package_body=True) package, body, module = self.create_module_package_and_body() self.assert_compiles(package, before=body) self.assert_compiles(body, before=module) self.assert_compiles(package, before=module) def test_that_package_can_have_no_body(self): self.project = Project(depend_on_package_body=True) package, _, module = self.create_module_package_and_body(add_body=False) self.assert_compiles(package, before=module) def test_package_instantiation_dependencies_on_generic_package(self): self.project.add_library("pkg_lib", "pkg_lib_path") pkg = self.add_source_file("pkg_lib", "pkg.vhd", """ package pkg is end package; """) self.project.add_library("lib", "lib_path") ent = self.add_source_file("lib", "ent.vhd", """ library pkg_lib; entity ent is end entity; architecture a of ent is package pkg_inst is new pkg_lib.pkg; begin end architecture; """) self.assert_compiles(pkg, before=ent) def test_package_instantiation_dependencies_on_instantiated_package(self): self.project.add_library("lib", "lib_path") generic_pkg = self.add_source_file("lib", "generic_pkg.vhd", """ package generic_pkg is generic (foo : boolean); end package; """) instance_pkg = self.add_source_file("lib", "instance_pkg.vhd", """ package instance_pkg is new work.generic_pkg generic map (foo => false); """) user = self.add_source_file("lib", "user.vhd", """ use work.instance_pkg; """) self.assert_compiles(generic_pkg, before=instance_pkg) self.assert_compiles(instance_pkg, before=user) def test_finds_context_dependencies(self): self.project.add_library("lib", "lib_path") context = self.add_source_file("lib", "context.vhd", """ context foo is end context; """) self.project.add_library("lib2", "work_path") module = self.add_source_file("lib2", "module.vhd", """ library lib; context lib.foo; entity module is end entity; architecture arch of module is begin end architecture; """) self.assert_compiles(context, before=module) def test_finds_configuration_dependencies(self): self.project.add_library("lib", "lib_path") cfg = self.add_source_file("lib", "cfg.vhd", """ configuration cfg of ent is end configuration; """) ent = self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) ent_a1 = self.add_source_file("lib", "ent_a1.vhd", """ architecture a1 of ent is begin end architecture; """) ent_a2 = self.add_source_file("lib", "ent_a2.vhd", """ architecture a2 of ent is begin end architecture; """) self.assert_compiles(ent, before=cfg) self.assert_compiles(ent_a1, before=cfg) self.assert_compiles(ent_a2, before=cfg) def test_finds_configuration_reference_dependencies(self): self.project.add_library("lib", "lib_path") cfg = self.add_source_file("lib", "cfg.vhd", """ configuration cfg of ent is end configuration; """) self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "ent_a.vhd", """ architecture a of ent is begin end architecture; """) top = self.add_source_file("lib", "top.vhd", """ entity top is end entity; architecture a of top is for inst : comp use configuration work.cfg; begin inst : comp; end architecture; """) self.assert_compiles(cfg, before=top) def test_specific_architecture_reference_dependencies(self): """ GHDL dependes also on architecture when specificially mentioned """ self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) ent_a1 = self.add_source_file("lib", "ent_a1.vhd", """ architecture a1 of ent is begin end architecture; """) ent_a2 = self.add_source_file("lib", "ent_a2.vhd", """ architecture a2 of ent is begin end architecture; """) top1 = self.add_source_file("lib", "top1.vhd", """ entity top1 is end entity; architecture a of top1 is begin inst : entity work.ent(a1); end architecture; """) top2 = self.add_source_file("lib", "top2.vhd", """ entity top2 is end entity; architecture a of top2 is for inst : comp use entity work.ent(a2); begin inst : comp; end architecture; """) self.assert_compiles(ent_a1, before=top1) self.assert_compiles(ent_a2, before=top2) @mock.patch("vunit.project.LOGGER") def test_warning_on_missing_specific_architecture_reference(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "arch.vhd", """ architecture a1 of ent is begin end architecture; """) self.add_source_file("lib", "top.vhd", """ entity top1 is end entity; architecture a of top1 is begin inst1 : entity work.ent(a1); inst2 : entity work.ent(a2); # Missing end architecture; """) self.project.get_files_in_compile_order() warning_calls = mock_logger.warning.call_args_list log_msg = warning_calls[0][0][0] % warning_calls[0][0][1:] self.assertEqual(len(warning_calls), 1) self.assertIn("top.vhd", log_msg) self.assertIn("a2", log_msg) self.assertIn("lib.ent", log_msg) def test_error_on_duplicate_file(self): self.project.add_library("lib", "lib_path") file1 = self.add_source_file("lib", "file.vhd", "") self.assertRaises(RuntimeError, self.add_source_file, "lib", "file.vhd", "") self.assertEqual(self.project.get_source_files_in_order(), [file1]) def _test_warning_on_duplicate(self, code, message, verilog=False): """ Utility function to test adding the same duplicate code under file.vhd and file_copy.vhd where the duplication should cause a warning message. """ suffix = "v" if verilog else "vhd" self.add_source_file("lib", "file." + suffix, code) with mock.patch("vunit.project.LOGGER") as mock_logger: self.add_source_file("lib", "file_copy." + suffix, code) warning_calls = mock_logger.warning.call_args_list log_msg = warning_calls[0][0][0] % warning_calls[0][0][1:] self.assertEqual(len(warning_calls), 1) self.assertEqual(log_msg, message) def test_warning_on_duplicate_entity(self): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( """ entity ent is end entity; """, "file_copy.vhd: entity 'ent' previously defined in file.vhd") def test_warning_on_duplicate_package(self): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( """ package pkg is end package; """, "file_copy.vhd: package 'pkg' previously defined in file.vhd") def test_warning_on_duplicate_configuration(self): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( """ configuration cfg of ent is end configuration; """, "file_copy.vhd: configuration 'cfg' previously defined in file.vhd") def test_warning_on_duplicate_package_body(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "pkg.vhd", """ package pkg is end package; """) self._test_warning_on_duplicate( """ package body pkg is end package bodY; """, "file_copy.vhd: package body 'pkg' previously defined in file.vhd") def test_warning_on_duplicate_architecture(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "arch.vhd", """ architecture a_no_duplicate of ent is begin end architecture; """) self._test_warning_on_duplicate( """ architecture a of ent is begin end architecture; """, "file_copy.vhd: architecture 'a' previously defined in file.vhd") def test_warning_on_duplicate_context(self): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( """ context ctx is end context; """, "file_copy.vhd: context 'ctx' previously defined in file.vhd") def test_warning_on_duplicate_verilog_module(self): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( """ module foo; endmodule """, "file_copy.v: module 'foo' previously defined in file.v", verilog=True) def test_warning_on_duplicate_verilog_package(self): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( """ package pkg; endpackage """, "file_copy.v: package 'pkg' previously defined in file.v", verilog=True) def test_should_recompile_all_files_initially(self): file1, file2, file3 = self.create_dummy_three_file_project() self.assert_should_recompile([file1, file2, file3]) self.assert_should_recompile([file1, file2, file3]) def test_updating_creates_hash_files(self): files = self.create_dummy_three_file_project() for source_file in files: self.update(source_file) self.assertTrue(exists(self.hash_file_name_of(source_file))) def test_should_not_recompile_updated_files(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.assert_should_recompile([file2, file3]) self.update(file2) self.assert_should_recompile([file3]) self.update(file3) self.assert_should_recompile([]) def test_should_recompile_files_affected_by_change(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project() self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project(update_file1=True) self.assert_should_recompile([file1, file2, file3]) def test_should_recompile_files_after_changing_compile_options(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) file2.set_compile_option("ghdl.flags", ["--no-vital-checks"]) self.assert_should_recompile([file2, file3]) def test_should_recompile_files_after_changing_vhdl_standard(self): write_file("file_name.vhd", "") self.project = Project() self.project.add_library("lib", "lib_path") source_file = self.project.add_source_file("file_name.vhd", library_name="lib", vhdl_standard='2008') self.assert_should_recompile([source_file]) self.update(source_file) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") source_file = self.project.add_source_file("file_name.vhd", library_name="lib", vhdl_standard='2002') self.assert_should_recompile([source_file]) def test_add_compile_option(self): self.project.add_library("lib", "lib_path") file1 = self.add_source_file("lib", "file.vhd", "") file1.add_compile_option("ghdl.flags", ["--foo"]) self.assertEqual(file1.get_compile_option("ghdl.flags"), ["--foo"]) file1.add_compile_option("ghdl.flags", ["--bar"]) self.assertEqual(file1.get_compile_option("ghdl.flags"), ["--foo", "--bar"]) file1.set_compile_option("ghdl.flags", ["--xyz"]) self.assertEqual(file1.get_compile_option("ghdl.flags"), ["--xyz"]) def test_compile_option_validation(self): self.project.add_library("lib", "lib_path") source_file = self.add_source_file("lib", "file.vhd", "") self.assertRaises(ValueError, source_file.set_compile_option, "foo", None) self.assertRaises(ValueError, source_file.set_compile_option, "ghdl.flags", None) self.assertRaises(ValueError, source_file.add_compile_option, "ghdl.flags", None) self.assertRaises(ValueError, source_file.get_compile_option, "foo") def test_should_recompile_files_affected_by_change_with_later_timestamp(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project() self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project(update_file1=True) self.assert_should_recompile([file1, file2, file3]) tick() self.update(file1) self.assert_should_recompile([file2, file3]) def test_should_recompile_files_missing_hash(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) os.remove(self.hash_file_name_of(file2)) self.assert_should_recompile([file2, file3]) def test_finds_component_instantiation_dependencies(self): self.project.add_library("toplib", "work_path") top = self.add_source_file("toplib", "top.vhd", """\ entity top is end entity; architecture arch of top is begin labelFoo : component foo generic map(WIDTH => 16) port map(clk => '1', rst => '0', in_vec => record_reg.input_signal, output => some_signal(UPPER_CONSTANT-1 downto LOWER_CONSTANT+1)); label2Foo : foo2 port map(clk => '1', rst => '0', output => "00"); end architecture; """) self.project.add_library("libcomp1", "work_path") comp1 = self.add_source_file("libcomp1", "comp1.vhd", """\ entity foo is end entity; architecture arch of foo is begin end architecture; """) self.project.add_library("libcomp2", "work_path") comp2 = self.add_source_file("libcomp2", "comp2.vhd", """\ entity foo2 is end entity; architecture arch of foo2 is begin end architecture; """) self.assert_has_component_instantiation("top.vhd", "foo") self.assert_has_component_instantiation("top.vhd", "foo2") dependencies = self.project.get_dependencies_in_compile_order([top], implementation_dependencies=True) self.assertIn(comp1, dependencies) self.assertIn(comp2, dependencies) def test_get_dependencies_in_compile_order_without_target(self): self.create_dummy_three_file_project() deps = self.project.get_dependencies_in_compile_order() self.assertEqual(len(deps), 3) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) self.assertTrue(deps[2] == self.project.get_source_files_in_order()[2]) def test_get_dependencies_in_compile_order_with_target(self): self.create_dummy_three_file_project() deps = self.project.get_dependencies_in_compile_order( target_files=[self.project.get_source_files_in_order()[1]]) self.assertEqual(len(deps), 2) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) # To test that indirect dependencies are included deps = self.project.get_dependencies_in_compile_order( target_files=[self.project.get_source_files_in_order()[2]]) self.assertEqual(len(deps), 3) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) self.assertTrue(deps[2] == self.project.get_source_files_in_order()[2]) def test_compiles_same_file_into_different_libraries(self): pkgs = [] second_pkgs = [] self.project.add_library("lib", "lib_path") other_pkg = self.add_source_file("lib", "other_pkg.vhd", """ package other_pkg is end package other_pkg; """) for lib in ["lib1", "lib2"]: self.project.add_library(lib, lib + "_path") pkgs.append(self.add_source_file(lib, "pkg.vhd", """ library lib; use lib.other_pkg.all; package pkg is end package pkg; """)) second_pkgs.append(self.add_source_file(lib, lib + "_pkg.vhd", """ use work.pkg.all; package second_pkg is end package second_pkg; """)) self.assertNotEqual(self.hash_file_name_of(pkgs[0]), self.hash_file_name_of(pkgs[1])) self.assertEqual(len(self.project.get_files_in_compile_order()), 5) self.assert_compiles(other_pkg, before=pkgs[0]) self.assert_compiles(other_pkg, before=pkgs[1]) self.assert_compiles(pkgs[0], before=second_pkgs[0]) self.assert_compiles(pkgs[1], before=second_pkgs[1]) def test_has_verilog_module(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ module name; endmodule """) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 1) def test_finds_verilog_package_import_dependencies(self): self.project.add_library("lib", "lib_path") pkg = self.add_source_file("lib", "pkg.sv", """\ package pkg; endpackage """) module = self.add_source_file("lib", "module.sv", """\ module name; import pkg::*; endmodule """) self.assert_compiles(pkg, before=module) def test_finds_verilog_package_reference_dependencies(self): self.project.add_library("lib", "lib_path") pkg = self.add_source_file("lib", "pkg.sv", """\ package pkg; endpackage """) module = self.add_source_file("lib", "module.sv", """\ module name; pkg::func(); endmodule """) self.assert_compiles(pkg, before=module) def test_verilog_package_reference_is_case_sensitive(self): self.project = Project() self.project.add_library("lib", "lib_path") pkg = self.add_source_file("lib", "pkg.sv", """\ package Pkg; endpackage """) module = self.add_source_file("lib", "module.sv", """\ module name; pkg::func(); endmodule """) self.assert_not_compiles(pkg, before=module) self.project = Project() self.project.add_library("lib", "lib_path") pkg = self.add_source_file("lib", "pkg.sv", """\ package pkg; endpackage """) module = self.add_source_file("lib", "module.sv", """\ module name; Pkg::func(); endmodule """) self.assert_not_compiles(pkg, before=module) def test_finds_verilog_module_instantiation_dependencies(self): self.project.add_library("lib", "lib_path") module1 = self.add_source_file("lib", "module1.sv", """\ module module1; endmodule """) module2 = self.add_source_file("lib", "module2.sv", """\ module module2; module1 inst(); endmodule """) self.assert_compiles(module1, before=module2) def test_verilog_module_instantiation_is_case_sensitive(self): self.project = Project() self.project.add_library("lib", "lib_path") module1 = self.add_source_file("lib", "module1.sv", """\ module Module1; endmodule """) module2 = self.add_source_file("lib", "module2.sv", """\ module module2; module1 inst(); endmodule """) self.assert_not_compiles(module1, before=module2) self.project = Project() self.project.add_library("lib", "lib_path") module1 = self.add_source_file("lib", "module1.sv", """\ module module1; endmodule """) module2 = self.add_source_file("lib", "module2.sv", """\ module module2; Module1 inst(); endmodule """) self.assert_not_compiles(module1, before=module2) def test_finds_verilog_module_instantiation_dependencies_in_vhdl(self): self.project.add_library("lib1", "lib_path") self.project.add_library("lib2", "lib_path") module1 = self.add_source_file("lib1", "module1.sv", """\ module module1; endmodule """) module2 = self.add_source_file("lib2", "module2.vhd", """\ library lib1; entity ent is end entity; architecture a of ent is begin inst : entity lib1.module1; end architecture; """) self.assert_compiles(module1, before=module2) def test_finds_verilog_include_dependencies(self): def create_project(): """ Create the test project """ self.project = Project() self.project.add_library("lib", "lib_path") return self.add_source_file("lib", "module.sv", """\ `include "include.svh" """) write_file("include.svh", """\ module name; endmodule """) module = create_project() self.assert_should_recompile([module]) for src_file in self.project.get_files_in_compile_order(): self.update(src_file) create_project() self.assert_should_recompile([]) write_file("include.svh", """\ module other_name; endmodule """) module = create_project() self.assert_should_recompile([module]) def test_verilog_defines_affects_dependency_scanning(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ `ifdef foo module mod; endmodule `endif """) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 0) self.project = Project() self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ `ifdef foo module mod; endmodule `endif """, defines={"foo": ""}) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 1) def test_recompile_when_updating_defines(self): contents1 = """ module mod1; endmodule """ contents2 = """ module mod2; endmodule """ self.project = Project() self.project.add_library("lib", "lib_path") mod1 = self.add_source_file("lib", "module1.v", contents1) mod2 = self.add_source_file("lib", "module2.v", contents2) self.assert_should_recompile([mod1, mod2]) self.update(mod1) self.update(mod2) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") mod1 = self.add_source_file("lib", "module1.v", contents1, defines={"foo": "bar"}) mod2 = self.add_source_file("lib", "module2.v", contents2) self.assert_should_recompile([mod1]) self.update(mod1) self.update(mod2) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") mod1 = self.add_source_file("lib", "module1.v", contents1, defines={"foo": "other_bar"}) mod2 = self.add_source_file("lib", "module2.v", contents2) self.assert_should_recompile([mod1]) self.update(mod1) self.update(mod2) self.assert_should_recompile([]) def test_manual_dependencies(self): self.project.add_library("lib", "lib_path") ent1 = self.add_source_file("lib", "ent1.vhd", """\ entity ent1 is end ent1; architecture arch of ent1 is begin end architecture; """) ent2 = self.add_source_file("lib", "ent2.vhd", """\ entity ent2 is end ent2; architecture arch of ent2 is begin end architecture; """) self.project.add_manual_dependency(ent2, depends_on=ent1) self.assert_compiles(ent1, before=ent2) @mock.patch("vunit.project.LOGGER", autospec=True) def test_circular_dependencies_causes_error(self, logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent1.vhd", """\ entity ent1 is end ent1; architecture arch of ent1 is begin ent2_inst : entity work.ent2; end architecture; """) self.add_source_file("lib", "ent2.vhd", """\ entity ent2 is end ent2; architecture arch of ent2 is begin ent1_inst : entity work.ent1; end architecture; """) self.assertRaises(CompileError, self.project.get_files_in_compile_order) logger.error.assert_called_once_with( "Found circular dependency:\n%s", "ent1.vhd ->\n" "ent2.vhd ->\n" "ent1.vhd") def test_order_of_adding_libraries_is_kept(self): for order in itertools.combinations(range(4), 4): project = Project() for idx in order: project.add_library("lib%i" % idx, "lib%i_path" % idx) library_names = [lib.name for lib in project.get_libraries()] self.assertEqual(library_names, ["lib%i" % idx for idx in order]) def test_file_type_of(self): self.assertEqual(file_type_of("file.vhd"), "vhdl") self.assertEqual(file_type_of("file.vhdl"), "vhdl") self.assertEqual(file_type_of("file.sv"), "systemverilog") self.assertEqual(file_type_of("file.v"), "verilog") self.assertEqual(file_type_of("file.vams"), "verilog") self.assertRaises(RuntimeError, file_type_of, "file.foo") def create_dummy_three_file_project(self, update_file1=False): """ Create a projected containing three dummy files optionally only updating file1 """ self.project = Project() self.project.add_library("lib", "work_path") if update_file1: file1 = self.add_source_file("lib", "file1.vhd", """\ entity module1 is end entity; architecture arch of module1 is begin end architecture; """) else: file1 = self.add_source_file("lib", "file1.vhd", """\ entity module1 is end entity; architecture arch of module1 is begin report "Updated"; end architecture; """) file2 = self.add_source_file("lib", "file2.vhd", """\ entity module2 is end entity; architecture arch of module2 is begin module1_inst : entity lib.module1; end architecture; """) file3 = self.add_source_file("lib", "file3.vhd", """\ entity module3 is end entity; architecture arch of module3 is begin module1_inst : entity work.module2; end architecture; """) return file1, file2, file3 def test_add_source_file_has_vhdl_standard(self): write_file("file.vhd", "") for std in ('93', '2002', '2008'): project = Project() project.add_library("lib", "lib_path") source_file = project.add_source_file("file.vhd", library_name="lib", file_type='vhdl', vhdl_standard=std) self.assertEqual(source_file.get_vhdl_standard(), std) def test_add_source_file_detects_illegal_vhdl_standard(self): write_file("file.vhd", "") project = Project() project.add_library("lib", "lib_path") self.assertRaises(ValueError, project.add_source_file, "file.vhd", library_name="lib", file_type='vhdl', vhdl_standard='2007') def test_add_source_file_has_no_parse_vhdl(self): for no_parse in (True, False): project = Project() file_name = "file.vhd" write_file(file_name, """ entity ent is end entity; """) project.add_library("lib", "work_path") source_file = project.add_source_file(file_name, "lib", file_type=file_type_of(file_name), no_parse=no_parse) self.assertEqual(len(source_file.design_units), int(not no_parse)) def test_add_source_file_has_no_parse_verilog(self): for no_parse in (True, False): project = Project() file_name = "file.v" write_file(file_name, """ module mod; endmodule """) project.add_library("lib", "work_path") source_file = project.add_source_file(file_name, "lib", file_type=file_type_of(file_name), no_parse=no_parse) self.assertEqual(len(source_file.design_units), int(not no_parse)) @mock.patch("vunit.project.LOGGER") def test_no_warning_builtin_library_reference(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ use std.foo.all; use ieee.bar.all; use builtin_lib.all; """) self.project.add_builtin_library("builtin_lib") self.project.get_files_in_compile_order() warning_calls = mock_logger.warning.call_args_list self.assertEqual(len(warning_calls), 0) def test_add_external_library(self): os.makedirs("lib_path") self.project.add_library("lib", "lib_path", is_external=True) def test_add_external_library_must_exist(self): try: self.project.add_library("lib2", "lib_path2", is_external=True) except ValueError as err: self.assertEqual(str(err), "External library 'lib_path2' does not exist") else: assert False, "ValueError not raised" def test_add_external_library_must_be_a_directory(self): write_file("lib_path3", "") try: self.project.add_library("lib3", "lib_path3", is_external=True) except ValueError as err: self.assertEqual(str(err), "External library must be a directory. Got 'lib_path3'") else: assert False, "ValueError not raised" def add_source_file(self, library_name, file_name, contents, defines=None): """ Convenient wrapper arround project.add_source_file """ write_file(file_name, contents) source_file = self.project.add_source_file(file_name, library_name, file_type=file_type_of(file_name), defines=defines) return source_file def hash_file_name_of(self, source_file): """ Get the hash file name of a source_file """ return self.project._hash_file_name_of(source_file) # pylint: disable=protected-access def update(self, source_file): """ Wrapper arround project.update """ self.project.update(source_file) def assert_should_recompile(self, source_files): self.assert_count_equal(source_files, self.project.get_files_in_compile_order()) def assert_compiles(self, source_file, before): """ Assert that the compile order of source_file is before the file named 'before'. """ for src_file in self.project.get_files_in_compile_order(): self.update(src_file) self.assert_should_recompile([]) tick() self.update(source_file) self.assertIn(before, self.project.get_files_in_compile_order()) def assert_not_compiles(self, source_file, before): """ Assert that the compile order of source_file is not before the file named 'before'. """ for src_file in self.project.get_files_in_compile_order(): self.update(src_file) self.assert_should_recompile([]) tick() self.update(source_file) self.assertNotIn(before, self.project.get_files_in_compile_order()) def assert_has_package_body(self, source_file_name, package_name): """ Assert that there is a package body with package_name withing source_file_name """ unit = self._find_design_unit(source_file_name, "package body", package_name, False, package_name) self.assertIsNotNone(unit) def assert_has_package(self, source_file_name, name): """ Assert that there is a package with name withing source_file_name """ unit = self._find_design_unit(source_file_name, "package", name) self.assertIsNotNone(unit) def assert_has_entity(self, source_file, name, generic_names=None, architecture_names=None): """ Assert that there is an entity with name withing source_file that has architectures with architecture_names. """ generic_names = [] if generic_names is None else generic_names architecture_names = [] if architecture_names is None else architecture_names for entity in source_file.library.get_entities(): if entity.name == name: self.assert_count_equal(entity.generic_names, generic_names) self.assert_count_equal(entity.architecture_names, architecture_names) return self.assertFalse("Did not find entity " + name + "in " + source_file.name) def assert_has_architecture(self, source_file_name, name, entity_name): """ Assert that there is an architecture with name of entity_name within source_file_name """ unit = self._find_design_unit(source_file_name, "architecture", name, False, entity_name) self.assertIsNotNone(unit) def assert_has_component_instantiation(self, source_file_name, component_name): """ Assert that there is a component instantion with component with source_file_name """ found_comp = False for source_file in self.project.get_source_files_in_order(): for component in source_file.depending_components: if component == component_name: found_comp = True self.assertTrue(found_comp, "Did not find component " + component_name + " in " + source_file_name) def _find_design_unit(self, # pylint: disable=too-many-arguments source_file_name, design_unit_type, design_unit_name, is_primary=True, primary_design_unit_name=None): """ Utility fnction to find and return a design unit """ for source_file in self.project.get_source_files_in_order(): for design_unit in source_file.design_units: if design_unit.unit_type != design_unit_type: continue if design_unit.name != design_unit_name: continue self.assertEqual(design_unit.is_primary, is_primary) self.assertEqual(source_file.name, source_file_name) if not is_primary: self.assertEqual(design_unit.primary_design_unit, primary_design_unit_name) return design_unit return None def assert_count_equal(self, values1, values2): # Python 2.7 compatability self.assertEqual(sorted(values1), sorted(values2))
class VUnit(object): # pylint: disable=too-many-instance-attributes, too-many-public-methods """ The public interface of VUnit :example: .. code-block:: python from vunit import VUnit """ @classmethod def from_argv(cls, argv=None, compile_builtins=True): """ Create VUnit instance from command line arguments. :param argv: Use explicit argv instead of actual command line argument :param compile_builtins: Do not compile builtins. Used for VUnit internal testing. :returns: A :class:`.VUnit` object instance :example: .. code-block:: python from vunit import VUnit prj = VUnit.from_argv() """ args = VUnitCLI().parse_args(argv=argv) return cls.from_args(args, compile_builtins=compile_builtins) @classmethod def from_args(cls, args, compile_builtins=True): """ Create VUnit instance from args namespace. Intended for users who adds custom command line options. See :class:`vunit.vunit_cli.VUnitCLI` class to learn about adding custom command line options. :param args: The parsed argument namespace object :param compile_builtins: Do not compile builtins. Used for VUnit internal testing. :returns: A :class:`.VUnit` object instance """ def test_filter(name): return any(fnmatch(name, pattern) for pattern in args.test_patterns) return cls(output_path=args.output_path, clean=args.clean, vhdl_standard=select_vhdl_standard(), use_debug_codecs=args.use_debug_codecs, no_color=args.no_color, verbose=args.verbose, xunit_xml=args.xunit_xml, log_level=args.log_level, test_filter=test_filter, list_only=args.list, list_files_only=args.files, compile_only=args.compile, keep_compiling=args.keep_compiling, elaborate_only=args.elaborate, compile_builtins=compile_builtins, simulator_factory=SimulatorFactory(args), num_threads=args.num_threads, exit_0=args.exit_0) def __init__(self, # pylint: disable=too-many-locals, too-many-arguments output_path, simulator_factory, clean=False, use_debug_codecs=False, no_color=False, verbose=False, xunit_xml=None, log_level="warning", test_filter=None, list_only=False, list_files_only=False, compile_only=False, keep_compiling=False, elaborate_only=False, vhdl_standard='2008', compile_builtins=True, num_threads=1, exit_0=False): self._configure_logging(log_level) self._elaborate_only = elaborate_only self._output_path = abspath(output_path) if no_color: self._printer = NO_COLOR_PRINTER else: self._printer = COLOR_PRINTER self._verbose = verbose self._xunit_xml = xunit_xml self._test_filter = test_filter if test_filter is not None else lambda name: True self._list_only = list_only self._list_files_only = list_files_only self._compile_only = compile_only self._keep_compiling = keep_compiling self._vhdl_standard = vhdl_standard self._tb_filter = tb_filter self._configuration = TestConfiguration() self._external_preprocessors = [] self._location_preprocessor = None self._check_preprocessor = None self._use_debug_codecs = use_debug_codecs self._simulator_factory = simulator_factory self._create_output_path(clean) self._project = None self._create_project() self._num_threads = num_threads self._exit_0 = exit_0 if compile_builtins: self.add_builtins(library_name="vunit_lib") def _create_project(self): """ Create Project instance """ database = self._create_database() self._project = Project( vhdl_parser=CachedVHDLParser(database=database), verilog_parser=VerilogParser(database=database), depend_on_package_body=self._simulator_factory.package_users_depend_on_bodies()) def _create_database(self): """ Create a persistent database to store expensive parse results Check for Python version used to create the database is the same as the running python instance or re-create """ project_database_file_name = join(self._output_path, "project_database") create_new = False key = b"version" version = str((6, sys.version)).encode() database = None try: database = DataBase(project_database_file_name) create_new = (key not in database) or (database[key] != version) except KeyboardInterrupt: raise except: # pylint: disable=bare-except traceback.print_exc() create_new = True if create_new: database = DataBase(project_database_file_name, new=True) database[key] = version return PickledDataBase(database) @staticmethod def _configure_logging(log_level): """ Configure logging based on log_level string """ level = getattr(logging, log_level.upper()) logging.basicConfig(filename=None, format='%(levelname)7s - %(message)s', level=level) def add_external_library(self, library_name, path): """ Add an externally compiled library as a black-box :param library_name: The name of the external library :param path: The path to the external library :returns: The created :class:`.Library` object :example: .. code-block:: python prj.add_external_library("unisim", "path/to/unisim/") """ self._project.add_library(library_name, abspath(path), is_external=True) return self._create_library_facade(library_name) def add_library(self, library_name): """ Add a library managed by VUnit. :param library_name: The name of the library :returns: The created :class:`.Library` object :example: .. code-block:: python library = prj.add_library("lib") """ path = join(self._simulator_factory.simulator_output_path, "libraries", library_name) self._project.add_library(library_name, abspath(path)) return self._create_library_facade(library_name) def library(self, library_name): """ Get a library :param library_name: The name of the library :returns: A :class:`.Library` object """ if not self._project.has_library(library_name): raise KeyError(library_name) return self._create_library_facade(library_name) def _create_library_facade(self, library_name): """ Create a Library object to be exposed to users """ return Library(library_name, self, self._project, self._configuration) def set_generic(self, name, value): """ Globally set a value of generic :param name: The name of the generic :param value: The value of the generic :example: .. code-block:: python prj.set_generic("data_width", 16) """ self._configuration.set_generic(name.lower(), value, scope=create_scope()) def set_parameter(self, name, value): """ Globally set value of parameter :param name: The name of the parameter :param value: The value of the parameter :example: .. code-block:: python prj.set_parameter("data_width", 16) """ self._configuration.set_generic(name, value, scope=create_scope()) def set_sim_option(self, name, value): """ Globally set simulation option :param name: |simulation_options| :param value: The value of the simulation option :example: .. code-block:: python prj.set_sim_option("ghdl.flags", ["--no-vital-checks"]) """ self._configuration.set_sim_option(name, value, scope=create_scope()) def set_compile_option(self, name, value): """ Globally set compile option :param name: |compile_option| :param value: The value of the compile option :example: .. code-block:: python prj.set_compile_option("ghdl.flags", ["--no-vital-checks"]) """ for source_file in self._project.get_source_files_in_order(): source_file.set_compile_option(name, value) def add_compile_option(self, name, value): """ Globally add compile option :param name: |compile_option| :param value: The value of the compile option """ for source_file in self._project.get_source_files_in_order(): source_file.add_compile_option(name, value) def set_pli(self, value): """ Globally Set pli :param value: A list of PLI object file names """ self._configuration.set_pli(value, scope=create_scope()) def disable_ieee_warnings(self): """ Globally disable ieee warnings """ self._configuration.disable_ieee_warnings(scope=create_scope()) def get_source_file(self, file_name, library_name=None): """ Get a source file :param file_name: The name of the file as a relative or absolute path :param library_name: The name of a specific library to search if not all libraries :returns: A :class:`.SourceFile` object """ files = self.get_source_files(file_name, library_name, allow_empty=True) if len(files) > 1: raise ValueError("Found file named '%s' in multiple-libraries, " "add explicit library_name." % file_name) elif len(files) == 0: if library_name is None: raise ValueError("Found no file named '%s'" % file_name) else: raise ValueError("Found no file named '%s' in library '%s'" % (file_name, library_name)) return files[0] def get_source_files(self, pattern="*", library_name=None, allow_empty=False): """ Get a list of source files :param pattern: A wildcard pattern matching either an absolute or relative path :param library_name: The name of a specific library to search if not all libraries :param allow_empty: To disable an error if no files matched the pattern :returns: A :class:`.SourceFileList` object """ results = [] for source_file in self._project.get_source_files_in_order(): if library_name is not None: if source_file.library.name != library_name: continue if not (fnmatch(abspath(source_file.name), pattern) or fnmatch(ostools.simplify_path(source_file.name), pattern)): continue results.append(SourceFile(source_file, self._project, self)) if (not allow_empty) and len(results) == 0: raise ValueError(("Pattern %r did not match any file. " "Use allow_empty=True to avoid exception,") % pattern) return SourceFileList(results) def add_source_files(self, # pylint: disable=too-many-arguments files, library_name, preprocessors=None, include_dirs=None, defines=None, allow_empty=False): """ Add source files matching wildcard pattern to library :param files: A wildcard pattern matching the files to add or a list of files :param library_name: The name of the library to add files into :param include_dirs: A list of include directories :param defines: A dictionary containing Verilog defines to be set :param allow_empty: To disable an error if no files matched the pattern :returns: A list of files (:class:`.SourceFileList`) which were added :example: .. code-block:: python prj.add_source_files("*.vhd", "lib") """ if _is_iterable_not_string(files): files = [files] file_names = [] for pattern in files: new_file_names = glob(pattern) if (not allow_empty) and len(new_file_names) == 0: raise ValueError(("Pattern %r did not match any file. " "Use allow_empty=True to avoid exception,") % pattern) file_names += new_file_names return SourceFileList(source_files=[ self.add_source_file(file_name, library_name, preprocessors, include_dirs, defines) for file_name in file_names]) def add_source_file(self, file_name, library_name, preprocessors=None, include_dirs=None, defines=None): """ Add source file to library :param file_name: The name of the file :param library_name: The name of the library to add the file into :param include_dirs: A list of include directories :param defines: A dictionary containing Verilog defines to be set :returns: The :class:`.SourceFile` which was added :example: .. code-block:: python prj.add_source_file("file.vhd", "lib") """ file_type = file_type_of(file_name) if file_type == "verilog": include_dirs = include_dirs if include_dirs is not None else [] include_dirs = add_verilog_include_dir(include_dirs) file_name = self._preprocess(library_name, abspath(file_name), preprocessors) return SourceFile(self._project.add_source_file(file_name, library_name, file_type=file_type, include_dirs=include_dirs, defines=defines), self._project, self) def _preprocess(self, library_name, file_name, preprocessors): """ Preprocess file_name within library_name using explicit preprocessors if preprocessors is None then use implicit globally defined processors """ # @TODO dependency checking etc... if preprocessors is None: preprocessors = [self._location_preprocessor, self._check_preprocessor] preprocessors = [p for p in preprocessors if p is not None] preprocessors = self._external_preprocessors + preprocessors if len(preprocessors) == 0: return file_name code = ostools.read_file(file_name) for preprocessor in preprocessors: code = preprocessor.run(code, basename(file_name)) pp_file_name = join(self._preprocessed_path, library_name, basename(file_name)) idx = 1 while ostools.file_exists(pp_file_name): LOGGER.debug("Preprocessed file exists '%s', adding prefix", pp_file_name) pp_file_name = join(self._preprocessed_path, library_name, "%i_%s" % (idx, basename(file_name))) idx += 1 ostools.write_file(pp_file_name, code) return pp_file_name def add_preprocessor(self, preprocessor): """ Add a custom preprocessor to be used on all files, must be called before adding any files """ self._external_preprocessors.append(preprocessor) def enable_location_preprocessing(self, additional_subprograms=None): """ Enable location preprocessing, must be called before adding any files """ preprocessor = LocationPreprocessor() if additional_subprograms is not None: for subprogram in additional_subprograms: preprocessor.add_subprogram(subprogram) self._location_preprocessor = preprocessor def enable_check_preprocessing(self): """ Enable check preprocessing, must be called before adding any files """ self._check_preprocessor = CheckPreprocessor() def main(self): """ Run vunit main function and exit """ try: all_ok = self._main() except KeyboardInterrupt: exit(1) except CompileError: exit(1) except TestScannerError: exit(1) except SystemExit: exit(1) except: # pylint: disable=bare-except traceback.print_exc() exit(1) if (not all_ok) and (not self._exit_0): exit(1) exit(0) def _main(self): """ Base vunit main function without performing exit """ if self._list_only: return self._main_list_only() if self._list_files_only: return self._main_list_files_only() if self._compile_only: return self._main_compile_only() simulator_if = self._create_simulator_if() test_cases = self._create_tests(simulator_if) self._compile(simulator_if) start_time = ostools.get_time() report = TestReport(printer=self._printer) try: self._run_test(test_cases, report) simulator_if.post_process(self._simulator_factory.simulator_output_path) except KeyboardInterrupt: print() LOGGER.debug("_main: Caught Ctrl-C shutting down") finally: del test_cases del simulator_if report.set_real_total_time(ostools.get_time() - start_time) self._post_process(report) return report.all_ok() def _main_list_only(self): """ Main function when only listing test cases """ simulator_if = None test_suites = self._create_tests(simulator_if) for test_suite in test_suites: for name in test_suite.test_cases: print(name) print("Listed %i tests" % test_suites.num_tests()) return True def _main_list_files_only(self): """ Main function when only listing files """ files = self.get_compile_order() for source_file in files: print("%s, %s" % (source_file.library.name, source_file.name)) print("Listed %i files" % len(files)) return True def _main_compile_only(self): """ Main function when only compiling """ simulator_if = self._create_simulator_if() self._compile(simulator_if) return True def _create_output_path(self, clean): """ Create or re-create the output path if necessary """ if clean: ostools.renew_path(self._output_path) elif not exists(self._output_path): os.makedirs(self._output_path) ostools.renew_path(self._preprocessed_path) def _create_simulator_if(self): """ Create a simulator interface instance """ return self._simulator_factory.create() @property def vhdl_standard(self): return self._vhdl_standard @property def _preprocessed_path(self): return join(self._output_path, "preprocessed") @property def codecs_path(self): return join(self._output_path, "codecs") @property def use_debug_codecs(self): return self._use_debug_codecs def _create_tests(self, simulator_if): """ Create the test suites by scanning the project """ scanner = TestScanner(simulator_if, self._configuration, elaborate_only=self._elaborate_only) test_list = scanner.from_project(self._project, entity_filter=self._tb_filter) if test_list.num_tests() == 0: LOGGER.warning("Test scanner found no test benches using current filter rule:\n%s", self._tb_filter.__doc__) test_list.keep_matches(self._test_filter) return test_list def _compile(self, simulator_if): """ Compile entire project """ simulator_if.compile_project(self._project, self._vhdl_standard, continue_on_error=self._keep_compiling) def _run_test(self, test_cases, report): """ Run the test suites and return the report """ runner = TestRunner(report, join(self._output_path, "tests"), verbose=self._verbose, num_threads=self._num_threads) runner.run(test_cases) def _post_process(self, report): """ Print the report to stdout and optionally write it to an XML file """ report.print_str() if self._xunit_xml is not None: xml = report.to_junit_xml_str() ostools.write_file(self._xunit_xml, xml) def add_builtins(self, library_name="vunit_lib", mock_lang=False, mock_log=False): """ Add vunit VHDL builtin libraries """ library = self.add_library(library_name) supports_context = self._simulator_factory.supports_vhdl_2008_contexts() add_vhdl_builtins(library, self._vhdl_standard, mock_lang, mock_log, supports_context=supports_context) def add_com(self, library_name="vunit_lib", use_debug_codecs=None): """ Add communication package :param use_debug_codecs: Use human readable debug codecs `None`: Use command line argument setting `False`: Never use debug codecs `True`: Always use debug codecs """ if not self._project.has_library(library_name): library = self.add_library(library_name) else: library = self.library(library_name) if use_debug_codecs is not None: self._use_debug_codecs = use_debug_codecs supports_context = self._simulator_factory.supports_vhdl_2008_contexts() add_com(library, self._vhdl_standard, use_debug_codecs=self._use_debug_codecs, supports_context=supports_context) def add_array_util(self, library_name="vunit_lib"): """ Add array utility package """ library = self.library(library_name) add_array_util(library, self._vhdl_standard) def add_osvvm(self, library_name="osvvm"): """ Add osvvm library """ if not self._project.has_library(library_name): library = self.add_library(library_name) else: library = self.library(library_name) add_osvvm(library) def get_compile_order(self, source_files=None): """ Get the compile order of all or specific source files and their dependencies :param source_files: A list of :class:`.SourceFile` objects or `None` meaing all :returns: A list of :class:`.SourceFile` objects in compile order. """ if source_files is None: source_files = self.get_source_files() target_files = [source_file._source_file # pylint: disable=protected-access for source_file in source_files] source_files = self._project.get_dependencies_in_compile_order(target_files) return SourceFileList([SourceFile(source_file, self._project, self) for source_file in source_files])
class TestProject(unittest.TestCase): # pylint: disable=too-many-public-methods """ Test the Project class """ def setUp(self): self.output_path = join(dirname(__file__), "test_project_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() os.chdir(self.output_path) def tearDown(self): os.chdir(self.cwd) if exists(self.output_path): rmtree(self.output_path) def test_parses_entity_architecture(self): self.project.add_library("lib", "work_path") self.add_source_file("lib", "file1.vhd", """\ entity foo is end entity; architecture arch of foo is begin end architecture; architecture arch2 of foo is begin end architecture; """) self.add_source_file("lib", "file2.vhd", """\ architecture arch3 of foo is begin end architecture; """) self.assert_has_entity("file1.vhd", "foo", architecture_names=["arch", "arch2", "arch3"]) self.assert_has_architecture("file1.vhd", "arch", "foo") self.assert_has_architecture("file1.vhd", "arch2", "foo") self.assert_has_architecture("file2.vhd", "arch3", "foo") def test_parses_entity_architecture_with_generics(self): self.project.add_library("lib", "work_path") self.add_source_file("lib", "file1.vhd", """\ entity foo is generic ( testing_that_foo : boolean; testing_that_bar : boolean); end entity; architecture arch of foo is begin end architecture; """) self.assert_has_entity("file1.vhd", "foo", generic_names=["testing_that_bar", "testing_that_foo"], architecture_names=["arch"]) self.assert_has_architecture("file1.vhd", "arch", "foo") def test_parses_package(self): self.project.add_library("lib", "work_path") self.add_source_file("lib", "file1.vhd", """\ package foo is end package; package body foo is begin end package body; """) self.assert_has_package("file1.vhd", "foo") self.assert_has_package_body("file1.vhd", "foo") def test_finds_entity_instantiation_dependencies(self): self.create_dummy_three_file_project() self.assert_compiles("file1.vhd", before="file2.vhd") self.assert_compiles("file2.vhd", before="file3.vhd") def test_primary_with_same_name_in_multiple_libraries_secondary_dependency(self): self.project.add_library("lib1", "lib1_path") self.project.add_library("lib2", "lib2_path") self.add_source_file("lib1", "foo_arch.vhd", """ architecture arch of foo is begin end architecture; """) self.add_source_file("lib1", "foo1_ent.vhd", """ entity foo is port (signal bar : boolean); end entity; """) self.add_source_file("lib2", "foo2_ent.vhd", """ entity foo is end entity; """) self.update("foo_arch.vhd") self.update("foo1_ent.vhd") self.update("foo2_ent.vhd") self.assert_should_recompile([]) tick() self.update("foo1_ent.vhd") self.assert_should_recompile(["foo_arch.vhd"]) def test_multiple_identical_file_names_with_different_path_in_same_library(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", join("a", "foo.vhd"), """ entity a_foo is end entity; """) self.add_source_file("lib", join("b", "foo.vhd"), """ entity b_foo is end entity; """) self.assert_should_recompile([join("a", "foo.vhd"), join("b", "foo.vhd")]) self.update(join("a", "foo.vhd")) self.update(join("b", "foo.vhd")) self.assert_should_recompile([]) def test_finds_entity_architecture_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "entity.vhd", """ entity foo is end entity; """) self.add_source_file("lib", "arch1.vhd", """ architecture arch1 of foo is begin end architecture; """) self.add_source_file("lib", "arch2.vhd", """ architecture arch2 of foo is begin end architecture; """) self.assert_compiles("entity.vhd", before="arch1.vhd") self.assert_compiles("entity.vhd", before="arch2.vhd") def test_finds_package_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "package.vhd", """ package foo is end package; """) self.add_source_file("lib", "body.vhd", """ package body foo is begin end package body; """) self.assert_compiles("package.vhd", before="body.vhd") def create_module_package_and_body(self, add_body=True): """ Help function to create a three file project with a package, a package body and a module using the package """ self.project.add_library("lib", "lib_path") self.add_source_file("lib", "package.vhd", """ package pkg is end package; """) if add_body: self.add_source_file("lib", "body.vhd", """ package body pkg is begin end package body; """) self.project.add_library("lib2", "work_path") self.add_source_file("lib2", "module.vhd", """ library lib; use lib.pkg.all; entity module is end entity; architecture arch of module is begin end architecture; """) def test_finds_use_package_dependencies(self): self.create_module_package_and_body() self.assert_compiles("package.vhd", before="body.vhd") self.assert_compiles("package.vhd", before="module.vhd") self.assert_not_compiles("body.vhd", before="module.vhd") def test_finds_extra_package_body_dependencies(self): self.project = Project(depend_on_package_body=True) self.create_module_package_and_body() self.assert_compiles("package.vhd", before="body.vhd") self.assert_compiles("body.vhd", before="module.vhd") self.assert_compiles("package.vhd", before="module.vhd") def test_that_package_can_have_no_body(self): self.project = Project(depend_on_package_body=True) self.create_module_package_and_body(add_body=False) self.assert_compiles("package.vhd", before="module.vhd") def test_finds_context_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "context.vhd", """ context foo is end context; """) self.project.add_library("lib2", "work_path") self.add_source_file("lib2", "module.vhd", """ library lib; context lib.foo; entity module is end entity; architecture arch of module is begin end architecture; """) self.assert_compiles("context.vhd", before="module.vhd") def test_finds_configuration_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "cfg.vhd", """ configuration cfg of ent is end configuration; """) self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "ent_a1.vhd", """ architecture a1 of ent is begin end architecture; """) self.add_source_file("lib", "ent_a2.vhd", """ architecture a2 of ent is begin end architecture; """) self.assert_compiles("ent.vhd", before="cfg.vhd") self.assert_compiles("ent_a1.vhd", before="cfg.vhd") self.assert_compiles("ent_a2.vhd", before="cfg.vhd") def test_finds_configuration_reference_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "cfg.vhd", """ configuration cfg of ent is end configuration; """) self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "ent_a.vhd", """ architecture a of ent is begin end architecture; """) self.add_source_file("lib", "top.vhd", """ entity top is end entity; architecture a of top is for inst : comp use configuration work.cfg; begin inst : comp; end architecture; """) self.assert_compiles("cfg.vhd", before="top.vhd") def test_specific_architecture_reference_dependencies(self): """ GHDL dependes also on architecture when specificially mentioned """ self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "ent_a1.vhd", """ architecture a1 of ent is begin end architecture; """) self.add_source_file("lib", "ent_a2.vhd", """ architecture a2 of ent is begin end architecture; """) self.add_source_file("lib", "top1.vhd", """ entity top1 is end entity; architecture a of top1 is begin inst : entity work.ent(a1); end architecture; """) self.add_source_file("lib", "top2.vhd", """ entity top2 is end entity; architecture a of top2 is for inst : comp use entity work.ent(a2); begin inst : comp; end architecture; """) self.assert_compiles("ent_a1.vhd", before="top1.vhd") self.assert_compiles("ent_a2.vhd", before="top2.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_missing_specific_architecture_reference(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "arch.vhd", """ architecture a1 of ent is begin end architecture; """) self.add_source_file("lib", "top.vhd", """ entity top1 is end entity; architecture a of top1 is begin inst1 : entity work.ent(a1); inst2 : entity work.ent(a2); # Missing end architecture; """) self.project.get_files_in_compile_order() warning_calls = mock_logger.warning.call_args_list log_msg = warning_calls[0][0][0] % warning_calls[0][0][1:] self.assertEqual(len(warning_calls), 1) self.assertIn("top.vhd", log_msg) self.assertIn("a2", log_msg) self.assertIn("lib.ent", log_msg) def _test_warning_on_duplicate(self, mock_logger, code, message): """ Utility function to test adding the same duplicate code under file.vhd and file_copy.vhd where the duplication should cause a warning message. """ self.add_source_file("lib", "file.vhd", code) warning_calls = mock_logger.warning.call_args_list self.assertEqual(len(warning_calls), 0) self.add_source_file("lib", "file_copy.vhd", code) warning_calls = mock_logger.warning.call_args_list self.assertEqual(len(warning_calls), 1) log_msg = warning_calls[0][0][0] % warning_calls[0][0][1:] self.assertEqual(log_msg, message) @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_entity(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ entity ent is end entity; """, "file_copy.vhd: entity 'ent' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_package(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ package pkg is end package; """, "file_copy.vhd: package 'pkg' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_configuration(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ configuration cfg of ent is end configuration; """, "file_copy.vhd: configuration 'cfg' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_package_body(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "pkg.vhd", """ package pkg is end package; """) self._test_warning_on_duplicate( mock_logger, """ package body pkg is end package bodY; """, "file_copy.vhd: package body 'pkg' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_architecture(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "arch.vhd", """ architecture a_no_duplicate of ent is begin end architecture; """) self._test_warning_on_duplicate( mock_logger, """ architecture a of ent is begin end architecture; """, "file_copy.vhd: architecture 'a' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_context(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ context ctx is end context; """, "file_copy.vhd: context 'ctx' previously defined in file.vhd") def test_should_recompile_all_files_initially(self): self.create_dummy_three_file_project() self.assert_should_recompile(["file1.vhd", "file2.vhd", "file3.vhd"]) self.assert_should_recompile(["file1.vhd", "file2.vhd", "file3.vhd"]) def test_updating_creates_hash_files(self): self.create_dummy_three_file_project() for file_name in ["file1.vhd", "file2.vhd", "file3.vhd"]: self.update(file_name) self.assertTrue(exists(self.hash_file_name_of(file_name))) def test_should_not_recompile_updated_files(self): self.create_dummy_three_file_project() self.update("file1.vhd") self.assert_should_recompile(["file2.vhd", "file3.vhd"]) self.update("file2.vhd") self.assert_should_recompile(["file3.vhd"]) self.update("file3.vhd") self.assert_should_recompile([]) def test_should_recompile_files_affected_by_change(self): self.create_dummy_three_file_project() self.update("file1.vhd") self.update("file2.vhd") self.update("file3.vhd") self.assert_should_recompile([]) self.create_dummy_three_file_project() self.assert_should_recompile([]) self.create_dummy_three_file_project(update_file1=True) self.assert_should_recompile(["file1.vhd", "file2.vhd", "file3.vhd"]) def test_should_recompile_files_affected_by_change_with_later_timestamp(self): self.create_dummy_three_file_project() self.update("file1.vhd") self.update("file2.vhd") self.update("file3.vhd") self.assert_should_recompile([]) self.create_dummy_three_file_project() self.assert_should_recompile([]) self.create_dummy_three_file_project(update_file1=True) self.assert_should_recompile(["file1.vhd", "file2.vhd", "file3.vhd"]) tick() self.update("file1.vhd") self.assert_should_recompile(["file2.vhd", "file3.vhd"]) def test_should_recompile_files_missing_hash(self): self.create_dummy_three_file_project() self.update("file1.vhd") self.update("file2.vhd") self.update("file3.vhd") self.assert_should_recompile([]) os.remove(self.hash_file_name_of("file2.vhd")) self.assert_should_recompile(["file2.vhd", "file3.vhd"]) def test_finds_component_instantiation_dependencies(self): self.project = Project(depend_on_components=True) self.project.add_library("toplib", "work_path") self.add_source_file("toplib", "top.vhd", """\ entity top is end entity; architecture arch of top is begin labelFoo : component foo generic map(WIDTH => 16) port map(clk => '1', rst => '0', in_vec => record_reg.input_signal, output => some_signal(UPPER_CONSTANT-1 downto LOWER_CONSTANT+1)); label2Foo : foo2 port map(clk => '1', rst => '0', output => "00"); end architecture; """) self.project.add_library("libcomp1", "work_path") self.add_source_file("libcomp1", "comp1.vhd", """\ entity foo is end entity; architecture arch of foo is begin end architecture; """) self.project.add_library("libcomp2", "work_path") self.add_source_file("libcomp2", "comp2.vhd", """\ entity foo2 is end entity; architecture arch of foo2 is begin end architecture; """) self.assert_has_component_instantiation("top.vhd", "foo") self.assert_has_component_instantiation("top.vhd", "foo2") self.assert_compiles("comp1.vhd", before="top.vhd") self.assert_compiles("comp2.vhd", before="top.vhd") def test_get_dependencies_in_compile_order_without_target(self): self.create_dummy_three_file_project(False) deps = self.project.get_dependencies_in_compile_order(target=None) self.assertEqual(len(deps), 3) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) self.assertTrue(deps[2] == self.project.get_source_files_in_order()[2]) def test_get_dependencies_in_compile_order_with_target(self): self.create_dummy_three_file_project(False) deps = self.project.get_dependencies_in_compile_order(target=self.project.get_source_files_in_order()[1].name) self.assertEqual(len(deps), 2) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) # To test that indirect dependencies are included deps = self.project.get_dependencies_in_compile_order(target=self.project.get_source_files_in_order()[2].name) self.assertEqual(len(deps), 3) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) self.assertTrue(deps[2] == self.project.get_source_files_in_order()[2]) def test_has_verilog_module(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ module name; endmodule """) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 1) def test_finds_verilog_package_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "pkg.sv", """\ package pkg; endpackage """) self.add_source_file("lib", "module.sv", """\ module name; import pkg::*; endmodule """) self.assert_compiles("pkg.sv", before="module.sv") def test_finds_verilog_module_instantiation_dependencies(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module1.sv", """\ module module1; endmodule """) self.add_source_file("lib", "module2.sv", """\ module module2; module1 inst(); endmodule """) self.assert_compiles("module1.sv", before="module2.sv") def test_finds_verilog_include_dependencies(self): def create_project(): """ Create the test project """ self.project = Project() self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.sv", """\ `include "include.svh" """) write_file("include.svh", """\ module name; endmodule """) create_project() self.assert_should_recompile(["module.sv"]) for src_file in self.project.get_files_in_compile_order(): self.update(src_file.name) create_project() self.assert_should_recompile([]) write_file("include.svh", """\ module other_name; endmodule """) create_project() self.assert_should_recompile(["module.sv"]) def test_file_type_of(self): self.assertEqual(file_type_of("file.vhd"), "vhdl") self.assertEqual(file_type_of("file.vhdl"), "vhdl") self.assertEqual(file_type_of("file.sv"), "verilog") self.assertEqual(file_type_of("file.v"), "verilog") self.assertRaises(RuntimeError, file_type_of, "file.foo") def create_dummy_three_file_project(self, update_file1=False): """ Create a projected containing three dummy files optionally only updating file1 """ self.project = Project() self.project.add_library("lib", "work_path") if update_file1: self.add_source_file("lib", "file1.vhd", """\ entity module1 is end entity; architecture arch of module1 is begin end architecture; """) else: self.add_source_file("lib", "file1.vhd", """\ entity module1 is end entity; architecture arch of module1 is begin report "Updated"; end architecture; """) self.add_source_file("lib", "file2.vhd", """\ entity module2 is end entity; architecture arch of module2 is begin module1_inst : entity lib.module1; end architecture; """) self.add_source_file("lib", "file3.vhd", """\ entity module3 is end entity; architecture arch of module3 is begin module1_inst : entity work.module2; end architecture; """) def add_source_file(self, library_name, file_name, contents): """ Convenient wrapper arround project.add_source_file """ write_file(file_name, contents) self.project.add_source_file(file_name, library_name, file_type=file_type_of(file_name)) def hash_file_name_of(self, file_name): """ Get the hash file name of a file with 'file_name' """ return self.project._hash_file_name_of(self.get_source_file(file_name)) # pylint: disable=protected-access def get_source_file(self, file_name): """ Wrapper arround project.get_source_file """ return self.project.get_source_file(file_name) def update(self, file_name): """ Wrapper arround project.update """ self.project.update(self.get_source_file(file_name)) def assert_should_recompile(self, file_names): self.assert_count_equal(file_names, [dep.name for dep in self.project.get_files_in_compile_order()]) def assert_compiles(self, file_name, before): """ Assert that the compile order of file_name is before the file named 'before'. """ for src_file in self.project.get_files_in_compile_order(): self.update(src_file.name) self.assert_should_recompile([]) tick() self.update(file_name) self.assertIn(before, [dep.name for dep in self.project.get_files_in_compile_order()]) def assert_not_compiles(self, file_name, before): """ Assert that the compile order of file_name is not before the file named 'before'. """ for src_file in self.project.get_files_in_compile_order(): self.update(src_file.name) self.assert_should_recompile([]) tick() self.update(file_name) self.assertNotIn(before, [dep.name for dep in self.project.get_files_in_compile_order()]) def assert_has_package_body(self, source_file_name, package_name): """ Assert that there is a package body with package_name withing source_file_name """ unit = self._find_design_unit(source_file_name, "package body", package_name, False, package_name) self.assertIsNotNone(unit) def assert_has_package(self, source_file_name, name): """ Assert that there is a package with name withing source_file_name """ unit = self._find_design_unit(source_file_name, "package", name) self.assertIsNotNone(unit) def assert_has_entity(self, source_file_name, name, generic_names=None, architecture_names=None): """ Assert that there is an entity with name withing source_file_name that has architectures with architecture_names. """ source_file = self.get_source_file(source_file_name) generic_names = [] if generic_names is None else generic_names architecture_names = [] if architecture_names is None else architecture_names for entity in source_file.library.get_entities(): if entity.name == name: self.assert_count_equal(entity.generic_names, generic_names) self.assert_count_equal(entity.architecture_names, architecture_names) return self.assertFalse("Did not find entity " + name + "in " + source_file_name) def assert_has_architecture(self, source_file_name, name, entity_name): """ Assert that there is an architecture with name of entity_name within source_file_name """ unit = self._find_design_unit(source_file_name, "architecture", name, False, entity_name) self.assertIsNotNone(unit) def assert_has_component_instantiation(self, source_file_name, component_name): """ Assert that there is a component instantion with component with source_file_name """ found_comp = False for source_file in self.project.get_source_files_in_order(): for component in source_file.depending_components: if component == component_name: found_comp = True self.assertTrue(found_comp, "Did not find component " + component_name + " in " + source_file_name) def _find_design_unit(self, # pylint: disable=too-many-arguments source_file_name, design_unit_type, design_unit_name, is_primary=True, primary_design_unit_name=None): """ Utility fnction to find and return a design unit """ for source_file in self.project.get_source_files_in_order(): for design_unit in source_file.design_units: if design_unit.unit_type != design_unit_type: continue if design_unit.name != design_unit_name: continue self.assertEqual(design_unit.is_primary, is_primary) self.assertEqual(source_file.name, source_file_name) if not is_primary: self.assertEqual(design_unit.primary_design_unit, primary_design_unit_name) return design_unit return None def assert_count_equal(self, values1, values2): # Python 2.7 compatability self.assertEqual(sorted(values1), sorted(values2))
class TestProject(unittest.TestCase): # pylint: disable=too-many-public-methods """ Test the Project class """ def setUp(self): self.output_path = join(dirname(__file__), "test_project_out") renew_path(self.output_path) self.project = Project() self.cwd = os.getcwd() os.chdir(self.output_path) def tearDown(self): os.chdir(self.cwd) if exists(self.output_path): rmtree(self.output_path) def test_parses_entity_architecture(self): self.project.add_library("lib", "work_path") file1 = self.add_source_file("lib", "file1.vhd", """\ entity foo is end entity; architecture arch of foo is begin end architecture; architecture arch2 of foo is begin end architecture; """) self.add_source_file("lib", "file2.vhd", """\ architecture arch3 of foo is begin end architecture; """) self.assert_has_entity(file1, "foo", architecture_names=["arch", "arch2", "arch3"]) self.assert_has_architecture("file1.vhd", "arch", "foo") self.assert_has_architecture("file1.vhd", "arch2", "foo") self.assert_has_architecture("file2.vhd", "arch3", "foo") def test_parses_entity_architecture_with_generics(self): self.project.add_library("lib", "work_path") file1 = self.add_source_file("lib", "file1.vhd", """\ entity foo is generic ( testing_that_foo : boolean; testing_that_bar : boolean); end entity; architecture arch of foo is begin end architecture; """) self.assert_has_entity(file1, "foo", generic_names=["testing_that_bar", "testing_that_foo"], architecture_names=["arch"]) self.assert_has_architecture("file1.vhd", "arch", "foo") def test_parses_package(self): self.project.add_library("lib", "work_path") self.add_source_file("lib", "file1.vhd", """\ package foo is end package; package body foo is begin end package body; """) self.assert_has_package("file1.vhd", "foo") self.assert_has_package_body("file1.vhd", "foo") def test_finds_entity_instantiation_dependencies(self): file1, file2, file3 = self.create_dummy_three_file_project() self.assert_compiles(file1, before=file2) self.assert_compiles(file2, before=file3) def test_primary_with_same_name_in_multiple_libraries_secondary_dependency(self): self.project.add_library("lib1", "lib1_path") self.project.add_library("lib2", "lib2_path") foo_arch = self.add_source_file("lib1", "foo_arch.vhd", """ architecture arch of foo is begin end architecture; """) foo1_ent = self.add_source_file("lib1", "foo1_ent.vhd", """ entity foo is port (signal bar : boolean); end entity; """) self.add_source_file("lib2", "foo2_ent.vhd", """ entity foo is end entity; """) self.assert_compiles(foo1_ent, before=foo_arch) def test_multiple_identical_file_names_with_different_path_in_same_library(self): self.project.add_library("lib", "lib_path") a_foo = self.add_source_file("lib", join("a", "foo.vhd"), """ entity a_foo is end entity; """) b_foo = self.add_source_file("lib", join("b", "foo.vhd"), """ entity b_foo is end entity; """) self.assert_should_recompile([a_foo, b_foo]) self.update(a_foo) self.assert_should_recompile([b_foo]) self.update(b_foo) self.assert_should_recompile([]) def test_finds_entity_architecture_dependencies(self): self.project.add_library("lib", "lib_path") entity = self.add_source_file("lib", "entity.vhd", """ entity foo is end entity; """) arch1 = self.add_source_file("lib", "arch1.vhd", """ architecture arch1 of foo is begin end architecture; """) arch2 = self.add_source_file("lib", "arch2.vhd", """ architecture arch2 of foo is begin end architecture; """) self.assert_compiles(entity, before=arch1) self.assert_compiles(entity, before=arch2) def test_finds_package_dependencies(self): self.project.add_library("lib", "lib_path") package = self.add_source_file("lib", "package.vhd", """ package foo is end package; """) body = self.add_source_file("lib", "body.vhd", """ package body foo is begin end package body; """) self.assert_compiles(package, before=body) def create_module_package_and_body(self, add_body=True): """ Help function to create a three file project with a package, a package body and a module using the package """ self.project.add_library("lib", "lib_path") package = self.add_source_file("lib", "package.vhd", """ package pkg is end package; """) body = None if add_body: body = self.add_source_file("lib", "body.vhd", """ package body pkg is begin end package body; """) self.project.add_library("lib2", "work_path") module = self.add_source_file("lib2", "module.vhd", """ library lib; use lib.pkg.all; entity module is end entity; architecture arch of module is begin end architecture; """) return package, body, module def test_finds_use_package_dependencies(self): package, body, module = self.create_module_package_and_body() self.assert_compiles(package, before=body) self.assert_compiles(package, before=module) self.assert_not_compiles(body, before=module) def test_finds_extra_package_body_dependencies(self): self.project = Project(depend_on_package_body=True) package, body, module = self.create_module_package_and_body() self.assert_compiles(package, before=body) self.assert_compiles(body, before=module) self.assert_compiles(package, before=module) def test_that_package_can_have_no_body(self): self.project = Project(depend_on_package_body=True) package, _, module = self.create_module_package_and_body(add_body=False) self.assert_compiles(package, before=module) def test_package_instantiation_dependencies(self): self.project.add_library("pkg_lib", "pkg_lib_path") pkg = self.add_source_file("pkg_lib", "pkg.vhd", """ package pkg is end package; """) self.project.add_library("lib", "lib_path") ent = self.add_source_file("lib", "ent.vhd", """ library pkg_lib; entity ent is end entity; architecture a of ent is package pkg_inst is new pkg_lib.pkg; begin end architecture; """) self.assert_compiles(pkg, before=ent) def test_finds_context_dependencies(self): self.project.add_library("lib", "lib_path") context = self.add_source_file("lib", "context.vhd", """ context foo is end context; """) self.project.add_library("lib2", "work_path") module = self.add_source_file("lib2", "module.vhd", """ library lib; context lib.foo; entity module is end entity; architecture arch of module is begin end architecture; """) self.assert_compiles(context, before=module) def test_finds_configuration_dependencies(self): self.project.add_library("lib", "lib_path") cfg = self.add_source_file("lib", "cfg.vhd", """ configuration cfg of ent is end configuration; """) ent = self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) ent_a1 = self.add_source_file("lib", "ent_a1.vhd", """ architecture a1 of ent is begin end architecture; """) ent_a2 = self.add_source_file("lib", "ent_a2.vhd", """ architecture a2 of ent is begin end architecture; """) self.assert_compiles(ent, before=cfg) self.assert_compiles(ent_a1, before=cfg) self.assert_compiles(ent_a2, before=cfg) def test_finds_configuration_reference_dependencies(self): self.project.add_library("lib", "lib_path") cfg = self.add_source_file("lib", "cfg.vhd", """ configuration cfg of ent is end configuration; """) self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "ent_a.vhd", """ architecture a of ent is begin end architecture; """) top = self.add_source_file("lib", "top.vhd", """ entity top is end entity; architecture a of top is for inst : comp use configuration work.cfg; begin inst : comp; end architecture; """) self.assert_compiles(cfg, before=top) def test_specific_architecture_reference_dependencies(self): """ GHDL dependes also on architecture when specificially mentioned """ self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) ent_a1 = self.add_source_file("lib", "ent_a1.vhd", """ architecture a1 of ent is begin end architecture; """) ent_a2 = self.add_source_file("lib", "ent_a2.vhd", """ architecture a2 of ent is begin end architecture; """) top1 = self.add_source_file("lib", "top1.vhd", """ entity top1 is end entity; architecture a of top1 is begin inst : entity work.ent(a1); end architecture; """) top2 = self.add_source_file("lib", "top2.vhd", """ entity top2 is end entity; architecture a of top2 is for inst : comp use entity work.ent(a2); begin inst : comp; end architecture; """) self.assert_compiles(ent_a1, before=top1) self.assert_compiles(ent_a2, before=top2) @mock.patch("vunit.project.LOGGER") def test_warning_on_missing_specific_architecture_reference(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "arch.vhd", """ architecture a1 of ent is begin end architecture; """) self.add_source_file("lib", "top.vhd", """ entity top1 is end entity; architecture a of top1 is begin inst1 : entity work.ent(a1); inst2 : entity work.ent(a2); # Missing end architecture; """) self.project.get_files_in_compile_order() warning_calls = mock_logger.warning.call_args_list log_msg = warning_calls[0][0][0] % warning_calls[0][0][1:] self.assertEqual(len(warning_calls), 1) self.assertIn("top.vhd", log_msg) self.assertIn("a2", log_msg) self.assertIn("lib.ent", log_msg) def _test_warning_on_duplicate(self, mock_logger, code, message): """ Utility function to test adding the same duplicate code under file.vhd and file_copy.vhd where the duplication should cause a warning message. """ self.add_source_file("lib", "file.vhd", code) warning_calls = mock_logger.warning.call_args_list self.assertEqual(len(warning_calls), 0) self.add_source_file("lib", "file_copy.vhd", code) warning_calls = mock_logger.warning.call_args_list self.assertEqual(len(warning_calls), 1) log_msg = warning_calls[0][0][0] % warning_calls[0][0][1:] self.assertEqual(log_msg, message) @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_entity(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ entity ent is end entity; """, "file_copy.vhd: entity 'ent' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_package(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ package pkg is end package; """, "file_copy.vhd: package 'pkg' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_configuration(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ configuration cfg of ent is end configuration; """, "file_copy.vhd: configuration 'cfg' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_package_body(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "pkg.vhd", """ package pkg is end package; """) self._test_warning_on_duplicate( mock_logger, """ package body pkg is end package bodY; """, "file_copy.vhd: package body 'pkg' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_architecture(self, mock_logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent.vhd", """ entity ent is end entity; """) self.add_source_file("lib", "arch.vhd", """ architecture a_no_duplicate of ent is begin end architecture; """) self._test_warning_on_duplicate( mock_logger, """ architecture a of ent is begin end architecture; """, "file_copy.vhd: architecture 'a' previously defined in file.vhd") @mock.patch("vunit.project.LOGGER") def test_warning_on_duplicate_context(self, mock_logger): self.project.add_library("lib", "lib_path") self._test_warning_on_duplicate( mock_logger, """ context ctx is end context; """, "file_copy.vhd: context 'ctx' previously defined in file.vhd") def test_should_recompile_all_files_initially(self): file1, file2, file3 = self.create_dummy_three_file_project() self.assert_should_recompile([file1, file2, file3]) self.assert_should_recompile([file1, file2, file3]) def test_updating_creates_hash_files(self): files = self.create_dummy_three_file_project() for source_file in files: self.update(source_file) self.assertTrue(exists(self.hash_file_name_of(source_file))) def test_should_not_recompile_updated_files(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.assert_should_recompile([file2, file3]) self.update(file2) self.assert_should_recompile([file3]) self.update(file3) self.assert_should_recompile([]) def test_should_recompile_files_affected_by_change(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project() self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project(update_file1=True) self.assert_should_recompile([file1, file2, file3]) def test_should_recompile_files_after_changing_compile_options(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) file2.set_compile_option("ghdl.flags", ["--no-vital-checks"]) self.assert_should_recompile([file2, file3]) def test_should_recompile_files_after_changing_vhdl_standard(self): write_file("file_name.vhd", "") self.project = Project() self.project.add_library("lib", "lib_path") source_file = self.project.add_source_file("file_name.vhd", library_name="lib", vhdl_standard='2008') self.assert_should_recompile([source_file]) self.update(source_file) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") source_file = self.project.add_source_file("file_name.vhd", library_name="lib", vhdl_standard='2002') self.assert_should_recompile([source_file]) def test_add_compile_option(self): file1, _, _ = self.create_dummy_three_file_project() file1.add_compile_option("ghdl.flags", ["--foo"]) self.assertEqual(file1.get_compile_option("ghdl.flags"), ["--foo"]) file1.add_compile_option("ghdl.flags", ["--bar"]) self.assertEqual(file1.get_compile_option("ghdl.flags"), ["--foo", "--bar"]) file1.set_compile_option("ghdl.flags", ["--xyz"]) self.assertEqual(file1.get_compile_option("ghdl.flags"), ["--xyz"]) def test_should_recompile_files_affected_by_change_with_later_timestamp(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project() self.assert_should_recompile([]) file1, file2, file3 = self.create_dummy_three_file_project(update_file1=True) self.assert_should_recompile([file1, file2, file3]) tick() self.update(file1) self.assert_should_recompile([file2, file3]) def test_should_recompile_files_missing_hash(self): file1, file2, file3 = self.create_dummy_three_file_project() self.update(file1) self.update(file2) self.update(file3) self.assert_should_recompile([]) os.remove(self.hash_file_name_of(file2)) self.assert_should_recompile([file2, file3]) def test_finds_component_instantiation_dependencies(self): self.project = Project(depend_on_components=True) self.project.add_library("toplib", "work_path") top = self.add_source_file("toplib", "top.vhd", """\ entity top is end entity; architecture arch of top is begin labelFoo : component foo generic map(WIDTH => 16) port map(clk => '1', rst => '0', in_vec => record_reg.input_signal, output => some_signal(UPPER_CONSTANT-1 downto LOWER_CONSTANT+1)); label2Foo : foo2 port map(clk => '1', rst => '0', output => "00"); end architecture; """) self.project.add_library("libcomp1", "work_path") comp1 = self.add_source_file("libcomp1", "comp1.vhd", """\ entity foo is end entity; architecture arch of foo is begin end architecture; """) self.project.add_library("libcomp2", "work_path") comp2 = self.add_source_file("libcomp2", "comp2.vhd", """\ entity foo2 is end entity; architecture arch of foo2 is begin end architecture; """) self.assert_has_component_instantiation("top.vhd", "foo") self.assert_has_component_instantiation("top.vhd", "foo2") self.assert_compiles(comp1, before=top) self.assert_compiles(comp2, before=top) def test_get_dependencies_in_compile_order_without_target(self): self.create_dummy_three_file_project() deps = self.project.get_dependencies_in_compile_order() self.assertEqual(len(deps), 3) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) self.assertTrue(deps[2] == self.project.get_source_files_in_order()[2]) def test_get_dependencies_in_compile_order_with_target(self): self.create_dummy_three_file_project() deps = self.project.get_dependencies_in_compile_order( target_files=[self.project.get_source_files_in_order()[1]]) self.assertEqual(len(deps), 2) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) # To test that indirect dependencies are included deps = self.project.get_dependencies_in_compile_order( target_files=[self.project.get_source_files_in_order()[2]]) self.assertEqual(len(deps), 3) self.assertTrue(deps[0] == self.project.get_source_files_in_order()[0]) self.assertTrue(deps[1] == self.project.get_source_files_in_order()[1]) self.assertTrue(deps[2] == self.project.get_source_files_in_order()[2]) def test_compiles_same_file_into_different_libraries(self): pkgs = [] second_pkgs = [] self.project.add_library("lib", "lib_path") other_pkg = self.add_source_file("lib", "other_pkg.vhd", """ package other_pkg is end package other_pkg; """) for lib in ["lib1", "lib2"]: self.project.add_library(lib, lib + "_path") pkgs.append(self.add_source_file(lib, "pkg.vhd", """ library lib; use lib.other_pkg.all; package pkg is end package pkg; """)) second_pkgs.append(self.add_source_file(lib, lib + "_pkg.vhd", """ use work.pkg.all; package second_pkg is end package second_pkg; """)) self.assertNotEqual(self.hash_file_name_of(pkgs[0]), self.hash_file_name_of(pkgs[1])) self.assertEqual(len(self.project.get_files_in_compile_order()), 5) self.assert_compiles(other_pkg, before=pkgs[0]) self.assert_compiles(other_pkg, before=pkgs[1]) self.assert_compiles(pkgs[0], before=second_pkgs[0]) self.assert_compiles(pkgs[1], before=second_pkgs[1]) def test_has_verilog_module(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ module name; endmodule """) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 1) def test_finds_verilog_package_import_dependencies(self): self.project.add_library("lib", "lib_path") pkg = self.add_source_file("lib", "pkg.sv", """\ package pkg; endpackage """) module = self.add_source_file("lib", "module.sv", """\ module name; import pkg::*; endmodule """) self.assert_compiles(pkg, before=module) def test_finds_verilog_package_reference_dependencies(self): self.project.add_library("lib", "lib_path") pkg = self.add_source_file("lib", "pkg.sv", """\ package pkg; endpackage """) module = self.add_source_file("lib", "module.sv", """\ module name; pkg::func(); endmodule """) self.assert_compiles(pkg, before=module) def test_finds_verilog_module_instantiation_dependencies(self): self.project.add_library("lib", "lib_path") module1 = self.add_source_file("lib", "module1.sv", """\ module module1; endmodule """) module2 = self.add_source_file("lib", "module2.sv", """\ module module2; module1 inst(); endmodule """) self.assert_compiles(module1, before=module2) def test_finds_verilog_include_dependencies(self): def create_project(): """ Create the test project """ self.project = Project() self.project.add_library("lib", "lib_path") return self.add_source_file("lib", "module.sv", """\ `include "include.svh" """) write_file("include.svh", """\ module name; endmodule """) module = create_project() self.assert_should_recompile([module]) for src_file in self.project.get_files_in_compile_order(): self.update(src_file) create_project() self.assert_should_recompile([]) write_file("include.svh", """\ module other_name; endmodule """) module = create_project() self.assert_should_recompile([module]) def test_verilog_defines_affects_dependency_scanning(self): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ `ifdef foo module mod; endmodule `endif """) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 0) self.project = Project() self.project.add_library("lib", "lib_path") self.add_source_file("lib", "module.v", """\ `ifdef foo module mod; endmodule `endif """, defines={"foo": ""}) library = self.project.get_library("lib") modules = library.get_modules() self.assertEqual(len(modules), 1) def test_recompile_when_updating_defines(self): contents = """ module mod; endmodule """ self.project = Project() self.project.add_library("lib", "lib_path") mod1 = self.add_source_file("lib", "module1.v", contents) mod2 = self.add_source_file("lib", "module2.v", contents) self.assert_should_recompile([mod1, mod2]) self.update(mod1) self.update(mod2) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") mod1 = self.add_source_file("lib", "module1.v", contents, defines={"foo": "bar"}) mod2 = self.add_source_file("lib", "module2.v", contents) self.assert_should_recompile([mod1]) self.update(mod1) self.update(mod2) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") mod1 = self.add_source_file("lib", "module1.v", contents, defines={"foo": "other_bar"}) mod2 = self.add_source_file("lib", "module2.v", contents) self.assert_should_recompile([mod1]) self.update(mod1) self.update(mod2) self.assert_should_recompile([]) def test_manual_dependencies(self): self.project.add_library("lib", "lib_path") ent1 = self.add_source_file("lib", "ent1.vhd", """\ entity ent1 is end ent1; architecture arch of ent1 is begin end architecture; """) ent2 = self.add_source_file("lib", "ent2.vhd", """\ entity ent2 is end ent2; architecture arch of ent2 is begin end architecture; """) self.project.add_manual_dependency(ent2, depends_on=ent1) self.assert_compiles(ent1, before=ent2) @mock.patch("vunit.project.LOGGER", autospec=True) def test_circular_dependencies_causes_error(self, logger): self.project.add_library("lib", "lib_path") self.add_source_file("lib", "ent1.vhd", """\ entity ent1 is end ent1; architecture arch of ent1 is begin ent2_inst : entity work.ent2; end architecture; """) self.add_source_file("lib", "ent2.vhd", """\ entity ent2 is end ent2; architecture arch of ent2 is begin ent1_inst : entity work.ent1; end architecture; """) self.assertRaises(CompileError, self.project.get_files_in_compile_order) logger.error.assert_called_once_with( "Found circular dependency:\n%s", "ent1.vhd ->\n" "ent2.vhd ->\n" "ent1.vhd") def test_order_of_adding_libraries_is_kept(self): for order in itertools.combinations(range(4), 4): project = Project() for idx in order: project.add_library("lib%i" % idx, "lib%i_path" % idx) library_names = [lib.name for lib in project.get_libraries()] self.assertEqual(library_names, ["lib%i" % idx for idx in order]) def test_file_type_of(self): self.assertEqual(file_type_of("file.vhd"), "vhdl") self.assertEqual(file_type_of("file.vhdl"), "vhdl") self.assertEqual(file_type_of("file.sv"), "verilog") self.assertEqual(file_type_of("file.v"), "verilog") self.assertEqual(file_type_of("file.vams"), "verilog") self.assertRaises(RuntimeError, file_type_of, "file.foo") def create_dummy_three_file_project(self, update_file1=False): """ Create a projected containing three dummy files optionally only updating file1 """ self.project = Project() self.project.add_library("lib", "work_path") if update_file1: file1 = self.add_source_file("lib", "file1.vhd", """\ entity module1 is end entity; architecture arch of module1 is begin end architecture; """) else: file1 = self.add_source_file("lib", "file1.vhd", """\ entity module1 is end entity; architecture arch of module1 is begin report "Updated"; end architecture; """) file2 = self.add_source_file("lib", "file2.vhd", """\ entity module2 is end entity; architecture arch of module2 is begin module1_inst : entity lib.module1; end architecture; """) file3 = self.add_source_file("lib", "file3.vhd", """\ entity module3 is end entity; architecture arch of module3 is begin module1_inst : entity work.module2; end architecture; """) return file1, file2, file3 def test_add_source_file_has_vhdl_standard(self): write_file("file.vhd", "") for std in ('93', '2002', '2008'): project = Project() project.add_library("lib", "lib_path") source_file = project.add_source_file("file.vhd", library_name="lib", file_type='vhdl', vhdl_standard=std) self.assertEqual(source_file.get_vhdl_standard(), std) def test_add_source_file_detects_illegal_vhdl_standard(self): write_file("file.vhd", "") project = Project() project.add_library("lib", "lib_path") self.assertRaises(ValueError, project.add_source_file, "file.vhd", library_name="lib", file_type='vhdl', vhdl_standard='2007') def test_add_source_file_has_no_parse_vhdl(self): for no_parse in (True, False): project = Project() file_name = "file.vhd" write_file(file_name, """ entity ent is end entity; """) project.add_library("lib", "work_path") source_file = project.add_source_file(file_name, "lib", file_type=file_type_of(file_name), no_parse=no_parse) self.assertEqual(len(source_file.design_units), int(not no_parse)) def test_add_source_file_has_no_parse_verilog(self): for no_parse in (True, False): project = Project() file_name = "file.v" write_file(file_name, """ module mod; endmodule """) project.add_library("lib", "work_path") source_file = project.add_source_file(file_name, "lib", file_type=file_type_of(file_name), no_parse=no_parse) self.assertEqual(len(source_file.design_units), int(not no_parse)) def add_source_file(self, library_name, file_name, contents, defines=None): """ Convenient wrapper arround project.add_source_file """ write_file(file_name, contents) source_file = self.project.add_source_file(file_name, library_name, file_type=file_type_of(file_name), defines=defines) return source_file def hash_file_name_of(self, source_file): """ Get the hash file name of a source_file """ return self.project._hash_file_name_of(source_file) # pylint: disable=protected-access def update(self, source_file): """ Wrapper arround project.update """ self.project.update(source_file) def assert_should_recompile(self, source_files): self.assert_count_equal(source_files, self.project.get_files_in_compile_order()) def assert_compiles(self, source_file, before): """ Assert that the compile order of source_file is before the file named 'before'. """ for src_file in self.project.get_files_in_compile_order(): self.update(src_file) self.assert_should_recompile([]) tick() self.update(source_file) self.assertIn(before, self.project.get_files_in_compile_order()) def assert_not_compiles(self, source_file, before): """ Assert that the compile order of source_file is not before the file named 'before'. """ for src_file in self.project.get_files_in_compile_order(): self.update(src_file) self.assert_should_recompile([]) tick() self.update(source_file) self.assertNotIn(before, self.project.get_files_in_compile_order()) def assert_has_package_body(self, source_file_name, package_name): """ Assert that there is a package body with package_name withing source_file_name """ unit = self._find_design_unit(source_file_name, "package body", package_name, False, package_name) self.assertIsNotNone(unit) def assert_has_package(self, source_file_name, name): """ Assert that there is a package with name withing source_file_name """ unit = self._find_design_unit(source_file_name, "package", name) self.assertIsNotNone(unit) def assert_has_entity(self, source_file, name, generic_names=None, architecture_names=None): """ Assert that there is an entity with name withing source_file that has architectures with architecture_names. """ generic_names = [] if generic_names is None else generic_names architecture_names = [] if architecture_names is None else architecture_names for entity in source_file.library.get_entities(): if entity.name == name: self.assert_count_equal(entity.generic_names, generic_names) self.assert_count_equal(entity.architecture_names, architecture_names) return self.assertFalse("Did not find entity " + name + "in " + source_file.name) def assert_has_architecture(self, source_file_name, name, entity_name): """ Assert that there is an architecture with name of entity_name within source_file_name """ unit = self._find_design_unit(source_file_name, "architecture", name, False, entity_name) self.assertIsNotNone(unit) def assert_has_component_instantiation(self, source_file_name, component_name): """ Assert that there is a component instantion with component with source_file_name """ found_comp = False for source_file in self.project.get_source_files_in_order(): for component in source_file.depending_components: if component == component_name: found_comp = True self.assertTrue(found_comp, "Did not find component " + component_name + " in " + source_file_name) def _find_design_unit(self, # pylint: disable=too-many-arguments source_file_name, design_unit_type, design_unit_name, is_primary=True, primary_design_unit_name=None): """ Utility fnction to find and return a design unit """ for source_file in self.project.get_source_files_in_order(): for design_unit in source_file.design_units: if design_unit.unit_type != design_unit_type: continue if design_unit.name != design_unit_name: continue self.assertEqual(design_unit.is_primary, is_primary) self.assertEqual(source_file.name, source_file_name) if not is_primary: self.assertEqual(design_unit.primary_design_unit, primary_design_unit_name) return design_unit return None def assert_count_equal(self, values1, values2): # Python 2.7 compatability self.assertEqual(sorted(values1), sorted(values2))
class VUnit(object): # pylint: disable=too-many-instance-attributes, too-many-public-methods """ The public interface of VUnit :example: .. code-block:: python from vunit import VUnit """ @classmethod def from_argv(cls, argv=None, compile_builtins=True): """ Create VUnit instance from command line arguments. :param argv: Use explicit argv instead of actual command line argument :param compile_builtins: Do not compile builtins. Used for VUnit internal testing. :returns: A :class:`.VUnit` object instance :example: .. code-block:: python from vunit import VUnit prj = VUnit.from_argv() """ args = VUnitCLI().parse_args(argv=argv) return cls.from_args(args, compile_builtins=compile_builtins) @classmethod def from_args(cls, args, compile_builtins=True): """ Create VUnit instance from args namespace. Intended for users who adds custom command line options. See :class:`vunit.vunit_cli.VUnitCLI` class to learn about adding custom command line options. :param args: The parsed argument namespace object :param compile_builtins: Do not compile builtins. Used for VUnit internal testing. :returns: A :class:`.VUnit` object instance """ def test_filter(name): return any(fnmatch(name, pattern) for pattern in args.test_patterns) return cls(output_path=args.output_path, clean=args.clean, vhdl_standard=select_vhdl_standard(), use_debug_codecs=args.use_debug_codecs, no_color=args.no_color, verbose=args.verbose, xunit_xml=args.xunit_xml, log_level=args.log_level, test_filter=test_filter, list_only=args.list, list_files_only=args.files, compile_only=args.compile, keep_compiling=args.keep_compiling, elaborate_only=args.elaborate, compile_builtins=compile_builtins, simulator_factory=SimulatorFactory(args), num_threads=args.num_threads, exit_0=args.exit_0) def __init__(self, # pylint: disable=too-many-locals, too-many-arguments output_path, simulator_factory, clean=False, use_debug_codecs=False, no_color=False, verbose=False, xunit_xml=None, log_level="warning", test_filter=None, list_only=False, list_files_only=False, compile_only=False, keep_compiling=False, elaborate_only=False, vhdl_standard='2008', compile_builtins=True, num_threads=1, exit_0=False): self._configure_logging(log_level) self._elaborate_only = elaborate_only self._output_path = abspath(output_path) if no_color: self._printer = NO_COLOR_PRINTER else: self._printer = COLOR_PRINTER self._verbose = verbose self._xunit_xml = xunit_xml self._test_filter = test_filter if test_filter is not None else lambda name: True self._list_only = list_only self._list_files_only = list_files_only self._compile_only = compile_only self._keep_compiling = keep_compiling self._vhdl_standard = vhdl_standard self._tb_filter = tb_filter self._configuration = TestConfiguration() self._external_preprocessors = [] self._location_preprocessor = None self._check_preprocessor = None self._use_debug_codecs = use_debug_codecs self._simulator_factory = simulator_factory self._create_output_path(clean) self._project = None self._create_project() self._num_threads = num_threads self._exit_0 = exit_0 if compile_builtins: self.add_builtins(library_name="vunit_lib") def _create_project(self): """ Create Project instance """ database = self._create_database() self._project = Project( vhdl_parser=CachedVHDLParser(database=database), verilog_parser=VerilogParser(database=database), depend_on_package_body=self._simulator_factory.package_users_depend_on_bodies()) def _create_database(self): """ Create a persistent database to store expensive parse results Check for Python version used to create the database is the same as the running python instance or re-create """ project_database_file_name = join(self._output_path, "project_database") create_new = False key = b"version" version = str((6, sys.version)).encode() database = None try: database = DataBase(project_database_file_name) create_new = (key not in database) or (database[key] != version) except KeyboardInterrupt: raise except: # pylint: disable=bare-except traceback.print_exc() create_new = True if create_new: database = DataBase(project_database_file_name, new=True) database[key] = version return PickledDataBase(database) @staticmethod def _configure_logging(log_level): """ Configure logging based on log_level string """ level = getattr(logging, log_level.upper()) logging.basicConfig(filename=None, format='%(levelname)7s - %(message)s', level=level) def add_external_library(self, library_name, path): """ Add an externally compiled library as a black-box :param library_name: The name of the external library :param path: The path to the external library :returns: The created :class:`.Library` object :example: .. code-block:: python prj.add_external_library("unisim", "path/to/unisim/") """ self._project.add_library(library_name, abspath(path), is_external=True) return self._create_library_facade(library_name) def add_library(self, library_name): """ Add a library managed by VUnit. :param library_name: The name of the library :returns: The created :class:`.Library` object :example: .. code-block:: python library = prj.add_library("lib") """ path = join(self._simulator_factory.simulator_output_path, "libraries", library_name) self._project.add_library(library_name, abspath(path)) return self._create_library_facade(library_name) def library(self, library_name): """ Get a library :param library_name: The name of the library :returns: A :class:`.Library` object """ if not self._project.has_library(library_name): raise KeyError(library_name) return self._create_library_facade(library_name) def _create_library_facade(self, library_name): """ Create a Library object to be exposed to users """ return Library(library_name, self, self._project, self._configuration) def set_generic(self, name, value): """ Globally set a value of generic :param name: The name of the generic :param value: The value of the generic :example: .. code-block:: python prj.set_generic("data_width", 16) """ self._configuration.set_generic(name.lower(), value, scope=create_scope()) def set_parameter(self, name, value): """ Globally set value of parameter :param name: The name of the parameter :param value: The value of the parameter :example: .. code-block:: python prj.set_parameter("data_width", 16) """ self._configuration.set_generic(name, value, scope=create_scope()) def set_sim_option(self, name, value): """ Globally set simulation option :param name: |simulation_options| :param value: The value of the simulation option :example: .. code-block:: python prj.set_sim_option("ghdl.flags", ["--no-vital-checks"]) """ self._configuration.set_sim_option(name, value, scope=create_scope()) def set_compile_option(self, name, value): """ Globally set compile option :param name: |compile_option| :param value: The value of the compile option :example: .. code-block:: python prj.set_compile_option("ghdl.flags", ["--no-vital-checks"]) """ for source_file in self._project.get_source_files_in_order(): source_file.set_compile_option(name, value) def add_compile_option(self, name, value): """ Globally add compile option :param name: |compile_option| :param value: The value of the compile option """ for source_file in self._project.get_source_files_in_order(): source_file.add_compile_option(name, value) def set_pli(self, value): """ Globally Set pli :param value: A list of PLI object file names """ self._configuration.set_pli(value, scope=create_scope()) def disable_ieee_warnings(self): """ Globally disable ieee warnings """ self._configuration.disable_ieee_warnings(scope=create_scope()) def get_source_file(self, file_name, library_name=None): """ Get a source file :param file_name: The name of the file as a relative or absolute path :param library_name: The name of a specific library to search if not all libraries :returns: A :class:`.SourceFile` object """ files = self.get_source_files(file_name, library_name, allow_empty=True) if len(files) > 1: raise ValueError("Found file named '%s' in multiple-libraries, " "add explicit library_name." % file_name) elif len(files) == 0: if library_name is None: raise ValueError("Found no file named '%s'" % file_name) else: raise ValueError("Found no file named '%s' in library '%s'" % (file_name, library_name)) return files[0] def get_source_files(self, pattern="*", library_name=None, allow_empty=False): """ Get a list of source files :param pattern: A wildcard pattern matching either an absolute or relative path :param library_name: The name of a specific library to search if not all libraries :param allow_empty: To disable an error if no files matched the pattern :returns: A :class:`.SourceFileList` object """ results = [] for source_file in self._project.get_source_files_in_order(): if library_name is not None: if source_file.library.name != library_name: continue if not (fnmatch(abspath(source_file.name), pattern) or fnmatch(ostools.simplify_path(source_file.name), pattern)): continue results.append(SourceFile(source_file, self._project, self)) if (not allow_empty) and len(results) == 0: raise ValueError(("Pattern %r did not match any file. " "Use allow_empty=True to avoid exception,") % pattern) return SourceFileList(results) def add_source_files(self, # pylint: disable=too-many-arguments files, library_name, preprocessors=None, include_dirs=None, defines=None, allow_empty=False): """ Add source files matching wildcard pattern to library :param files: A wildcard pattern matching the files to add or a list of files :param library_name: The name of the library to add files into :param include_dirs: A list of include directories :param defines: A dictionary containing Verilog defines to be set :param allow_empty: To disable an error if no files matched the pattern :returns: A list of files (:class:`.SourceFileList`) which were added :example: .. code-block:: python prj.add_source_files("*.vhd", "lib") """ if _is_iterable_not_string(files): files = [files] file_names = [] for pattern in files: new_file_names = glob(pattern) if (not allow_empty) and len(new_file_names) == 0: raise ValueError(("Pattern %r did not match any file. " "Use allow_empty=True to avoid exception,") % pattern) file_names += new_file_names return SourceFileList(source_files=[ self.add_source_file(file_name, library_name, preprocessors, include_dirs, defines) for file_name in file_names]) def add_source_file(self, file_name, library_name, preprocessors=None, include_dirs=None, defines=None): """ Add source file to library :param file_name: The name of the file :param library_name: The name of the library to add the file into :param include_dirs: A list of include directories :param defines: A dictionary containing Verilog defines to be set :returns: The :class:`.SourceFile` which was added :example: .. code-block:: python prj.add_source_file("file.vhd", "lib") """ file_type = file_type_of(file_name) if file_type == "verilog": include_dirs = include_dirs if include_dirs is not None else [] include_dirs = add_verilog_include_dir(include_dirs) file_name = self._preprocess(library_name, abspath(file_name), preprocessors) return SourceFile(self._project.add_source_file(file_name, library_name, file_type=file_type, include_dirs=include_dirs, defines=defines), self._project, self) def _preprocess(self, library_name, file_name, preprocessors): """ Preprocess file_name within library_name using explicit preprocessors if preprocessors is None then use implicit globally defined processors """ # @TODO dependency checking etc... if preprocessors is None: preprocessors = [self._location_preprocessor, self._check_preprocessor] preprocessors = [p for p in preprocessors if p is not None] preprocessors = self._external_preprocessors + preprocessors if len(preprocessors) == 0: return file_name code = ostools.read_file(file_name) for preprocessor in preprocessors: code = preprocessor.run(code, basename(file_name)) pp_file_name = join(self._preprocessed_path, library_name, basename(file_name)) idx = 1 while ostools.file_exists(pp_file_name): LOGGER.debug("Preprocessed file exists '%s', adding prefix", pp_file_name) pp_file_name = join(self._preprocessed_path, library_name, "%i_%s" % (idx, basename(file_name))) idx += 1 ostools.write_file(pp_file_name, code) return pp_file_name def add_preprocessor(self, preprocessor): """ Add a custom preprocessor to be used on all files, must be called before adding any files """ self._external_preprocessors.append(preprocessor) def enable_location_preprocessing(self, additional_subprograms=None): """ Enable location preprocessing, must be called before adding any files """ preprocessor = LocationPreprocessor() if additional_subprograms is not None: for subprogram in additional_subprograms: preprocessor.add_subprogram(subprogram) self._location_preprocessor = preprocessor def enable_check_preprocessing(self): """ Enable check preprocessing, must be called before adding any files """ self._check_preprocessor = CheckPreprocessor() def main(self): """ Run vunit main function and exit """ try: all_ok = self._main() except KeyboardInterrupt: exit(1) except CompileError: exit(1) except TestScannerError: exit(1) except SystemExit: exit(1) except: # pylint: disable=bare-except traceback.print_exc() exit(1) if (not all_ok) and (not self._exit_0): exit(1) exit(0) def _main(self): """ Base vunit main function without performing exit """ if self._list_only: return self._main_list_only() if self._list_files_only: return self._main_list_files_only() if self._compile_only: return self._main_compile_only() simulator_if = self._create_simulator_if() test_cases = self._create_tests(simulator_if) self._compile(simulator_if) start_time = ostools.get_time() report = TestReport(printer=self._printer) try: self._run_test(test_cases, report) simulator_if.post_process(self._simulator_factory.simulator_output_path) except KeyboardInterrupt: print() LOGGER.debug("_main: Caught Ctrl-C shutting down") finally: del test_cases del simulator_if report.set_real_total_time(ostools.get_time() - start_time) self._post_process(report) return report.all_ok() def _main_list_only(self): """ Main function when only listing test cases """ simulator_if = None test_suites = self._create_tests(simulator_if) for test_suite in test_suites: for name in test_suite.test_cases: print(name) print("Listed %i tests" % test_suites.num_tests()) return True def _main_list_files_only(self): """ Main function when only listing files """ files = self.get_compile_order() for source_file in files: print("%s, %s" % (source_file.library.name, source_file.name)) print("Listed %i files" % len(files)) return True def _main_compile_only(self): """ Main function when only compiling """ simulator_if = self._create_simulator_if() self._compile(simulator_if) return True def _create_output_path(self, clean): """ Create or re-create the output path if necessary """ if clean: ostools.renew_path(self._output_path) elif not exists(self._output_path): os.makedirs(self._output_path) ostools.renew_path(self._preprocessed_path) def _create_simulator_if(self): """ Create a simulator interface instance """ return self._simulator_factory.create() @property def vhdl_standard(self): return self._vhdl_standard @property def _preprocessed_path(self): return join(self._output_path, "preprocessed") @property def codecs_path(self): return join(self._output_path, "codecs") @property def use_debug_codecs(self): return self._use_debug_codecs def _create_tests(self, simulator_if): """ Create the test suites by scanning the project """ scanner = TestScanner(simulator_if, self._configuration, elaborate_only=self._elaborate_only) test_list = scanner.from_project(self._project, entity_filter=self._tb_filter) if test_list.num_tests() == 0: LOGGER.warning("Test scanner found no test benches using current filter rule:\n%s", self._tb_filter.__doc__) test_list.keep_matches(self._test_filter) return test_list def _compile(self, simulator_if): """ Compile entire project """ simulator_if.compile_project(self._project, self._vhdl_standard, continue_on_error=self._keep_compiling) def _run_test(self, test_cases, report): """ Run the test suites and return the report """ runner = TestRunner(report, join(self._output_path, "tests"), verbose=self._verbose, num_threads=self._num_threads) runner.run(test_cases) def _post_process(self, report): """ Print the report to stdout and optionally write it to an XML file """ report.print_str() if self._xunit_xml is not None: xml = report.to_junit_xml_str() ostools.write_file(self._xunit_xml, xml) def add_builtins(self, library_name="vunit_lib", mock_lang=False, mock_log=False): """ Add vunit VHDL builtin libraries """ library = self.add_library(library_name) supports_context = self._simulator_factory.supports_vhdl_2008_contexts() add_vhdl_builtins(library, self._vhdl_standard, mock_lang, mock_log, supports_context=supports_context) def add_com(self, library_name="vunit_lib", use_debug_codecs=None): """ Add communication package :param use_debug_codecs: Use human readable debug codecs `None`: Use command line argument setting `False`: Never use debug codecs `True`: Always use debug codecs """ if not self._project.has_library(library_name): library = self.add_library(library_name) else: library = self.library(library_name) if use_debug_codecs is not None: self._use_debug_codecs = use_debug_codecs supports_context = self._simulator_factory.supports_vhdl_2008_contexts() add_com(library, self._vhdl_standard, use_debug_codecs=self._use_debug_codecs, supports_context=supports_context) def add_array_util(self, library_name="vunit_lib"): """ Add array utility package """ library = self.library(library_name) add_array_util(library, self._vhdl_standard) def add_osvvm(self, library_name="osvvm"): """ Add osvvm library """ if not self._project.has_library(library_name): library = self.add_library(library_name) else: library = self.library(library_name) add_osvvm(library) def get_compile_order(self, source_files=None): """ Get the compile order of all or specific source files and their dependencies :param source_files: A list of :class:`.SourceFile` objects or `None` meaing all :returns: A list of :class:`.SourceFile` objects in compile order. """ if source_files is None: source_files = self.get_source_files() target_files = [source_file._source_file # pylint: disable=protected-access for source_file in source_files] source_files = self._project.get_dependencies_in_compile_order(target_files) return SourceFileList([SourceFile(source_file, self._project, self) for source_file in source_files])