def test(parse_builtins): # type: (...) -> None root = tempfile.mkdtemp() server = DummyServer(Path(root)) with tempfile.NamedTemporaryFile(suffix=".vhd") as filename: diags = server.getMessagesWithText( Path(filename.name), "library lib; use lib.pkg.all; library builtin; use builtin.foo;", ) parse_builtins.assert_called() logIterable("Diags", diags, _logger.info) it.assertCountEqual( diags, [ UnresolvedDependency( RequiredDesignUnit( name=Identifier("pkg"), library=Identifier("lib"), owner=Path(filename.name), locations=[Location(0, 17)], ), Location(0, 17), ) ], )
def test_GetDependencies(self): expected = [ IncludedPath( owner=Path(self.filename), name=VerilogIdentifier("some/include"), locations=(Location(line=0, column=8), ), ) ] if self.filetype in ("sv", "svh"): expected += [ RequiredDesignUnit( owner=Path(self.filename), name=VerilogIdentifier("some_package"), library=None, locations=(Location(line=2, column=7), ), ), RequiredDesignUnit( owner=Path(self.filename), name=VerilogIdentifier("another_package"), library=None, locations=(Location(line=3, column=8), ), ), RequiredDesignUnit( owner=Path(self.filename), name=VerilogIdentifier("foo"), library=None, locations=(Location(line=12, column=14), ), ), ] self.assertCountEqual(self.source.getDependencies(), expected)
def test_ExpandWithRecursiveWildcardsAndRelativePaths(self): # type: (...) -> Any """ Recursive wildcards are only available on Python3, expected result will be different but we're not porting it back """ config = {"sources": [p.join("**", "*.sv")]} _logger.info("config:\n%s", pformat(config)) if six.PY3: expected = (SourceEntry(Path(x), None, (), (), ()) for x in ( self.join("some_sv.sv"), self.join("dir_0", "some_sv.sv"), self.join("dir_0", "dir_1", "some_sv.sv"), self.join("dir_2", "some_sv.sv"), self.join("dir_2", "dir_3", "some_sv.sv"), )) else: expected = (SourceEntry(Path(x), None, (), (), ()) for x in ( self.join("dir_0", "some_sv.sv"), self.join("dir_2", "some_sv.sv"), )) self.assertCountEqual(flattenConfig(config, root_path=self.base_path), expected)
def _getServerByProjectFile(project_file): # type: (Optional[str]) -> Server """ Returns the Server object that corresponds to the given project file. If the object doesn't exists yet it gets created and then returned """ _logger.debug("project_file: %s", project_file) if isinstance(project_file, str) and project_file.lower() == "none": project_file = None # If there's no project file to use, create a temporary path to use if project_file is None: root_dir = Path(tempfile.mkdtemp()) else: root_dir = Path(p.dirname(project_file)) if root_dir not in servers: _logger.info("Creating server for %s (root=%s)", project_file, root_dir) try: project = Server(root_dir=root_dir) if project_file is not None: project.setConfig(project_file, origin=ConfigFileOrigin.user) _logger.debug("Created new project server for '%s'", project_file) except (IOError, OSError): _logger.info("Failed to create checker, reverting to fallback") project = Server(None) servers[root_dir] = project return servers[root_dir]
def test_LintFileOnOpening(self): source = p.join(TEST_PROJECT, "basic_library", "clk_en_generator.vhd") with patch( "hdl_checker.lsp.Server.getMessagesByPath", return_value=[CheckerDiagnostic(filename=Path(source), text="some text")], ) as meth: self.assertCountEqual( self._checkLintFileOnOpen(source), [lsp.checkerDiagToLspDict(CheckerDiagnostic(text="some text"))], ) meth.assert_called_once_with(Path(source))
def test(): source = p.join(TEST_PROJECT, "another_library", "foo.vhd") with patch( "hdl_checker.lsp.Server.getMessagesByPath", return_value=[ CheckerDiagnostic(filename=Path(source), text="some text") ], ) as meth: it.assertCountEqual( checkLintFileOnOpen(source), [lsp.checkerDiagToLspDict(CheckerDiagnostic(text="some text"))], ) meth.assert_called_once_with(Path(source))
def _expand(config, ref_path): # type: (Dict[str, Any], str) -> Iterable[SourceEntry] """ Expands the sources defined in the config dict into a list of tuples """ flags = {} for filetype in FileType: filetype_cfg = config.pop(filetype.value, {}).pop("flags", {}) flags[filetype] = ( filetype_cfg.get(BuildFlagScope.single.value, ()), filetype_cfg.get(BuildFlagScope.dependencies.value, ()), filetype_cfg.get(BuildFlagScope.all.value, ()), ) sources = config.pop("sources", None) # If no sources were defined, search ref_path if sources is None: _logger.debug("No sources found, will search %s", ref_path) sources = (x.name for x in findRtlSourcesByPath(Path(ref_path))) for entry in sources: source = JsonSourceEntry.make(entry) path_expr = ( source.path_expr if p.isabs(source.path_expr) else p.join(ref_path, source.path_expr) ) for _path in glob(path_expr): path = Path(_path, ref_path) try: filetype = FileType.fromPath(path) except UnknownTypeExtension: _logger.warning("Won't include non RTL file '%s'", path) continue single_flags = flags[filetype][0] dependencies_flags = flags[filetype][1] global_flags = flags[filetype][2] yield SourceEntry( path, source.library, tuple(global_flags) + tuple(single_flags) + tuple(source.flags), tuple(global_flags) + tuple(dependencies_flags), )
def test(): filename = Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")) code = open(str(filename), "r").read().split("\n") code[28] = "-- " + code[28] writeListToFile(str(filename), code) diagnostics = it.project.getMessagesByPath(filename) try: it.assertNotIn( ObjectIsNeverUsed( object_type="constant", object_name="ADDR_WIDTH", line_number=28, column_number=13, ), diagnostics, ) finally: # Remove the comment we added code[28] = code[28][3:] writeListToFile(str(filename), code)
def __init__(self, root_dir): # type: (Path) -> None # Root dir is the absolute path to use when any path passed on is # relative self.root_dir = root_dir # Work dir is the the directory that HDL Checker uses as a scratch # pad, everything within it may be deleted or changed self.work_dir = Path(p.join(str(self.root_dir), WORK_PATH)) self._lock = RLock() self.config_file = None # type: Optional[WatchedFile] self._database = Database() self._builder = Fallback(self.work_dir, self._database) self._setupIfNeeded() self._recoverCacheIfPossible() # Use this to know which methods should be cache self._cached_methods = { getattr(self, x) for x in dir(self) if hasattr(getattr(self, x), "cache_clear") } if six.PY2: self._handleUiWarning(_PYTHON_27_WARNING_MSG)
def test_converting_to_lsp(self, diag_type, severity): # type: (...) -> Any _logger.info("Running %s and %s", diag_type, severity) diag = CheckerDiagnostic( checker="hdl_checker test", text="some diag", filename=Path("filename"), line_number=0, column_number=0, error_code="error code", severity=diag_type, ) self.assertEqual( lsp.checkerDiagToLspDict(diag), { "source": "hdl_checker test", "range": { "start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}, }, "message": "some diag", "severity": severity, "code": "error code", }, )
def setup(): # type: (...) -> Any it.path = Path(tempfile.mktemp()) contents = toBytes(""" single_build_flags[vhdl] = -single_build_flag_0 dependencies_build_flags[vhdl] = --vhdl-batch global_build_flags[vhdl] = -globalvhdl -global-vhdl-flag single_build_flags[verilog] = -single_build_flag_0 dependencies_build_flags[verilog] = --verilog-batch global_build_flags[verilog] = -globalverilog -global-verilog-flag single_build_flags[systemverilog] = -single_build_flag_0 dependencies_build_flags[systemverilog] = --systemverilog-batch global_build_flags[systemverilog] = -globalsystemverilog -global-systemverilog-flag builder = msim target_dir = .build vhdl work sample_file.vhd -sample_file_flag vhdl work sample_package.vhdl -sample_package_flag vhdl work TESTBENCH.VHD -build-in some way vhdl lib {0} verilog work foo.v -some-flag some value systemverilog work bar.sv some sv flag """.format(SOME_ABS_PATH)) with open(it.path.name, "wb") as fd: fd.write(contents) fd.flush()
def _getCacheFilename(self): # type: () -> Path """ The cache file name will always be inside the path returned by self._getWorkingPath and defaults to cache.json """ return Path(CACHE_NAME, self.work_dir)
def getMessagesByPath(): # type: (...) -> Any """ Get messages for a given projec_file/path pair """ project_file = bottle.request.forms.get("project_file") # pylint: disable=no-member path = Path(bottle.request.forms.get("path")) # pylint: disable=no-member content = bottle.request.forms.get("content", None) # pylint: disable=no-member _logger.debug( "Getting messages for '%s', '%s', %s", project_file, path, "no content" if content is None else "with content", ) server = _getServerByProjectFile(project_file) if content is None: messages = server.getMessagesByPath(path) else: messages = server.getMessagesWithText(path, content) _logger.info("messages: %s", [x.toDict() for x in messages]) # Messages at this point need to be serializable so that bottle can send # them over return {"messages": tuple(x.toDict() for x in messages)}
def _getBuilderMessages(self, path): # type: (Path) -> Iterable[CheckerDiagnostic] """ Builds the given path taking care of recursively building its dependencies first """ _logger.debug("Building '%s'", str(path)) path = Path(path, self.root_dir) for dep_library, dep_path in self.database.getBuildSequence( path, self.builder.builtin_libraries): for record in self._buildAndHandleRebuilds( dep_path, dep_library, scope=BuildFlagScope.dependencies): if record.severity in (DiagType.ERROR, DiagType.STYLE_ERROR): yield record _logger.debug("Built dependencies, now actually building '%s'", str(path)) library = self.database.getLibrary(path) for record in self._buildAndHandleRebuilds( path, library if library is not None else DEFAULT_LIBRARY, scope=BuildFlagScope.single, forced=True, ): yield record
def setUpClass(cls): cls.filename = NamedTemporaryFile(suffix="." + cls.filetype).name writeListToFile( cls.filename, [ '`include "some/include"', "", "import some_package::*;", "import another_package :: some_name ;", "", "module clock_divider", " #(parameter DIVISION = 5)", " (// Usual ports", " input clk,", " input rst,", " // Output clock divided", " output clk_div);", " localparam foo::bar = std::randomize(cycles);", "endmodule", "", "package \\m$gPkg! ;", " integer errCnt = 0;", " integer warnCnt = 0;", "endpackage", "", ], ) cls.source = VerilogParser(Path(cls.filename))
def test(): filename = p.join(TEST_PROJECT, "another_library", "foo.vhd") diagnostics = it.project.getMessagesByPath(Path(filename)) it.assertIn( ObjectIsNeverUsed( filename=Path(filename), line_number=28, column_number=11, object_type="signal", object_name="neat_signal", ), diagnostics, ) it.assertTrue(it.project.database.paths)
def test(): project_file = _path("hello.prj") open(_path("hello.prj"), "w").write("") server = MagicMock() servers = MagicMock() servers.__getitem__.side_effect = { Path(p.dirname(project_file)): server }.__getitem__ with patch.object(hdl_checker.handlers, "servers", servers): it.app.post("/rebuild_project", {"project_file": project_file}) # Check the object was removed from the servers list servers.__delitem__.assert_called_once_with( Path(p.dirname(project_file))) # Check the original server cleaned things up server.clean.assert_called_once()
def test_FilterGitPaths(self): # type: (...) -> Any self.assertTrue(isGitRepo(Path(self.base_path))) result = list( filterGitIgnoredPaths(Path(self.base_path), (Path(x) for x in self.paths))) _logger.info("Result: %s", result) self.assertCountEqual( result, (Path(x) for x in ( self.join("regular_file"), self.join("untracked_file"), self.out_of_repo, )), )
def test(handle_ui_info): path = tempfile.mkdtemp() config = p.join(path, "config.json") source = p.join(path, "source.vhd") # Make sure the files exists open(config, "w").write("") open(source, "w").write("") project = DummyServer(_Path(path)) project.setConfig(Path(config)) # Get messages of anything to trigger reading the config project.getMessagesByPath(Path(source)) handle_ui_info.assert_called_once_with("No sources were added") removeIfExists(path)
def __init__( self, design_units, # type: Iterable[Dict[str, str]] library=None, # type: str dependencies=None, # type: Iterable[MockDep] filename=None, # type: Optional[str] ): self._design_units = list(design_units or []) if filename is not None: self._filename = Path(p.join(self.base_path, filename)) else: library = "lib_not_set" if library is None else library self._filename = Path( p.join( self.base_path, library, "_{}.vhd".format(self._design_units[0]["name"]), )) self.filetype = FileType.fromPath(self.filename) # self.abspath = p.abspath(self.filename) self.flags = [] # type: ignore self.library = library self._dependencies = [] # type: List[RequiredDesignUnit] for dep_spec in dependencies or []: _name = dep_spec[0] _library = "work" try: _library, _name = dep_spec # type: ignore except ValueError: pass self._dependencies.append( RequiredDesignUnit( self._filename, Identifier(_name, False), Identifier(_library, False), )) self._createMockFile()
def getVunitSources(builder): # type: (AnyValidBuilder) -> Iterable[Tuple[Path, Optional[str], BuildFlags]] "Gets VUnit sources according to the file types supported by builder" if not foundVunit(): return _logger.debug("VUnit installation found") sources = [] # type: List[vunit.source_file.SourceFile] # Prefer VHDL VUnit if FileType.vhdl in builder.file_types: sources += _getSourcesFromVUnitModule(VUnit_VHDL) _logger.debug("Added VUnit VHDL files") if FileType.systemverilog in builder.file_types: _logger.debug("Builder supports Verilog, adding VUnit Verilog files") builder.addExternalLibrary(FileType.verilog, Identifier("vunit_lib", False)) sources += _getSourcesFromVUnitModule(VUnit_Verilog) if not sources: _logger.info("Vunit found but no file types are supported by %s", builder) return for source in sources: path = p.abspath(source.name) library = source.library.name # Get extra flags for building VUnit sources try: flags = _VUNIT_FLAGS[BuilderName( builder.builder_name)][source.vhdl_standard] except KeyError: flags = tuple() yield Path(path), library, flags if FileType.systemverilog in builder.file_types: for path in findRtlSourcesByPath(Path(p.dirname(vunit.__file__))): if _isHeader(path): yield Path(path), None, ()
def test_RaisesSanityError(self, builder_class): # type: (...) -> Any if builder_class is Fallback: raise self.skipTest("Fallback won't raise any exception") _logger.info("Testing builder %s", builder_class.builder_name) with self.assertRaises(SanityCheckError): _ = builder_class( Path(p.join(TEST_TEMP_PATH, "_%s" % builder_class.builder_name)), None)
def _gitLsFiles(path_to_repo, recurse_submodules=False): # type: (Path, bool) -> Iterable[Path] "Lists files from a git repository" cmd = ["git", "-C", path_to_repo.abspath, "ls-files"] if recurse_submodules: cmd += ["--recurse-submodules"] for line in subp.check_output(cmd, stderr=subp.STDOUT).decode().split("\n"): if not line: continue yield Path(line, path_to_repo.abspath)
def test_DependencyInfoForPathNotFound(self): path = Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")) dependency = RequiredDesignUnit( name=Identifier("clock_divider"), library=Identifier("basic_library"), owner=path, locations=(), ) self.assertEqual( self.server._getDependencyInfoForHover(dependency), "Couldn't find a source defining 'basic_library.clock_divider'", )
def test_ReportDependencyInfo(self): path = Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")) dependency = RequiredDesignUnit( name=Identifier("clock_divider"), library=Identifier("basic_library"), owner=path, locations=(), ) self.assertEqual( self.server._getDependencyInfoForHover(dependency), 'Path "some_path", library "some_library"', )
def test(): filename = p.relpath( p.join(TEST_PROJECT, "another_library", "foo.vhd"), str(it.project.root_dir), ) it.assertFalse(p.isabs(filename)) diagnostics = it.project.getMessagesByPath(Path(filename)) it.assertIn( ObjectIsNeverUsed( filename=Path( p.join(TEST_PROJECT, "another_library", "foo.vhd")), line_number=28, column_number=11, object_type="signal", object_name="neat_signal", ), diagnostics, )
def test_SystemverilogOnlyBuilder(self, get_source_files, find_rtl_sources): get_source_files.side_effect = [[ SourceFile(name=_path("path_0.vhd"), library="libary_0"), SourceFile(name=_path("path_1.vhd"), library="libary_1"), ]] find_rtl_sources.return_value = [ Path(_path("some_header.vh")), Path(_path("some_header.svh")), ] builder = MagicMock() builder.builder_name = "msim" builder.file_types = {FileType.systemverilog} self.assertTrue(foundVunit(), "Need VUnit for this test") # Should only have VHDL files sources = list(getVunitSources(builder)) get_source_files.assert_called_once() find_rtl_sources.assert_called_once() self.assertCountEqual( sources, { (Path(_path("path_0.vhd")), "libary_0", ("-2008", )), (Path(_path("path_1.vhd")), "libary_1", ("-2008", )), (Path(_path("some_header.vh")), None, ()), (Path(_path("some_header.svh")), None, ()), }, )
def rebuildProject(): # type: (...) -> Any """ Rebuilds the current project """ _logger.info("Rebuilding project") project_file = bottle.request.forms.get("project_file") # pylint: disable=no-member server = _getServerByProjectFile(project_file) server.clean() _logger.debug("Removing and recreating server object") root_dir = Path(p.dirname(project_file)) del servers[root_dir] _getServerByProjectFile(project_file)
def test_IncludeFolderShouldSearch(self): # type: (...) -> None folder = mkdtemp() with patch("hdl_checker.parser_utils.findRtlSourcesByPath") as meth: meth.return_value = ["sources.vhd"] result = list(getIncludedConfigs((folder, ), self.base_path)) meth.assert_called_once_with(Path(folder)) _logger.info("Result:\n%s", pformat(result)) self.assertCountEqual(result, [(folder, { "sources": ("sources.vhd", ) })])
def test_ParseXvhdlResult(self, path): # type: (...) -> Any if not isinstance(self.builder, XVHDL): raise unittest2.SkipTest("XVHDL only test") self.assertEqual( list( self.builder._makeRecords( "ERROR: [VRFC 10-1412] syntax error near ) [%s:12]" % path)), [ BuilderDiag( builder_name=self.builder_name, text="syntax error near )", filename=Path(path), line_number=11, error_code="VRFC 10-1412", severity=DiagType.ERROR, ) ], ) self.assertEqual( list( self.builder._makeRecords( "WARNING: [VRFC 10-1256] possible infinite loop; process " "does not have a wait statement [%s:119]" % path)), [ BuilderDiag( builder_name=self.builder_name, text= "possible infinite loop; process does not have a wait statement", filename=Path(path), line_number=118, error_code="VRFC 10-1256", severity=DiagType.WARNING, ) ], )