Esempio n. 1
0
 def preprocess(self, code, file_name="fn.v", include_paths=None):
     """
     Tokenize & Preprocess
     """
     tokenizer = VerilogTokenizer()
     preprocessor = VerilogPreprocessor(tokenizer)
     write_file(file_name, code)
     tokens = tokenizer.tokenize(code, file_name=file_name)
     defines = {}
     included_files = []
     with mock.patch("vunit.parsing.verilog.preprocess.LOGGER", autospec=True) as logger:
         tokens = preprocessor.preprocess(tokens, defines, include_paths, included_files)
     return PreprocessResult(self, tokens, defines,
                             [file_name for _, file_name in included_files if file_name is not None],
                             logger)
def tokenize(code, file_name="fn.v"):
    """
    Tokenize
    """
    tokenizer = VerilogTokenizer()
    return tokenizer.tokenize(code, file_name=file_name)
Esempio n. 3
0
class TestVerilogTokenizer(TestCase):
    """
    Test of the Verilog tokenizer
    """

    def test_tokenizes_define(self):
        self.check("`define name",
                   [PREPROCESSOR(value="define"),
                    WHITESPACE(value=" "),
                    IDENTIFIER(value="name")])

    def test_tokenizes_string_literal(self):
        self.check('"hello"',
                   [STRING(value='hello')])

        self.check('"hel""lo"',
                   [STRING(value='hel'),
                    STRING(value='lo')])

        self.check(r'"h\"ello"',
                   [STRING(value=r'h\"ello')])

        self.check(r'"h\"ello"',
                   [STRING(value=r'h\"ello')])

        self.check(r'"\"ello"',
                   [STRING(value=r'\"ello')])

        self.check(r'"\"\""',
                   [STRING(value=r'\"\"')])

    def test_tokenizes_single_line_comment(self):
        self.check("// asd",
                   [COMMENT(value=" asd")])

        self.check("asd// asd",
                   [IDENTIFIER(value="asd"),
                    COMMENT(value=" asd")])

        self.check("asd// asd //",
                   [IDENTIFIER(value="asd"),
                    COMMENT(value=" asd //")])

    def test_tokenizes_multi_line_comment(self):
        self.check("/* asd */",
                   [MULTI_COMMENT(value=" asd ")])

        self.check("/* /* asd */",
                   [MULTI_COMMENT(value=" /* asd ")])

        self.check("/* /* asd */",
                   [MULTI_COMMENT(value=" /* asd ")])

        self.check("/* 1 \n 2 */",
                   [MULTI_COMMENT(value=" 1 \n 2 ")])

        self.check("/* 1 \r\n 2 */",
                   [MULTI_COMMENT(value=" 1 \r\n 2 ")])

    def test_tokenizes_semi_colon(self):
        self.check("asd;",
                   [IDENTIFIER(value="asd"),
                    SEMI_COLON(value='')])

    def test_tokenizes_newline(self):
        self.check("asd\n",
                   [IDENTIFIER(value="asd"),
                    NEWLINE(value='')])

    def test_tokenizes_comma(self):
        self.check(",",
                   [COMMA(value='')])

    def test_tokenizes_parenthesis(self):
        self.check("()",
                   [LPAR(value=''),
                    RPAR(value='')])

    def test_tokenizes_hash(self):
        self.check("#",
                   [HASH(value='')])

    def test_tokenizes_equal(self):
        self.check("=",
                   [EQUAL(value='')])

    def test_escaped_newline_ignored(self):
        self.check("a\\\nb",
                   [IDENTIFIER(value='a'),
                    IDENTIFIER(value='b')])

    def test_tokenizes_keywords(self):
        self.check("module",
                   [MODULE(value='')])
        self.check("endmodule",
                   [ENDMODULE(value='')])
        self.check("package",
                   [PACKAGE(value='')])
        self.check("endpackage",
                   [ENDPACKAGE(value='')])
        self.check("parameter",
                   [PARAMETER(value='')])
        self.check("import",
                   [IMPORT(value='')])

    def test_has_location_information(self):
        self.check("`define foo", [
            PREPROCESSOR(value="define", location=(("fn.v", (0, 6)), None)),
            WHITESPACE(value=" ", location=(("fn.v", (7, 7)), None)),
            IDENTIFIER(value="foo", location=(("fn.v", (8, 10)), None)),
        ], strip_loc=False)

    def setUp(self):
        self.tokenizer = VerilogTokenizer()

    def check(self, code, tokens, strip_loc=True):
        """
        Helper method to test tokenizer
        Tokenize code and check that it matches tokens
        optionally strip location information in comparison
        """

        def preprocess(tokens):
            if strip_loc:
                return [token.kind(token.value, None) for token in tokens]
            else:
                return tokens

        self.assertEqual(preprocess(list(self.tokenizer.tokenize(code, "fn.v"))),
                         tokens)
Esempio n. 4
0
 def setUp(self):
     self.tokenizer = VerilogTokenizer()
Esempio n. 5
0
File: parser.py Progetto: wzab/vunit
 def __init__(self, database=None):
     self._tokenizer = VerilogTokenizer()
     self._preprocessor = VerilogPreprocessor(self._tokenizer)
     self._database = database
     self._content_cache = {}
Esempio n. 6
0
File: parser.py Progetto: wzab/vunit
class VerilogParser(object):
    """
    Parse a single Verilog file
    """
    def __init__(self, database=None):
        self._tokenizer = VerilogTokenizer()
        self._preprocessor = VerilogPreprocessor(self._tokenizer)
        self._database = database
        self._content_cache = {}

    def parse(self, file_name, include_paths=None, defines=None):
        """
        Parse verilog code
        """

        defines = {} if defines is None else defines
        include_paths = [] if include_paths is None else include_paths
        include_paths = [dirname(file_name)] + include_paths

        cached = self._lookup_parse_cache(file_name, include_paths, defines)
        if cached is not None:
            return cached

        initial_defines = dict(
            (key, Macro(key, self._tokenizer.tokenize(value)))
            for key, value in defines.items())
        code = read_file(file_name, encoding=HDL_FILE_ENCODING)
        tokens = self._tokenizer.tokenize(code, file_name=file_name)
        included_files = []
        pp_tokens = self._preprocessor.preprocess(
            tokens,
            include_paths=include_paths,
            defines=initial_defines,
            included_files=included_files)

        included_files_for_design_file = [
            name for _, name in included_files if name is not None
        ]
        result = VerilogDesignFile.parse(pp_tokens,
                                         included_files_for_design_file)

        if self._database is None:
            return result

        self._store_result(file_name, result, included_files, defines)
        return result

    @staticmethod
    def _key(file_name):
        """
        Returns the database key for parse results of file_name
        """
        return ("CachedVerilogParser.parse(%s)" % abspath(file_name)).encode()

    def _store_result(self, file_name, result, included_files, defines):
        """
        Store parse result into back into cache
        """
        new_included_files = [(short_name, full_name,
                               self._content_hash(full_name))
                              for short_name, full_name in included_files]
        key = self._key(file_name)
        self._database[key] = self._content_hash(
            file_name), new_included_files, defines, result
        return result

    def _content_hash(self, file_name):
        """
        Hash the contents of the file
        """
        if file_name is None or not exists(file_name):
            return None
        if file_name not in self._content_cache:
            self._content_cache[file_name] = file_content_hash(
                file_name, encoding=HDL_FILE_ENCODING, database=self._database)
        return self._content_cache[file_name]

    def _lookup_parse_cache(self, file_name, include_paths, defines):
        """
        Use verilog code from cache
        """
        # pylint: disable=too-many-return-statements

        if self._database is None:
            return None

        key = self._key(file_name)
        if key not in self._database:
            return None

        old_content_hash, old_included_files, old_defines, old_result = self._database[
            key]
        if old_defines != defines:
            return None

        if old_content_hash != self._content_hash(file_name):
            return None

        for include_str, included_file_name, last_content_hash in old_included_files:
            if last_content_hash != self._content_hash(included_file_name):
                return None

            if find_included_file(include_paths,
                                  include_str) != included_file_name:
                return None

        LOGGER.debug("Re-using cached Verilog parse results for %s", file_name)

        return old_result
def tokenize(code, file_name="fn.v"):
    """
    Tokenize
    """
    tokenizer = VerilogTokenizer()
    return tokenizer.tokenize(code, file_name=file_name)
Esempio n. 8
0
class TestVerilogTokenizer(TestCase):
    """
    Test of the Verilog tokenizer
    """

    def test_tokenizes_define(self):
        self.check("`define name",
                   [PREPROCESSOR(value="define"),
                    WHITESPACE(value=" "),
                    IDENTIFIER(value="name")])

    def test_tokenizes_string_literal(self):
        self.check('"hello"',
                   [STRING(value='hello')])

        self.check('"hel""lo"',
                   [STRING(value='hel'),
                    STRING(value='lo')])

        self.check(r'"h\"ello"',
                   [STRING(value='h"ello')])

        self.check(r'"h\"ello"',
                   [STRING(value='h"ello')])

        self.check(r'"\"ello"',
                   [STRING(value='"ello')])

        self.check(r'"\"\""',
                   [STRING(value='""')])

        self.check(r'''"hi
there"''',
                   [STRING(value='hi\nthere')])

        self.check(r'''"hi\
there"''',
                   [STRING(value='hithere')])

    def test_tokenizes_single_line_comment(self):
        self.check("// asd",
                   [COMMENT(value=" asd")])

        self.check("asd// asd",
                   [IDENTIFIER(value="asd"),
                    COMMENT(value=" asd")])

        self.check("asd// asd //",
                   [IDENTIFIER(value="asd"),
                    COMMENT(value=" asd //")])

    def test_tokenizes_multi_line_comment(self):
        self.check("/* asd */",
                   [MULTI_COMMENT(value=" asd ")])

        self.check("/* /* asd */",
                   [MULTI_COMMENT(value=" /* asd ")])

        self.check("/* /* asd */",
                   [MULTI_COMMENT(value=" /* asd ")])

        self.check("/* 1 \n 2 */",
                   [MULTI_COMMENT(value=" 1 \n 2 ")])

        self.check("/* 1 \r\n 2 */",
                   [MULTI_COMMENT(value=" 1 \r\n 2 ")])

    def test_tokenizes_semi_colon(self):
        self.check("asd;",
                   [IDENTIFIER(value="asd"),
                    SEMI_COLON(value='')])

    def test_tokenizes_newline(self):
        self.check("asd\n",
                   [IDENTIFIER(value="asd"),
                    NEWLINE(value='')])

    def test_tokenizes_comma(self):
        self.check(",",
                   [COMMA(value='')])

    def test_tokenizes_parenthesis(self):
        self.check("()",
                   [LPAR(value=''),
                    RPAR(value='')])

    def test_tokenizes_hash(self):
        self.check("#",
                   [HASH(value='')])

    def test_tokenizes_equal(self):
        self.check("=",
                   [EQUAL(value='')])

    def test_escaped_newline_ignored(self):
        self.check("a\\\nb",
                   [IDENTIFIER(value='a'),
                    IDENTIFIER(value='b')])

    def test_tokenizes_keywords(self):
        self.check("module",
                   [MODULE(value='')])
        self.check("endmodule",
                   [ENDMODULE(value='')])
        self.check("package",
                   [PACKAGE(value='')])
        self.check("endpackage",
                   [ENDPACKAGE(value='')])
        self.check("parameter",
                   [PARAMETER(value='')])
        self.check("import",
                   [IMPORT(value='')])

    def test_has_location_information(self):
        self.check("`define foo", [
            PREPROCESSOR(value="define", location=(("fn.v", (0, 6)), None)),
            WHITESPACE(value=" ", location=(("fn.v", (7, 7)), None)),
            IDENTIFIER(value="foo", location=(("fn.v", (8, 10)), None)),
        ], strip_loc=False)

    def setUp(self):
        self.tokenizer = VerilogTokenizer()

    def check(self, code, tokens, strip_loc=True):
        """
        Helper method to test tokenizer
        Tokenize code and check that it matches tokens
        optionally strip location information in comparison
        """

        def preprocess(tokens):  # pylint: disable=missing-docstring
            if strip_loc:
                return [token.kind(token.value, None) for token in tokens]

            return tokens

        self.assertEqual(preprocess(list(self.tokenizer.tokenize(code, "fn.v"))),
                         tokens)
Esempio n. 9
0
 def setUp(self):
     self.tokenizer = VerilogTokenizer()
Esempio n. 10
0
 def __init__(self, database=None):
     self._tokenizer = VerilogTokenizer()
     self._preprocessor = VerilogPreprocessor(self._tokenizer)
     self._database = database
     self._content_cache = {}
Esempio n. 11
0
class VerilogParser(object):
    """
    Parse a single Verilog file
    """

    def __init__(self, database=None):
        self._tokenizer = VerilogTokenizer()
        self._preprocessor = VerilogPreprocessor(self._tokenizer)
        self._database = database
        self._content_cache = {}

    def parse(self, file_name, include_paths=None, defines=None):
        """
        Parse verilog code
        """

        defines = {} if defines is None else defines
        include_paths = [] if include_paths is None else include_paths
        include_paths = [dirname(file_name)] + include_paths

        cached = self._lookup_parse_cache(file_name, include_paths, defines)
        if cached is not None:
            return cached

        initial_defines = dict((key, Macro(key, self._tokenizer.tokenize(value)))
                               for key, value in defines.items())
        code = read_file(file_name, encoding=HDL_FILE_ENCODING)
        tokens = self._tokenizer.tokenize(code, file_name=file_name)
        included_files = []
        pp_tokens = self._preprocessor.preprocess(tokens,
                                                  include_paths=include_paths,
                                                  defines=initial_defines,
                                                  included_files=included_files)

        included_files_for_design_file = [name for _, name in included_files if name is not None]
        result = VerilogDesignFile.parse(pp_tokens, included_files_for_design_file)

        if self._database is None:
            return result

        self._store_result(file_name, result, included_files, defines)
        return result

    @staticmethod
    def _key(file_name):
        """
        Returns the database key for parse results of file_name
        """
        return ("CachedVerilogParser.parse(%s)" % abspath(file_name)).encode()

    def _store_result(self, file_name, result, included_files, defines):
        """
        Store parse result into back into cache
        """
        new_included_files = [(short_name, full_name, self._content_hash(full_name))
                              for short_name, full_name in included_files]
        key = self._key(file_name)
        self._database[key] = self._content_hash(file_name), new_included_files, defines, result
        return result

    def _content_hash(self, file_name):
        """
        Hash the contents of the file
        """
        if file_name is None or not exists(file_name):
            return None
        if file_name not in self._content_cache:
            self._content_cache[file_name] = file_content_hash(file_name,
                                                               encoding=HDL_FILE_ENCODING,
                                                               database=self._database)
        return self._content_cache[file_name]

    def _lookup_parse_cache(self, file_name, include_paths, defines):
        """
        Use verilog code from cache
        """
        # pylint: disable=too-many-return-statements

        if self._database is None:
            return None

        key = self._key(file_name)
        if key not in self._database:
            return None

        old_content_hash, old_included_files, old_defines, old_result = self._database[key]
        if old_defines != defines:
            return None

        if old_content_hash != self._content_hash(file_name):
            return None

        for include_str, included_file_name, last_content_hash in old_included_files:
            if last_content_hash != self._content_hash(included_file_name):
                return None

            if find_included_file(include_paths, include_str) != included_file_name:
                return None

        LOGGER.debug("Re-using cached Verilog parse results for %s", file_name)

        return old_result