def test_preprocess_define_with_default_values(self): defines = {} tokens = preprocess(tokenize("`define foo(arg1, arg2=default)arg1 arg2"), defines) self.assertEqual(tokens, []) self.assertEqual( defines, {"foo": Macro("foo", tokenize("arg1 arg2"), args=("arg1", "arg2"), defaults={"arg2": tokenize("default")})}, )
def test_preprocess_include_directive(self): self.write_file("include.svh", "hello hey") included_files = [] tokens = preprocess( tokenize('`include "include.svh"'), include_paths=[self.output_path], included_files=included_files ) self.assertEqual(tokens, tokenize("hello hey")) self.assertEqual(included_files, [join(self.output_path, "include.svh")])
def test_preprocess_substitute_define_with_one_arg(self): tokens = preprocess( tokenize( """\ `define foo(arg)arg 123 `foo(hello hey)""" ) ) self.assertEqual(tokens, tokenize("hello hey 123"))
def test_preprocess_substitute_define_with_multile_args(self): tokens = preprocess( tokenize( """\ `define foo(arg1, arg2)arg1,arg2 `foo(1 2, hello)""" ) ) self.assertEqual(tokens, tokenize("1 2, hello"))
def test_preprocess_substitute_define_without_args(self): tokens = preprocess( tokenize( """\ `define foo bar \"abc\" `foo""" ) ) self.assertEqual(tokens, tokenize('bar "abc"'))
def test_preprocess_substitute_define_with_default_values(self): defines = {} tokens = preprocess( tokenize( """\ `define foo(arg1, arg2=default)arg1 arg2 `foo(1)""" ), defines, ) self.assertEqual(tokens, tokenize("1 default"))
def test_tokenizes_single_line_comment(self): self.assertEqual(list(tokenize("// asd")), [Token(tokenizer.COMMENT, value=" asd")]) self.assertEqual(list(tokenize("asd// asd")), [Token(tokenizer.IDENTIFIER, value="asd"), Token(tokenizer.COMMENT, value=" asd")]) self.assertEqual(list(tokenize("asd// asd //")), [Token(tokenizer.IDENTIFIER, value="asd"), Token(tokenizer.COMMENT, value=" asd //")])
def test_tokenizes_multi_line_comment(self): self.assertEqual(list(tokenize("/* asd */")), [Token(tokenizer.MULTI_COMMENT, value=" asd ")]) self.assertEqual(list(tokenize("/* /* asd */")), [Token(tokenizer.MULTI_COMMENT, value=" /* asd ")]) self.assertEqual(list(tokenize("/* /* asd */")), [Token(tokenizer.MULTI_COMMENT, value=" /* asd ")]) self.assertEqual(list(tokenize("/* 1 \n 2 */")), [Token(tokenizer.MULTI_COMMENT, value=" 1 \n 2 ")]) self.assertEqual(list(tokenize("/* 1 \r\n 2 */")), [Token(tokenizer.MULTI_COMMENT, value=" 1 \r\n 2 ")])
def preprocess(tokens, defines=None, include_paths=None, included_files=None): """ Pre-process tokens while filling in defines """ stream = TokenStream(tokens) include_paths = [] if include_paths is None else include_paths included_files = [] if included_files is None else included_files defines = {} if defines is None else defines result = [] while not stream.eof: token = stream.pop() if not token.kind == tokenizer.PREPROCESSOR: result.append(token) continue if token.value == "define": macro = define(stream) defines[macro.name] = macro if token.value == "include": stream.skip_until(tokenizer.STRING) file_name = stream.pop().value full_name = None for include_path in include_paths: full_name = join(include_path, file_name) if exists(full_name): break else: assert False included_files.append(full_name) with open(full_name, "r") as fptr: included_tokens = tokenize(fptr.read()) result += preprocess(included_tokens, defines, include_paths, included_files) elif token.value in defines: macro = defines[token.value] if macro.num_args == 0: values = [] else: values = parse_macro_actuals(stream) result += macro.expand(values) return result
def test_tokenizes_keywords(self): self.assertEqual(list(tokenize("module")), [Token(tokenizer.MODULE, value='')]) self.assertEqual(list(tokenize("endmodule")), [Token(tokenizer.ENDMODULE, value='')]) self.assertEqual(list(tokenize("package")), [Token(tokenizer.PACKAGE, value='')]) self.assertEqual(list(tokenize("endpackage")), [Token(tokenizer.ENDPACKAGE, value='')]) self.assertEqual(list(tokenize("parameter")), [Token(tokenizer.PARAMETER, value='')]) self.assertEqual(list(tokenize("import")), [Token(tokenizer.IMPORT, value='')])
def parse(cls, code, file_name, include_paths=None): """ Parse verilog file """ include_paths = [] if include_paths is None else include_paths tokens = tokenize(code) included_files = [] pp_tokens = preprocess(tokens, include_paths=[dirname(file_name)] + include_paths, included_files=included_files) tokens = [token for token in pp_tokens if token.kind not in (tokenizer.WHITESPACE, tokenizer.COMMENT, tokenizer.NEWLINE, tokenizer.MULTI_COMMENT)] return cls(modules=VerilogModule.find(tokens), packages=VerilogPackage.find(tokens), imports=cls.find_imports(tokens), instances=cls.find_instances(tokens), included_files=included_files)
def test_tokenizes_string_literal(self): self.assertEqual(list(tokenize('"hello"')), [Token(tokenizer.STRING, value='hello')]) self.assertEqual(list(tokenize('"hel""lo"')), [Token(tokenizer.STRING, value='hel'), Token(tokenizer.STRING, value='lo')]) self.assertEqual(list(tokenize(r'"h\"ello"')), [Token(tokenizer.STRING, value=r'h\"ello')]) self.assertEqual(list(tokenize(r'"h\"ello"')), [Token(tokenizer.STRING, value=r'h\"ello')]) self.assertEqual(list(tokenize(r'"\"ello"')), [Token(tokenizer.STRING, value=r'\"ello')]) self.assertEqual(list(tokenize(r'"\"\""')), [Token(tokenizer.STRING, value=r'\"\"')])
def test_tokenizes_equal(self): self.assertEqual(list(tokenize("=")), [Token(tokenizer.EQUAL, value='')])
def test_tokenizes_hash(self): self.assertEqual(list(tokenize("#")), [Token(tokenizer.HASH, value='')])
def test_tokenizes_parenthesis(self): self.assertEqual(list(tokenize("()")), [Token(tokenizer.LPAR, value=''), Token(tokenizer.RPAR, value='')])
def test_tokenizes_comma(self): self.assertEqual(list(tokenize(",")), [Token(tokenizer.COMMA, value='')])
def test_tokenizes_newline(self): self.assertEqual(list(tokenize("asd\n")), [Token(tokenizer.IDENTIFIER, value="asd"), Token(tokenizer.NEWLINE, value='')])
def test_tokenizes_semi_colon(self): self.assertEqual(list(tokenize("asd;")), [Token(tokenizer.IDENTIFIER, value="asd"), Token(tokenizer.SEMI_COLON, value='')])
def test_non_preprocess_tokens_are_kept(self): defines = {} tokens = tokenize('"hello"ident/*comment*///comment') pp_tokens = preprocess(tokenize('"hello"ident/*comment*///comment'), defines) self.assertEqual(pp_tokens, tokens) self.assertEqual(defines, {})
def test_preprocess_define_without_value(self): defines = {} tokens = preprocess(tokenize("`define foo"), defines) self.assertEqual(tokens, []) self.assertEqual(defines, {"foo": Macro("foo")})
def test_tokenizes_define(self): self.assertEqual(list(tokenize("`define name")), [Token(tokenizer.PREPROCESSOR, value="define"), Token(tokenizer.WHITESPACE, value=""), Token(tokenizer.IDENTIFIER, value="name")])
def test_preprocess_define_with_value(self): defines = {} tokens = preprocess(tokenize('`define foo bar "abc"'), defines) self.assertEqual(tokens, []) self.assertEqual(defines, {"foo": Macro("foo", tokenize('bar "abc"'))})
def test_escaped_newline_ignored(self): self.assertEqual(list(tokenize("a\\\nb")), [Token(tokenizer.IDENTIFIER, value='a'), Token(tokenizer.IDENTIFIER, value='b')])
def test_preprocess_define_with_lpar_value(self): defines = {} tokens = preprocess(tokenize("`define foo (bar)"), defines) self.assertEqual(tokens, []) self.assertEqual(defines, {"foo": Macro("foo", tokenize("(bar)"))})
def test_preprocess_define_with_one_arg(self): defines = {} tokens = preprocess(tokenize("`define foo(arg)arg 123"), defines) self.assertEqual(tokens, []) self.assertEqual(defines, {"foo": Macro("foo", tokenize("arg 123"), args=("arg",))})
def test_preprocess_define_with_multiple_args(self): defines = {} tokens = preprocess(tokenize("`define foo( arg1, arg2)arg1 arg2"), defines) self.assertEqual(tokens, []) self.assertEqual(defines, {"foo": Macro("foo", tokenize("arg1 arg2"), args=("arg1", "arg2"))})