def test_lexer_with_comments1(setup_basic): str_ = '''* Some comment here X1 a b testdev; COMMENT ABOUT M1 pins ; SOME MORE COMMENTS ABOUT PARAMETERS + x=1f y=0.1; AND A FW MORE FOR GOOD MEASURE ''' tokens = list(SpiceParser._generate_tokens(str_)) assert tokens.pop(0).type == 'NEWL' assert all(tok1.type == tok2.type and tok1.value == tok2.value for tok1, tok2 in zip( tokens, SpiceParser._generate_tokens(setup_basic))), tokens
def test_lexer_continuation(setup_basic): str_ = "param fin_p_diff2sing=6 \\ \nwidth_n_diff2sing=10\n" types = [ 'NAME', 'NAME', 'EQUALS', 'NUMBER', 'NAME', 'EQUALS', 'NUMBER', 'NEWL' ] tokens = list(SpiceParser._generate_tokens(str_)) assert [tok.type for tok in tokens] == types str_ = "param fin_p_diff2sing=6 \\\nwidth_n_diff2sing=10\n" types = [ 'NAME', 'NAME', 'EQUALS', 'NUMBER', 'NAME', 'EQUALS', 'NUMBER', 'NEWL' ] tokens = list(SpiceParser._generate_tokens(str_)) assert [tok.type for tok in tokens] == types
def test_lexer_basic(setup_basic): str_ = setup_basic types = [ 'NAME', 'NAME', 'NAME', 'NAME', 'NAME', 'EQUALS', 'NUMBER', 'NAME', 'EQUALS', 'NUMBER' ] assert [tok.type for tok in SpiceParser._generate_tokens(str_)] == types
def test_lexer_multiline(setup_multiline): str_ = setup_multiline types = [ 'NEWL', 'NAME', 'NAME', 'NAME', 'NAME', 'NAME', 'EQUALS', 'NUMBER', 'NAME', 'EQUALS', 'NUMBER', 'NEWL', 'NAME', 'NAME', 'NAME', 'NAME', 'NAME', 'EQUALS', 'EXPR', 'NEWL' ] assert [tok.type for tok in SpiceParser._generate_tokens(str_)] == types
def test_lexer_annotation(setup_annotation): str_ = setup_annotation types = [ 'NEWL', 'DECL', 'NAME', 'NAME', 'NAME', 'NAME', 'NAME', 'NAME', 'NUMBER', 'NAME', 'ANNOTATION', 'ANNOTATION', 'NEWL', 'NAME', 'NAME', 'NAME', 'NUMBER', 'NEWL', 'NAME', 'NAME', 'NAME', 'NUMBER', 'ANNOTATION', 'NEWL', 'NAME', 'NAME', 'NAME', 'NAME', 'NUMBER', 'NAME', 'NAME', 'EQUALS', 'NUMBER', 'NAME', 'EQUALS', 'NUMBER', 'NEWL', 'NAME', 'NAME', 'NAME', 'NAME', 'NUMBER', 'NAME', 'NAME', 'EQUALS', 'NUMBER', 'NAME', 'EQUALS', 'NUMBER', 'NEWL', 'DECL', 'NEWL' ] assert [tok.type for tok in SpiceParser._generate_tokens(str_)] == types
def library(): parser = SpiceParser() align_home = pathlib.Path(__file__).resolve().parent.parent / "files" basic_lib_path = align_home / "basic_template.sp" with open(basic_lib_path) as f: lines = f.read() parser.parse(lines) user_lib_path = align_home / "user_template.sp" with open(user_lib_path) as f: lines = f.read() parser.parse(lines) return parser.library
def parser(): parser = SpiceParser() return parser
def get_parser(): parser = SpiceParser() with open(model_statemenets) as f: lines = f.read() parser.parse(lines) return parser