def test_Tokenizer_single_line(self): t = prefix_tokenizer.Tokenizer(''' = a 5 ''') tokens = values(t.tokens) self.assertEqual( tokens, ['=', 'a', '5', '\n'] )
def test_Tokenizer_heredoc(self): t = prefix_tokenizer.Tokenizer(''' = a <<MSG hello world MSG ''') tokens = values(t.tokens) self.assertEqual( tokens, ['=', 'a', '"hello\\nworld"', '\n'] )
def test_range_syntax_inclusive(self): t = prefix_tokenizer.Tokenizer(''' 0..10 ''') tokens = values(t.tokens) self.assertEqual( tokens, [ '(', 'range', '0', '11', ')', '\n' ] )
def test_Tokenizer_anonymous(self): t = prefix_tokenizer.Tokenizer(''' reduce {+ $ $} a ''') tokens = values(t.tokens) self.assertEqual( tokens, [ 'reduce', '(', 'fn-shorthand', '+', '$', '$', ')', 'a', '\n' ] )