Exemple #1
0
	def test_peeking_iter(self):
		symbols = "+-*/"
		lexer = Lexer(symbols)

		tokens = [Token(TokenType.Symbol, c, ((1, i), (1, i + 1))) for i, c in enumerate(symbols, start=1)]
		tokens.append(Token(TokenType.EndOfStream, "", ((1, len(symbols) + 1), (1, len(symbols) + 1))))

		def test():
			for i, (actual, expected) in enumerate(zip_longest(lexer, tokens)):
				with self.subTest(i=i, actual=actual, expected=expected):
					self.assertToken(actual, expected)

		with self.subTest(iteration=1), lexer.peeking():
			test()
		with self.subTest(iteration=2), lexer.peeking():
			test()
		with self.subTest(iteration=3):
			test()
Exemple #2
0
	def test_nested_peeking(self):
		lexer = Lexer("1 2 3")

		self.assertToken(lexer.next(), TokenType.Integer, "1")

		with lexer.peeking():
			self.assertToken(lexer.next(), TokenType.Integer, "2")

			with lexer.peeking():
				self.assertToken(lexer.next(), TokenType.Integer, "3")
				self.assertToken(lexer.next(), TokenType.EndOfStream)

			self.assertToken(lexer.next(), TokenType.Integer, "3")
			self.assertToken(lexer.next(), TokenType.EndOfStream)

		self.assertToken(lexer.next(), TokenType.Integer, "2")
		self.assertToken(lexer.next(), TokenType.Integer, "3")
		self.assertToken(lexer.next(), TokenType.EndOfStream)
Exemple #3
0
	def test_nested_peeking_save_multiple(self):
		lexer = Lexer("1 2 3 4 5")

		self.assertToken(lexer.next(), TokenType.Integer, "1")

		with lexer.peeking() as p1:
			self.assertToken(lexer.next(), TokenType.Integer, "2")

			with lexer.peeking() as p2:
				self.assertToken(lexer.next(), TokenType.Integer, "3")

				p2.save()

				self.assertToken(lexer.next(), TokenType.Integer, "4")

			p1.save()

			self.assertToken(lexer.next(), TokenType.Integer, "4")

		self.assertToken(lexer.next(), TokenType.Integer, "4")
		self.assertToken(lexer.next(), TokenType.Integer, "5")
		self.assertToken(lexer.next(), TokenType.EndOfStream)
Exemple #4
0
	def test_peeking(self):
		symbols = "+-*/"
		lexer = Lexer(symbols)

		for i in range(len(symbols)):
			next_expected = symbols[i]
			with self.subTest(next_token=next_expected):
				with lexer.peeking():
					for j in range(len(symbols) - i):
						expected = symbols[i + j]
						with self.subTest(peek_token=expected):
							self.assertToken(lexer.next(), TokenType.Symbol, expected)
				self.assertToken(lexer.next(), TokenType.Symbol, next_expected)
Exemple #5
0
	def test_peeking_save(self):
		lexer = Lexer("1 2 3 4 5")

		self.assertToken(lexer.next(), TokenType.Integer, "1")

		with lexer.peeking() as p:
			self.assertToken(lexer.next(), TokenType.Integer, "2")
			self.assertToken(lexer.next(), TokenType.Integer, "3")

			p.save()

			self.assertToken(lexer.next(), TokenType.Integer, "4")

		self.assertToken(lexer.next(), TokenType.Integer, "4")
		self.assertToken(lexer.next(), TokenType.Integer, "5")
		self.assertToken(lexer.next(), TokenType.EndOfStream)