Beispiel #1
0
	def __init__(self, value):
		super().__init__(value)

		rd = CodeReader(value)
		rd.consume_exact('#pragma')
		rd.consume_inline_whitespace()
		self.name = rd.consume_identifier()
		rd.consume_inline_whitespace()

		if rd.has_identifier():
			self.value = rd.consume_identifier()  # identifier without quotes

		elif rd.has_number():

			n = rd.consume_number()

			try:
				self.value = int(n, 10)
			except ValueError:
				try:
					self.value = int(n, 16)
				except ValueError:
					try:
						self.value = int(n, 2)
					except ValueError:
						rd.error('Could not parse number: %s' % n)

		elif rd.has_string():
			self.value = rd.consume_string()[1:-1]  # crop quotes

		else:
			self.value = True  # boolean directive (flag)

		v = self.value
		if type(v) is str:
			self.value = {'true': True, 'false': False}.get(v.lower(), v)
Beispiel #2
0
	def _tokenize(self):
		""" Parse expression sub-tokens """

		rd = CodeReader(self.value)

		while not rd.has_end():
			rd.sweep()

			if rd.has_identifier():
				# an identifier
				# can be variable or a function call

				s = rd.consume_identifier()
				t = T_Name(s)
				self.tokens.append(t)

				rd.sweep()

				if rd.has_bracket():
					# array index
					s = rd.consume_block()
					t = T_Bracket(s)
					self.tokens.append(t)

				elif rd.has_paren():
					# paren with arguments for the function
					s = rd.consume_block()
					t = T_Paren(s)

					t.set_type(ParenType.ARGVALS)

					self.tokens.append(t)

			elif rd.has_paren():
				# Parenthesised sub-expression
				s = rd.consume_block()
				t = T_Paren(s)
				t.set_type(ParenType.EXPR)
				self.tokens.append(t)

			elif rd.has_number():
				# Number literal
				s = rd.consume_number()
				t = T_Number(s)
				self.tokens.append(t)

			elif (len(self.tokens) > 0 and
				type(self.tokens[-1:][0]) is T_Operator
				and rd.matches(r'[-+]\s*[0-9a-z_]+')):

				# Number literal
				sign = rd.consume()
				if sign == '+':
					sign = ''

				rd.sweep()

				if sign == '-':
					self.tokens.append(T_Number('-1'))
					self.tokens.append(T_Operator('*'))

			elif rd.has_operator():
				# Operator
				s = rd.consume_operator()
				t = T_Operator(s)
				self.tokens.append(t)

			elif rd.has_char():
				# Char literal
				s = rd.consume_char()
				t = T_Char(s)
				self.tokens.append(t)

			elif rd.has_string():
				# String literal
				s = rd.consume_string()
				t = T_String(s)
				self.tokens.append(t)

			else:
				raise Exception('Unexpected expression token near' + rd.peek(10))

		for t in self.tokens:
			if t.is_composite():
				t.tokenize()
Beispiel #3
0
	def _tokenize(self):
		""" Parse expression sub-tokens """

		rd = CodeReader(self.value)

		while not rd.has_end():
			rd.sweep()

			if rd.has_identifier():
				# an identifier
				# can be variable or a function call

				s = rd.consume_identifier()
				t = T_Name(s)
				self.tokens.append(t)

				rd.sweep()

				if rd.has_bracket():
					# array index
					s = rd.consume_block()
					t = T_Bracket(s)
					self.tokens.append(t)

				elif rd.has_paren():
					# paren with arguments for the function
					s = rd.consume_block()
					t = T_Paren(s)

					t.set_type(ParenType.ARGVALS)

					self.tokens.append(t)

			elif rd.has_paren():
				# Parenthesised sub-expression
				s = rd.consume_block()
				t = T_Paren(s)
				t.set_type(ParenType.EXPR)
				self.tokens.append(t)

			elif rd.has_number():
				# Number literal
				s = rd.consume_number()
				t = T_Number(s)
				self.tokens.append(t)

			elif (((len(self.tokens) > 0 and
				type(self.tokens[-1:][0]) is T_Operator) or len(self.tokens) == 0)
				and rd.matches(r'[-+]\s*[0-9a-z_]+')):

				# Unary operator
				sign = rd.consume()
				if sign == '+':
					sign = ''

				rd.sweep()

				if sign == '-':
					self.tokens.append(T_Operator('@-'))

			elif rd.has_operator():
				# Operator
				s = rd.consume_operator()
				t = T_Operator(s)
				self.tokens.append(t)

			elif rd.has_char():
				# Char literal
				s = rd.consume_char()
				t = T_Char(s)
				self.tokens.append(t)

			elif rd.has_string():
				# String literal
				s = rd.consume_string()
				t = T_String(s)
				self.tokens.append(t)

			else:
				raise Exception('Unexpected expression token near' + rd.peek(10))

		for t in self.tokens:
			if t.is_composite():
				t.tokenize()