def __parse_application_target(self, target_name, it): link_with = variables.Variable() library_dirs = variables.Variable() common_parameters = CommonTargetParameters( os.path.dirname(self.filename), self.name, target_name) cxx_parameters = CxxParameters() while True: token = it.next() if token == lexer.Token.LITERAL: if self.__try_parse_target_common_parameters( common_parameters, token, it): pass elif self.__try_parse_cxx_parameters(cxx_parameters, token, it): pass elif token.content == "link_with": link_with = self.__parse_list(it) elif token.content == "library_dirs": library_dirs = self.__parse_list(it) else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) target = targets.Application(common_parameters, cxx_parameters, link_with, library_dirs) targets.add_target(target)
def __parse_application_target(self, target_name, it): link_with = variables.Variable() library_dirs = variables.Variable() common_parameters = CommonTargetParameters( os.path.dirname(self.filename), self.name, target_name) cxx_parameters = CxxParameters() while True: token = it.next() if token == lexer.Token.LITERAL: if self.__try_parse_target_common_parameters(common_parameters, token, it): pass elif self.__try_parse_cxx_parameters(cxx_parameters, token, it): pass elif token.content == "link_with": link_with = self.__parse_list(it) elif token.content == "library_dirs": library_dirs = self.__parse_list(it) else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) target = targets.Application(common_parameters, cxx_parameters, link_with, library_dirs) targets.add_target(target)
def __parse_phony(self, target_name, it): common_parameters = CommonTargetParameters( os.path.dirname(self.filename), self.name, target_name) cxx_parameters = CxxParameters() while True: token = it.next() if token == lexer.Token.LITERAL: if self.__try_parse_target_common_parameters( common_parameters, token, it): pass elif token.content == "artefacts": common_parameters.artefacts = self.__parse_list(it) elif token.content == "prerequisites": common_parameters.prerequisites = self.__parse_list(it) else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) target = targets.Phony(common_parameters) targets.add_target(target)
def __parse(self): it = iter(self.tokens) try: if not self.__parse_directive(it): ui.parse_error(msg="unknown :(") except StopIteration: ui.debug("eof")
def _parse_configuration_export(self, it): ret = [] token = it.next() if token == lexer.Token.OPEN_PARENTHESIS: while True: token = it.next() if token in [lexer.Token.LITERAL, lexer.Token.VARIABLE]: value = self._token_to_variable(token) token = it.next() if token == lexer.Token.COLON: token = it.next() if token == lexer.Token.VARIABLE: variable = variables.Variable(self.name, token.content, value) ret.append(variable) else: ui.parse_error(token, msg="expected variable") else: ui.parse_error(token, msg="expected colon") elif token == lexer.Token.CLOSE_PARENTHESIS: break else: ui.parse_error(token) else: ui.parse_error(token) ui.debug("colon list: " + str(ret)) return ret
def _parse_configuration_export(self, it): ret = [] token = it.next() if token == lexer.Token.OPEN_PARENTHESIS: while True: token = it.next() if token in [lexer.Token.LITERAL, lexer.Token.VARIABLE]: value = self._token_to_variable(token) token = it.next() if token == lexer.Token.COLON: token = it.next() if token == lexer.Token.VARIABLE: variable = variables.Variable( self.name, token.content, value) ret.append(variable) else: ui.parse_error(token, msg="expected variable") else: ui.parse_error(token, msg="expected colon") elif token == lexer.Token.CLOSE_PARENTHESIS: break else: ui.parse_error(token) else: ui.parse_error(token) ui.debug("colon list: " + str(ret)) return ret
def __parse_directive(self, it): while True: token = it.next() if token == lexer.Token.LITERAL: if token.content == "set" or token.content == "append": self.__parse_set_or_append(it, token.content == "append") elif token.content == "target": self.__parse_target(it) elif token.content == "configuration": self.__parse_configuration(it) else: ui.parse_error(token, msg="expected directive") elif token == lexer.Token.NEWLINE: continue else: return False
def __tokenize(self, buf): while not buf.eof(): ret = any([self.__try_tokenize_comment(buf), self.__try_tokenize_slash_newline(buf), self.__try_tokenize_simple_chars(buf), self.__try_tokenize_quoted_literal(buf), self.__try_tokenize_variable_or_literal(buf), self.__try_tokenize_whitespace(buf), self.__try_tokenize_multiline_literal(buf)]) if not ret: ui.parse_error(msg="unexpected character: " + str(buf.value())) if buf.eof(): break
def __tokenize(self, buf): while not buf.eof(): ret = any([ self.__try_tokenize_comment(buf), self.__try_tokenize_slash_newline(buf), self.__try_tokenize_simple_chars(buf), self.__try_tokenize_quoted_literal(buf), self.__try_tokenize_variable_or_literal(buf), self.__try_tokenize_whitespace(buf), self.__try_tokenize_multiline_literal(buf) ]) if not ret: ui.parse_error(msg="unexpected character: " + str(buf.value())) if buf.eof(): break
def __parse_list(self, it): ret = variables.Variable() token = it.next() if token == lexer.Token.OPEN_PARENTHESIS: while True: token = it.next() variable = self._token_to_variable(token) if variable: ret.content.append(variable) elif token == lexer.Token.CLOSE_PARENTHESIS: break else: ui.parse_error(token) else: ui.parse_error(token) return ret
def eval(self): ui.debug("evaluating {!s}".format(self)) parts = self.name.split(".") if len(parts) == 1: self.module = self.module self.name = parts[0] elif len(parts) == 2: self.module = parts[0][1:] # lose the $ self.name = "$" + parts[1] global modules if self.module not in modules: ui.parse_error(msg="no such module: " + self.module) if self.name not in modules[self.module]: ui.fatal("{!s} does not exist".format(self)) return modules[self.module][self.name].eval()
def __parse_static_library(self, target_name, it): common_parameters = CommonTargetParameters( os.path.dirname(self.filename), self.name, target_name) cxx_parameters = CxxParameters() while True: token = it.next() if token == lexer.Token.LITERAL: if self.__try_parse_target_common_parameters(common_parameters, token, it): pass elif self.__try_parse_cxx_parameters(cxx_parameters, token, it): pass else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) target = targets.StaticLibrary(common_parameters, cxx_parameters) targets.add_target(target)
def eval(self): ui.debug("evaluating {!s}: ".format(self)) s = self.content ret = [] STATE_READING = 1 STATE_WAITING_FOR_PARENTHESIS = 2 STATE_READING_NAME = 3 variable_name = '$' state = STATE_READING for c in s: if state == STATE_READING: if c == "$": state = STATE_WAITING_FOR_PARENTHESIS else: ret.append(c) elif state == STATE_WAITING_FOR_PARENTHESIS: if c == "{": state = STATE_READING_NAME else: ui.parse_error(msg="expecting { after $") elif state == STATE_READING_NAME: if c == "}": ui.debug("variable: " + variable_name) variable = ReferenceToVariable(self.module, variable_name) ret.append(" ".join(variable.eval())) variable_name = '$' state = STATE_READING else: variable_name += c elif state == STATE_READING_NAME: variable_name = variable_name + c return ["".join(ret)]
def __parse_configuration(self, it): configuration = configurations.Configuration() # name token = it.next() if token == lexer.Token.LITERAL: configuration.name = token.content else: ui.parse_error(token) while True: token = it.next() if token == lexer.Token.LITERAL: if token.content == "compiler": configuration.compiler = self.__parse_list(it) elif token.content == "archiver": configuration.archiver = self.__parse_list(it) elif token.content == "application_suffix": configuration.application_suffix = self.__parse_list(it) elif token.content == "compiler_flags": configuration.compiler_flags = self.__parse_list(it) elif token.content == "linker_flags": configuration.linker_flags = self.__parse_list(it) elif token.content == "export": configuration.export = self._parse_configuration_export(it) else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) ui.debug("configuration parsed:" + str(configuration)) configurations.add_configuration(configuration)
def eval(self): ui.debug("evaluating {!s}: ".format(self)) s = self.content ret = [] STATE_READING = 1 STATE_WAITING_FOR_PARENTHESIS = 2 STATE_READING_NAME = 3 variable_name = "$" state = STATE_READING for c in s: if state == STATE_READING: if c == "$": state = STATE_WAITING_FOR_PARENTHESIS else: ret.append(c) elif state == STATE_WAITING_FOR_PARENTHESIS: if c == "{": state = STATE_READING_NAME else: ui.parse_error(msg="expecting { after $") elif state == STATE_READING_NAME: if c == "}": ui.debug("variable: " + variable_name) variable = ReferenceToVariable(self.module, variable_name) ret.append(" ".join(variable.eval())) variable_name = "$" state = STATE_READING else: variable_name += c elif state == STATE_READING_NAME: variable_name = variable_name + c return ["".join(ret)]
def __parse_phony(self, target_name, it): common_parameters = CommonTargetParameters( os.path.dirname(self.filename), self.name, target_name) cxx_parameters = CxxParameters() while True: token = it.next() if token == lexer.Token.LITERAL: if self.__try_parse_target_common_parameters(common_parameters, token, it): pass elif token.content == "artefacts": common_parameters.artefacts = self.__parse_list(it) elif token.content == "prerequisites": common_parameters.prerequisites = self.__parse_list(it) else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) target = targets.Phony(common_parameters) targets.add_target(target)
def __parse_set_or_append(self, it, append): token = it.next() if token == lexer.Token.VARIABLE: variable_name = token.content else: ui.parse_error(token) second_add = False while True: token = it.next() variable = self._token_to_variable(token) if variable: if append or second_add: variables.append(self.name, variable_name, variable) else: variables.add(self.name, variable_name, variable) second_add = True elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token)
def __parse_static_library(self, target_name, it): common_parameters = CommonTargetParameters( os.path.dirname(self.filename), self.name, target_name) cxx_parameters = CxxParameters() while True: token = it.next() if token == lexer.Token.LITERAL: if self.__try_parse_target_common_parameters( common_parameters, token, it): pass elif self.__try_parse_cxx_parameters(cxx_parameters, token, it): pass else: ui.parse_error(token) elif token == lexer.Token.NEWLINE: break else: ui.parse_error(token) target = targets.StaticLibrary(common_parameters, cxx_parameters) targets.add_target(target)
def __parse_target(self, it): token = it.next() if token == lexer.Token.LITERAL: target_type = token.content token = it.next() if token == lexer.Token.LITERAL: target_name = token.content else: ui.parse_error(token) else: ui.parse_error(token) if target_type == "application": self.__parse_application_target(target_name, it) elif target_type == "static_library": self.__parse_static_library(target_name, it) elif target_type == "phony": self.__parse_phony(target_name, it) else: ui.parse_error(token, msg="unknown target type: " + target_type)