示例#1
0
    def __init__(self, tokens, parenthesis=None, ending=None):
        assert (ending in (None, ',', ';'))
        assert (parenthesis in (None, '()', '[]', '{}'))
        assert (isinstance(tokens, list))
        for i, s in enumerate(tokens):
            assert(isinstance(s, (Type, Keyword, Preprocessor, Statement, ParserType)))

        self._parenthesis = parenthesis

        if self._parenthesis:
            tokens = [ParserKeyword(parenthesis[0])] + tokens + [ParserKeyword(parenthesis[1])]

        super().__init__(tokens)
        self._ending = None
        self.ending = ending
示例#2
0
 def ending(self, ending):
     assert(ending in (None, ';', ','))
     if self._ending is not None:
         del self._tokens[-1]
     if ending is not None:
         self._tokens.append(ParserKeyword(ending))
     self._ending = ending
示例#3
0
def identify_token(token):
    """
    The function that converts a token from tokenize to a BaseType.
    """
    if isinstance(token, (Comment, String)):
        return token
    if token == ' ':
        return Space()
    if token == '\t':
        return Tab()
    if token == '\\\n':
        return BrokenEndOfLine()
    if token in ('(', ')', '[', ']', '{', '}', ',', ';'):
        return ParserKeyword(token)
    if token in ('\n', '\r\n'):
        return EndOfLine(token)
    if token in ('true', 'false'):
        return Boolean(token == 'true')
    try:
        return Number(int(token))
    except ValueError:
        pass
    try:
        return Number(float(token))
    except ValueError:
        pass
    if token in PREPROCESSORS:
        return Preprocessor(token)
    if token.lower() in NAMESPACES:
        return Namespace(token)
    elif token.lower() in KEYWORDS:
        return Keyword(token)
    else:
        return Variable(token)
示例#4
0
 def _with_commas(self):
     if self._values is None:
         return []
     it = iter(self._values)
     yield next(it)
     for x in it:
         yield ParserKeyword(',')
         yield x
示例#5
0
def _analyze_array(tokens, analyze_tokens, tokens_until):
    result = []
    part = []
    first_comma_found = False
    for token in tokens:
        if token == ParserKeyword(','):
            first_comma_found = True
            if not part:
                raise SQFParserError(get_coord(tokens_until), 'Array cannot have an empty element')
            result.append(analyze_tokens(part))
            part = []
        else:
            part.append(token)

    # an empty array is a valid array
    if part == [] and first_comma_found:
        raise SQFParserError(get_coord(tokens_until), 'Array cannot have an empty element')
    elif tokens:
        result.append(analyze_tokens(part))
    return result
示例#6
0
 def update_tokens(self):
     self._tokens = [ParserKeyword('[')] + list(self._with_commas()) + [ParserKeyword(']')]
示例#7
0
def parse_block(all_tokens,
                analyze_tokens,
                start=0,
                initial_lvls=None,
                stop_statement='both',
                defines=None):
    if not initial_lvls:
        initial_lvls = _LEVELS
    if defines is None:
        defines = defaultdict(dict)
    lvls = initial_lvls.copy()

    statements = []
    tokens = []
    i = start
    if not all_tokens:
        return Statement([]), 0

    while i < len(all_tokens):
        token = all_tokens[i]

        # begin #ifdef controls
        if lvls['ifdef'] and token in OPEN_PARENTHESIS:
            lvls['ifdef_open_close'] += 1

        stop = False
        if token in (Preprocessor('#ifdef'), Preprocessor('#ifndef')):
            stop = True
            lvls['ifdef'] += 1
            expression, size = parse_block(all_tokens,
                                           _analyze_simple,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls['ifdef'] -= 1
            if lvls['ifdef'] == 0:
                assert (isinstance(expression, IfDefStatement))
                replacing_expression = parse_ifdef_block(
                    expression, defines, get_coord(all_tokens[:i - 1]))

                new_all_tokens = sqf.base_type.get_all_tokens(
                    tokens + replacing_expression)

                result, _ = parse_block(new_all_tokens,
                                        analyze_tokens,
                                        0,
                                        None,
                                        stop_statement,
                                        defines=defines)

                expression.prepend(tokens)

                expression = IfDefResult(expression, result.tokens)
                statements.append(expression)

                len_expression = len(expression.get_all_tokens())

                i += len_expression - len(tokens) - 1
                tokens = []
            else:
                tokens.append(expression)
                i += size + 1
        # finish ifdef
        elif is_finish_ifdef_condition(tokens, lvls) and (
                is_end_statement(token, stop_statement)
                or is_finish_ifdef_parenthesis(token, lvls)
        ) or lvls['ifdef'] > 1 and token == Preprocessor('#endif'):

            if token != EndOfFile() and token not in CLOSE_PARENTHESIS:
                tokens.append(token)

            if_def = finish_ifdef(tokens, all_tokens, start, statements)
            return if_def, i - start
        # parse during ifdef
        elif lvls['ifdef'] != 0:
            stop = True
            tokens.append(token)

        # end ifdef controls
        if lvls['ifdef'] and token in (STOP_KEYWORDS['single'] +
                                       CLOSE_PARENTHESIS):
            lvls['ifdef_open_close'] -= 1
            if lvls['ifdef_open_close'] < 0:
                lvls['ifdef_open_close'] = 0

        if stop:
            pass
        # try to match a #defined and get the arguments
        elif str(token) in defines:  # is a define
            stop, define_statement, arg_indexes = find_match_if_def(
                all_tokens, i, defines, token)

            if stop:
                arg_number = len(define_statement.args)

                extra_tokens_to_move = 1 + 2 * (
                    arg_number != 0) + 2 * arg_number - 1 * (arg_number != 0)

                replaced_expression = all_tokens[i:i + extra_tokens_to_move]

                # the `all_tokens` after replacement
                replacing_expression = replace_in_expression(
                    define_statement.expression, define_statement.args,
                    arg_indexes, all_tokens)

                new_all_tokens = all_tokens[:i - len(
                    tokens)] + tokens + replacing_expression + all_tokens[
                        i + extra_tokens_to_move:]

                new_start = i - len(tokens)

                expression, size = parse_block(new_all_tokens,
                                               analyze_tokens,
                                               new_start,
                                               lvls,
                                               stop_statement,
                                               defines=defines)

                # the all_tokens of the statement before replacement
                original_tokens_taken = len(replaced_expression) - len(
                    replacing_expression) + size

                original_tokens = all_tokens[i - len(tokens):i - len(tokens) +
                                             original_tokens_taken]

                if isinstance(expression, Statement):
                    expression = expression.content[0]

                if type(original_tokens[-1]) in (EndOfLine, Comment,
                                                 EndOfFile):
                    del original_tokens[-1]
                    original_tokens_taken -= 1

                expression = DefineResult(original_tokens, define_statement,
                                          expression)
                statements.append(expression)

                i += original_tokens_taken - len(tokens) - 1

                tokens = []
        if stop:
            pass
        elif token == ParserKeyword('['):
            lvls['[]'] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement='single',
                                           defines=defines)
            lvls['[]'] -= 1
            tokens.append(expression)
            i += size + 1
        elif token == ParserKeyword('('):
            lvls['()'] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls['()'] -= 1
            tokens.append(expression)
            i += size + 1
        elif token == ParserKeyword('{'):
            lvls['{}'] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls['{}'] -= 1
            tokens.append(expression)
            i += size + 1
        elif token == ParserKeyword(']'):
            if lvls['[]'] == 0:
                raise SQFParenthesisError(
                    get_coord(all_tokens[:i]),
                    'Trying to close right parenthesis without them opened.')

            if statements:
                if isinstance(statements[0], DefineResult):
                    statements[0]._tokens = [
                        Array(
                            _analyze_array(statements[0]._tokens,
                                           analyze_tokens, all_tokens[:i]))
                    ]
                    return statements[0], i - start
                else:
                    raise SQFParserError(
                        get_coord(all_tokens[:i]),
                        'A statement %s cannot be in an array' %
                        Statement(statements))

            return Array(_analyze_array(tokens, analyze_tokens,
                                        all_tokens[:i])), i - start
        elif token == ParserKeyword(')'):
            if lvls['()'] == 0:
                raise SQFParenthesisError(
                    get_coord(all_tokens[:i]),
                    'Trying to close parenthesis without opened parenthesis.')

            if tokens:
                statements.append(analyze_tokens(tokens))

            return Statement(statements, parenthesis=True), i - start
        elif token == ParserKeyword('}'):
            if lvls['{}'] == 0:
                raise SQFParenthesisError(
                    get_coord(all_tokens[:i]),
                    'Trying to close brackets without opened brackets.')

            if tokens:
                statements.append(analyze_tokens(tokens))

            return Code(statements), i - start
        # end of statement when not in preprocessor states
        elif all(lvls[lvl_type] == 0
                 for lvl_type in ('#define', '#include')) and is_end_statement(
                     token, stop_statement):
            if type(token) != EndOfFile:
                tokens.append(token)
            if tokens:
                statements.append(analyze_tokens(tokens))

            tokens = []
        elif token in (Preprocessor('#define'), Preprocessor('#include')):
            # notice that `token` is ignored here. It will be picked up in the end
            if tokens:
                # a pre-processor starts a new statement
                statements.append(analyze_tokens(tokens))
                tokens = []

            lvls[token.value] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls[token.value] -= 1

            statements.append(expression)
            i += size
        elif token == Keyword('#') and lvls['#define'] != 0:
            # The # sqf command is superseded by the preprocessor directive's stringification command
            tokens.append(Preprocessor('#'))
        elif type(token) in (EndOfLine, Comment, EndOfFile) and any(
                lvls[x] != 0 for x in {'#define', '#include'}):
            tokens.insert(
                0,
                all_tokens[start -
                           1])  # pick the token that triggered the statement
            if tokens[0] == Preprocessor('#define'):
                define_statement = _analyze_define(tokens)
                defines[define_statement.variable_name][len(
                    define_statement.args)] = define_statement
                statements.append(define_statement)
            else:
                statements.append(analyze_tokens(tokens))

            return Statement(statements), i - start
        elif type(token) != EndOfFile:
            tokens.append(token)
        i += 1

    if is_finish_ifdef_condition(tokens, lvls):
        return finish_ifdef(tokens, all_tokens, start, statements), i - start

    for lvl_type in ('[]', '()', '{}', 'ifdef'):
        if lvls[lvl_type] != 0:
            message = 'Parenthesis "%s" not closed' % lvl_type[0]
            if lvl_type == 'ifdef':
                message = '#ifdef statement not closed'

            raise SQFParenthesisError(get_coord(all_tokens[:start - 1]),
                                      message)

    if tokens:
        statements.append(analyze_tokens(tokens))

    return Statement(statements), i - start
示例#8
0
def is_finish_ifdef_parenthesis(token, lvls):
    for lvl_type in ('()', '[]', '{}'):
        if lvls[lvl_type] != 0 and token == ParserKeyword(lvl_type[1]):
            return True
    return False
示例#9
0
def rindex(the_list, value):
    return len(the_list) - the_list[::-1].index(value) - 1


_LEVELS = {
    '[]': 0,
    '()': 0,
    '{}': 0,
    '#include': 0,
    '#define': 0,
    'ifdef': 0,
    'ifdef_open_close': 0
}

STOP_KEYWORDS = {
    'single': (ParserKeyword(';'), ),
    'both': (ParserKeyword(';'), ParserKeyword(',')),
}

OPEN_PARENTHESIS = (ParserKeyword('['), ParserKeyword('('), ParserKeyword('{'))
CLOSE_PARENTHESIS = (ParserKeyword(']'), ParserKeyword(')'),
                     ParserKeyword('}'))


def get_coord(tokens):
    return sqf.base_type.get_coord(''.join([str(x) for x in tokens]))


def add_coords(coord1, tokens):
    coord2 = get_coord(tokens)
    return coord1[0] + coord2[0], coord1[1] + coord2[1] - 1