Esempio n. 1
0
def identify_token(token):
    """
    The function that converts a token from tokenize to a BaseType.
    """
    if isinstance(token, (Comment, String)):
        return token
    if token == ' ':
        return Space()
    if token == '\t':
        return Tab()
    if token == '\\\n':
        return BrokenEndOfLine()
    if token in ('(', ')', '[', ']', '{', '}', ',', ';'):
        return ParserKeyword(token)
    if token in ('\n', '\r\n'):
        return EndOfLine(token)
    if token in ('true', 'false'):
        return Boolean(token == 'true')
    try:
        return Number(int(token))
    except ValueError:
        pass
    try:
        return Number(float(token))
    except ValueError:
        pass
    if token in PREPROCESSORS:
        return Preprocessor(token)
    if token.lower() in NAMESPACES:
        return Namespace(token)
    elif token.lower() in KEYWORDS:
        return Keyword(token)
    else:
        return Variable(token)
Esempio n. 2
0
def parse_switch(interpreter, code):
    conditions = []
    default_used = False

    for statement in code.base_tokens:
        base_tokens = statement.base_tokens

        # evaluate all the base_tokens, trying to obtain their values
        values = []
        for token in base_tokens:
            v = interpreter.value(token)
            values.append(v)

        if type(values[0]) != SwitchType:
            interpreter.exception(
                SQFParserError(
                    statement.position,
                    'Switch code can only start with "case" or "default"'))

        if values[0].keyword == Keyword('default'):
            if default_used:
                interpreter.exception(
                    SQFParserError(
                        code.position,
                        'Switch code contains more than 1 `default`'))
            default_used = True
            assert (isinstance(values[0].result, Code))
            conditions.append(('default', values[0].result))
        else:
            case_condition = values[0].result
            if len(values) == 1:
                conditions.append((case_condition, None))
            else:
                assert (len(values) == 3 and values[1] == Keyword(':'))
                outcome_statement = values[2]
                conditions.append((case_condition, outcome_statement))

    return conditions
Esempio n. 3
0
    def test_with_comments(self):
        # _x=2;/* the two
        #  the three
        #  the four
        #  */
        # _x=3'
        s = Statement([
                Statement([
                    V('_x'),
                    Keyword('='),
                    N(2)], ending=';'),
                Statement([
                    Statement([Comment('/* the two \n the three\n the four\n */'),
                               EndOfLine('\n'),
                               V('_x')]),
                    Keyword('='),
                    N(3)
                ])
        ])

        s.set_position((1,1))

        self.assertEqual(Keyword('='), s[1][1])
        self.assertEqual((5, 3), s[1][1].position)
Esempio n. 4
0
 def __init__(self, action=None):
     super().__init__(Code, Keyword('forEach'), Array, None, action)
Esempio n. 5
0
import math

from sqf.common_expressions import TryCatchExpression, ForEachExpression, \
    WhileDoExpression, ForFromToDoExpression, ForSpecDoExpression, SwitchDoExpression, \
    IfThenSpecExpression, IfThenElseExpression, IfThenExpression, IfThenExitWithExpression
from sqf.types import Keyword, Namespace, Number, Array, Code, Type, Boolean, String, Nothing, Variable
from sqf.exceptions import SQFParserError
from sqf.keywords import OP_ARITHMETIC, OP_COMPARISON, OP_LOGICAL
from sqf.expressions import BinaryExpression, UnaryExpression
from sqf.interpreter_types import SwitchType

OP_OPERATIONS = {
    # Arithmetic
    Keyword('+'): lambda x, y: x + y,
    Keyword('-'): lambda x, y: x - y,
    Keyword('*'): lambda x, y: x * y,
    Keyword('/'): lambda x, y: x / y,
    Keyword('%'): lambda x, y: x % y,
    Keyword('mod'): lambda x, y: x % y,
    Keyword('^'): lambda x, y: x**y,
    Keyword('max'): lambda x, y: max(x, y),
    Keyword('floor'): lambda x: math.floor(x),

    # Comparison
    Keyword('=='): lambda x, y: x == y,
    Keyword('!='): lambda x, y: x != y,
    Keyword('<'): lambda x, y: x < y,
    Keyword('>'): lambda x, y: x < y,
    Keyword('<='): lambda x, y: x <= y,
    Keyword('>='): lambda x, y: x >= y,
Esempio n. 6
0
 def test_keyword(self):
     self.assertEqual(Keyword('forEach'), Keyword('foreach'))
Esempio n. 7
0
 def test_reservedtoken(self):
     self.assertEqual('for', str(Keyword('for')))
Esempio n. 8
0
 def __init__(self, action=None):
     super().__init__(IfType, Keyword('then'), Code, None, action)
Esempio n. 9
0
 def __init__(self):
     super().__init__(Keyword('if'), Boolean, IfType, lambda v, i: v)
Esempio n. 10
0
 def __init__(self, action=None):
     super().__init__(TryType, Keyword('catch'), Code, None, action)
Esempio n. 11
0
 def __init__(self):
     super().__init__(Keyword('try'), Code, TryType, lambda v, i: v)
Esempio n. 12
0
COMMON_EXPRESSIONS = [
    CaseExpression(),
    DefaultExpression(),
    WithExpression(),
    WithDoExpression(),
    TryExpression(),
    TryCatchExpression(),
    ForEachExpression(),
    WhileExpression(),
    WhileDoExpression(),
    ForExpression(),
    ForFromExpression(),
    ForFromToExpression(),
    ForFromToStepExpression(),
    ForFromToDoExpression(),
    ForSpecExpression(),
    ForSpecDoExpression(),
    SwitchExpression(),
    SwitchDoExpression(),
    IfExpression(),
    ElseExpression(),
    IfThenSpecExpression(),
    IfThenElseExpression(),
    IfThenExpression(),
    IfThenExitWithExpression(),
    UnaryExpression(Keyword('params'), Array, Boolean,
                    lambda rhs_v, i: i.add_params(rhs_v)),
    BinaryExpression(Type, Keyword('params'), Array, Boolean,
                     lambda lhs_v, rhs_v, i: i.add_params(rhs_v)),
]
Esempio n. 13
0
def parse_block(all_tokens,
                analyze_tokens,
                start=0,
                initial_lvls=None,
                stop_statement='both',
                defines=None):
    if not initial_lvls:
        initial_lvls = _LEVELS
    if defines is None:
        defines = defaultdict(dict)
    lvls = initial_lvls.copy()

    statements = []
    tokens = []
    i = start
    if not all_tokens:
        return Statement([]), 0

    while i < len(all_tokens):
        token = all_tokens[i]

        # begin #ifdef controls
        if lvls['ifdef'] and token in OPEN_PARENTHESIS:
            lvls['ifdef_open_close'] += 1

        stop = False
        if token in (Preprocessor('#ifdef'), Preprocessor('#ifndef')):
            stop = True
            lvls['ifdef'] += 1
            expression, size = parse_block(all_tokens,
                                           _analyze_simple,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls['ifdef'] -= 1
            if lvls['ifdef'] == 0:
                assert (isinstance(expression, IfDefStatement))
                replacing_expression = parse_ifdef_block(
                    expression, defines, get_coord(all_tokens[:i - 1]))

                new_all_tokens = sqf.base_type.get_all_tokens(
                    tokens + replacing_expression)

                result, _ = parse_block(new_all_tokens,
                                        analyze_tokens,
                                        0,
                                        None,
                                        stop_statement,
                                        defines=defines)

                expression.prepend(tokens)

                expression = IfDefResult(expression, result.tokens)
                statements.append(expression)

                len_expression = len(expression.get_all_tokens())

                i += len_expression - len(tokens) - 1
                tokens = []
            else:
                tokens.append(expression)
                i += size + 1
        # finish ifdef
        elif is_finish_ifdef_condition(tokens, lvls) and (
                is_end_statement(token, stop_statement)
                or is_finish_ifdef_parenthesis(token, lvls)
        ) or lvls['ifdef'] > 1 and token == Preprocessor('#endif'):

            if token != EndOfFile() and token not in CLOSE_PARENTHESIS:
                tokens.append(token)

            if_def = finish_ifdef(tokens, all_tokens, start, statements)
            return if_def, i - start
        # parse during ifdef
        elif lvls['ifdef'] != 0:
            stop = True
            tokens.append(token)

        # end ifdef controls
        if lvls['ifdef'] and token in (STOP_KEYWORDS['single'] +
                                       CLOSE_PARENTHESIS):
            lvls['ifdef_open_close'] -= 1
            if lvls['ifdef_open_close'] < 0:
                lvls['ifdef_open_close'] = 0

        if stop:
            pass
        # try to match a #defined and get the arguments
        elif str(token) in defines:  # is a define
            stop, define_statement, arg_indexes = find_match_if_def(
                all_tokens, i, defines, token)

            if stop:
                arg_number = len(define_statement.args)

                extra_tokens_to_move = 1 + 2 * (
                    arg_number != 0) + 2 * arg_number - 1 * (arg_number != 0)

                replaced_expression = all_tokens[i:i + extra_tokens_to_move]

                # the `all_tokens` after replacement
                replacing_expression = replace_in_expression(
                    define_statement.expression, define_statement.args,
                    arg_indexes, all_tokens)

                new_all_tokens = all_tokens[:i - len(
                    tokens)] + tokens + replacing_expression + all_tokens[
                        i + extra_tokens_to_move:]

                new_start = i - len(tokens)

                expression, size = parse_block(new_all_tokens,
                                               analyze_tokens,
                                               new_start,
                                               lvls,
                                               stop_statement,
                                               defines=defines)

                # the all_tokens of the statement before replacement
                original_tokens_taken = len(replaced_expression) - len(
                    replacing_expression) + size

                original_tokens = all_tokens[i - len(tokens):i - len(tokens) +
                                             original_tokens_taken]

                if isinstance(expression, Statement):
                    expression = expression.content[0]

                if type(original_tokens[-1]) in (EndOfLine, Comment,
                                                 EndOfFile):
                    del original_tokens[-1]
                    original_tokens_taken -= 1

                expression = DefineResult(original_tokens, define_statement,
                                          expression)
                statements.append(expression)

                i += original_tokens_taken - len(tokens) - 1

                tokens = []
        if stop:
            pass
        elif token == ParserKeyword('['):
            lvls['[]'] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement='single',
                                           defines=defines)
            lvls['[]'] -= 1
            tokens.append(expression)
            i += size + 1
        elif token == ParserKeyword('('):
            lvls['()'] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls['()'] -= 1
            tokens.append(expression)
            i += size + 1
        elif token == ParserKeyword('{'):
            lvls['{}'] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls['{}'] -= 1
            tokens.append(expression)
            i += size + 1
        elif token == ParserKeyword(']'):
            if lvls['[]'] == 0:
                raise SQFParenthesisError(
                    get_coord(all_tokens[:i]),
                    'Trying to close right parenthesis without them opened.')

            if statements:
                if isinstance(statements[0], DefineResult):
                    statements[0]._tokens = [
                        Array(
                            _analyze_array(statements[0]._tokens,
                                           analyze_tokens, all_tokens[:i]))
                    ]
                    return statements[0], i - start
                else:
                    raise SQFParserError(
                        get_coord(all_tokens[:i]),
                        'A statement %s cannot be in an array' %
                        Statement(statements))

            return Array(_analyze_array(tokens, analyze_tokens,
                                        all_tokens[:i])), i - start
        elif token == ParserKeyword(')'):
            if lvls['()'] == 0:
                raise SQFParenthesisError(
                    get_coord(all_tokens[:i]),
                    'Trying to close parenthesis without opened parenthesis.')

            if tokens:
                statements.append(analyze_tokens(tokens))

            return Statement(statements, parenthesis=True), i - start
        elif token == ParserKeyword('}'):
            if lvls['{}'] == 0:
                raise SQFParenthesisError(
                    get_coord(all_tokens[:i]),
                    'Trying to close brackets without opened brackets.')

            if tokens:
                statements.append(analyze_tokens(tokens))

            return Code(statements), i - start
        # end of statement when not in preprocessor states
        elif all(lvls[lvl_type] == 0
                 for lvl_type in ('#define', '#include')) and is_end_statement(
                     token, stop_statement):
            if type(token) != EndOfFile:
                tokens.append(token)
            if tokens:
                statements.append(analyze_tokens(tokens))

            tokens = []
        elif token in (Preprocessor('#define'), Preprocessor('#include')):
            # notice that `token` is ignored here. It will be picked up in the end
            if tokens:
                # a pre-processor starts a new statement
                statements.append(analyze_tokens(tokens))
                tokens = []

            lvls[token.value] += 1
            expression, size = parse_block(all_tokens,
                                           analyze_tokens,
                                           i + 1,
                                           lvls,
                                           stop_statement,
                                           defines=defines)
            lvls[token.value] -= 1

            statements.append(expression)
            i += size
        elif token == Keyword('#') and lvls['#define'] != 0:
            # The # sqf command is superseded by the preprocessor directive's stringification command
            tokens.append(Preprocessor('#'))
        elif type(token) in (EndOfLine, Comment, EndOfFile) and any(
                lvls[x] != 0 for x in {'#define', '#include'}):
            tokens.insert(
                0,
                all_tokens[start -
                           1])  # pick the token that triggered the statement
            if tokens[0] == Preprocessor('#define'):
                define_statement = _analyze_define(tokens)
                defines[define_statement.variable_name][len(
                    define_statement.args)] = define_statement
                statements.append(define_statement)
            else:
                statements.append(analyze_tokens(tokens))

            return Statement(statements), i - start
        elif type(token) != EndOfFile:
            tokens.append(token)
        i += 1

    if is_finish_ifdef_condition(tokens, lvls):
        return finish_ifdef(tokens, all_tokens, start, statements), i - start

    for lvl_type in ('[]', '()', '{}', 'ifdef'):
        if lvls[lvl_type] != 0:
            message = 'Parenthesis "%s" not closed' % lvl_type[0]
            if lvl_type == 'ifdef':
                message = '#ifdef statement not closed'

            raise SQFParenthesisError(get_coord(all_tokens[:start - 1]),
                                      message)

    if tokens:
        statements.append(analyze_tokens(tokens))

    return Statement(statements), i - start
Esempio n. 14
0
 def __init__(self):
     super().__init__(Keyword('default'), Type, SwitchType, lambda v, i:
                      (self.keyword, v))
Esempio n. 15
0
 def __init__(self):
     super().__init__(Keyword('with'), Namespace, WithType, lambda v, i: v)
Esempio n. 16
0
 def __init__(self, action=None):
     super().__init__(SwitchType, Keyword('do'), Code, None, action)
Esempio n. 17
0
 def __init__(self, action=None):
     if action is None:
         action = lambda lhs, rhs, i: i.execute_code(
             rhs, namespace_name=lhs.namespace.value)
     super().__init__(WithType, Keyword('do'), Code, None, action)
Esempio n. 18
0
 def __init__(self):
     super().__init__(Keyword('while'), Code, WhileType, lambda v, i: v)
Esempio n. 19
0
 def __init__(self):
     super().__init__(Keyword('for'), Array, ForSpecType, lambda v, i: v)
Esempio n. 20
0
 def __init__(self):
     super().__init__(Code, Keyword('else'), Code, ElseType,
                      lambda lhs, rhs, i: (lhs, rhs))
Esempio n. 21
0
 def __init__(self, action=None):
     super().__init__(ForSpecType, Keyword('do'), Code, None, action)
Esempio n. 22
0
 def test_code(self):
     self.assertEqual('{_x=2;}', str(Code([Statement([V('_x'), Keyword('='), N(2)], ending=';')])))
Esempio n. 23
0
 def __init__(self):
     super().__init__(Keyword('for'), String, ForType, lambda rhs, i: rhs)
Esempio n. 24
0
 def test_keyword(self):
     s = Statement([Space(), Keyword('for')])
     s.set_position((1, 1))
     self.assertEqual((1, 2), s[1].position)
Esempio n. 25
0
 def __init__(self):
     super().__init__(
         ForType, Keyword('step'), Number, ForType, lambda lhs, rhs, i:
         (lhs.variable, lhs.from_, lhs.to, rhs))
Esempio n. 26
0
for expression in EXPRESSIONS:
    if isinstance(expression, BinaryExpression):
        op = expression.types_or_values[1]
        BINARY_OPERATORS.add(op.value.lower())
    elif isinstance(expression, UnaryExpression):
        op = expression.types_or_values[0]
        UNARY_OPERATORS.add(op.value.lower())
    else:
        op = expression.types_or_values[0]
        NULARY_OPERATORS.add(op)

    KEYWORDS.add(op.value.lower())

OP_ARITHMETIC = [
    Keyword(s) for s in ('+', '-', '*', '/', '%', 'mod', '^', 'max', 'floor')
]

OP_LOGICAL = [Keyword(s) for s in ('&&', 'and', '||', 'or')]

OP_COMPARISON = [
    Keyword(s) for s in ('==', 'isequalto', '!=', '<', '>', '<=', '>=', '>>')
]

NAMESPACES = {
    'missionnamespace', 'profilenamespace', 'uinamespace', 'parsingnamespace'
}

# namespaces are parsed as such
KEYWORDS = KEYWORDS - NAMESPACES
Esempio n. 27
0
    TryCatchExpression(),
    ForEachExpression(),
    WhileExpression(),
    WhileDoExpression(),
    ForExpression(),
    ForFromExpression(),
    ForFromToExpression(),
    ForFromToStepExpression(),
    ForFromToDoExpression(),
    ForSpecExpression(),
    ForSpecDoExpression(),
    SwitchExpression(),
    SwitchDoExpression(),
    IfExpression(),
    ElseExpression(),
    IfThenSpecExpression(),
    IfThenElseExpression(),
    IfThenExpression(),
    IfThenExitWithExpression(),
    UnaryExpression(Keyword('params'), Array, Boolean,
                    lambda rhs_v, i: i.add_params(rhs_v)),
    BinaryExpression(Type, Keyword('params'), Array, Boolean,
                     lambda lhs_v, rhs_v, i: i.add_params(rhs_v, lhs_v)),
    UnaryExpression(Keyword('call'), Code, None,
                    lambda rhs_v, i: i.execute_code(rhs_v)),
    BinaryExpression(
        Type, Keyword('call'), Code, None,
        lambda lhs_v, rhs_v, i: i.execute_code(rhs_v,
                                               extra_scope={"_this": lhs_v})),
]