예제 #1
0
		full_output = self.type + "(" + output + ")" + space + "{" + newline
		
		output = ""
		for expression in self.expressions:
			output = output + (tab * self.get_indent_level()) + expression.to_script() + newline
		
		full_output = full_output + output + (tab * (self.get_indent_level() - 1)) + "}"

		return full_output
	
	def read_expression(tokenizer, tree):
		tokenizer.file.give_character_back()
		tokenizer.file.give_character_back()
		char = tokenizer.file.read_character()
		switch_type = "switch"
		if char == "$":
			switch_type = "switch$"
		tokenizer.file.read_character()
		
		expression = SwitchExpression(switch_type, tokenizer=tokenizer)
		
		tokenizer.tokenize(stop_ats=[closing_parenthesis_token], tree=expression)
		expression.convert_expressions_to_conditionals()

		tokenizer.tokenize(stop_ats=[opening_curly_bracket_token], tree=expression)
		tokenizer.tokenize(stop_ats=[closing_curly_bracket_token], tree=expression)

		return expression

Expression.add_keyword_regex(valid_switch, SwitchExpression)
Expression.add_keyword_regex(valid_switch_string, SwitchExpression)
예제 #2
0
        full_output = "package " + self.name_symbol.to_script(
        ) + space + "{" + newline

        output = ""
        for expression in self.expressions:
            output = output + (tab * self.get_indent_level()
                               ) + expression.to_script() + newline

        full_output = full_output + output + (
            tab * (self.get_indent_level() - 1)) + "};"

        return full_output

    def read_expression(tokenizer, tree):
        expression = PackageExpression(tokenizer=tokenizer)

        tokenizer.file.give_character_back()

        tokenizer.tokenize(stop_ats=[opening_curly_bracket_token],
                           tree=expression)
        expression.convert_expression_to_name()

        tokenizer.tokenize(stop_ats=[closing_curly_bracket_token],
                           tree=expression)

        return expression


Expression.add_keyword_regex(valid_package, PackageExpression)
예제 #3
0
            ) + tokenizer.file.read_character()
            if tokenizer.buffer != "else if":
                tokenizer.file.give_character_back(ignore_whitespace=True)
                tokenizer.file.give_character_back(ignore_whitespace=True)

                tokenizer.buffer = "else"

        expression.type = tokenizer.buffer

        if tokenizer.buffer != "else":
            tokenizer.file.read_character()  # absorb first "("
            tokenizer.tokenize(stop_ats=[closing_parenthesis_token],
                               tree=expression)
            expression.move_expressions()

        tokenizer.tokenize(
            give_back_stop_ats=[opening_curly_bracket_token, semicolon_token],
            tree=expression)

        # figure out if this is a single line if-statement or not
        if tokenizer.file.read_character() == "{":
            tokenizer.tokenize(stop_ats=[closing_curly_bracket_token],
                               tree=expression)
        else:
            tokenizer.file.give_character_back()

        return expression


Expression.add_keyword_regex(valid_conditional, ConditionalExpression)
예제 #4
0
class ReturnExpression(Expression):
    def __init__(self, tokenizer=None):
        super().__init__(tokenizer=tokenizer)

    def __str__(self):
        return f"ReturnExpression({self.expressions})"

    def __repr__(self):
        return self.__str__()

    def to_script(self):
        output = " "
        for expression in self.expressions:
            output = output + expression.to_script()

        if output == " ":
            output = ""

        return f"return{output}{self.handle_semicolon()}"

    def read_expression(tokenizer, tree):
        expression = ReturnExpression(tokenizer=tokenizer)

        tokenizer.file.give_character_back()
        tokenizer.tokenize(give_back_stop_ats=[semicolon_token],
                           tree=expression)
        return expression


Expression.add_keyword_regex(valid_return, ReturnExpression)
예제 #5
0
from eggscript_src.expressions.expression import Expression
from eggscript_src.regex import valid_break


class BreakExpression(Expression):
    def __init__(self, tokenizer=None):
        super().__init__(tokenizer=tokenizer)

    def __str__(self):
        return "BreakExpression()"

    def __repr__(self):
        return self.__str__()

    def to_script(self):
        return f"break{self.handle_semicolon()}"

    def read_expression(tokenizer, tree):
        tokenizer.file.give_character_back()
        return BreakExpression(tokenizer=tokenizer)


Expression.add_keyword_regex(valid_break, BreakExpression)
예제 #6
0
        return full_output

    def read_expression(tokenizer, tree):
        expression = ForLoopExpression(tokenizer=tokenizer)

        tokenizer.tokenize(stop_ats=[semicolon_token], tree=expression)
        expression.move_initiation_expressions()

        tokenizer.tokenize(stop_ats=[semicolon_token], tree=expression)
        expression.move_conditional_expressions()

        tokenizer.tokenize(stop_ats=[closing_parenthesis_token],
                           tree=expression)
        expression.move_increment_expressions()

        tokenizer.tokenize(
            give_back_stop_ats=[opening_curly_bracket_token, semicolon_token],
            tree=expression)

        # figure out if this is a single line if-statement or not
        if tokenizer.file.read_character() == "{":
            tokenizer.tokenize(stop_ats=[closing_curly_bracket_token],
                               tree=expression)
        else:
            tokenizer.file.give_character_back()

        return expression


Expression.add_keyword_regex(valid_for, ForLoopExpression)
from eggscript_src.expressions.expression import Expression
from eggscript_src.regex import valid_continue


class ContinueExpression(Expression):
    def __init__(self, tokenizer=None):
        super().__init__(tokenizer=tokenizer)

    def __str__(self):
        return "ContinueExpression()"

    def __repr__(self):
        return self.__str__()

    def to_script(self):
        return f"continue{self.handle_semicolon()}"

    def read_expression(tokenizer, tree):
        tokenizer.file.give_character_back()
        return ContinueExpression(tokenizer=tokenizer)


Expression.add_keyword_regex(valid_continue, ContinueExpression)
예제 #8
0
        space = " "
        tab = "\t"
        if get_config("minify") == True:
            newline = ""
            space = ""
            tab = ""

        full_output = f"default:" + newline

        output = ""
        for expression in self.expressions:
            output = output + (tab * self.get_indent_level()
                               ) + expression.to_script() + newline

        full_output = full_output + output

        return full_output

    def read_expression(tokenizer, tree):
        expression = DefaultExpression(tokenizer=tokenizer)
        # read up until next case, next default, or }
        tokenizer.tokenize(
            give_back_stop_ats=[closing_curly_bracket_token],
            buffer_give_back_stop_at=[valid_case, valid_default],
            tree=expression)

        return expression


Expression.add_keyword_regex(valid_default, DefaultExpression)
		for conditional_expression in self.conditional_expressions:
			output = output + conditional_expression.to_script()

		full_output = "while(" + output + ")" + space + "{" + newline
		
		output = ""
		for expression in self.expressions:
			output = output + (tab * self.get_indent_level()) + expression.to_script() + newline
		
		full_output = full_output + output + (tab * (self.get_indent_level() - 1)) + "}"

		return full_output
	
	def read_expression(tokenizer, tree):
		expression = WhileLoopExpression(tokenizer=tokenizer)
		
		tokenizer.tokenize(stop_ats=[closing_parenthesis_token], tree=expression)
		expression.convert_expressions_to_conditionals()

		tokenizer.tokenize(give_back_stop_ats=[opening_curly_bracket_token, semicolon_token], tree=expression)

		# figure out if this is a single line if-statement or not
		if tokenizer.file.read_character() == "{":
			tokenizer.tokenize(stop_ats=[closing_curly_bracket_token], tree=expression)
		else:
			tokenizer.file.give_character_back()

		return expression

Expression.add_keyword_regex(valid_while, WhileLoopExpression)
예제 #10
0
                tab * (self.get_indent_level() - 1)) + "}"

        return full_output + self.handle_semicolon()

    def read_expression(tokenizer, tree):
        expression = NewObjectExpression(tokenizer=tokenizer)
        tokenizer.file.give_character_back()

        tokenizer.tokenize(stop_ats=[opening_parenthesis_token],
                           tree=expression)
        expression.convert_expressions_to_class()

        tokenizer.tokenize(stop_ats=[closing_parenthesis_token],
                           tree=expression)
        expression.convert_expressions_to_arguments()

        tokenizer.tokenize(
            give_back_stop_ats=[opening_curly_bracket_token, semicolon_token],
            tree=expression)
        char = tokenizer.file.read_character()  # absorb first "{"
        if opening_curly_bracket_token.match(char):
            tokenizer.tokenize(stop_ats=[closing_curly_bracket_token],
                               tree=expression)
        else:
            tokenizer.file.give_character_back()

        return expression


Expression.add_keyword_regex(valid_new, NewObjectExpression)
        tokenizer.tokenize(stop_ats=[opening_parenthesis_token],
                           tree=expression)
        expression.convert_expression_to_class()

        tokenizer.tokenize(stop_ats=[closing_parenthesis_token],
                           give_back_stop_ats=[colon_token],
                           tree=expression)

        if tokenizer.file.read_character() == ":":
            inheritance_expression = InheritanceExpression(tokenizer=tokenizer)
            inheritance_expression.child_class = expression.expressions[0]
            tokenizer.tokenize(stop_ats=[closing_parenthesis_token],
                               tree=inheritance_expression)
            inheritance_expression.convert_expression_to_super_class()

            expression.convert_expression_to_name(
                expression=inheritance_expression)
        else:
            expression.convert_expression_to_name()
            tokenizer.file.give_character_back()

        tokenizer.tokenize(stop_ats=[opening_curly_bracket_token],
                           tree=expression)
        tokenizer.tokenize(stop_ats=[closing_curly_bracket_token],
                           tree=expression)

        return expression


Expression.add_keyword_regex(valid_datablock, DatablockExpression)
예제 #12
0
                               ) + expression.to_script() + newline

        full_output = full_output + output + (
            tab * (self.get_indent_level() - 1)) + "}"

        return full_output

    def read_expression(tokenizer, tree):
        expression = FunctionExpression(tokenizer=tokenizer)

        tokenizer.file.give_character_back()
        tokenizer.tokenize(
            stop_ats=[opening_parenthesis_token],
            inheritable_give_back_stop_at=[opening_parenthesis_token],
            tree=expression)
        expression.convert_expression_to_name()

        tokenizer.tokenize(stop_ats=[closing_parenthesis_token],
                           tree=expression)
        expression.convert_expressions_to_arguments()

        tokenizer.tokenize(stop_ats=[opening_curly_bracket_token],
                           tree=expression)
        tokenizer.tokenize(stop_ats=[closing_curly_bracket_token],
                           tree=expression)

        return expression


Expression.add_keyword_regex(valid_function, FunctionExpression)