Пример #1
0
def main(input):
	tokens = lexer.get_tokens(input)

	parser_tree = parser.parse(tokens)

	if type(parser_tree) == str:
		return parser_tree
	else:
		return evaluator.eval(parser_tree)
Пример #2
0
def text_to_tokens(text, extensions=None, strict=False):
    if not strict:
        built_in_exts = _extensions._EXTENDED_DEFINITIONS
    if not extensions:
        extensions = built_in_exts
    else:
        extensions = built_in_exts + extensions
    if not strict:
        definitions = (tokens.TK_DEFINITIONS[:-1] + extensions +
                       tokens.TK_DEFINITIONS[-1:])
    else:
        definitions = tokens.TK_DEFINITIONS
    tks = [token for token in lexer.get_tokens(text, definitions)]

    # Group tokens on the same lines together
    # by first sort them by lineno.
    sorted_tokens = sort_tokens(tks)
    grouped_tokens = group_tokens(sorted_tokens)
    return grouped_tokens
Пример #3
0
def text_to_tokens(text, extensions=None, strict=False):
    if not strict:
        built_in_exts = _extensions._EXTENDED_DEFINITIONS
    if not extensions:
        extensions = built_in_exts
    else:
        extensions = built_in_exts + extensions
    if not strict:
        definitions = (tokens.TK_DEFINITIONS[:-1] +
                       extensions +
                       tokens.TK_DEFINITIONS[-1:])
    else:
        definitions = tokens.TK_DEFINITIONS
    tks = [token for token in lexer.get_tokens(text, definitions)]

    # Group tokens on the same lines together
    # by first sort them by lineno.
    sorted_tokens = sort_tokens(tks)
    grouped_tokens = group_tokens(sorted_tokens)
    return grouped_tokens
Пример #4
0
	else:
		subtree = posint()
		if subtree:
			tree['R3'].append(subtree)
			result = tree
	return result

@level_increment
@printer
def KEY(key):
	if current_token is None:
		return None

	if current_token == KEYWORDS[key]:
		tree = dict()
		tree['KEY'] = current_token
		get_next_token()
		return tree
	return None


if __name__ == "__main__":
	from main import set_logging
	from lexer import get_tokens
	set_logging(logging.DEBUG, logging.DEBUG, "./../log/")
	code_file = open('./../Input/example1.txt', 'r')
	code_lines = code_file.readlines()
	code_file.close()
	chain_lines = get_tokens(code_lines)
	#chain_lines = [";", "asdasdasdasdqwe asd", "END"]
	runParser(chain_lines)
Пример #5
0
import pandas as pd
import numpy as np
from lexer import get_tokens

get_tokens("code.txt")
EPSILON = "e"


def get_productions(X):
    # This function will return all the productions X->A of the grammar
    productions = []
    for prod in grammar:
        lhs, rhs = prod.split('->')
        # Check if the production has X on LHS
        if lhs == X:
            # Introduce a dot
            rhs = '.'+rhs
            productions.append('->'.join([lhs, rhs]))
    return productions


def closure(I):
    # This function calculates the closure of the set of items I
    for production, a in I:
        # This means that the dot is at the end and can be ignored
        if production.endswith("."):
            continue
        lhs, rhs = production.split('->')
        alpha, B_beta = rhs.split('.')
        B = B_beta[0]
        beta = B_beta[1:]
Пример #6
0
        local_variables[term] = list()
        local_variables[term].append(in_val)
    else:
        try:
            local_variables[term][index] = in_val
        except IndexError:
            local_variables[term].append(in_val)


def runCompiler(tree):
    from executer import execute

    get_classification(tree)
    print "Start executing..."

    execute(primary_code, input_parameters)


if __name__ == "__main__":
    global input_parameters
    global input
    set_logging(logging.DEBUG, logging.DEBUG, "./../log/")
    code_file = open("./../Input/example1.txt", "r")
    code_lines = code_file.readlines()
    code_file.close()
    chain_lines, input_variables = get_tokens(code_lines)
    input_parameters = map(lambda x: int(x), input_variables)
    # chain_lines = [";", "asdasdasdasdqwe asd", "END"]
    tree = runParser(chain_lines)
    runCompiler(tree)
Пример #7
0
def main():
    only = ['9.c']
    for test in os.listdir('tests'):
        if only and test not in only:
            continue
        print('================== Analyzing file %s ==================' % test)

        with open(os.path.join('tests', test), 'rt') as infile:
            try:
                tokens = lexer.get_tokens(infile)
                for ln, ltokens in itertools.groupby(tokens, lambda t: t.line):
                    print("Line %d: " % ln + ' '.join(map(str, ltokens)))

                syntax_parser = parser.SyntaxParser(tokens)

                for rule_name, pred in rules.syntax_rules.items():
                    syntax_parser.add_named_rule(rule_name, pred)

                syntax_parser.set_root_rule('unit', rules.root_rule)

                unit_node = syntax_parser.get_syntax_tree()
                if unit_node is not None:
                    print(
                        "===================== PARSED SYNTAX ====================="
                    )
                    print(unit_node)
                else:
                    print("SYNTAX PARSE FAILED")

                builtin_symbol_table = symbols.SymbolTable(
                    'builtin', symbols.StorageType.BUILTIN, None)
                for f in builtin.all_builtins:
                    builtin_symbol_table.add_symbol(f)

                global_symbol_table = symbols.SymbolTable(
                    'global', symbols.StorageType.GLOBAL, builtin_symbol_table)
                unit_node.bind_symbol_table(global_symbol_table)
                print(builtin_symbol_table)

                unit_node.validate()
                print(
                    "============== TYPE VALIDATION SUCCESSFUL ==============="
                )
                # alloc space for globals
                mem = stack.DataStack(8192)
                globals_size = sum(
                    symbol.type.sizeof
                    for symbol in global_symbol_table.symbols
                    if symbol.storage == symbols.StorageType.GLOBAL)
                global_mem = mem.alloc(globals_size)
                mem.write_at(global_mem, b'\0' * globals_size)

                # write string constants in global memory so they can be passed by address
                for node in unit_node._children:
                    if isinstance(node, tree.ConstantLiteralNode):
                        if isinstance(node.constant_type, symbols.ArrayType):
                            if node.constant_type.elem_type == symbols.TYPE_CHAR:
                                assert node.constant_type.size == len(
                                    node.constant_value) + 1
                                node.addr = mem.alloc(node.constant_type.size)
                                mem.write_at(
                                    node.addr,
                                    node.constant_value.encode('utf8') + b'\0')

                program = []
                unit_node.compile(program)
                main_func = global_symbol_table.get_symbol('main')
                if not main_func or not isinstance(main_func,
                                                   symbols.FunctionSymbol):
                    raise errors.AtomCRuntimeError(
                        "main function missing or main is not a function", 0)

                for addr, instr in enumerate(program):
                    print(
                        f"{addr}: {instr} {'<----- ENTRY POINT' if addr == main_func.offset else ''}"
                    )
                print("============== COMPILATION SUCCESSFUL ===============")
                print("Running program...")
                builtin.stdout = ''

                entry_point = len(program)
                bootstrap = tree.FunctionCallExpressionNode(-1, 'main', [])
                bootstrap.bind_symbol_table(global_symbol_table)
                bootstrap.validate()
                bootstrap.compile(program)
                program.append(instructions.HLT(-1))
                vm = machine.AtomCVM(mem, program, entry_point, debug=True)
                vm.execute()
                print(">>>>> PROGRAM HALTED; OUTPUT: \n" + builtin.stdout)

            except errors.AtomCError as e:
                print(e)
                return

        print('=========================================================')
Пример #8
0
from splitter import get_commands
from performer import execute_commands


# I use class with all information about shell's current status
# to pass it through the functions instead of many parameters
class ShellStatus:
    # this field describes is shell still run on not
    # value in it could be changed only by exit command
    is_run = True

    # local shell variables are stored in this dictionary
    environment = {}

    # by these strings particular commands can get input data and write output
    input_stream = ""
    output_stream = ""


# main cycle where we get commands one by one and perform them
while ShellStatus.is_run:
    command = input("Type your command: ")
    token_queue = get_tokens(command, ShellStatus.environment)
    command_queue = get_commands(token_queue)
    execute_commands(command_queue, ShellStatus)

    # by default the last one command is an empty pipe
    # and it changes output_stream to input_stream so we print input_stream
    if ShellStatus.input_stream != '':
        print(ShellStatus.input_stream)