def parse_range_designated_expr(desig_expr, default_values): first, last = designation(desig_expr) exhaust( imap( parse_designated_expr, imap(OffsetDesignatedExpression, xrange(first, last + 1), repeat(exp(desig_expr)), repeat(loc(desig_expr))), repeat(default_values)))
def exhaust_remaining_blocks(token_seq): exhaust( imap( apply, imap( rules(exhaust_remaining_blocks).__getitem__, takewhile(TOKENS.PENDIF.__ne__, imap(peek, repeat(token_seq)))), repeat((token_seq, ))))
def parse_range_designated_expr(desig_expr, default_values): first, last = designation(desig_expr) exhaust( imap( parse_designated_expr, imap(OffsetDesignatedExpression, xrange(first, last + 1), repeat(exp(desig_expr)), repeat(loc(desig_expr))), repeat(default_values) ) )
def exhaust_remaining_blocks(token_seq): exhaust( imap( apply, imap( rules(exhaust_remaining_blocks).__getitem__, takewhile(TOKENS.PENDIF.__ne__, imap(peek, repeat(token_seq))) ), repeat((token_seq,)) ) )
def update_instruction_references(instrs): # update operand references, since they may referencing omitted instructions references = [] for instr in instrs: references.extend(ifilter( # get all operands that are referencing something ... lambda o: isinstance(referenced_obj(o, None), (Operand, Reference, Instruction)), operns(instr, ()) )) yield instr # for ref in references: # new_obj = get_new_instr(ref, ref.obj) # ref.obj = new_obj exhaust(imap(setattr, references, repeat('obj'), imap(get_new_instr, references, imap(referenced_obj, references))))
def set_default_initializer(initializer, default_values): # Complete ArrayTypes with initializer containing a single non designated expression assign the value through out if isinstance(c_type(default_values), ArrayType) \ and not isinstance(initializer[0], DesignatedExpression) \ and len(initializer) == 1 \ and c_type(default_values).length is not None: initializer = Initializer( enumerate(repeat(initializer[0], len(c_type(default_values)))), c_type(default_values)(loc(initializer)), loc(initializer) ) exhaust(imap(parse_designated_expr, initializer_desig_exprs(initializer, default_values), repeat(default_values))) return default_values
def set_default_initializer(initializer, default_values): # Complete ArrayTypes with initializer containing a single non designated expression assign the value through out if isinstance(c_type(default_values), ArrayType) \ and not isinstance(initializer[0], DesignatedExpression) \ and len(initializer) == 1 \ and c_type(default_values).length is not None: initializer = Initializer( enumerate(repeat(initializer[0], len(c_type(default_values)))), c_type(default_values)(loc(initializer)), loc(initializer)) exhaust( imap(parse_designated_expr, initializer_desig_exprs(initializer, default_values), repeat(default_values))) return default_values
def pre_processor(char_stream, location): # returns pre_processing symbol or #identifier ... values = consume(char_stream) if peek_or_terminal(char_stream) == TOKENS.NUMBER_SIGN: # token concatenation symbol ... values += consume(char_stream) else: _ = exhaust(takewhile({' ', '\t', '\a'}.__contains__, char_stream)) values += ''.join(takewhile(letters.__contains__, char_stream)) return rules(pre_processor).get(values, IDENTIFIER)(values, location)
def merge_lines(char_seq): while True: char = consume(char_seq) if char == '\\' and isinstance(peek(char_seq), NewLineStr): # if current char is \ followed by end of line seq _ = exhaust(takewhile(lambda token: isinstance(token, NewLineStr), char_seq)) for char in get_repositioned_line(char_seq, loc(char)): yield char else: yield char
def get_repositioned_line(char_seq, location): # get next line ... while not isinstance(peek(char_seq), NewLineStr): char = consume(char_seq) if char == '\\' and isinstance(peek(char_seq), NewLineStr): _ = exhaust(takewhile(lambda token: isinstance(token, NewLineStr), char_seq)) for char in get_repositioned_line(char_seq, location): yield char else: yield Str(char, location)
def __calc_if(expr, token_seq, macros): tokens = get_block(token_seq, terminating_with={TOKENS.PELIF, TOKENS.PELSE, TOKENS.PENDIF}) # get a single block if not expr: # if expression is false we have to exhaust ... and search for a true elif expression, else or endif _ = exhaust(tokens) tokens = rules(__calc_if)[peek(token_seq)](token_seq, macros) for t in imap(consume, repeat(tokens)): # emit tokens which will be pre-processed ... yield t exhaust_remaining_blocks(token_seq)
def update_instruction_references( instrs ): # update operand references, since they may referencing omitted instructions references = [] for instr in instrs: references.extend( ifilter( # get all operands that are referencing something ... lambda o: isinstance(referenced_obj(o, None), (Operand, Reference, Instruction)), operns(instr, ()))) yield instr # for ref in references: # new_obj = get_new_instr(ref, ref.obj) # ref.obj = new_obj exhaust( imap(setattr, references, repeat('obj'), imap(get_new_instr, references, imap(referenced_obj, references))))
def get_repositioned_line(char_seq, location): # get next line ... while not isinstance(peek(char_seq), NewLineStr): char = consume(char_seq) if char == '\\' and isinstance(peek(char_seq), NewLineStr): _ = exhaust( takewhile(lambda token: isinstance(token, NewLineStr), char_seq)) for char in get_repositioned_line(char_seq, location): yield char else: yield Str(char, location)
def pre_processor( char_stream, location): # returns pre_processing symbol or #identifier ... values = consume(char_stream) if peek_or_terminal( char_stream ) == TOKENS.NUMBER_SIGN: # token concatenation symbol ... values += consume(char_stream) else: _ = exhaust(takewhile({' ', '\t', '\a'}.__contains__, char_stream)) values += ''.join(takewhile(letters.__contains__, char_stream)) return rules(pre_processor).get(values, IDENTIFIER)(values, location)
def merge_lines(char_seq): while True: char = consume(char_seq) if char == '\\' and isinstance( peek(char_seq), NewLineStr ): # if current char is \ followed by end of line seq _ = exhaust( takewhile(lambda token: isinstance(token, NewLineStr), char_seq)) for char in get_repositioned_line(char_seq, loc(char)): yield char else: yield char
def __calc_if(expr, token_seq, macros): tokens = get_block( token_seq, terminating_with={TOKENS.PELIF, TOKENS.PELSE, TOKENS.PENDIF}) # get a single block if not expr: # if expression is false we have to exhaust ... and search for a true elif expression, else or endif _ = exhaust(tokens) tokens = rules(__calc_if)[peek(token_seq)](token_seq, macros) for t in imap( consume, repeat(tokens)): # emit tokens which will be pre-processed ... yield t exhaust_remaining_blocks(token_seq)
def __init__(self, exp, ctype, location=LocationNotSet): super(IncrementExpression, self).__init__(exp, TOKENS.PLUS_EQUAL, ctype, location) class DecrementExpression(IncDecExpr): def __init__(self, exp, ctype, location=LocationNotSet): super(DecrementExpression, self).__init__(exp, TOKENS.MINUS_EQUAL, ctype, location) class PrefixIncrementExpression(IncrementExpression): pass class PostfixIncrementExpression(IncrementExpression): pass class PrefixDecrementExpression(DecrementExpression): pass class PostfixDecrementExpression(DecrementExpression): pass property_names = 'oper', 'exp', 'lvalue', 'right_exp', 'left_exp', 'designation' exhaust( imap(setattr, repeat(current_module), property_names, imap(get_attribute_func, property_names)))
def replace_instrs(new_instr, old_instrs): return exhaust(imap(replace_instr, old_instrs, repeat(new_instr))) or new_instr
lambda instr, cpu, mem, _: setattr(cpu, 'stack_pointer', pop(cpu, mem)), LoadInstructionPointer: lambda instr, cpu, mem, _: push(cpu.instr_pointer + instr_size(instr), cpu, mem), PostfixUpdate: postfix_update, Allocate: lambda instr, cpu, mem, _: setattr( cpu, 'stack_pointer', cpu.stack_pointer + mem[cpu.instr_pointer + word_size]), Dup: lambda instr, cpu, mem, _: exhaust( starmap( push, izip( chain.from_iterable( repeat((pop(cpu, mem) for _ in xrange(mem[ cpu.instr_pointer + word_size] / word_size)), 2)), repeat(cpu), repeat(mem), ))), Swap: lambda instr, cpu, mem, _: exhaust( starmap( push, izip( reversed( pop(cpu, mem) for _ in xrange(mem[cpu.instr_pointer + word_size] / word_size)), repeat(cpu), repeat(mem), ))),
def main(): cli = argparse.ArgumentParser(description='C Compiler ...') cli.add_argument('files', nargs='+') cli.add_argument('-O', '--optimize', default=0, nargs=1, help='Optimization Level') cli.add_argument('-E', '--preprocess', action='store_true', default=False, help='Output preprocessor and stop.') cli.add_argument('-S', '--assembly', action='store_true', default=False, help='Output instructions readable text.') cli.add_argument('-c', '--compile', action='store_true', default=False, help='Compile, but not link.') cli.add_argument('-static', '--static', action='store_true', default=True, help='Static Linking (default).') cli.add_argument('-shared', '--shared', action='store_true', default=False, help='Shared Linking.') cli.add_argument('--vm', action='store_true', default=False, help='Execute code on Virtual Machine.') cli.add_argument('-a', '--archive', action='store_true', default=False, help='Archive files into a single output') cli.add_argument('-o', '--output', default=[], nargs='?', action='append', help='Name of output, file(s) default is the original') cli.add_argument('-I', '--Include', default=[], nargs='?', action='append', help='Directories to be used by the preprocessor when searching for files.') cli.add_argument('-L', '--Libraries', default=[], nargs='?', action='append', help='Directories to be used by the linker when searching for libraries') cli.add_argument('-l', '--libraries', default=[], nargs='?', action='append', help='Name of libraries to be used when searching for symbols.') args = cli.parse_args() args.Include += std_include_dirs + list(set(imap(os.path.dirname, args.files))) args.Libraries += std_libraries_dirs args.libraries += std_libraries libraries = ifilter(os.path.isfile, starmap(os.path.join, product(args.Libraries, args.libraries))) optimizer = lambda instrs: optimize(instrs, zero_level_optimization) if args.optimize and args.optimize[0] == '1': optimizer = lambda instrs: optimize(instrs, first_level_optimization) if args.preprocess: exhaust(imap(sys.stdout.write, preprocess(args.files, args.Include))) elif args.assembly: exhaust(imap(sys.stdout.write, assembly(args.files, args.Include, libraries, optimizer))) elif args.compile: if args.output: # if output(s) giving then check it matches the number of inputs ... output_files = error_if_not_value(repeat(len(args.output), 1), len(args.files)) and args.output else: output_files = imap('{0}.o.p'.format, imap(lambda f: os.path.splitext(f)[0], args.files)) for input_file, output_file in izip(args.files, output_files): symbol_table = linker.library(symbols(input_file, args.Include, optimizer)) with open(output_file, 'wb') as file_obj: pickle.dump(symbol_table, file_obj) elif args.archive: symbol_table = SymbolTable() error_if_not_value(repeat(len(args.output), 1), 1) # archives require a single output which has no default ... for input_file in args.files: # compile all files into a single symbol_table ... symbol_table = linker.library(symbols(input_file, args.Include, optimizer), symbol_table) with open(args.output[0], 'wb') as file_obj: # dump symbol_table ... pickle.dump(symbol_table, file_obj) elif args.shared: raise NotImplementedError else: # default compile, and and statically link ... instructions = instrs(args.files, args.Include, libraries, optimizer) if args.vm: # if we requested a vm then execute instructions ... vm.start(instructions) else: # other wise emit single executable file ... _ = args.output and error_if_not_value(repeat(len(args.output), 1), 1, Location('cc.py', '', '')) file_output = args.output and args.output[0] or 'a.out.p' # if not giving an output use default a.out.p with open(file_output, 'wb') as file_obj: pickle.dump(tuple(instructions), file_obj)
def exhaust_else_block(token_seq): exhaust(_else_block(token_seq, None))
def exhaust_elif_block(token_seq): exhaust(get_block(token_seq, terminating_with={TOKENS.PELIF, TOKENS.PELSE, TOKENS.PENDIF}))
def exhaust_elif_block(token_seq): exhaust( get_block(token_seq, terminating_with={TOKENS.PELIF, TOKENS.PELSE, TOKENS.PENDIF}))
def unbind_instructions(obj, offset): exhaust(imap(delattr, repeat(obj), ('offset', 'load_address'))) return obj
class IncrementExpression(IncDecExpr): # Statement/Expression def __init__(self, exp, ctype, location=LocationNotSet): super(IncrementExpression, self).__init__(exp, TOKENS.PLUS_EQUAL, ctype, location) class DecrementExpression(IncDecExpr): def __init__(self, exp, ctype, location=LocationNotSet): super(DecrementExpression, self).__init__(exp, TOKENS.MINUS_EQUAL, ctype, location) class PrefixIncrementExpression(IncrementExpression): pass class PostfixIncrementExpression(IncrementExpression): pass class PrefixDecrementExpression(DecrementExpression): pass class PostfixDecrementExpression(DecrementExpression): pass property_names = 'oper', 'exp', 'lvalue', 'right_exp', 'left_exp', 'designation' exhaust(imap(setattr, repeat(current_module), property_names, imap(get_attribute_func, property_names)))
def main(): cli = argparse.ArgumentParser(description='C Compiler ...') cli.add_argument('files', nargs='+') cli.add_argument('-O', '--optimize', default=0, nargs=1, help='Optimization Level') cli.add_argument('-E', '--preprocess', action='store_true', default=False, help='Output preprocessor and stop.') cli.add_argument('-S', '--assembly', action='store_true', default=False, help='Output instructions readable text.') cli.add_argument('-c', '--compile', action='store_true', default=False, help='Compile, but not link.') cli.add_argument('-static', '--static', action='store_true', default=True, help='Static Linking (default).') cli.add_argument('-shared', '--shared', action='store_true', default=False, help='Shared Linking.') cli.add_argument('--vm', action='store_true', default=False, help='Execute code on Virtual Machine.') cli.add_argument('-a', '--archive', action='store_true', default=False, help='Archive files into a single output') cli.add_argument('-o', '--output', default=[], nargs='?', action='append', help='Name of output, file(s) default is the original') cli.add_argument( '-I', '--Include', default=[], nargs='?', action='append', help= 'Directories to be used by the preprocessor when searching for files.') cli.add_argument( '-L', '--Libraries', default=[], nargs='?', action='append', help='Directories to be used by the linker when searching for libraries' ) cli.add_argument( '-l', '--libraries', default=[], nargs='?', action='append', help='Name of libraries to be used when searching for symbols.') args = cli.parse_args() args.Include += std_include_dirs + list( set(imap(os.path.dirname, args.files))) args.Libraries += std_libraries_dirs args.libraries += std_libraries libraries = ifilter( os.path.isfile, starmap(os.path.join, product(args.Libraries, args.libraries))) optimizer = lambda instrs: optimize(instrs, zero_level_optimization) if args.optimize and args.optimize[0] == '1': optimizer = lambda instrs: optimize(instrs, first_level_optimization) if args.preprocess: exhaust(imap(sys.stdout.write, preprocess(args.files, args.Include))) elif args.assembly: exhaust( imap(sys.stdout.write, assembly(args.files, args.Include, libraries, optimizer))) elif args.compile: if args.output: # if output(s) giving then check it matches the number of inputs ... output_files = error_if_not_value(repeat(len(args.output), 1), len(args.files)) and args.output else: output_files = imap( '{0}.o.p'.format, imap(lambda f: os.path.splitext(f)[0], args.files)) for input_file, output_file in izip(args.files, output_files): symbol_table = linker.library( symbols(input_file, args.Include, optimizer)) with open(output_file, 'wb') as file_obj: pickle.dump(symbol_table, file_obj) elif args.archive: symbol_table = SymbolTable() error_if_not_value( repeat(len(args.output), 1), 1) # archives require a single output which has no default ... for input_file in args.files: # compile all files into a single symbol_table ... symbol_table = linker.library( symbols(input_file, args.Include, optimizer), symbol_table) with open(args.output[0], 'wb') as file_obj: # dump symbol_table ... pickle.dump(symbol_table, file_obj) elif args.shared: raise NotImplementedError else: # default compile, and and statically link ... instructions = instrs(args.files, args.Include, libraries, optimizer) if args.vm: # if we requested a vm then execute instructions ... vm.start(instructions) else: # other wise emit single executable file ... _ = args.output and error_if_not_value(repeat(len( args.output), 1), 1, Location('cc.py', '', '')) file_output = args.output and args.output[ 0] or 'a.out.p' # if not giving an output use default a.out.p with open(file_output, 'wb') as file_obj: pickle.dump(tuple(instructions), file_obj)