def link_binary(self, objcopy, src, dst): """Link a binary file with no bells and whistles.""" ld_target = dst cmd = [self.__command, "--entry=" + str(PlatformVar("entry"))] + listify(src) + self.__linker_script + self.__linker_flags_extra # Use objcopy if it was given. if objcopy: (dst_base, dst_ext) = os.path.splitext(dst) dst_bin = dst_base + ".out" objcopy_cmd = [objcopy, "--output-target=binary", dst_bin, dst] ld_target = dst_bin # Otherwise link directly into binary. else: cmd += ["--oformat=binary"] cmd += ["-o", ld_target] # Run linker command. (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se) # Only run objcopy commad if it was required. if objcopy: (so_add, se) = run_command(objcopy_cmd) if 0 < len(se) and is_verbose(): print(se) so += so_add return so
def link_binary(self, objcopy, src, dst): """Link a binary file with no bells and whistles.""" ld_target = dst cmd = [ self.__command, "--entry=" + str(PlatformVar("entry")) ] + listify(src) + self.__linker_script + self.__linker_flags_extra # Use objcopy if it was given. if objcopy: (dst_base, dst_ext) = os.path.splitext(dst) dst_bin = dst_base + ".out" objcopy_cmd = [objcopy, "--output-target=binary", dst_bin, dst] ld_target = dst_bin # Otherwise link directly into binary. else: cmd += ["--oformat=binary"] cmd += ["-o", ld_target] # Run linker command. (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se) # Only run objcopy commad if it was required. if objcopy: (so_add, se) = run_command(objcopy_cmd) if 0 < len(se) and is_verbose(): print(se) so += so_add return so
def generate_linker_script(self, dst, modify_start=False): """Get linker script from linker, improve it, write improved linker script to given file.""" (so, se) = run_command([self.__command, "--verbose"] + self.__linker_flags_extra) if 0 < len(se) and is_verbose(): print(se) # Linker script is the block of code between lines of multiple '=':s. match = re.match(r'.*\n=+\s*\n(.*)\n=+\s*\n.*', so, re.DOTALL) if not match: raise RuntimeError("could not extract script from linker output") ld_script = match.group(1) # Remove unwanted symbol definitions one at a time. unwanted_symbols = [ "__bss_end__", "__bss_start__", "__end__", "__bss_start", "_bss_end__", "_edata", "_end" ] for ii in unwanted_symbols: ld_script = re.sub(r'\n([ \f\r\t\v]+)(%s)(\s*=[^\n]+)\n' % (ii), r'\n\1/*\2\3*/\n', ld_script, re.MULTILINE) ld_script = re.sub( r'SEGMENT_START\s*\(\s*(\S+)\s*,\s*\d*x?\d+\s*\)', r'SEGMENT_START(\1, %s)' % (str(PlatformVar("entry"))), ld_script, re.MULTILINE) if modify_start: ld_script = re.sub( r'(SEGMENT_START.*\S)\s*\+\s*SIZEOF_HEADERS\s*;', r'\1;', ld_script, re.MULTILINE) fd = open(dst, "w") fd.write(ld_script) fd.close() if is_verbose(): print("Wrote linker script '%s'." % (dst)) return ld_script
def crunch_entry_push(self, op): """Crunch amd64/ia32 push directives from given line listing.""" lst = self.want_label(op) if not lst: return ii = lst[0] + 1 jj = ii stack_decrement = 0 reinstated_lines = [] while True: current_line = self.__content[jj] match = re.match(r'\s*(push\w).*%(\w+)', current_line, re.IGNORECASE) if match: stack_decrement += get_push_size(match.group(1)) jj += 1 continue # Preserve comment lines as they are. match = re.match(r'^\s*[#;].*', current_line, re.IGNORECASE) if match: reinstated_lines += [current_line] jj += 1 continue # Some types of lines can be in the middle of pushing. if is_reinstate_line(current_line): reinstated_lines += [current_line] jj += 1 continue # Stop at stack decrement. match = re.match(r'\s*sub.*\s+[^\d]*(\d+),\s*%(rsp|esp)', current_line, re.IGNORECASE) if match: # Align to 16 bytes if necessary. if osname_is_linux() and osarch_is_64_bit(): if osarch_is_amd64(): # Just ignore increment, there's probably enough stack. self.__content[jj] = re.sub( r'subq(\s*).*', r'andq\g<1>$0xFFFFFFFFFFFFFFF0, %rsp', current_line) else: raise RuntimeError( "no stack alignment instruction for current architecture" ) else: total_decrement = int(match.group(1)) + stack_decrement self.__content[jj] = re.sub(r'\d+', str(total_decrement), current_line) break # Do nothing if suspicious instruction is found. if is_verbose(): print( "Unknown header instruction found, aborting erase: '%s'" % (current_line.strip())) break if is_verbose(): print("Erasing function header from '%s': %i lines" % (op, jj - ii - len(reinstated_lines))) self.__content[ii:jj] = reinstated_lines
def link(self, src, dst, extra_args=[]): """Link a file.""" cmd = [self.__command, src, "-o", dst] + self.__linker_flags + self.get_library_directory_list() + self.get_library_list() + extra_args + self.__linker_script + self.__linker_flags_extra (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se) return so
def get_library_name(self, op): """Get actual name of library.""" if op.startswith("/"): return op # Check if the library is specified verbatim. If yes, no need to expand. if re.match(r'lib.+\.so(\..*)?', op): return op libname = "lib%s.so" % (op) # Search in library directories only. for ii in self.__library_directories: current_libname = locate(ii, libname) if not current_libname: continue # Check if the supposed shared library is a linker script. if file_is_ascii_text(current_libname): ret = read_linker_script_library_name(current_libname) if ret: if is_verbose(): print( "'%s' is a linker script, actual library name: '%s'" % (libname, ret)) return ret # Stop at first match. break return libname
def generate_source(self, required_symbols): """Generate C source that contains definitions for given symbols.""" headers = set() prototypes = [] source = [] compiled_symbol_names = [] ii = 0 while ii < len(required_symbols): name = required_symbols[ii] if name in self.__symbols: sym = self.__symbols[name] # Add all dependencies to required symbols list. for jj in sym.get_dependencies(): if not jj in required_symbols: required_symbols += [jj] # Extend collected source data. headers = headers.union(sym.get_headers()) prototypes += sym.get_prototypes() source += [sym.get_source()] compiled_symbol_names += [name] ii += 1 if not source: return None if is_verbose(): print("%i extra symbols required: %s" % (len(compiled_symbol_names), str(compiled_symbol_names))) subst = { "HEADERS": "\n".join(map(lambda x: "#include <%s>" % (x), headers)), "PROTOTYPES": "\n\n".join(prototypes), "SOURCE": "\n\n".join(source) } return g_template_extra_source.format(subst)
def generate_fake_bss(self, assembler, und_symbols = None, elfling = None): """Remove local labels that would seem to generate .bss, make a fake .bss section.""" bss = AssemblerSectionBss() for ii in self.__sections: while True: entry = ii.extract_bss(und_symbols) if not entry: break if not entry.is_und_symbol(): bss.add_element(entry) if elfling: bss.add_element(AssemblerBssElement(ELFLING_WORK, elfling.get_work_size())) bss_size = bss.get_size() if 0 < bss.get_alignment(): pt_load_string = ", second PT_LOAD required" else: pt_load_string = ", one PT_LOAD sufficient" if is_verbose(): outstr = "Constructed fake .bss segement: " if 1073741824 < bss_size: print("%s%1.1f Gbytes%s" % (outstr, float(bss_size) / 1073741824.0, pt_load_string)) elif 1048576 < bss_size: print("%s%1.1f Mbytes%s" % (outstr, float(bss_size) / 1048576.0, pt_load_string)) elif 1024 < bss_size: print("%s%1.1f kbytes%s" % (outstr, float(bss_size) / 1024.0, pt_load_string)) else: print("%s%u bytes%s" % (outstr, bss_size, pt_load_string)) self.add_sections(bss) return bss
def get_library_name(self, op): """Get actual name of library.""" if op.startswith("/"): return op # Check if the library is specified verbatim. If yes, no need to expand. if re.match(r'lib.+\.so(\..*)?', op): return op libname = "lib%s.so" % (op) # Search in library directories only. for ii in self.__library_directories: current_libname = locate(ii, libname) if not current_libname: continue # Check if the supposed shared library is a linker script. if file_is_ascii_text(current_libname): fd = open(current_libname, "r") match = re.search(r'GROUP\s*\(\s*(\S+)\s+', fd.read(), re.MULTILINE) fd.close() if match: ret = os.path.basename(match.group(1)) if is_verbose(): print("Using shared library '%s' instead of '%s'." % (ret, libname)) return ret # Stop at first match. break return libname
def crunch_align(self): """Replace all .align declarations with minimal byte alignment.""" desired = int(PlatformVar("align")) adjustments = [] for ii in range(len(self.__content)): line = self.__content[ii] match = re.match(r'(\s*)\.align\s+(\d+).*', line, re.I) if not match: continue # Compiler thinking aligning to less than desired platform alignment is probably ok. align = get_align_bytes(int(match.group(2))) if align <= desired: continue # Some alignment directives are necessary due to data access. if not can_minimize_align(align): continue self.__content[ii] = "%s.balign %i\n" % (match.group(1), desired) adjustments += ["%i -> %i" % (align, desired)] # Data sections may be reshuffled and require minimal align as first line. if self.__name in ["data", "rodata"]: match = re.match(r'(\s*)\.b?align\s.*', self.__content[0], re.I) if not match: first_align = "\t.balign %i\n" % (int(PlatformVar("align"))) self.__content.insert(0, first_align) if is_verbose() and adjustments: print("Alignment adjustment(%s): %s" % (self.get_name(), ", ".join(adjustments)))
def generate_source(self, required_symbols): """Generate C source that contains definitions for given symbols.""" headers = set() prototypes = [] source = [] compiled_symbol_names = [] ii = 0 while ii < len(required_symbols): name = required_symbols[ii] if name in self.__symbols: sym = self.__symbols[name] # Add all dependencies to required symbols list. for jj in sym.get_dependencies(): if jj not in required_symbols: required_symbols += [jj] # Extend collected source data. headers = headers.union(sym.get_headers()) prototypes += sym.get_prototypes() source += [sym.get_source()] compiled_symbol_names += [name] ii += 1 if not source: return None if is_verbose(): print("%i extra symbols required: %s" % (len(compiled_symbol_names), str(compiled_symbol_names))) subst = {"HEADERS": "\n".join(map(lambda x: "#include <%s>" % (x), headers)), "PROTOTYPES": "\n\n".join(prototypes), "SOURCE": "\n\n".join(source) } return g_template_extra_source.format(subst)
def add_source(self, fname): """Add source from an assembler file.""" fd = open(fname, "r") lines = fd.readlines() fd.close() current_section = AssemblerSection("text") sectionre = re.compile(r'^\s*\.section\s+\"?\.([a-zA-Z0-9_]+)[\.\s]') directivere = re.compile(r'^\s*\.(bss|data|rodata|text)') for ii in lines: # Try both expressions first. match = sectionre.match(ii) if not match: match = directivere.match(ii) # If match, start new section. if match: self.add_sections(current_section) current_section = AssemblerSection(match.group(1), ii) else: current_section.add_content(ii) if not current_section.empty(): self.add_sections(current_section) if is_verbose(): section_names = map(lambda x: x.get_name(), self.__sections) print("%i sections in '%s': %s" % (len(self.__sections), fname, str(section_names)))
def parse(self): """Parse all source files.""" # First, assemble glsl chains. source_dict = {} for ii in self.__sources: if ii.isCommonChainName(): continue chain_name = ii.getChainName() if chain_name in source_dict: source_dict[chain_name].addSource(ii) else: source_dict[chain_name] = GlslSourceChain(ii) for ii in self.__sources: if not ii.isCommonChainName(): continue for jj in source_dict.keys(): source_chain = source_dict[jj] if source_chain.isSourceSlotFree(ii): source_chain.addSource(ii) self.__chains = source_dict.values() if is_verbose(): print("GLSL source chains: %s" % (" ; ".join(map(lambda x: str(x), self.__chains)))) # Run parse process on sources. for ii in self.__sources: ii.parse()
def get_library_name(self, linker): """Get linkable library object name.""" libname = linker.get_library_name(self.__library.get_name()) if libname != self.__library.get_name() and is_verbose(): print("Using shared library '%s' instead of '%s'." % (str(libname), self.__library.get_name())) return libname
def generate_fake_bss(self, assembler, und_symbols=None, elfling=None): """Remove local labels that would seem to generate .bss, make a fake .bss section.""" bss = AssemblerSectionBss() for ii in self.__sections: while True: entry = ii.extract_bss(und_symbols) if not entry: break if not entry.is_und_symbol(): bss.add_element(entry) if elfling: bss.add_element(AssemblerBssElement(ELFLING_WORK, elfling.get_work_size())) bss_size = bss.get_size() if 0 < bss.get_alignment(): pt_load_string = ", second PT_LOAD required" else: pt_load_string = ", one PT_LOAD sufficient" if is_verbose(): outstr = "Constructed fake .bss segement: " if 1073741824 < bss_size: print("%s%1.1f Gbytes%s" % (outstr, float(bss_size) / 1073741824.0, pt_load_string)) elif 1048576 < bss_size: print("%s%1.1f Mbytes%s" % (outstr, float(bss_size) / 1048576.0, pt_load_string)) elif 1024 < bss_size: print("%s%1.1f kbytes%s" % (outstr, float(bss_size) / 1024.0, pt_load_string)) else: print("%s%u bytes%s" % (outstr, bss_size, pt_load_string)) self.add_sections(bss) return bss
def merge(self, op): """Attempt to merge with given segment.""" highest_mergable = 0 (head_src, bytestream_src) = self.deconstruct_tail() (bytestream_dst, tail_dst) = op.deconstruct_head() for ii in range(min(len(bytestream_src), len(bytestream_dst))): mergable = True for jj in range(ii + 1): if not bytestream_src[-ii - 1 + jj].mergable( bytestream_dst[jj]): mergable = False break if mergable: highest_mergable = ii + 1 if 0 >= highest_mergable: return False if is_verbose(): print("Merging headers %s and %s at %i bytes." % (self.__name, op.__name, highest_mergable)) for ii in range(highest_mergable): bytestream_src[-highest_mergable + ii].merge(bytestream_dst[ii]) bytestream_dst[0:highest_mergable] = [] self.reconstruct(head_src + bytestream_src) op.reconstruct(bytestream_dst + tail_dst) return True
def replace_osname(repl_osname, reason): """Replace osname with given string.""" global g_osname if g_osname == repl_osname: return if is_verbose(): print("%stargeting osname '%s' instead of '%s'" % (reason, repl_osname, g_osname)) g_osname = repl_osname
def replace_osarch(repl_osarch, reason): """Replace osarch with given string.""" global g_osarch if g_osarch == repl_osarch: return if is_verbose(): print("%stargeting osarch '%s' instead of '%s'" % (reason, repl_osarch, g_osarch)) g_osarch = repl_osarch
def compile_asm(self, src, dst, whole_program=False): """Compile a file into assembler source.""" cmd = [self.get_command(), "-S", src, "-o", dst] + self.__standard + self.__compiler_flags + self._compiler_flags_extra + self._definitions + self._include_directories if whole_program: cmd += self.__compiler_flags_generate_asm (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se)
def detectType(self): """Try to detect chain name and type from filename.""" (self.__chain, self.__type) = detect_shader_type(self.__basename) if is_verbose(): output_message = "Shader file '%s' type" % (self.__filename) if self.__type: print(output_message + (": '%s'" % (self.__type))) else: print(output_message + " not detected, assuming generic.")
def compile_and_link(self, src, dst): """Compile and link a file directly.""" cmd = [ self.get_command(), src, "-o", dst ] + self.__standard + self.__compiler_flags + self._compiler_flags_extra + self._definitions + self._include_directories + self.get_linker_flags( ) + self.get_library_directory_list() + self.get_library_list() (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se)
def format(self, force): """Return formatted output.""" if not self.__rename: if force: if is_verbose(): print("WARNING: %s not locked" % (self)) return self.__name return "" return self.__rename
def preprocess(self, op): """Preprocess a file, return output.""" args = [self.get_command(), op] + self._compiler_flags_extra + self._definitions + self._include_directories if self.is_msvc(): args += ["/E"] (so, se) = run_command(args) if 0 < len(se) and is_verbose(): print(se) return so
def crunch(self): """Remove all offending content.""" self.crunch_align() self.crunch_redundant() if osarch_is_amd64() or osarch_is_ia32(): self.crunch_amd64_ia32() elif is_verbose(): print("WARNING: no platform-dependent crunch for architecture '%s'" % g_osarch) self.__tag = None
def write(self): """Write compressed output.""" fd = open(self.__output_name, "w") if not fd: raise RuntimeError("could not write GLSL header '%s'" % (self.__output_name)) fd.write(self.generateHeaderOutput()) fd.close() if is_verbose(): print("Wrote GLSL header: '%s' => '%s'" % (self.__variable_name, self.__output_name))
def preprocess(self, op): """Preprocess a file, return output.""" args = [self.get_command(), op] + self._compiler_flags_extra + self._definitions + self._include_directories if self.command_basename_startswith("cl."): args += ["/E"] (so, se) = run_command(args) if 0 < len(se) and is_verbose(): print(se) return so
def link_binary(self, src, dst): """Link a binary file with no bells and whistles.""" cmd = [self.__command, "--entry=" + str(PlatformVar("entry"))] + listify(src) + [ "-o", dst ] + self.__linker_script + self.__linker_flags_extra (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se) return so
def crunch_entry_push(self, op): """Crunch amd64/ia32 push directives from given line listing.""" lst = self.want_label(op) if not lst: return ii = lst[0] + 1 jj = ii stack_decrement = 0 reinstated_lines = [] while True: current_line = self.__content[jj] match = re.match(r'\s*(push\w).*%(\w+)', current_line, re.IGNORECASE) if match: stack_decrement += get_push_size(match.group(1)) jj += 1 continue # Preserve comment lines as they are. match = re.match(r'^\s*[#;].*', current_line, re.IGNORECASE) if match: reinstated_lines += [current_line] jj += 1 continue # Some types of lines can be in the middle of pushing. if is_reinstate_line(current_line): reinstated_lines += [current_line] jj += 1 continue # Stop at stack decrement. match = re.match(r'\s*sub.*\s+[^\d]*(\d+),\s*%(rsp|esp)', current_line, re.IGNORECASE) if match: total_decrement = int(match.group(1)) + stack_decrement # As per gcc ABI, align to 16 bytes on 64-bit architectures. if osarch_is_64_bit() and ((total_decrement & 0xF) != 0): total_decrement += 0x10 - (total_decrement & 0xF) self.__content[jj] = re.sub(r'\d+', str(total_decrement), current_line) break # Do nothing if suspicious instruction is found. if is_verbose(): print("Unknown header instruction found, aborting erase: '%s'" % (current_line.strip())) break if is_verbose(): print("Erasing function header from '%s': %i lines" % (op, jj - ii - len(reinstated_lines))) self.__content[ii:jj] = reinstated_lines
def link(self, src, dst, extra_args=[]): """Link a file.""" cmd = [ self.__command, src, "-o", dst ] + self.__linker_flags + self.get_library_directory_list( ) + self.get_library_list( ) + extra_args + self.__linker_script + self.__linker_flags_extra (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se) return so
def get_extra_library_directories(self): """Determine extra library requirements for the compiler.""" ret = [] if self.is_gcc(): (so, se) = run_command([self.get_command(), "-v"]) match = re.search(r'COLLECT_LTO_WRAPPER\s*\=\s*([^\n]+)', se, re.I | re.M) if match: ret += [os.path.dirname(match.group(1))] if is_verbose() and ret: print("Compiler '%s' requires additional library directories: %s" % (self.get_command_basename(), str(ret))) return ret
def extract_shader_payload(preprocessor, src, dst): """Extract only the quoted content and write a file.""" text = preprocessor.preprocess(src) match = re.match(r'.*char[^"]+"(.*)"\s*;[^"]+', text, re.MULTILINE | re.DOTALL) if not match: raise RuntimeError("could not extract shader blob") text = re.sub(r'"\s*\n\s*"', "", match.group(1)) fd = open(dst, "w") fd.write(text.replace("\\n", "\n")) fd.close() if is_verbose(): print("Wrote shader payload: '%s'" % (dst))
def format(self, force): """Return formatted output.""" ret = "." if self.__swizzle: if not self.__swizzle_export: if force: if is_verbose(): print("WARNING: %s swizzle status unconfirmed" % (str(self))) return ret + self.__name.format(force) return "" return ret + self.generateSwizzle() return ret + self.__name.format(force)
def generate_linker_script(self, dst, modify_start=False): """Get linker script from linker, improve it, write improved linker script to given file.""" (so, se) = run_command([self.__command, "--verbose"] + self.__linker_flags_extra) if 0 < len(se) and is_verbose(): print(se) # Linker script is the block of code between lines of multiple '=':s. match = re.match(r'.*\n=+\s*\n(.*)\n=+\s*\n.*', so, re.DOTALL) if not match: raise RuntimeError("could not extract script from linker output") ld_script = match.group(1) # Remove unwanted symbol definitions one at a time. unwanted_symbols = ["__bss_end__", "__bss_start__", "__end__", "__bss_start", "_bss_end__", "_edata", "_end"] for ii in unwanted_symbols: ld_script = re.sub(r'\n([ \f\r\t\v]+)(%s)(\s*=[^\n]+)\n' % (ii), r'\n\1/*\2\3*/\n', ld_script, re.MULTILINE) ld_script = re.sub(r'SEGMENT_START\s*\(\s*(\S+)\s*,\s*\d*x?\d+\s*\)', r'SEGMENT_START(\1, %s)' % (str(PlatformVar("entry"))), ld_script, re.MULTILINE) if modify_start: ld_script = re.sub(r'(SEGMENT_START.*\S)\s*\+\s*SIZEOF_HEADERS\s*;', r'\1;', ld_script, re.MULTILINE) fd = open(dst, "w") fd.write(ld_script) fd.close() if is_verbose(): print("Wrote linker script '%s'." % (dst)) return ld_script
def format(self, substitutions = None): """Return formatted output.""" ret = self.__content if substitutions: for kk in substitutions: vv = substitutions[kk].replace("\\", "\\\\") (ret, num) = re.subn(r'\[\[\s*%s\s*\]\]' % (kk), vv, ret) if not num: print("WARNING: substitution '%s' has no matches" % (kk)) unmatched = list(set(re.findall(r'\[\[([^\]]+)\]\]', ret))) (ret, num) = re.subn(r'\[\[[^\]]+\]\]', "", ret) if num and is_verbose(): print("Template substitutions not matched: %s (%i)" % (str(unmatched), num)) return ret
def crunch_jump_pop(self, op): """Crunch popping before a jump.""" lst = self.want_line(r'\s*(jmp\s+%s)\s+.*' % (op)) if not lst: return ii = lst[0] jj = ii - 1 while True: if (0 > jj) or not re.match(r'\s*(pop\S).*', self.__content[jj], re.IGNORECASE): if is_verbose(): print("Erasing function footer before jump to '%s': %i lines" % (op, ii - jj - 1)) self.erase(jj + 1, ii) break jj -= 1
def format(self, substitutions=None): """Return formatted output.""" ret = self.__content if substitutions: for kk in substitutions: vv = substitutions[kk].replace("\\", "\\\\") (ret, num) = re.subn(r'\[\[\s*%s\s*\]\]' % (kk), vv, ret) if not num: print("WARNING: substitution '%s' has no matches" % (kk)) unmatched = list(set(re.findall(r'\[\[([^\]]+)\]\]', ret))) (ret, num) = re.subn(r'\[\[[^\]]+\]\]', "", ret) if num and is_verbose(): print("Template substitutions not matched: %s (%i)" % (str(unmatched), num)) return ret
def __init__(self, lst, explicit): """Constructor.""" GlslBlock.__init__(self) self.__explicit = explicit self.__squashable = False self.__allow_squash = False # Check for degenerate scope. if (1 == len(lst)) and is_glsl_block_declaration(lst[0]): raise RuntimeError("scope with only block '%s' is degenerate" % (lst[0].format(True))) # Check for empty scope (likely an error). if 0 >= len(lst): if is_verbose(): print("WARNING: empty scope") # Hierarchy. self.addChildren(lst)
def crunch_entry_push(self, op): """Crunch amd64/ia32 push directives from given line listing.""" lst = self.want_label(op) if not lst: return ii = lst[0] + 1 jj = ii stack_decrement = 0 stack_save_decrement = 0 reinstated_lines = [] while True: current_line = self.__content[jj] match = re.match(r'\s*(push\w).*%(\w+)', current_line, re.IGNORECASE) if match: if is_stack_save_register(match.group(2)): stack_save_decrement += get_push_size(match.group(1)) else: stack_decrement += get_push_size(match.group(1)) jj += 1 continue; # Preserve comment lines as they are. match = re.match(r'^\s*[#;].*', current_line, re.IGNORECASE) if match: reinstated_lines += [current_line] jj += 1 continue # Saving stack pointer or sometimes initializing edx seem to be within pushing. match = re.match(r'\s*mov\w\s+%\w+,\s*%(rbp|ebp|edx).*', current_line, re.IGNORECASE) if match: if is_stack_save_register(match.group(1)): stack_save_decrement = 0 reinstated_lines += [current_line] jj += 1 continue; # Some types of lines can be in the middle of pushing. if is_reinstate_line(current_line): reinstated_lines += [current_line] jj += 1 continue match = re.match(r'\s*sub.*\s+[^\d]*(\d+),\s*%(rsp|esp)', current_line, re.IGNORECASE) if match: total_decrement = int(match.group(1)) + stack_decrement + stack_save_decrement self.__content[jj] = re.sub(r'\d+', str(total_decrement), current_line) break if is_verbose(): print("Erasing function header from '%s': %i lines" % (op, jj - ii - len(reinstated_lines))) self.erase(ii, jj) self.__content[ii:ii] = reinstated_lines
def write(self, op, assembler, section_names = None): """Write an output assembler file or append to an existing file.""" output = self.generate_file_output(section_names) if not output: return False if isinstance(op, str): fd = open(op, "w") fd.write(output) fd.close() if is_verbose(): print("Wrote assembler source: '%s'" % (op)) else: prefix = assembler.format_block_comment("sections '%s'" % (str(section_names))) op.write(prefix) op.write(output) return True
def crunch_amd64(self): """Perform platform-dependent crunching.""" self.crunch_entry_push("_start") self.crunch_entry_push(ELFLING_UNCOMPRESSED) self.crunch_jump_pop(ELFLING_UNCOMPRESSED) lst = self.want_line(r'\s*(int\s+\$0x3|syscall)\s+.*') if lst: ii = lst[0] + 1 jj = ii while True: if (jj >= len(self.__content)) or (not can_erase_footer(self.__content[jj])): if is_verbose(): print("Erasing function footer after '%s': %i lines" % (lst[1], jj - ii)) self.erase(ii, jj) break jj += 1
def crunch_ia32(self): """Perform platform-dependent crunching.""" self.crunch_entry_push("_start") self.crunch_entry_push(ELFLING_UNCOMPRESSED) self.crunch_jump_pop(ELFLING_UNCOMPRESSED) lst = self.want_line(r'\s*int\s+\$(0x3|0x80)\s+.*') if lst: ii = lst[0] + 1 jj = ii while True: if len(self.__content) <= jj or re.match(r'\s*\S+\:\s*', self.__content[jj]): if is_verbose(): print("Erasing function footer after interrupt '%s': %i lines" % (lst[1], jj - ii)) self.erase(ii, jj) break jj += 1
def detectType(self): """Try to detect type from filename.""" self.__type = None stub = r'.*[._\-\s]%s[._\-\s].*' if re.match(stub % ("frag"), self.__filename, re.I) or re.match(stub % ("fragment"), self.__filename, re.I): self.__type = "fragment" elif re.match(stub % ("geom"), self.__filename, re.I) or re.match(stub % ("geometry"), self.__filename, re.I): self.__type = "geometry" elif re.match(stub % ("vert"), self.__filename, re.I) or re.match(stub % ("vertex"), self.__filename, re.I): self.__type = "vertex" if is_verbose(): output_message = "Shader file '%s' type" % (self.__filename) if self.__type: print(output_message + (": '%s'" % (self.__type))) else: print(output_message + " not detected, assuming generic.")
def write(self, op, assembler, section_names=None): """Write an output assembler file or append to an existing file.""" output = self.generate_file_output(section_names) if not output: return False if isinstance(op, str): fd = open(op, "w") fd.write(output) fd.close() if is_verbose(): print("Wrote assembler source: '%s'" % (op)) else: prefix = assembler.format_block_comment("sections '%s'" % (str(section_names))) op.write(prefix) op.write(output) return True
def crunch_align(self): """Replace all .align declarations with minimal byte alignment.""" desired = int(PlatformVar("align")) adjustments = [] for ii in range(len(self.__content)): line = self.__content[ii] match = re.match(r'(\s*)\.align\s+(\d+).*', line) if not match: continue # Get actual align byte count. align = get_align_bytes(int(match.group(2))) if align == desired: continue # Some alignment directives are necessary due to data access. if not can_minimize_align(align): continue self.__content[ii] = "%s.balign %i\n" % (match.group(1), desired) adjustments += ["%i -> %i" % (align, desired)] if is_verbose() and adjustments: print("Alignment adjustment(%s): %s" % (self.get_name(), ", ".join(adjustments)))
def merge(self, op): """Attempt to merge with given segment.""" highest_mergable = 0 (head_src, bytestream_src) = self.deconstruct_tail() (bytestream_dst, tail_dst) = op.deconstruct_head() for ii in range(min(len(bytestream_src), len(bytestream_dst))): mergable = True for jj in range(ii + 1): if not bytestream_src[-ii - 1 + jj].mergable(bytestream_dst[jj]): mergable = False break if mergable: highest_mergable = ii + 1 if 0 >= highest_mergable: return False if is_verbose(): print("Merging headers %s and %s at %i bytes." % (self.__name, op.__name, highest_mergable)) for ii in range(highest_mergable): bytestream_src[-highest_mergable + ii].merge(bytestream_dst[ii]) bytestream_dst[0:highest_mergable] = [] self.reconstruct(head_src + bytestream_src) op.reconstruct(bytestream_dst + tail_dst) return True
def sort_sections(self, assembler, data_in_front=True): """Sort sections into an order that is more easily compressible.""" text_sections = [] rodata_sections = [] data_sections = [] other_sections = [] for ii in self.__sections: if "text" == ii.get_name(): text_sections += [ii] elif "rodata" == ii.get_name(): rodata_sections += [ii] elif "data" == ii.get_name(): data_sections += [ii] else: other_sections += [ii] text_section_str = [] rodata_section_str = [] data_section_str = [] other_section_str = [] if text_sections: text_section_str += ["%i text" % (len(text_sections))] if rodata_sections: rodata_section_str = ["%i rodata" % (len(rodata_sections))] if data_sections: data_section_str += ["%i data" % (len(data_sections))] if other_sections: other_section_str = [", ".join(map(lambda x: x.get_name(), other_sections))] # Sort data either in front or in the back. if data_in_front: section_str = rodata_section_str + data_section_str + text_section_str + other_section_str self.__sections = rodata_sections + data_sections + text_sections + other_sections else: section_str = text_section_str + rodata_section_str + data_section_str + other_section_str self.__sections = text_sections + rodata_sections + data_sections + other_sections if is_verbose(): print("Sorted sections: " + ", ".join(filter(lambda x: x, section_str)))
def main(): """Main function.""" default_preprocessor_list = ["cpp", "clang-cpp"] preprocessor = None parser = argparse.ArgumentParser(usage = "GLSL minifying test.", formatter_class = CustomHelpFormatter, add_help = False) parser.add_argument("-h", "--help", action = "store_true", help = "Print this help string and exit.") parser.add_argument("--preprocessor", default = None, help = "Try to use given preprocessor executable as opposed to autodetect.") parser.add_argument("-v", "--verbose", action = "store_true", help = "Print more info about what is being done.") parser.add_argument("source", default = [], nargs = "*", help = "Source file(s) to process.") args = parser.parse_args() preprocessor = args.preprocessor if args.help: print(parser.format_help().strip()) return 0 # Verbosity. if args.verbose: set_verbose(True) # Source files to process. if not args.source: raise RuntimeError("no source files to process") source_files = [] for ii in args.source: if re.match(r'.*\.(glsl|vert|geom|frag)$', ii, re.I): source_files += [ii] else: raise RuntimeError("unknown source file: '%s'" % (ii)) dl = find_executable("dnload.py", "dnload") if is_verbose(): print("found dnload: '%s'" % (dl)) sm = find_executable("shader_minifier.exe", "Shader_Minifier") if is_verbose(): print("found shader_minifier: '%s'" % (sm)) # Find preprocessor. if preprocessor: if not executable_check(preprocessor): raise RuntimeError("could not use supplied preprocessor '%s'" % (preprocessor)) else: preprocessor_list = default_preprocessor_list if os.name == "nt": preprocessor_list = ["cl.exe"] + preprocessor_list preprocessor = executable_search(preprocessor_list, "preprocessor") if not preprocessor: raise RuntimeError("suitable preprocessor not found") preprocessor = Preprocessor(preprocessor) for ii in source_files: fname = "/tmp/" + os.path.basename(ii) fname_dn = fname + ".dnload" fname_dn_in = fname_dn + ".h" fname_dn_out = fname_dn + ".payload" fname_sm = fname + ".shaderminifier" fname_sm_in = fname_sm + ".h" fname_sm_out = fname_sm + ".payload" run_command(["python", dl, ii, "-o", fname_dn_in]) if is_verbose(): print("Wrote dnload -minified shader: '%s'" % (fname_dn_in)) run_command(["mono", sm, ii, "-o", fname_sm_in]) if is_verbose(): print("Wrote shader_minifier -minified shader: '%s'" % (fname_sm_in)) extract_shader_payload(preprocessor, fname_dn_in, fname_dn_out) extract_shader_payload(preprocessor, fname_sm_in, fname_sm_out) compress_file("lzma", fname_dn_out, fname_dn + ".lzma") #compress_file("xz", fname_dn_out, fname_dn + ".xz") compress_file("lzma", fname_sm_out, fname_sm + ".lzma") #compress_file("xz", fname_sm_out, fname_sm + ".xz") return 0
def compile_and_link(self, src, dst): """Compile and link a file directly.""" cmd = [self.get_command(), src, "-o", dst] + self.__standard + self.__compiler_flags + self._compiler_flags_extra + self._definitions + self._include_directories + self.get_linker_flags() + self.get_library_directory_list() + self.get_library_list() (so, se) = run_command(cmd) if 0 < len(se) and is_verbose(): print(se)