def main(config_path, base_dir, target_path, modes, verbose, use_cache=True): global config # Load config with open(config_path) as f: config = yaml.load(f.read(), Loader=yaml.SafeLoader) options.initialize(config, config_path, base_dir, target_path) options.set("modes", modes) if verbose: options.set("verbose", True) with options.get_target_path().open("rb") as f2: rom_bytes = f2.read() if "sha1" in config: sha1 = hashlib.sha1(rom_bytes).hexdigest() e_sha1 = config["sha1"] if e_sha1 != sha1: log.error(f"sha1 mismatch: expected {e_sha1}, was {sha1}") # Create main output dir options.get_base_path().mkdir(parents=True, exist_ok=True) processed_segments: List[Segment] = [] seg_sizes: Dict[str, int] = {} seg_split: Dict[str, int] = {} seg_cached: Dict[str, int] = {} # Load cache if use_cache: try: with options.get_cache_path().open("rb") as f3: cache = pickle.load(f3) if verbose: log.write(f"Loaded cache ({len(cache.keys())} items)") except Exception: cache = {} else: cache = {} # invalidate entire cache if options change if use_cache and cache.get("__options__") != config.get("options"): if verbose: log.write("Options changed, invalidating cache") cache = { "__options__": config.get("options"), } # Initialize segments all_segments = initialize_segments(config["segments"]) # Load and process symbols if options.mode_active("code"): log.write("Loading and processing symbols") symbols.initialize(all_segments) # Resolve raster/palette siblings if options.mode_active("img"): palettes.initialize(all_segments) # Scan log.write("Starting scan") for segment in all_segments: typ = segment.type if segment.type == "bin" and segment.is_name_default(): typ = "unk" if typ not in seg_sizes: seg_sizes[typ] = 0 seg_split[typ] = 0 seg_cached[typ] = 0 seg_sizes[typ] += 0 if segment.size is None else segment.size if segment.should_scan(): # Check cache but don't write anything if use_cache: if segment.cache() == cache.get(segment.unique_id()): continue if segment.needs_symbols: segment_symbols, other_symbols = get_segment_symbols( segment, all_segments) segment.given_seg_symbols = segment_symbols segment.given_ext_symbols = other_symbols segment.did_run = True segment.scan(rom_bytes) processed_segments.append(segment) seg_split[typ] += 1 log.dot(status=segment.status()) # Split log.write("Starting split") for segment in all_segments: if use_cache: cached = segment.cache() if cached == cache.get(segment.unique_id()): # Cache hit seg_cached[typ] += 1 continue else: # Cache miss; split cache[segment.unique_id()] = cached if segment.should_split(): segment.split(rom_bytes) log.dot(status=segment.status()) if options.mode_active("ld"): global linker_writer linker_writer = LinkerWriter() for segment in all_segments: linker_writer.add(segment) linker_writer.save_linker_script() linker_writer.save_symbol_header() # Write undefined_funcs_auto.txt to_write = [ s for s in symbols.all_symbols if s.referenced and not s.defined and not s.dead and s.type == "func" ] if len(to_write) > 0: with open(options.get_undefined_funcs_auto_path(), "w", newline="\n") as f: for symbol in to_write: f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n") # write undefined_syms_auto.txt to_write = [ s for s in symbols.all_symbols if s.referenced and not s.defined and not s.dead and not s.type == "func" ] if len(to_write) > 0: with open(options.get_undefined_syms_auto_path(), "w", newline="\n") as f: for symbol in to_write: f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n") # print warnings during split for segment in all_segments: if len(segment.warnings) > 0: log.write( f"{Style.DIM}0x{segment.rom_start:06X}{Style.RESET_ALL} {segment.type} {Style.BRIGHT}{segment.name}{Style.RESET_ALL}:" ) for warn in segment.warnings: log.write("warning: " + warn, status="warn") log.write("") # empty line # Statistics do_statistics(seg_sizes, rom_bytes, seg_split, seg_cached) # Save cache if cache != {} and use_cache: if verbose: log.write("Writing cache") with open(options.get_cache_path(), "wb") as f4: pickle.dump(cache, f4)
def main(config_path, base_dir, target_path, modes, verbose, use_cache=True): global config log.write(f"splat {VERSION} (powered by spimdisasm {spimdisasm.__version__})") # Load config config = {} for entry in config_path: with open(entry) as f: additional_config = yaml.load(f.read(), Loader=yaml.SafeLoader) config = merge_configs(config, additional_config) options.initialize(config, config_path, base_dir, target_path) options.set("modes", modes) if verbose: options.set("verbose", True) with options.get_target_path().open("rb") as f2: rom_bytes = f2.read() if "sha1" in config: sha1 = hashlib.sha1(rom_bytes).hexdigest() e_sha1 = config["sha1"].lower() if e_sha1 != sha1: log.error(f"sha1 mismatch: expected {e_sha1}, was {sha1}") # Create main output dir options.get_base_path().mkdir(parents=True, exist_ok=True) processed_segments: List[Segment] = [] seg_sizes: Dict[str, int] = {} seg_split: Dict[str, int] = {} seg_cached: Dict[str, int] = {} # Load cache if use_cache: try: with options.get_cache_path().open("rb") as f3: cache = pickle.load(f3) if verbose: log.write(f"Loaded cache ({len(cache.keys())} items)") except Exception: cache = {} else: cache = {} # invalidate entire cache if options change if use_cache and cache.get("__options__") != config.get("options"): if verbose: log.write("Options changed, invalidating cache") cache = { "__options__": config.get("options"), } configure_disassembler() # Initialize segments all_segments = initialize_segments(config["segments"]) # Load and process symbols symbols.initialize(all_segments) # Assign symbols to segments assign_symbols_to_segments() if options.mode_active("code"): symbols.initialize_spim_context(all_segments) # Resolve raster/palette siblings if options.mode_active("img"): palettes.initialize(all_segments) # Scan scan_bar = tqdm.tqdm(all_segments, total=len(all_segments)) for segment in scan_bar: assert isinstance(segment, Segment) scan_bar.set_description(f"Scanning {brief_seg_name(segment, 20)}") typ = segment.type if segment.type == "bin" and segment.is_name_default(): typ = "unk" if typ not in seg_sizes: seg_sizes[typ] = 0 seg_split[typ] = 0 seg_cached[typ] = 0 seg_sizes[typ] += 0 if segment.size is None else segment.size if segment.should_scan(): # Check cache but don't write anything if use_cache: if segment.cache() == cache.get(segment.unique_id()): continue segment.did_run = True segment.scan(rom_bytes) processed_segments.append(segment) seg_split[typ] += 1 # Split for segment in tqdm.tqdm( all_segments, total=len(all_segments), desc=f"Splitting {brief_seg_name(segment, 20)}", ): if use_cache: cached = segment.cache() if cached == cache.get(segment.unique_id()): # Cache hit seg_cached[typ] += 1 continue else: # Cache miss; split cache[segment.unique_id()] = cached if segment.should_split(): segment.split(rom_bytes) if options.mode_active("ld"): global linker_writer linker_writer = LinkerWriter() for segment in tqdm.tqdm( all_segments, total=len(all_segments), desc=f"Writing linker script {brief_seg_name(segment, 20)}", ): linker_writer.add(segment) linker_writer.save_linker_script() linker_writer.save_symbol_header() # write elf_sections.txt - this only lists the generated sections in the elf, not subsections # that the elf combines into one section if options.get_create_elf_section_list_auto(): section_list = "" for segment in all_segments: section_list += "." + to_cname(segment.name) + "\n" with open(options.get_elf_section_list_path(), "w", newline="\n") as f: f.write(section_list) # Write undefined_funcs_auto.txt if options.get_create_undefined_funcs_auto(): to_write = [ s for s in symbols.all_symbols if s.referenced and not s.defined and not s.dead and s.type == "func" ] to_write.sort(key=lambda x: x.vram_start) with open(options.get_undefined_funcs_auto_path(), "w", newline="\n") as f: for symbol in to_write: f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n") # write undefined_syms_auto.txt if options.get_create_undefined_syms_auto(): to_write = [ s for s in symbols.all_symbols if s.referenced and not s.defined and not s.dead and s.type not in {"func", "label", "jtbl_label"} ] to_write.sort(key=lambda x: x.vram_start) with open(options.get_undefined_syms_auto_path(), "w", newline="\n") as f: for symbol in to_write: f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n") # print warnings during split for segment in all_segments: if len(segment.warnings) > 0: log.write( f"{Style.DIM}0x{segment.rom_start:06X}{Style.RESET_ALL} {segment.type} {Style.BRIGHT}{segment.name}{Style.RESET_ALL}:" ) for warn in segment.warnings: log.write("warning: " + warn, status="warn") log.write("") # empty line # Statistics do_statistics(seg_sizes, rom_bytes, seg_split, seg_cached) # Save cache if cache != {} and use_cache: if verbose: log.write("Writing cache") with open(options.get_cache_path(), "wb") as f4: pickle.dump(cache, f4)