Пример #1
0
def clean_up_path(path: Path) -> Path:
    path_resolved = path.resolve()
    base_resolved = options.get_base_path().resolve()
    try:
        return path_resolved.relative_to(base_resolved)
    except ValueError:
        pass

    # If the path wasn't relative to the splat file, use the working directory instead
    cwd = Path(os.getcwd())
    try:
        return path_resolved.relative_to(cwd)
    except ValueError:
        pass

    # If it wasn't relative to that too, then just return the path as-is
    return path
Пример #2
0
    def create_c_file(self, funcs_text, asm_out_dir, c_path):
        c_lines = self.get_c_preamble()

        for func in funcs_text:
            func_name = self.get_symbol(func, type="func",
                                        local_only=True).name
            if options.get_compiler() == "GCC":
                c_lines.append("INCLUDE_ASM(s32, \"{}\", {});".format(
                    self.name, func_name))
            else:
                asm_outpath = Path(
                    os.path.join(asm_out_dir, self.name, func_name + ".s"))
                rel_asm_outpath = os.path.relpath(asm_outpath,
                                                  options.get_base_path())
                c_lines.append(f"#pragma GLOBAL_ASM(\"{rel_asm_outpath}\")")
            c_lines.append("")

        Path(c_path).parent.mkdir(parents=True, exist_ok=True)
        with open(c_path, "w") as f:
            f.write("\n".join(c_lines))
        print(f"Wrote {self.name} to {c_path}")
Пример #3
0
    def create_c_file(self, funcs_text, asm_out_dir, c_path):
        c_lines = self.get_c_preamble()

        for func in funcs_text:
            func_name = self.parent.get_symbol(func,
                                               type="func",
                                               local_only=True).name

            # Terrible hack to "auto-decompile" empty functions
            # TODO move disassembly into funcs_text or somewhere we can access it from here
            if (options.get_auto_decompile_empty_functions()
                    and len(funcs_text[func][0]) == 3
                    and funcs_text[func][0][1][-3:] in ["$ra", "$31"]
                    and funcs_text[func][0][2][-3:] == "nop"):
                c_lines.append("void " + func_name + "(void) {")
                c_lines.append("}")
            else:
                if options.get_compiler() in [GCC, SN64]:
                    if options.get_use_legacy_include_asm():
                        rel_asm_out_dir = asm_out_dir.relative_to(
                            options.get_nonmatchings_path())
                        c_lines.append(
                            f'INCLUDE_ASM(s32, "{rel_asm_out_dir / self.name}", {func_name});'
                        )
                    else:
                        c_lines.append(
                            f'INCLUDE_ASM("{asm_out_dir / self.name}", {func_name});'
                        )
                else:
                    asm_outpath = Path(
                        os.path.join(asm_out_dir, self.name, func_name + ".s"))
                    rel_asm_outpath = os.path.relpath(asm_outpath,
                                                      options.get_base_path())
                    c_lines.append(f'#pragma GLOBAL_ASM("{rel_asm_outpath}")')
            c_lines.append("")

        Path(c_path).parent.mkdir(parents=True, exist_ok=True)
        with open(c_path, "w") as f:
            f.write("\n".join(c_lines))
        log.write(f"Wrote {self.name} to {c_path}")
Пример #4
0
    def create_c_file(self, asm_out_dir, c_path):
        c_lines = self.get_c_preamble()

        for func in self.text_section.symbolList:
            assert isinstance(func, spimdisasm.mips.symbols.SymbolFunction)

            # Terrible hack to "auto-decompile" empty functions
            if (options.get_auto_decompile_empty_functions()
                    and func.instructions[0].isJrRa()
                    and func.instructions[1].isNop()):
                c_lines.append("void " + func.getName() + "(void) {")
                c_lines.append("}")
            else:
                if options.get_compiler() in [GCC, SN64]:
                    if options.get_use_legacy_include_asm():
                        rel_asm_out_dir = asm_out_dir.relative_to(
                            options.get_nonmatchings_path())
                        c_lines.append(
                            f'INCLUDE_ASM(s32, "{rel_asm_out_dir / self.name}", {func.getName()});'
                        )
                    else:
                        c_lines.append(
                            f'INCLUDE_ASM("{asm_out_dir / self.name}", {func.getName()});'
                        )
                else:
                    asm_outpath = Path(
                        os.path.join(asm_out_dir, self.name,
                                     func.getName() + ".s"))
                    rel_asm_outpath = os.path.relpath(asm_outpath,
                                                      options.get_base_path())
                    c_lines.append(f'#pragma GLOBAL_ASM("{rel_asm_outpath}")')
            c_lines.append("")

        Path(c_path).parent.mkdir(parents=True, exist_ok=True)
        with open(c_path, "w") as f:
            f.write("\n".join(c_lines))
        log.write(f"Wrote {self.name} to {c_path}")
Пример #5
0
Файл: rnc.py Проект: mkst/sssv
 def split(self, rom_bytes):
     # stage 1: decompression
     path = self.out_path()
     path.parent.mkdir(parents=True, exist_ok=True)
     # create path to temporary file
     tmp_path = path.parent / (path.name + ".tmp")
     # write out RNC file
     with open(tmp_path, "wb") as f:
         f.write(rom_bytes[self.rom_start:self.rom_end])
     # build path to rnc64 binary
     rnc64 = options.get_base_path(
     ) / "tools" / "rnc_propack_source" / "rnc64"
     # run rnc64
     args = [rnc64, "u", tmp_path, path]
     subprocess.run(args, capture_output=True, check=True)
     # remove temporary file
     tmp_path.unlink()
     # stage 2: decoding
     if self.subtype is not None:
         with open(path, "rb") as infile:
             data = infile.read()
         if self.subtype == "rgba16":
             args = [None, None, None, self.width, self.height]
             seg = N64SegRgba16(0, len(data), self.subtype, self.name,
                                self.vram_start, self.extract,
                                self.given_subalign, self.given_is_overlay,
                                self.given_dir, [], args)
         elif self.subtype == "i4":
             args = [None, None, None, self.width, self.height]
             seg = N64SegI4(0, len(data), self.subtype, self.name,
                            self.vram_start, self.extract,
                            self.given_subalign, self.given_is_overlay,
                            self.given_dir, [], args)
         else:
             log.error(f"Error: Unsupported subtype: {self.subtype}")
         seg.split(data)
         path.unlink()
Пример #6
0
Файл: c.py Проект: Alto1772/pm64
    def create_c_file(self, funcs_text, asm_out_dir, c_path):
        c_lines = self.get_c_preamble()

        for func in funcs_text:
            func_name = self.parent.get_symbol(func, type="func", local_only=True).name

            # Terrible hack to "auto-decompile" empty functions
            # TODO move disassembly into funcs_text or somewhere we can access it from here
            if len(funcs_text[func][0]) == 3 and funcs_text[func][0][1][-3:] == "$ra" and funcs_text[func][0][2][-3:] == "nop":
                c_lines.append("void " + func_name + "(void) {")
                c_lines.append("}")
            else:
                if options.get_compiler() == "GCC":
                    c_lines.append("INCLUDE_ASM(s32, \"{}\", {});".format(self.name, func_name))
                else:
                    asm_outpath = Path(os.path.join(asm_out_dir, self.dir, self.name, func_name + ".s"))
                    rel_asm_outpath = os.path.relpath(asm_outpath, options.get_base_path())
                    c_lines.append(f"#pragma GLOBAL_ASM(\"{rel_asm_outpath}\")")
            c_lines.append("")

        Path(c_path).parent.mkdir(parents=True, exist_ok=True)
        with open(c_path, "w") as f:
            f.write("\n".join(c_lines))
        log.write(f"Wrote {self.name} to {c_path}")
Пример #7
0
def main(config_path, base_dir, target_path, modes, verbose, use_cache=True):
    global config

    # Load config
    with open(config_path) as f:
        config = yaml.load(f.read(), Loader=yaml.SafeLoader)

    options.initialize(config, config_path, base_dir, target_path)
    options.set("modes", modes)

    if verbose:
        options.set("verbose", True)

    with options.get_target_path().open("rb") as f2:
        rom_bytes = f2.read()

    if "sha1" in config:
        sha1 = hashlib.sha1(rom_bytes).hexdigest()
        e_sha1 = config["sha1"]
        if e_sha1 != sha1:
            log.error(f"sha1 mismatch: expected {e_sha1}, was {sha1}")

    # Create main output dir
    options.get_base_path().mkdir(parents=True, exist_ok=True)

    processed_segments: List[Segment] = []

    seg_sizes: Dict[str, int] = {}
    seg_split: Dict[str, int] = {}
    seg_cached: Dict[str, int] = {}

    # Load cache
    if use_cache:
        try:
            with options.get_cache_path().open("rb") as f3:
                cache = pickle.load(f3)

            if verbose:
                log.write(f"Loaded cache ({len(cache.keys())} items)")
        except Exception:
            cache = {}
    else:
        cache = {}

    # invalidate entire cache if options change
    if use_cache and cache.get("__options__") != config.get("options"):
        if verbose:
            log.write("Options changed, invalidating cache")

        cache = {
            "__options__": config.get("options"),
        }

    # Initialize segments
    all_segments = initialize_segments(config["segments"])

    # Load and process symbols
    if options.mode_active("code"):
        log.write("Loading and processing symbols")
        symbols.initialize(all_segments)

    # Resolve raster/palette siblings
    if options.mode_active("img"):
        palettes.initialize(all_segments)

    # Scan
    log.write("Starting scan")
    for segment in all_segments:
        typ = segment.type
        if segment.type == "bin" and segment.is_name_default():
            typ = "unk"

        if typ not in seg_sizes:
            seg_sizes[typ] = 0
            seg_split[typ] = 0
            seg_cached[typ] = 0
        seg_sizes[typ] += 0 if segment.size is None else segment.size

        if segment.should_scan():
            # Check cache but don't write anything
            if use_cache:
                if segment.cache() == cache.get(segment.unique_id()):
                    continue

            if segment.needs_symbols:
                segment_symbols, other_symbols = get_segment_symbols(
                    segment, all_segments)
                segment.given_seg_symbols = segment_symbols
                segment.given_ext_symbols = other_symbols

            segment.did_run = True
            segment.scan(rom_bytes)

            processed_segments.append(segment)

            seg_split[typ] += 1

        log.dot(status=segment.status())

    # Split
    log.write("Starting split")
    for segment in all_segments:
        if use_cache:
            cached = segment.cache()

            if cached == cache.get(segment.unique_id()):
                # Cache hit
                seg_cached[typ] += 1
                continue
            else:
                # Cache miss; split
                cache[segment.unique_id()] = cached

        if segment.should_split():
            segment.split(rom_bytes)

        log.dot(status=segment.status())

    if options.mode_active("ld"):
        global linker_writer
        linker_writer = LinkerWriter()
        for segment in all_segments:
            linker_writer.add(segment)
        linker_writer.save_linker_script()
        linker_writer.save_symbol_header()

    # Write undefined_funcs_auto.txt
    to_write = [
        s for s in symbols.all_symbols
        if s.referenced and not s.defined and not s.dead and s.type == "func"
    ]
    if len(to_write) > 0:
        with open(options.get_undefined_funcs_auto_path(), "w",
                  newline="\n") as f:
            for symbol in to_write:
                f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n")

    # write undefined_syms_auto.txt
    to_write = [
        s for s in symbols.all_symbols if s.referenced and not s.defined
        and not s.dead and not s.type == "func"
    ]
    if len(to_write) > 0:
        with open(options.get_undefined_syms_auto_path(), "w",
                  newline="\n") as f:
            for symbol in to_write:
                f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n")

    # print warnings during split
    for segment in all_segments:
        if len(segment.warnings) > 0:
            log.write(
                f"{Style.DIM}0x{segment.rom_start:06X}{Style.RESET_ALL} {segment.type} {Style.BRIGHT}{segment.name}{Style.RESET_ALL}:"
            )

            for warn in segment.warnings:
                log.write("warning: " + warn, status="warn")

            log.write("")  # empty line

    # Statistics
    do_statistics(seg_sizes, rom_bytes, seg_split, seg_cached)

    # Save cache
    if cache != {} and use_cache:
        if verbose:
            log.write("Writing cache")
        with open(options.get_cache_path(), "wb") as f4:
            pickle.dump(cache, f4)
Пример #8
0
def clean_up_path(path: Path) -> Path:
    return path.resolve().relative_to(options.get_base_path().resolve())
Пример #9
0
def path_to_object_path(path: Path) -> Path:
    path = options.get_build_path() / path.with_suffix(
        path.suffix + ".o").relative_to(options.get_base_path())
    return clean_up_path(path)
Пример #10
0
def main(config_path, base_dir, target_path, modes, verbose, use_cache=True):
    global config

    log.write(f"splat {VERSION} (powered by spimdisasm {spimdisasm.__version__})")

    # Load config
    config = {}
    for entry in config_path:
        with open(entry) as f:
            additional_config = yaml.load(f.read(), Loader=yaml.SafeLoader)
        config = merge_configs(config, additional_config)

    options.initialize(config, config_path, base_dir, target_path)
    options.set("modes", modes)

    if verbose:
        options.set("verbose", True)

    with options.get_target_path().open("rb") as f2:
        rom_bytes = f2.read()

    if "sha1" in config:
        sha1 = hashlib.sha1(rom_bytes).hexdigest()
        e_sha1 = config["sha1"].lower()
        if e_sha1 != sha1:
            log.error(f"sha1 mismatch: expected {e_sha1}, was {sha1}")

    # Create main output dir
    options.get_base_path().mkdir(parents=True, exist_ok=True)

    processed_segments: List[Segment] = []

    seg_sizes: Dict[str, int] = {}
    seg_split: Dict[str, int] = {}
    seg_cached: Dict[str, int] = {}

    # Load cache
    if use_cache:
        try:
            with options.get_cache_path().open("rb") as f3:
                cache = pickle.load(f3)

            if verbose:
                log.write(f"Loaded cache ({len(cache.keys())} items)")
        except Exception:
            cache = {}
    else:
        cache = {}

    # invalidate entire cache if options change
    if use_cache and cache.get("__options__") != config.get("options"):
        if verbose:
            log.write("Options changed, invalidating cache")

        cache = {
            "__options__": config.get("options"),
        }

    configure_disassembler()

    # Initialize segments
    all_segments = initialize_segments(config["segments"])

    # Load and process symbols
    symbols.initialize(all_segments)

    # Assign symbols to segments
    assign_symbols_to_segments()

    if options.mode_active("code"):
        symbols.initialize_spim_context(all_segments)

    # Resolve raster/palette siblings
    if options.mode_active("img"):
        palettes.initialize(all_segments)

    # Scan
    scan_bar = tqdm.tqdm(all_segments, total=len(all_segments))
    for segment in scan_bar:
        assert isinstance(segment, Segment)
        scan_bar.set_description(f"Scanning {brief_seg_name(segment, 20)}")
        typ = segment.type
        if segment.type == "bin" and segment.is_name_default():
            typ = "unk"

        if typ not in seg_sizes:
            seg_sizes[typ] = 0
            seg_split[typ] = 0
            seg_cached[typ] = 0
        seg_sizes[typ] += 0 if segment.size is None else segment.size

        if segment.should_scan():
            # Check cache but don't write anything
            if use_cache:
                if segment.cache() == cache.get(segment.unique_id()):
                    continue

            segment.did_run = True
            segment.scan(rom_bytes)

            processed_segments.append(segment)

            seg_split[typ] += 1

    # Split
    for segment in tqdm.tqdm(
        all_segments,
        total=len(all_segments),
        desc=f"Splitting {brief_seg_name(segment, 20)}",
    ):
        if use_cache:
            cached = segment.cache()

            if cached == cache.get(segment.unique_id()):
                # Cache hit
                seg_cached[typ] += 1
                continue
            else:
                # Cache miss; split
                cache[segment.unique_id()] = cached

        if segment.should_split():
            segment.split(rom_bytes)

    if options.mode_active("ld"):
        global linker_writer
        linker_writer = LinkerWriter()
        for segment in tqdm.tqdm(
            all_segments,
            total=len(all_segments),
            desc=f"Writing linker script {brief_seg_name(segment, 20)}",
        ):
            linker_writer.add(segment)
        linker_writer.save_linker_script()
        linker_writer.save_symbol_header()

        # write elf_sections.txt - this only lists the generated sections in the elf, not subsections
        # that the elf combines into one section
        if options.get_create_elf_section_list_auto():
            section_list = ""
            for segment in all_segments:
                section_list += "." + to_cname(segment.name) + "\n"
            with open(options.get_elf_section_list_path(), "w", newline="\n") as f:
                f.write(section_list)

    # Write undefined_funcs_auto.txt
    if options.get_create_undefined_funcs_auto():
        to_write = [
            s
            for s in symbols.all_symbols
            if s.referenced and not s.defined and not s.dead and s.type == "func"
        ]
        to_write.sort(key=lambda x: x.vram_start)

        with open(options.get_undefined_funcs_auto_path(), "w", newline="\n") as f:
            for symbol in to_write:
                f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n")

    # write undefined_syms_auto.txt
    if options.get_create_undefined_syms_auto():
        to_write = [
            s
            for s in symbols.all_symbols
            if s.referenced
            and not s.defined
            and not s.dead
            and s.type not in {"func", "label", "jtbl_label"}
        ]
        to_write.sort(key=lambda x: x.vram_start)

        with open(options.get_undefined_syms_auto_path(), "w", newline="\n") as f:
            for symbol in to_write:
                f.write(f"{symbol.name} = 0x{symbol.vram_start:X};\n")

    # print warnings during split
    for segment in all_segments:
        if len(segment.warnings) > 0:
            log.write(
                f"{Style.DIM}0x{segment.rom_start:06X}{Style.RESET_ALL} {segment.type} {Style.BRIGHT}{segment.name}{Style.RESET_ALL}:"
            )

            for warn in segment.warnings:
                log.write("warning: " + warn, status="warn")

            log.write("")  # empty line

    # Statistics
    do_statistics(seg_sizes, rom_bytes, seg_split, seg_cached)

    # Save cache
    if cache != {} and use_cache:
        if verbose:
            log.write("Writing cache")
        with open(options.get_cache_path(), "wb") as f4:
            pickle.dump(cache, f4)