Exemple #1
0
def disassemble_all_code(self):
  if not os.path.isfile(r"C:\devkitPro\devkitPPC\bin\powerpc-eabi-objdump.exe"):
    raise Exception(r"Failed to disassemble code: Could not find devkitPPC. devkitPPC should be installed to: C:\devkitPro\devkitPPC")
  
  rels_arc = self.get_arc("files/RELS.arc")
  out_dir = os.path.join(self.randomized_output_folder, "disassemble")
  if not os.path.isdir(out_dir):
    os.mkdir(out_dir)
  main_symbols = get_main_symbols(self)
  
  
  files_to_disassemble = get_list_of_all_rels(self)
  files_to_disassemble.append("sys/main.dol")
  
  for file_path in files_to_disassemble:
    basename_with_ext = os.path.basename(file_path)
    print(basename_with_ext)
    
    rel_file_entry = rels_arc.get_file_entry(basename_with_ext)
    if rel_file_entry:
      rel_file_entry.decompress_data_if_necessary()
      data = rel_file_entry.data
    else:
      data = self.gcm.read_file_data(file_path)
      if try_read_str(data, 0, 4) == "Yaz0":
        data = Yaz0.decompress(data)
    
    basename, file_ext = os.path.splitext(basename_with_ext)
    
    is_rel = (file_ext == ".rel")
    
    
    bin_path = os.path.join(out_dir, basename_with_ext)
    with open(bin_path, "wb") as f:
      data.seek(0)
      f.write(data.read())
    
    asm_path = os.path.join(out_dir, basename + ".asm")
    
    disassemble_file(bin_path, asm_path)
    
    if is_rel:
      demangled_map_path = os.path.join(ASM_PATH, "maps-out", basename + ".map.out")
      if os.path.isfile(demangled_map_path):
        with open(demangled_map_path, "rb") as f:
          rel_map_data = BytesIO(f.read())
      else:
        rel_map_data = self.gcm.read_file_data("files/maps/" + basename + ".map")
      rel_map_data.seek(0)
      rel_map_data = rel_map_data.read()
      
      # Copy the map file to the output directory
      rel_map_path = os.path.join(out_dir, basename + ".map")
      with open(rel_map_path, "wb") as f:
        f.write(rel_map_data)
      
      rel_map_data = rel_map_data.decode("ascii")
      add_relocations_and_symbols_to_rel(asm_path, bin_path, main_symbols, rel_map_data)
    else:
      add_symbols_to_main(asm_path, main_symbols)
Exemple #2
0
    def save_changes(self):
        hash = 0
        for char in self.name:
            hash *= 3
            hash += ord(char)
            hash &= 0xFFFF
        self.name_hash = hash

        # Set or clear compressed type bits
        if not self.is_dir and Yaz0.check_is_compressed(self.data):
            self.type |= 0x84
        else:
            self.type &= ~0x84

        type_and_name_offset = (self.type << 24) | (self.name_offset
                                                    & 0x00FFFFFF)

        if self.is_dir:
            data_offset_or_node_index = self.node_index
        else:
            data_offset_or_node_index = self.data_offset

        if self.is_dir:
            self.data_size = 0x10
        else:
            self.data_size = data_len(self.data)

        write_u16(self.rarc.data, self.entry_offset + 0x00, self.id)
        write_u16(self.rarc.data, self.entry_offset + 0x02, self.name_hash)
        write_u32(self.rarc.data, self.entry_offset + 0x04,
                  type_and_name_offset)
        write_u32(self.rarc.data, self.entry_offset + 0x08,
                  data_offset_or_node_index)
        write_u32(self.rarc.data, self.entry_offset + 0x0C, self.data_size)
        write_u32(self.rarc.data, self.entry_offset + 0x10, 0)  # Unused?
Exemple #3
0
    def update_compression_flags_from_data(self):
        if self.is_dir:
            self.type &= ~RARCFileAttrType.COMPRESSED
            self.type &= ~RARCFileAttrType.YAZ0_COMPRESSED
            return

        if Yaz0.check_is_compressed(self.data):
            self.type |= RARCFileAttrType.COMPRESSED
            self.type |= RARCFileAttrType.YAZ0_COMPRESSED
        elif try_read_str(self.data, 0, 4) == "Yay0":
            self.type |= RARCFileAttrType.COMPRESSED
            self.type &= ~RARCFileAttrType.YAZ0_COMPRESSED
        else:
            self.type &= ~RARCFileAttrType.COMPRESSED
            self.type &= ~RARCFileAttrType.YAZ0_COMPRESSED
Exemple #4
0
    def get_raw_file(self, file_path):
        file_path = file_path.replace("\\", "/")

        if file_path in self.raw_files_by_path:
            return self.raw_files_by_path[file_path]
        else:
            if file_path.startswith("files/rels/"):
                raise Exception("Cannot read a REL as a raw file.")

            data = self.gcm.read_file_data(file_path)

            if try_read_str(data, 0, 4) == "Yaz0":
                data = Yaz0.decompress(data)

            self.raw_files_by_path[file_path] = data
            return data
Exemple #5
0
    def __init__(self, data):
        self.data = data

        if try_read_str(self.data, 0, 4) == "Yaz0":
            self.data = Yaz0.decompress(self.data)

        data = self.data

        self.magic = read_str(data, 0, 4)
        assert self.magic == "RARC"
        self.size = read_u32(data, 4)
        self.node_list_offset = 0x40
        self.file_data_list_offset = read_u32(data, 0xC) + 0x20
        self.file_data_total_size = read_u32(data, 0x10)
        self.file_data_total_size_2 = read_u32(data, 0x14)
        self.file_data_total_size_3 = read_u32(data, 0x18)
        self.num_nodes = read_u32(data, 0x20)
        self.total_num_file_entries = read_u32(data, 0x28)
        self.file_entries_list_offset = read_u32(data, 0x2C) + 0x20
        self.string_list_size = read_u32(data, 0x30)
        self.string_list_offset = read_u32(data, 0x34) + 0x20
        self.next_free_file_id = read_u16(data, 0x38)
        self.keep_file_ids_synced_with_indexes = read_u8(data, 0x3A)

        self.nodes = []
        for node_index in range(self.num_nodes):
            offset = self.node_list_offset + node_index * Node.ENTRY_SIZE
            node = Node(self)
            node.read(offset)
            self.nodes.append(node)

        self.file_entries = []
        for file_index in range(self.total_num_file_entries):
            file_entry_offset = self.file_entries_list_offset + file_index * FileEntry.ENTRY_SIZE
            file_entry = FileEntry(self)
            file_entry.read(file_entry_offset)
            self.file_entries.append(file_entry)

        for node in self.nodes:
            for file_index in range(node.first_file_index,
                                    node.first_file_index + node.num_files):
                file_entry = self.file_entries[file_index]
                file_entry.parent_node = node
                node.files.append(file_entry)

        self.instantiated_object_files = {}
Exemple #6
0
    def __init__(self, data):
        self.data = data

        if try_read_str(self.data, 0, 4) == "Yaz0":
            self.data = Yaz0.decompress(self.data)

        data = self.data

        self.magic = read_str(data, 0, 4)
        assert self.magic == "RARC"
        self.size = read_u32(data, 4)
        self.file_data_list_offset = read_u32(data, 0xC) + 0x20
        self.file_data_total_size = read_u32(data, 0x10)
        self.file_data_total_size_2 = read_u32(data, 0x14)
        self.file_data_total_size_3 = read_u32(data, 0x18)
        num_nodes = read_u32(data, 0x20)
        node_list_offset = 0x40
        self.total_num_file_entries = read_u32(data, 0x28)
        file_entries_list_offset = read_u32(data, 0x2C) + 0x20
        self.string_list_offset = read_u32(data, 0x34) + 0x20

        self.nodes = []
        for node_index in range(0, num_nodes):
            offset = node_list_offset + node_index * 0x10
            node = Node(data, offset)
            self.nodes.append(node)

        self.file_entries = []
        for node in self.nodes:
            for file_index in range(node.first_file_index,
                                    node.first_file_index + node.num_files):
                file_entry_offset = file_entries_list_offset + file_index * 0x14

                file_entry = FileEntry(data, file_entry_offset, self)
                self.file_entries.append(file_entry)
                node.files.append(file_entry)

        self.instantiated_object_files = {}
Exemple #7
0
    def save_changes(self):
        hash = 0
        for char in self.name:
            hash *= 3
            hash += ord(char)
            hash &= 0xFFFF
        self.name_hash = hash

        # Set or clear compressed type bits.
        if not self.is_dir and Yaz0.check_is_compressed(self.data):
            self.type |= RARCFileAttrType.COMPRESSED
            self.type |= RARCFileAttrType.YAZ0_COMPRESSED
        else:
            self.type &= ~RARCFileAttrType.COMPRESSED
            self.type &= ~RARCFileAttrType.YAZ0_COMPRESSED

        type_and_name_offset = (self.type << 24) | (self.name_offset
                                                    & 0x00FFFFFF)

        if self.is_dir:
            data_offset_or_node_index = self.node_index
        else:
            data_offset_or_node_index = self.data_offset

        if self.is_dir:
            self.data_size = 0x10
        else:
            self.data_size = data_len(self.data)

        write_u16(self.rarc.data, self.entry_offset + 0x00, self.id)
        write_u16(self.rarc.data, self.entry_offset + 0x02, self.name_hash)
        write_u32(self.rarc.data, self.entry_offset + 0x04,
                  type_and_name_offset)
        write_u32(self.rarc.data, self.entry_offset + 0x08,
                  data_offset_or_node_index)
        write_u32(self.rarc.data, self.entry_offset + 0x0C, self.data_size)
        write_u32(self.rarc.data, self.entry_offset + 0x10,
                  0)  # Pointer to the file's data, filled at runtime.
Exemple #8
0
    def get_raw_file(self, file_path):
        file_path = file_path.replace("\\", "/")

        if file_path in self.raw_files_by_path:
            return self.raw_files_by_path[file_path]
        else:
            if file_path.startswith("files/rels/"):
                rel_name = os.path.basename(file_path)
                rels_arc = self.get_arc("files/RELS.arc")
                rel_file_entry = rels_arc.get_file_entry(rel_name)
            else:
                rel_file_entry = None

            if rel_file_entry:
                rel_file_entry.decompress_data_if_necessary()
                data = rel_file_entry.data
            else:
                data = self.gcm.read_file_data(file_path)

            if try_read_str(data, 0, 4) == "Yaz0":
                data = Yaz0.decompress(data)

            self.raw_files_by_path[file_path] = data
            return data
Exemple #9
0
    def read(self, data):
        self.data = data

        if try_read_str(self.data, 0, 4) == "Yaz0":
            self.data = Yaz0.decompress(self.data)

        data = self.data

        self.id = read_u32(data, 0)

        self.sections = []
        self.num_sections = read_u32(data, 0xC)
        self.section_info_table_offset = read_u32(data, 0x10)
        for section_index in range(0, self.num_sections):
            section_info_offset = self.section_info_table_offset + section_index * RELSection.ENTRY_SIZE
            section = RELSection()
            section.read(data, section_info_offset)
            self.sections.append(section)

        self.name_offset = read_u32(data, 0x14)
        self.name_length = read_u32(data, 0x18)
        self.rel_format_version = read_u32(data, 0x1C)

        self.bss_size = read_u32(data, 0x20)

        relocation_data_offset_for_module = OrderedDict()
        self.relocation_table_offset = read_u32(data, 0x24)
        self.imp_table_offset = read_u32(data, 0x28)
        self.imp_table_length = read_u32(data, 0x2C)
        offset = self.imp_table_offset
        while offset < self.imp_table_offset + self.imp_table_length:
            module_num = read_u32(data, offset)
            relocation_data_offset = read_u32(data, offset + 4)
            relocation_data_offset_for_module[
                module_num] = relocation_data_offset
            offset += 8

        self.relocation_entries_for_module = OrderedDict()
        curr_section_num = None
        for module_num, relocation_data_offset in relocation_data_offset_for_module.items(
        ):
            self.relocation_entries_for_module[module_num] = []

            offset = relocation_data_offset
            prev_relocation_offset = 0
            while True:
                relocation_type = RELRelocationType(read_u8(data, offset + 2))
                if relocation_type == RELRelocationType.R_DOLPHIN_END:
                    break

                relocation_data_entry = RELRelocation()
                relocation_data_entry.read(data, offset,
                                           prev_relocation_offset,
                                           curr_section_num)
                prev_relocation_offset = relocation_data_entry.relocation_offset

                if relocation_data_entry.relocation_type == RELRelocationType.R_DOLPHIN_SECTION:
                    curr_section_num = relocation_data_entry.section_num_to_relocate_against
                    prev_relocation_offset = 0
                else:
                    self.relocation_entries_for_module[module_num].append(
                        relocation_data_entry)

                offset += RELRelocation.ENTRY_SIZE

        self.prolog_section = read_u8(data, 0x30)
        self.epilog_section = read_u8(data, 0x31)
        self.unresolved_section = read_u8(data, 0x32)
        self.prolog_offset = read_u32(data, 0x34)
        self.epilog_offset = read_u32(data, 0x38)
        self.unresolved_offset = read_u32(data, 0x3C)

        self.alignment = read_u32(data, 0x40)
        self.bss_alignment = read_u32(data, 0x44)

        # Space after this fix_size offset can be reused for other purposes.
        # Such as using the space that originally had the relocations list for .bss static variables instead.
        self.fix_size = read_u32(data, 0x48)

        self.bss_section_index = None  # The byte at offset 0x33 in the REL is reserved for this value at runtime.
        for section_index, section in enumerate(self.sections):
            if section.is_bss:
                self.bss_section_index = section_index
                section.offset = self.bss_offset
                break
Exemple #10
0
 def __init__(self, data):
     if try_read_str(data, 0, 4) == "Yaz0":
         data = Yaz0.decompress(data)
     super(BTIFile, self).__init__(data)
Exemple #11
0
def disassemble_all_code(self):
    if not os.path.isfile(
            r"C:\devkitPro\devkitPPC\bin\powerpc-eabi-objdump.exe"):
        raise Exception(
            r"Failed to disassemble code: Could not find devkitPPC. devkitPPC should be installed to: C:\devkitPro\devkitPPC"
        )

    rels_arc = self.get_arc("files/RELS.arc")
    out_dir = os.path.join(self.randomized_output_folder, "disassemble")
    if not os.path.isdir(out_dir):
        os.mkdir(out_dir)

    demangled_map_path = os.path.join(ASM_PATH, "maps-out",
                                      "framework.map.out")
    if os.path.isfile(demangled_map_path):
        with open(demangled_map_path, "rb") as f:
            framework_map_contents = BytesIO(f.read())
    else:
        framework_map_contents = self.gcm.read_file_data(
            "files/maps/framework.map")
    framework_map_contents = read_all_bytes(framework_map_contents).decode(
        "ascii")
    main_symbols = get_main_symbols(framework_map_contents)

    all_rel_paths = get_list_of_all_rels(self)
    files_to_disassemble = all_rel_paths.copy()
    files_to_disassemble.append("sys/main.dol")

    for file_path_in_gcm in files_to_disassemble:
        basename_with_ext = os.path.basename(file_path_in_gcm)

        rel_file_entry = rels_arc.get_file_entry(basename_with_ext)
        if rel_file_entry:
            rel_file_entry.decompress_data_if_necessary()
            data = rel_file_entry.data
        else:
            data = self.gcm.read_file_data(file_path_in_gcm)
            if Yaz0.check_is_compressed(data):
                data = Yaz0.decompress(data)

        basename, file_ext = os.path.splitext(basename_with_ext)

        bin_path = os.path.join(out_dir, basename_with_ext)
        with open(bin_path, "wb") as f:
            data.seek(0)
            f.write(data.read())

    all_rels_by_path = OrderedDict()
    all_rel_symbols_by_path = OrderedDict()
    for file_path_in_gcm in all_rel_paths:
        basename_with_ext = os.path.basename(file_path_in_gcm)
        basename, file_ext = os.path.splitext(basename_with_ext)

        bin_path = os.path.join(out_dir, basename_with_ext)
        rel = REL()
        rel.read_from_file(bin_path)
        all_rels_by_path[file_path_in_gcm] = rel

        demangled_map_path = os.path.join(ASM_PATH, "maps-out",
                                          basename + ".map.out")
        if os.path.isfile(demangled_map_path):
            with open(demangled_map_path, "rb") as f:
                rel_map_data = BytesIO(f.read())
        else:
            rel_map_data = self.gcm.read_file_data("files/maps/" + basename +
                                                   ".map")
        rel_map_data.seek(0)
        rel_map_data = rel_map_data.read()

        # Copy the map file to the output directory
        rel_map_path = os.path.join(out_dir, basename + ".map")
        with open(rel_map_path, "wb") as f:
            f.write(rel_map_data)

        rel_map_data = rel_map_data.decode("ascii")

        all_rel_symbols_by_path[file_path_in_gcm] = get_rel_symbols(
            rel, rel_map_data)

    for file_path_in_gcm in files_to_disassemble:
        basename_with_ext = os.path.basename(file_path_in_gcm)
        print(basename_with_ext)

        basename, file_ext = os.path.splitext(basename_with_ext)

        bin_path = os.path.join(out_dir, basename_with_ext)
        asm_path = os.path.join(out_dir, basename + ".asm")

        disassemble_file(bin_path, asm_path)

        is_rel = (file_ext == ".rel")
        if is_rel:
            add_relocations_and_symbols_to_rel(asm_path, bin_path,
                                               file_path_in_gcm, main_symbols,
                                               all_rel_symbols_by_path,
                                               all_rels_by_path)
        else:
            add_symbols_to_main(self, asm_path, main_symbols)
Exemple #12
0
    def read(self, data):
        self.data = data

        if try_read_str(self.data, 0, 4) == "Yaz0":
            self.data = Yaz0.decompress(self.data)

        data = self.data

        # Read header.
        self.magic = read_str(data, 0, 4)
        assert self.magic == "RARC"
        self.size = read_u32(data, 4)
        self.data_header_offset = read_u32(data, 0x8)
        assert self.data_header_offset == 0x20
        self.file_data_list_offset = read_u32(data,
                                              0xC) + self.data_header_offset
        self.total_file_data_size = read_u32(data, 0x10)
        self.mram_file_data_size = read_u32(data, 0x14)
        self.aram_file_data_size = read_u32(data, 0x18)
        self.unknown_1 = read_u32(data, 0x1C)
        assert self.unknown_1 == 0

        # Read data header.
        self.num_nodes = read_u32(data, self.data_header_offset + 0x00)
        self.node_list_offset = read_u32(
            data, self.data_header_offset + 0x04) + self.data_header_offset
        self.total_num_file_entries = read_u32(data,
                                               self.data_header_offset + 0x08)
        self.file_entries_list_offset = read_u32(
            data, self.data_header_offset + 0x0C) + self.data_header_offset
        self.string_list_size = read_u32(data, self.data_header_offset + 0x10)
        self.string_list_offset = read_u32(
            data, self.data_header_offset + 0x14) + self.data_header_offset
        self.next_free_file_id = read_u16(data, self.data_header_offset + 0x18)
        self.keep_file_ids_synced_with_indexes = read_u8(
            data, self.data_header_offset + 0x1A)
        self.unknown_2 = read_u8(data, self.data_header_offset + 0x1B)
        assert self.unknown_2 == 0
        self.unknown_3 = read_u32(data, self.data_header_offset + 0x1C)
        assert self.unknown_3 == 0

        self.nodes = []
        for node_index in range(self.num_nodes):
            offset = self.node_list_offset + node_index * Node.ENTRY_SIZE
            node = Node(self)
            node.read(offset)
            self.nodes.append(node)

        self.file_entries = []
        for file_index in range(self.total_num_file_entries):
            file_entry_offset = self.file_entries_list_offset + file_index * FileEntry.ENTRY_SIZE
            file_entry = FileEntry(self)
            file_entry.read(file_entry_offset)
            self.file_entries.append(file_entry)

            if file_entry.is_dir and file_entry.node_index != 0xFFFFFFFF:
                file_entry.node = self.nodes[file_entry.node_index]
                if file_entry.name not in [".", ".."]:
                    assert file_entry.node.dir_entry is None
                    file_entry.node.dir_entry = file_entry

        for node in self.nodes:
            for file_index in range(node.first_file_index,
                                    node.first_file_index + node.num_files):
                file_entry = self.file_entries[file_index]
                file_entry.parent_node = node
                node.files.append(file_entry)

        self.instantiated_object_files = {}
Exemple #13
0
 def decompress_data_if_necessary(self):
     if try_read_str(self.data, 0, 4) == "Yaz0":
         self.data = Yaz0.decompress(self.data)
         # Clear compressed type bits.
         self.type &= ~RARCFileAttrType.COMPRESSED
         self.type &= ~RARCFileAttrType.YAZ0_COMPRESSED
Exemple #14
0
    def __init__(self, data):
        if Yaz0.check_is_compressed(data):
            data = Yaz0.decompress(data)
        self.data = data

        self.read()
Exemple #15
0
    def __init__(self, file_path):
        self.file_path = file_path
        with open(self.file_path, "rb") as file:
            self.data = BytesIO(file.read())

        if try_read_str(self.data, 0, 4) == "Yaz0":
            self.data = Yaz0.decompress(self.data)

        data = self.data

        self.id = read_u32(data, 0)

        self.sections = []
        self.num_sections = read_u32(data, 0xC)
        self.section_info_table_offset = read_u32(data, 0x10)
        for section_index in range(0, self.num_sections):
            section_info_offset = self.section_info_table_offset + section_index * 8
            section = Section(data, section_info_offset)
            self.sections.append(section)

        self.relocation_data_offset_for_module = {}
        self.imp_table_offset = read_u32(data, 0x28)
        self.imp_table_length = read_u32(data, 0x2C)
        offset = self.imp_table_offset
        while offset < self.imp_table_offset + self.imp_table_length:
            module_num = read_u32(data, offset)
            relocation_data_offset = read_u32(data, offset + 4)
            self.relocation_data_offset_for_module[
                module_num] = relocation_data_offset
            offset += 8

        self.relocation_entries_for_module = {}
        curr_section_num = None
        for module_num, relocation_data_offset in self.relocation_data_offset_for_module.items(
        ):
            self.relocation_entries_for_module[module_num] = []

            offset = relocation_data_offset
            prev_relocation_offset = 0
            while True:
                relocation_type = read_u8(data, offset + 2)
                if relocation_type == 0xCB:  # R_RVL_STOP
                    break

                relocation_data_entry = RelocationDataEntry(
                    data, offset, prev_relocation_offset, curr_section_num)
                prev_relocation_offset = relocation_data_entry.relocation_offset

                if relocation_data_entry.relocation_type == 0xCA:  # R_RVL_SECT
                    curr_section_num = relocation_data_entry.section_num_to_relocate_against
                    prev_relocation_offset = 0
                else:
                    self.relocation_entries_for_module[module_num].append(
                        relocation_data_entry)

                offset += 8

        # Space after this fix_size offset can be reused for other purposes.
        # Such as using the space that originally had the relocations list for .bss static variables instead.
        self.fix_size = read_u32(data, 0x48)
        self.fix_size = (self.fix_size +
                         0x1F) & ~(0x1F)  # Round up to nearest 0x20

        self.bss_section_index = None  # The byte at offset 0x33 in the REL is reserved for this value at runtime.
        for section_index, section in enumerate(self.sections):
            if section.is_bss:
                self.bss_section_index = section_index
                section.offset = self.fix_size
                break
Exemple #16
0
 def decompress_data_if_necessary(self):
     if Yaz0.check_is_compressed(self.data):
         self.data = Yaz0.decompress(self.data)
         self.update_compression_flags_from_data()
Exemple #17
0
 def __init__(self, data):
     if Yaz0.check_is_compressed(data):
         data = Yaz0.decompress(data)
     super(BTIFile, self).__init__(data)
Exemple #18
0
 def decompress_data_if_necessary(self):
     if try_read_str(self.data, 0, 4) == "Yaz0":
         self.data = Yaz0.decompress(self.data)
         self.type &= ~0x84  # Clear compressed type bits