Esempio n. 1
0
 def open(self, file_path, mode = "rb"):
     try:
         self.bdt_file = open(file_path, mode)
     except OSError as exc:
         LOG.error("Error opening {}: {}".format(file_path, exc))
         return
     self.opened = True
Esempio n. 2
0
 def _load_infos(self, bnd_info_path):
     try:
         with open(bnd_info_path, "r") as infos_file:
             infos = json.load(infos_file)
         self.magic = infos["magic"].encode("ascii")
         self.flags = infos["flags"]
     except OSError as exc:
         LOG.error("Error reading {}: {}".format(bnd_info_path, exc))
     self._set_entry_bin()
Esempio n. 3
0
 def _load_infos(self, infos_path):
     try:
         with open(infos_path, "r") as infos_file:
             infos = json.load(infos_file)
     except OSError as exc:
         LOG.error("Error reading {}: {}".format(infos_path, exc))
         return
     self.ident = infos["ident"]
     self.decoded_path = infos["path"]
     self.set_has_absolute_path()
Esempio n. 4
0
 def load(self, file_path):
     """ Load a DCX file, return True on success. """
     try:
         with open(file_path, "rb") as dcx_file:
             self._load_header(dcx_file)
             self._load_content(dcx_file)
     except OSError as exc:
         LOG.error("Error reading '{}': {}".format(file_path, exc))
         return False
     return True
Esempio n. 5
0
 def save(self, output_path):
     """ Save the DCX file at output_path, return True on success. """
     try:
         with open(output_path, "wb") as dcx_file:
             self._save_header(dcx_file)
             self._save_content(dcx_file)
     except OSError as exc:
         LOG.error("Error writing '{}': {}".format(output_path, exc))
         return False
     return True
Esempio n. 6
0
 def _load_header(self, bnd_file):
     unpacked = read_struct(bnd_file, self.HEADER_BIN)
     self.magic         = unpacked[0]
     self.flags         = unpacked[1]
     self.num_entries   = unpacked[2]
     self.data_position = unpacked[3]
     if self.magic not in self.KNOWN_MAGICS:
         LOG.debug("Unknown magic {}".format(self.magic.decode("ascii")))
     if self.flags not in self.KNOWN_FLAGS:
         LOG.debug("Unknown flags {}".format(hex(self.flags)))
     self._set_entry_bin()
Esempio n. 7
0
 def _write_infos(self, output_path):
     infos = {
         "ident": self.ident,
         "path": self.decoded_path
     }
     json_path = output_path + ".json"
     try:
         with open(json_path, "w") as infos_file:
             json.dump(infos, infos_file)
     except OSError as exc:
         LOG.error("Error writing {}: {}".format(json_path, exc))
Esempio n. 8
0
 def _write_infos(self, output_dir):
     infos = {
         "magic": self.magic.decode("ascii"),
         "flags": self.flags
     }
     json_path = os.path.join(output_dir, self.INFOS_FILE_NAME)
     try:
         with open(json_path, "w") as infos_file:
             json.dump(infos, infos_file)
     except OSError as exc:
         LOG.error("Error writing {}: {}".format(json_path, exc))
Esempio n. 9
0
 def load_records_map(self, input_dir):
     """ Load the archive's records map that will be used to generate an
     archive with original record-to-entries map, return True on success. """
     map_path = os.path.join(input_dir, self.RECORDS_MAP_NAME)
     if not os.path.isfile(map_path):
         LOG.error("Records map file can't be found.")
         return False
     else:
         with open(map_path, "r") as records_map_file:
             self.records_map = json.load(records_map_file)
         return True
Esempio n. 10
0
 def load(self, file_path):
     """ Load the whole BND archive in memory, return True on success. """
     self.reset()
     try:
         with open(file_path, "rb") as bnd_file:
             self._load_header(bnd_file)
             self._load_entries(bnd_file)
     except OSError as exc:
         LOG.error("Error reading {}: {}".format(file_path, exc))
         return False
     return True
Esempio n. 11
0
 def load_decompressed_list(self, input_dir):
     """ Load the list of files in that input dir that should be compressed
     before being imported in the archive. """
     list_path = os.path.join(input_dir, self.DECOMPRESSED_LIST_NAME)
     if not os.path.isfile(list_path):
         LOG.info("No decompressed file list found in the input dir.")
         return False
     else:
         with open(list_path, "r") as list_file:
             self.decompressed_list = json.load(list_file)
         LOG.info("Loaded decompressed file list.")
         return True
Esempio n. 12
0
 def save(self, output_path):
     """ Save the BND file at output_path, return True on success. """
     strings_block, files_block = self._generate_data()
     try:
         with open(output_path, "wb") as bnd_file:
             self._save_header(bnd_file)
             self._save_entries(bnd_file)
             bnd_file.write(strings_block)
             bnd_file.write(files_block)
     except OSError as exc:
         LOG.error("Error writing {}: {}".format(output_path, exc))
         return False
Esempio n. 13
0
 def _try_decompress(self, rel_path, base_rel_path, output_dir):
     """ Try to decompress the DCX at rel_path to base_rel_path, in the
     output_dir; fails if a file is already expected at base_rel_path. """
     if base_rel_path in self.filelist.values():
         LOG.info("Won't decompress {} because it conflicts with {}".format(
             rel_path, base_rel_path
         ))
         return
     joinable_rel_path = os.path.normpath(rel_path.lstrip("/"))
     file_path = os.path.join(output_dir, joinable_rel_path)
     success = ExternalArchive._decompress(file_path)
     if success:
         self.decompressed_list.append(base_rel_path)
Esempio n. 14
0
 def _update_record(self, rel_path, data_entry):
     """ Add the data entry to the record associated with that relative path,
     return True on success. """
     try:
         index = next(( int(index)
                        for index, files in self.records_map.items()
                        if rel_path in files ))
     except StopIteration:
         LOG.error("File {} not in any record.".format(rel_path))
         return False
     record = self.bhd.records[index]
     record.entries.append(data_entry)
     return True
Esempio n. 15
0
    def extract_all_files(self, output_dir, write_infos = True):
        """ Extract all files contained in this archive in output_dir.

        If write_infos is True (default), a JSON file is written to the disk for
        each file, plus an additional general file for the whole BND. This
        allows you to call import_files later and use the same BND and entries
        properties, to try not to break anything when editing a file.
        """
        for entry in self.entries:
            relative_path = entry.get_joinable_path()
            LOG.info("Extracting {}".format(relative_path))
            entry_path = os.path.join(output_dir, relative_path)
            entry.extract_file(entry_path, write_infos)
        self._write_infos(output_dir)
Esempio n. 16
0
    def extract_file(self, output_path, write_infos = True):
        """ Write entry data at output_path, return True on success. """
        try:
            if not os.path.isdir(os.path.dirname(output_path)):
                os.makedirs(os.path.dirname(output_path))
            with open(output_path, "wb") as output_file:
                output_file.write(self.data)
        except OSError as exc:
            LOG.error("Error writing {}: {}".format(output_path, exc))
            return False

        if write_infos:
            self._write_infos(output_path)

        return True
Esempio n. 17
0
    def import_file(self, file_path):
        position = self.bdt_file.tell()
        try:
            with open(file_path, "rb") as input_file:
                file_content = input_file.read()
            num_written = self.bdt_file.write(file_content)

            # Pad the BDT file to 16-byte if needed.
            pad_file(self.bdt_file, 16)
        except OSError as exc:
            LOG.error("Error importing {}: {}".format(
                file_path, exc
            ))
            return position, -1
        else:
            return position, num_written
Esempio n. 18
0
    def import_file(self, data_dir, file_dir, file_name):
        """ Try to import the file file_name in file_dir, with data_dir as the
        archive root; create a data entry in the appropriate record, and write
        the file data in the BDT file. Return True on success. """
        file_path = os.path.join(file_dir, file_name)

        # Find rel_path: either a hashable name like "/chr/c5352.anibnd.dcx"
        # or directly a hash name like "192E66A4".
        is_unnamed = self.UNNAMED_FILE_RE.match(file_name) is not None
        if is_unnamed:
            rel_path = file_name
        else:
            rel_path = ExternalArchive._get_rel_path(data_dir, file_path)
            rel_path = "/" + rel_path
        LOG.info("Importing {}".format(rel_path))

        # If the file is in the decompressed list, it has to be compressed first
        # and that means we have to create its DCX file, then we update the
        # paths we use afterwards.
        if rel_path in self.decompressed_list:
            joinable_rel_path = os.path.normpath(rel_path.lstrip("/"))
            decompressed_path = os.path.join(data_dir, joinable_rel_path)
            success = ExternalArchive._compress(decompressed_path)
            if not success:
                return False
            rel_path = rel_path + ".dcx"
            file_path = file_path + ".dcx"

        # Import the file
        import_results = self.bdt.import_file(file_path)
        if import_results[1] == -1:  # written bytes
            return False

        # Unnamed files aren't decompressed, so their hash is already available.
        # Named files can be decompressed, therefore we don't know their
        # relative path until now.
        if is_unnamed:
            entry_hash = int(file_name, 16)
        else:
            entry_hash = BhdDataEntry.hash_name(rel_path)

        data_entry = BhdDataEntry()
        data_entry.hash = entry_hash
        data_entry.offset, data_entry.size = import_results

        record_is_updated = self._update_record(rel_path, data_entry)
        return record_is_updated
Esempio n. 19
0
    def save_decompressed(self, output_path):
        """ Save the decompressed content at output_path, return True on
        success and False if an error occured with zlib or the export. """
        try:
            decompressed = zlib.decompress(self.zlib_data)
        except zlib.error as exc:
            LOG.error("Zlib error: {}".format(exc))
            return False

        try:
            with open(output_path, "wb") as output_file:
                output_file.write(decompressed)
        except OSError as exc:
            LOG.error("Error writing '{}': {}".format(output_path, exc))
            return False

        return True
Esempio n. 20
0
    def import_file(self, file_path):
        """ Load the file at file_path and the associated informations,
        return True on success. """
        self.reset()

        file_infos_path = file_path + ".json"
        if os.path.isfile(file_infos_path):
            self._load_infos(file_infos_path)

        try:
            with open(file_path, "rb") as input_file:
                self.data = input_file.read()
        except OSError as exc:
            LOG.error("Error writing {}: {}".format(file_path, exc))
            return False

        self.data_size = os.stat(file_path).st_size
        self.unk2 = self.data_size

        return True
Esempio n. 21
0
    def load_decompressed(self, file_path):
        """ Compress the file content, import its content and update the
        different sizes variables. Return True on success and False if an error
        occured with zlib or the import. """
        try:
            with open(file_path, "rb") as file_to_compress:
                data = file_to_compress.read()
        except OSError as exc:
            LOG.error("Error reading '{}': {}".format(file_path, exc))
            return False

        try:
            self.zlib_data = zlib.compress(data, 9)
        except zlib.error as exc:
            LOG.error("Zlib error: {}".format(exc))
            return False

        file_size = os.stat(file_path).st_size
        self.sizes.uncompressed_size = file_size
        self.sizes.compressed_size = len(self.zlib_data)
        return True
Esempio n. 22
0
    def export_file(self, entry, output_dir):
        """ Export the file corresponding to that BHD data entry, return the
        relative file path on success, None on failure """
        if not self.is_entry_valid(entry):
            LOG.error("Tried to extract a file not from this archive.")
            return None

        rel_path = self.filelist.get(entry.hash) or "{:08X}".format(entry.hash)
        LOG.info("Extracting {}".format(rel_path))

        file_content = self.bdt.read_entry(entry.offset, entry.size)
        content_len = len(file_content)
        if content_len != entry.size:
            LOG.error( "Tried to read {} bytes but only {} were available "
                       "(file '{}').".format(
                entry.size, content_len, rel_path
            ))
            return None

        output_path = os.path.join(output_dir, rel_path.lstrip("/"))
        if not os.path.isdir(os.path.dirname(output_path)):
            os.makedirs(os.path.dirname(output_path))
        with open(output_path, "wb") as output_file:
            output_file.write(file_content)
        return rel_path
Esempio n. 23
0
 def _save_files(self, bhd_path):
     """ Write both BHD and BDT files to disk. """
     LOG.info("Saving files to disk...")
     self.bhd.save(bhd_path)
     self.bdt.close()