def processFile(self, field): meta = Metadata(self) meta.filename = field["filename"].value meta.creation_date = field["last_mod"].value meta.compression = field["compression"].display if "data_desc" in field: meta.file_size = field["data_desc/file_uncompressed_size"].value if field["data_desc/file_compressed_size"].value: meta.compr_size = field["data_desc/file_compressed_size"].value else: meta.file_size = field["uncompressed_size"].value if field["compressed_size"].value: meta.compr_size = field["compressed_size"].value computeCompressionRate(meta) self.addGroup(field.name, meta, "File \"%s\"" % meta.get('filename'))
def processFile(self, field): meta = Metadata(self) meta.filename = field["filename"].value meta.creation_date = field["last_mod"].value meta.compression = field["compression"].display if "data_desc" in field: meta.file_size = field["data_desc/file_uncompressed_size"].value if field["data_desc/file_compressed_size"].value: meta.compr_size = field["data_desc/file_compressed_size"].value else: meta.file_size = field["uncompressed_size"].value if field["compressed_size"].value: meta.compr_size = field["compressed_size"].value computeCompressionRate(meta) self.addGroup(field.name, meta, "File \"%s\"" % meta.get('filename'))
def traverse_dir(self, p_dir_entry, p_cur_path, p_list): l_loc = p_dir_entry["extent_loc"].value * SECTOR_SIZE l_len = p_dir_entry["size"].value l_read = 0 if self.DEBUG is True: print(80 * "*") for l_index, l_field in enumerate(p_list): print("[%d] 0x%0.8x" % (l_index, l_field.absolute_address // 8)) print(80 * "*") while l_read < l_len: l_entry = self.find_entry(l_loc, p_list) if l_entry is not None: if self.DEBUG: for l_field in l_entry: print("%#x:%s=%s" % (l_field.absolute_address // 8, l_field.name, l_field.display)) l_new_len = l_entry["rec_length"].value l_read += l_new_len if l_entry["name_length"].value > 1: l_filename = self.get_filename(l_entry) if l_entry["file_flags"].value & 2: if self.DEBUG: print("entering directory %s" % l_filename) self.traverse_dir( l_entry, "%s%s%s" % (p_cur_path, l_filename, sep), p_list) if self.DEBUG: print("leaving directory %s" % l_filename) else: (acc_time, crea_time, mod_time) = self.get_dates(l_entry) meta = Metadata(self) meta.filename = "%s%s" % (p_cur_path, l_filename) meta.last_modification = mod_time meta.creation_date = crea_time meta.file_size = l_entry["size"].value self.addGroup("file[]", meta, "File \"%s\"" % meta.get('filename')) if self.DEBUG: print("adding file[] %s" % meta.get('filename')) l_loc = l_loc + l_new_len else: l_node_sec, l_node_rest = divmod(l_loc, SECTOR_SIZE) if self.DEBUG: print( "no entry found at %#x, skipping %d bytes to sector boundary" % (l_loc, SECTOR_SIZE - l_node_rest)) l_loc = (l_node_sec + 1) * SECTOR_SIZE l_read += (SECTOR_SIZE - l_node_rest)
def useFile(self, field): meta = Metadata(self) meta.filename = field["filename"].value meta.file_size = field["filesize"].value meta.creation_date = field["timestamp"].value attr = field["attributes"].value if attr != "(none)": meta.file_attr = attr if meta.has("filename"): title = "File \"%s\"" % meta.getText('filename') else: title = "File" self.addGroup(field.name, meta, title)
def useFile(self, field): meta = Metadata(self) meta.filename = field["filename"].value meta.file_size = field["filesize"].value meta.creation_date = field["timestamp"].value attr = field["attributes"].value if attr != "(none)": meta.file_attr = attr if meta.has("filename"): title = _("File \"%s\"") % meta.getText('filename') else: title = _("File") self.addGroup(field.name, meta, title)
def extract(self, mar): self.comment = "Contains %s files" % mar["nb_file"].value self.format_version = ("Microsoft Archive version %s" % mar["version"].value) max_nb = maxNbFile(self) for index, field in enumerate(mar.array("file")): if max_nb is not None and max_nb <= index: self.warning("MAR archive contains many files, " "but only first %s files are processed" % max_nb) break meta = Metadata(self) meta.filename = field["filename"].value meta.compression = "None" meta.file_size = field["filesize"].value self.addGroup(field.name, meta, "File \"%s\"" % meta.getText('filename'))
def extract(self, mar): self.comment = "Contains %s files" % mar["nb_file"].value self.format_version = "Microsoft Archive version %s"\ % mar["version"].value max_nb = maxNbFile(self) for index, field in enumerate(mar.array("file")): if max_nb is not None and max_nb <= index: self.warning("MAR archive contains many files, " "but only first %s files are processed" % max_nb) break meta = Metadata(self) meta.filename = field["filename"].value meta.compression = "None" meta.file_size = field["filesize"].value self.addGroup(field.name, meta, "File \"%s\"" % meta.getText('filename'))
def extract(self, rar): l_max_nb = maxNbFile(self) l_rarformat = rar["signature"].value if l_rarformat == b"RE~^": l_format_version = "1.4" elif l_rarformat[0:6] == b"Rar!\x1A\x07": if l_rarformat[6:7] == b"\x00": l_format_version = "1.5" # RAR 4 elif l_rarformat[6:7] == b"\x01": l_format_version = "5.0" elif l_rarformat[6:7] == b"\x02": l_format_version = "> 5.0" self.format_version = "RAR version %s" % l_format_version if l_format_version != "1.5": self.warning("RAR TODO: unknown format_version \"%s\" " % l_format_version) l_has_recovery_record = False l_has_auth_verification = False l_has_password = False l_is_multivolume = False l_is_solid = False if rar["/archive_start/flags/has_comment"].value: self.warning("RAR TODO: comment extraction not implemented") self.comment = "HACHOIR: comment extraction not implemented" l_has_recovery_record = rar[ "/archive_start/flags/has_recovery_record"].value l_has_auth_verification = rar[ "/archive_start/flags/has_auth_information"].value l_has_password = rar["/archive_start/flags/is_locked"].value l_is_multivolume = rar["/archive_start/flags/vol"].value l_is_solid = rar["/archive_start/flags/is_solid"].value is_first_vol = rar["/archive_start/flags/is_first_vol"].value for l_index, l_field in enumerate(rar.array("new_sub_block")): if l_field["filename"].value == "CMT": self.warning("RAR TODO: comment unpacking not implemented") self.comment = "HACHOIR: comment unpacking not implemented" elif l_field["filename"].value == "AV": l_has_auth_verification = True elif l_field["filename"].value == "RR": l_has_recovery_record = True else: self.warning("RAR TODO: unknown sub_block \"%s\" " % l_field["filename"].value) self.has_recovery_record = l_has_recovery_record self.has_auth_verification = l_has_auth_verification self.has_password = l_has_password self.is_multivolume = l_is_multivolume self.is_solid = l_is_solid self.is_first_vol = is_first_vol for l_index, l_field in enumerate(rar.array("file")): if l_max_nb is not None and l_max_nb <= l_index: self.warning( "RAR archive contains many files, but only first %s files are processed" % l_max_nb) break l_meta = Metadata(self) l_meta.filename = l_field["filename"].value l_meta.last_modification = l_field["ftime"].value l_meta.os = l_field["host_os"].display l_meta.application_version = l_field["version"].display l_meta.compression = l_field["method"].display l_meta.file_size = l_field["uncompressed_size"].value l_meta.compr_size = l_field["compressed_size"].value self.addGroup(l_field.name, l_meta, "File \"%s\"" % l_meta.get('filename'))