def mainFooter(self): print(file=stderr) print("[+] End of search -- offset=%s (%s)" % (self.current_offset // 8, humanFilesize( self.current_offset // 8)), file=stderr) size = (self.current_offset - self.start_offset) // 8 duration = time() - self.main_start if 0.1 <= duration: print("Total time: %s -- global rate: %s/sec" % (humanDuration( duration * 1000), humanFilesize(size // duration)), file=stderr)
def createDescription(self): superblock = self.getSuperblock() block_size = 1024 << superblock["log_block_size"].value nb_block = superblock["blocks_count"].value total = nb_block * block_size used = (superblock["free_blocks_count"].value) * block_size desc = "EXT2/EXT3" if "group[0]/inode_table/inode[7]/blocks" in self: if 0 < self["group[0]/inode_table/inode[7]/blocks"].value: desc = "EXT3" else: desc = "EXT2" return desc + " file system: total=%s, used=%s, block=%s" % ( humanFilesize(total), humanFilesize(used), humanFilesize(block_size))
def processParser(self, offset, parser): """ Process a valid parser. """ text = "[+] File at %s" % (offset // 8) if parser.content_size is not None: text += " size=%s (%s)" % (parser.content_size // 8, humanFilesize(parser.content_size // 8)) if not (parser.content_size ) or parser.content_size // 8 < FILE_MAX_SIZE: text += ": " + parser.description else: text += ": " + parser.__class__.__name__ if self.output and parser.content_size: if (offset == 0 and parser.content_size == self.size): text += " (don't copy whole file)" elif parser.content_size // 8 >= FILE_MAX_SIZE: text += " (don't copy file, too big)" elif not self.filter or self.filter(parser): filename = self.output.createFilename(parser.filename_suffix) filename = self.output.writeFile(filename, self.stream, offset, parser.content_size) text += " => %s" % filename print(text) self.next_progress = time() + PROGRESS_UPDATE
def __call__(self, prev): name = self.path + "[]" address, size, last = next(self.cluster) if self.filesize: if self.done >= self.filesize: error("(FAT) bad metadata for " + self.path) return field = File(self.root, name, size=size) if prev.first is None: field._description = 'File size: %s' % humanFilesize( self.filesize // 8) field.setSubIStream(self.createInputStream) field.datasize = min(self.filesize - self.done, size) self.done += field.datasize else: field = Directory(self.root, name, size=size) padding = self.root.getFieldByAddress(address, feed=False) if not isinstance(padding, (PaddingBytes, RawBytes)): error("(FAT) address %u doesn't point to a padding field" % address) return if last: link_next = None else: def link_next(): return self(field) field.setLinks(prev.first, link_next) self.root.writeFieldsIn(padding, address, (field, )) return field
def extractAVI(self, headers, **kwargs): audio_index = 1 for stream in headers.array("stream"): if "stream_hdr/stream_type" not in stream: continue stream_type = stream["stream_hdr/stream_type"].value if stream_type == "vids": if "stream_hdr" in stream: meta = Metadata(self) self.extractAVIVideo(stream["stream_hdr"], meta) self.addGroup("video", meta, "Video stream") elif stream_type == "auds": if "stream_fmt" in stream: meta = Metadata(self) self.extractAVIAudio(stream["stream_fmt"], meta) self.addGroup("audio[%u]" % audio_index, meta, "Audio stream") audio_index += 1 if "avi_hdr" in headers: self.useAviHeader(headers["avi_hdr"]) # Compute global bit rate if self.has("duration") and "/movie/size" in headers: self.bit_rate = float( headers["/movie/size"].value) * 8 / timedelta2seconds( self.get('duration')) # Video has index? scan_index = (True, kwargs['scan_index'])['scan_index' in kwargs] if scan_index and "/index" in headers: self.comment = "Has audio/video index (%s)" \ % humanFilesize(headers["/index"].size // 8)
def createDescription(self): desc = "Partition header: " if self.isUsed(): system = self["system"].display size = self["size"].value * BLOCK_SIZE desc += "%s, %s" % (system, humanFilesize(size)) else: desc += "(unused)" return desc
def createDescription(self): if self["magic"].value == "S1SUSPEND\0": text = "Suspend swap file version 1" elif self["magic"].value == "SWAPSPACE2": text = "Linux swap file version 2" else: text = "Linux swap file version 1" nb_page = self.getPageCount() return "%s, page size: %s, %s pages" % (text, humanFilesize(PAGE_SIZE), nb_page)
def __init__(self, parent, name, length, decompressor, description=None, parser=None, filename=None, mime_type=None, parser_class=None): if filename: if not isinstance(filename, str): filename = makePrintable(filename, "ISO-8859-1") if not description: description = 'File "%s" (%s)' % ( filename, humanFilesize(length)) Bytes.__init__(self, parent, name, length, description) self.setupInputStream(decompressor, parser, filename, mime_type, parser_class)
def createDescription(self): if self.isEmpty(): desc = "(terminator, empty header)" else: filename = self["name"].value if self["prefix"].value: filename = self["prefix"].value + '/' + filename filesize = humanFilesize(self.getOctal("size")) desc = "(%s: %s, %s)" % \ (filename, self["type"].display, filesize) return "Tar File " + desc
def displayProgress(self): """ Display progress (to stdout) of the whole process. Compute data rate (in byte per sec) and time estimation. """ # Program next update self.next_progress = time() + PROGRESS_UPDATE # Progress offset percent = float(self.current_offset - self.start_offset) * \ 100 / (self.size - self.start_offset) offset = self.current_offset // 8 message = "Search: %.2f%% -- offset=%u (%s)" % (percent, offset, humanFilesize(offset)) # Compute data rate (byte/sec) average = self.datarate.average if average: message += " -- %s/sec " % humanFilesize(average // 8) eta = float(self.size - self.current_offset) / average message += " -- ETA: %s" % humanDuration(eta * 1000) # Display message print(message, file=stderr)
def mainHeader(self): # Fix slice size if needed self.slice_size = max(self.slice_size, self.patterns.max_length * 8) # Load parsers if none has been choosen if not self.patterns: self.loadParsers() bytes = (self.size - self.start_offset) // 8 print("[+] Start search on %s bytes (%s)" % (bytes, humanFilesize(bytes)), file=stderr) print(file=stderr) self.stats = {} self.current_offset = self.start_offset self.main_start = time()
def createDescription(self): desc = "Inode %s: " % self.uniq_id size = self["size"].value + (self['size_high'].value << 32) size = humanFilesize(size) if self["links_count"].value == 0: desc += "(unused)" elif 11 <= self.uniq_id: desc += "%s, size=%s, mode=%s" % (self.describe_file(), size, self['mode'].display) elif self.uniq_id in self.inode_type_name: desc += self.inode_type_name[self.uniq_id] if self.uniq_id == 2: desc += ", size=%s, mode=%s" % (size, self['mode'].display) else: desc += "special" return desc
def __init__(self, parent, name, length, description=None, parser=None, filename=None, mime_type=None, parser_class=None): if filename: if not isinstance(filename, str): filename = makePrintable(filename, "ISO-8859-1") if not description: description = 'File "%s" (%s)' % ( filename, humanFilesize(length)) Bytes.__init__(self, parent, name, length, description) def createInputStream(cis, **args): tags = args.setdefault("tags", []) if parser_class: tags.append(("class", parser_class)) if parser is not None: tags.append(("id", parser.PARSER_TAGS["id"])) if mime_type: tags.append(("mime", mime_type)) if filename: tags.append(("filename", filename)) return cis(**args) self.setSubIStream(createInputStream)
def checkPattern(self): if not (config.check_padding_pattern): return False if self.pattern is None: return False if self.MAX_SIZE < self._size // 8: self.info("only check first %s of padding" % humanFilesize(self.MAX_SIZE)) content = self._parent.stream.readBytes(self.absolute_address, self.MAX_SIZE) else: content = self.value index = 0 pattern_len = len(self.pattern) while index < len(content): if content[index:index + pattern_len] != self.pattern: self.warning("padding contents doesn't look normal" " (invalid pattern at byte %u)!" % index) return False index += pattern_len return True
def __init__(self, parent, name, description=None): FieldSet.__init__(self, parent, name, description) self._size = (self["size"].value + 3 * 4) * 8 if MAX_CHUNK_SIZE < (self._size // 8): raise ParserError("PNG: Chunk is too big (%s)" % humanFilesize(self._size // 8)) tag = self["tag"].value self.desc_func = None self.value_func = None if tag in self.TAG_INFO: self._name, self.parse_func, desc, value_func = self.TAG_INFO[tag] if value_func: self.value_func = value_func self.createValue = self.createValueFunc if desc: if isinstance(desc, str): self._description = desc else: self.desc_func = desc else: self._description = "" self.parse_func = None
def createDescription(self): return "%d blocks of %s" % (self["num_blocks"].value, humanFilesize(self["len"].value))
def createDisplay(self): if not self["value"].hasValue(): return None if self._name in ("length", "piece_length"): return humanFilesize(self.value) return FieldSet.createDisplay(self)
def createDescription(self): size = self["nb_sectors"].value * self["bios/bytes_per_sector"].value return "NTFS Master Boot Record (%s)" % humanFilesize(size)