Example #1
0
 def parse(self):
     try:
         self.data = uf2.Uf2.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.uf2_block_start.block_number == 0,
                     'invalid start block')
Example #2
0
    def parse(self):
        try:
            self.data = kaitai_snappy.Snappy.from_io(self.infile)
        except (Exception, ValidationFailedError) as e:
            raise UnpackParserException(e.args)

        # first chunk has to be the header, even though
        # this is already covered by the signature
        check_condition(
            self.data.chunks[0].identifier ==
            kaitai_snappy.Snappy.ChunkTypes.stream_identifier,
            "invalid first chunk")

        seen_frame_identifier = False

        self.unpacked_size = 0
        for chunk in self.data.chunks:
            if chunk.is_valid:
                # check to see if there possibly is more than one stream
                # if so decompress them separately
                if chunk.identifier == kaitai_snappy.Snappy.ChunkTypes.stream_identifier:
                    if not seen_frame_identifier:
                        seen_frame_identifier = True
                    else:
                        break
                self.unpacked_size += 4 + chunk.body.len_chunk.value
Example #3
0
    def calculate_unpacked_size(self):
        # According to https://web.archive.org/web/20080321063028/http://technet2.microsoft.com/windowsserver/en/library/bdeda920-1f08-4683-9ffb-7b4b50df0b5a1033.mspx?mfr=true
        # the backup GPT header is at the last sector of the disk
        #
        # There are situations, such as on some Android devices , where the
        # GPT partition table and the actual partitions are separate from eachother
        # and where the LBA of the backup GPT is 0.
        #
        # There are also situations where the partition table is completely
        # unreliable, for example Android devices where certain partitions have
        # been removed from the firmware update, but where the partition table
        # has not been changed.
        try:
            self.unpacked_size = (self.data.primary.backup_lba +
                                  1) * self.data.sector_size
        except BaseException as e:
            print("EXC")
            raise UnpackParserException(e.args)

        all_entries_size = self.data.primary.entries_size * self.data.primary.entries_count
        self.unpacked_size = max(
            self.unpacked_size,
            self.data.primary.entries_start * self.data.sector_size +
            all_entries_size)
        for e in self.data.primary.entries:
            partition_start = e.first_lba * self.data.sector_size
            partition_end = (e.last_lba + 1) * self.data.sector_size
            if partition_start + partition_end > self.fileresult.filesize:
                continue
        check_condition(self.unpacked_size <= self.fileresult.filesize,
                        "partition bigger than file")
Example #4
0
    def parse(self):
        # For reasons unknown pyOpenSSL sometimes barfs on certs from
        # Android, so use an external tool (for now).
        check_condition(
            shutil.which('openssl') is not None, "openssl program not found")

        buf = self.infile.read(80)
        self.certtype = None
        if b'PRIVATE KEY' in buf:
            self.certtype = 'key'
        elif b'CERTIFICATE' in buf:
            self.certtype = 'certificate'

        # try to find the end of the certificate
        end_pos = -1
        self.infile.seek(0)
        self.pos = self.infile.tell()
        cert_unpacked = False

        while True:
            buf = self.infile.read(2048)
            if buf == b'':
                break
            end_pos = buf.find(b'-----END')

            if end_pos != -1:
                if self.certtype == 'key':
                    end_res = buf.find(b'KEY-----', end_pos)
                    if end_res != -1:
                        end_of_certificate = self.pos + end_res + 8
                        cert_unpacked = True
                elif self.certtype == 'certificate':
                    end_res = buf.find(b'CERTIFICATE-----', end_pos)
                    if end_res != -1:
                        end_of_certificate = self.pos + end_res + 16
                        cert_unpacked = True
                else:
                    end_res = buf.find(b'-----', end_pos + 1)
                    if end_res != -1:
                        end_of_certificate = self.pos + end_res + 5
                        cert_unpacked = True
                break

            # make sure there is a little bit of overlap
            if self.infile.tell() + self.offset == self.fileresult.filesize:
                break
            self.infile.seek(-15, os.SEEK_CUR)
            self.pos = self.infile.tell()

        check_condition(end_pos != -1, "no end of certificate found")
        check_condition(cert_unpacked, "no certificate found")

        # check the certificate
        self.infile.seek(0)
        cert = self.infile.read(end_of_certificate)
        check_condition(
            list(filter(lambda x: chr(x) not in string.printable, cert)) == [],
            "text cert can only contain ASCII printable characters")
        (res, self.cert_labels) = self.extract_certificate(cert)
        check_condition(res, "not a valid certificate")
Example #5
0
    def unpack(self):
        unpacked_files = []
        out_labels = []
        unpackdir_full = self.scan_environment.unpack_path(self.rel_unpack_dir)

        p = subprocess.Popen([
            'ar', 'x', self.fileresult.filename,
            '--output=%s' % unpackdir_full
        ],
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        (outputmsg, errormsg) = p.communicate()
        # TODO: look into cleanup if unpacking fails, is it necessary?
        check_condition(p.returncode == 0, "Not a valid ar file")

        # walk the results directory
        for result in unpackdir_full.glob('**/*'):
            # first change the permissions
            result.chmod(stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

            # then add the file to the result set
            file_path = result.relative_to(unpackdir_full)
            fr = FileResult(self.fileresult, self.rel_unpack_dir / file_path,
                            set())
            unpacked_files.append(fr)

        return unpacked_files
Example #6
0
    def parse(self):
        check_condition(False, "unsupported")
        check_condition(
            shutil.which('unpack200') is not None, "pack200 program not found")

        # create a temporary directory
        temp_dir = tempfile.mkdtemp(
            dir=self.scan_environment.temporarydirectory)

        # the unpack200 tool only works on whole files. Finding out
        # where the file ends is TODO, but if there is data in front
        # of a valid pack200 file it is not a problem.
        havetmpfile = False
        if self.offset != 0:
            pass

        if havetmpfile:
            p = subprocess.Popen(['unpack200', temporaryfile[1], outfile_full],
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE,
                                 cwd=temp_dir)
        else:
            p = subprocess.Popen(
                ['unpack200', self.fileresult.filename, outfile_full],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                cwd=temp_dir)

        (outputmsg, errormsg) = p.communicate()

        if havetmpfile:
            os.unlink(temporary_file[1])
    def parse(self):
        try:
            self.data = doom_wad.DoomWad.from_io(self.infile)
        # TODO: decide what exceptions to catch
        except (Exception, ValidationFailedError) as e:
            raise UnpackParserException(e.args)
        except BaseException as e:
            raise UnpackParserException(e.args)
        # this is a bit of an ugly hack to detect if the file has been
        # truncated or corrupted. In certain cases (like when scanning the
        # 'magic' database) it could be that the offset would be bigger
        # than the file itself and there would be hundreds of millions of
        # index entries for which the generated code would first try to create
        # an IndexEntry() object leading to an out of memory issue.
        filesize = self.fileresult.filesize
        check_condition(self.data.index_offset <= filesize,
                        "index offset outside of file")
        check_condition(self.data.num_index_entries > 0, "no lumps defined")

        # another ugly hack to prevent ASCII decoding errors
        # (example: when scanning mime.cache)
        try:
            for i in self.data.index:
                pass
        except Exception as e:
            raise UnpackParserException(e.args)
 def parse(self):
     # needs LZ77 decompression
     check_condition(False, "unsupported")
     try:
         self.data = novatek.Novatek.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
Example #9
0
 def parse(self):
     self.file_size = self.fileresult.filesize
     try:
         self.data = llvm_ir_wrapper.LlvmIrWrapper.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     self.unpacked_size = self.data.ofs_bytecode + self.data.len_bytecode
     check_condition(self.file_size >= self.unpacked_size,
                     "not enough data")
Example #10
0
 def parse(self):
     self.unpacked_size = 0
     try:
         self.data = android_imgdata.AndroidImgdata.from_io(self.infile)
         for image in self.data.images:
             self.unpacked_size = max(self.unpacked_size, image.ofs_image + image.len_image)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     check_condition(self.unpacked_size <= self.fileresult.filesize, "data outside file")
 def parse(self):
     try:
         self.data = dhtb.Dhtb.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     sha256 = hashlib.sha256(self.data.payload)
     check_condition(
         sha256.hexdigest() == binascii.hexlify(
             self.data.header.sha256).decode(), 'invalid hash')
    def parse(self):
        try:
            self.data = trx.Trx.from_io(self.infile)
        except (Exception, ValidationFailedError) as e:
            raise UnpackParserException(e.args)

        computed_crc = ~zlib.crc32(self.data.raw_data) & 0xffffffff
        check_condition(self.data.preheader.crc32 == computed_crc,
                        "invalid CRC32")
Example #13
0
 def parse(self):
     try:
         self.data = xg3d.Xg3d.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     check_condition(
         self.offset == 0
         and self.infile.tell() == self.fileresult.filesize,
         "carving not supported for xg3d")
 def calculate_unpacked_size(self):
     self.unpacked_size = 0
     for p in self.data.partitions:
         self.unpacked_size = max(self.unpacked_size,
                                  (p.lba_start + p.num_sectors) * 512)
     check_condition(self.unpacked_size <= self.fileresult.filesize,
                     "partition bigger than file")
     check_condition(self.unpacked_size >= 0x1be,
                     "invalid partition table: no partitions")
Example #15
0
    def parse(self):
        file_size = self.fileresult.filesize
        self.is_variant = False
        try:
            self.data = android_img.AndroidImg.from_io(self.infile)
        except (Exception, ValidationFailedError) as e:
            try:
                self.infile.seek(self.offset)
                self.data = android_img_lk.AndroidImgLk.from_io(self.infile)
                self.is_variant = True
            except (Exception, ValidationFailedError) as e:
                raise UnpackParserException(e.args)

        self.unpacked_size = self.infile.tell()

        # compute the size and check against the file size
        # take padding into account
        if self.is_variant:
            self.unpacked_size = max(
                self.unpacked_size,
                self.data.header.dtb_pos + self.data.header.dt_size)
        else:
            if self.data.header_version < 3:
                page_size = self.data.header.page_size
                try:
                    unpacked_size = (
                        (page_size + self.data.header.kernel.size + page_size -
                         1) // page_size) * page_size
                except ZeroDivisionError as e:
                    raise UnpackParserException(e.args)
                if self.data.header.ramdisk.size > 0:
                    unpacked_size = (
                        (unpacked_size + self.data.header.ramdisk.size +
                         page_size - 1) // page_size) * page_size
                if self.data.header.second.size > 0:
                    unpacked_size = (
                        (unpacked_size + self.data.header.second.size +
                         page_size - 1) // page_size) * page_size
                if self.data.header_version > 0:
                    if self.data.header.recovery_dtbo.size > 0:
                        unpacked_size = (
                            (self.data.header.recovery_dtbo.offset +
                             self.data.header.recovery_dtbo.size + page_size -
                             1) // page_size) * page_size
                if self.data.header_version > 1:
                    if self.data.header.dtb.size > 0:
                        unpacked_size = (
                            (unpacked_size + self.data.header.dtb.size +
                             page_size - 1) // page_size) * page_size
                self.unpacked_size = max(self.unpacked_size, unpacked_size)
                check_condition(file_size >= self.unpacked_size,
                                "not enough data")
            elif self.data.header_version in [3, 4]:
                unpacked_size = 4096 + len(self.data.header.kernel_img) + \
                    len(self.data.header.padding1) + len(self.data.header.ramdisk_img) + \
                    len(self.data.header.padding2)
                self.unpacked_size = max(self.unpacked_size, unpacked_size)
Example #16
0
 def parse(self):
     try:
         self.data = gif.Gif.from_io(self.infile)
     except Exception as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.logical_screen_descriptor.screen_width > 0,
                     "invalid width")
     check_condition(self.data.logical_screen_descriptor.screen_height > 0,
                     "invalid height")
 def calculate_unpacked_size(self):
     # According to https://web.archive.org/web/20080321063028/http://technet2.microsoft.com/windowsserver/en/library/bdeda920-1f08-4683-9ffb-7b4b50df0b5a1033.mspx?mfr=true
     # the backup GPT header is at the last sector of the disk
     try:
         self.unpacked_size = (self.data.primary.backup_lba +
                               1) * self.data.sector_size
     except Exception as e:
         raise UnpackParserException(e.args)
     check_condition(self.unpacked_size <= self.fileresult.filesize,
                     "partition bigger than file")
Example #18
0
 def parse(self):
     file_size = self.fileresult.filename.stat().st_size
     try:
         self.data = android_msm_bootldr.AndroidMsmBootldr.from_io(self.infile)
     except (Exception, ValidationNotEqualError) as e:
         raise UnpackParserException(e.args)
     self.unpacked_size = self.data.ofs_start
     for entry in self.data.img_info:
         self.unpacked_size += entry.size
     check_condition(file_size >= self.unpacked_size, "not enough data")
Example #19
0
 def calculate_unpacked_size(self):
     self.unpacked_size = 0
     try:
         for p in self.data.partitions:
             self.unpacked_size = max(self.unpacked_size,
                                      (p.lba_start + p.num_sectors) * 512)
     except BaseException as e:
         raise UnpackParserException(e.args)
     check_condition(self.unpacked_size >= 0x1be,
                     "invalid partition table: no partitions")
Example #20
0
 def parse(self):
     resource_ids = set()
     try:
         self.data = chrome_pak.ChromePak.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.resources[-1].id == 0,
                     "wrong resource identifier")
     check_condition(
         self.data.resources[-1].ofs_body <= self.fileresult.filesize,
         "not enough data")
Example #21
0
 def parse(self):
     try:
         self.data = dds.Dds.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     compatible_flags = True
     if self.data.dds_header.flags & 0x8 == 0x8 and self.data.dds_header.flags & 0x80000 == 0x80000:
         compatible_flags = False
     check_condition(compatible_flags, "incompatible flags specified")
     check_condition(self.data.dds_header.flags & 0x80000 == 0x80000,
                     "uncompressed files currently not supported")
 def parse(self):
     file_size = self.fileresult.filesize
     try:
         self.data = android_bootldr_qcom.AndroidBootldrQcom.from_io(
             self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     self.unpacked_size = self.data.ofs_img_bodies
     for entry in self.data.img_headers:
         self.unpacked_size += entry.len_body
     check_condition(file_size >= self.unpacked_size, "not enough data")
 def parse(self):
     self.file_size = self.fileresult.filesize
     try:
         self.data = au.Au.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.header.data_size != 0xffffffff,
                     "files with unknown data size not supported")
     check_condition(
         self.file_size >= self.data.ofs_data + self.data.header.data_size,
         "not enough data")
Example #24
0
 def parse(self):
     try:
         self.data = pcapng.Pcapng.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     section_header_seen = False
     for block in self.data.blocks:
         if block.header_type == pcapng.Pcapng.HeaderTypes.section_header:
             section_header_seen = True
             break
     check_condition(section_header_seen, "no section header block found")
Example #25
0
 def parse(self):
     try:
         self.data = pcf_font.PcfFont.from_io(self.infile)
         check_condition(self.data.num_tables > 0,
                         "invalid number of tables")
         # this is a bit of an ugly hack to detect if the file
         # has been truncated.
         for i in self.data.tables:
             a = type(i.body)
     except (Exception, ValidationNotEqualError) as e:
         raise UnpackParserException(e.args)
Example #26
0
 def parse(self):
     file_size = self.fileresult.filesize
     try:
         self.data = mozilla_mar.MozillaMar.from_io(self.infile)
     except (Exception, ValidationNotEqualError) as e:
         raise UnpackParserException(e.args)
     except EOFError as e:
         raise UnpackParserException(e.args)
     check_condition(
         self.data.file_size == self.data.ofs_index + 4 +
         self.data.index.len_index, "Wrong file size")
     check_condition(self.data.file_size <= file_size, "Not enough data")
Example #27
0
 def parse(self):
     resource_ids = set()
     try:
         self.data = chrome_pak.ChromePak.from_io(self.infile)
     except (Exception, ValidationNotEqualError,
             ValidationGreaterThanError) as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.header.resources[-1].id == 0,
                     "wrong resource identifier")
     check_condition(
         self.data.header.resources[-1].offset <= self.fileresult.filesize,
         "not enough data")
Example #28
0
 def parse(self):
     try:
         self.data = apple_single_double.AppleSingleDouble.from_io(
             self.infile)
         # this is a bit of an ugly hack as the Kaitai parser is
         # not entirely complete. Use this to detect if the file
         # has been truncated.
         for i in self.data.entries:
             a = type(i.body)
     except (Exception, ValidationNotEqualError) as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.num_entries > 1, "no apple double entries")
Example #29
0
 def parse(self):
     try:
         self.data = kaitai_lz4.Lz4.from_io(self.infile)
     except (Exception, ValidationFailedError) as e:
         raise UnpackParserException(e.args)
     for block in self.data.blocks:
         if block.is_endmark:
             break
         if self.data.frame_descriptor.flag.block_checksum:
             block_checksum = xxhash.xxh32(block.data)
             check_condition(block.checksum == block_checksum.intdigest(),
                             "invalid block checksum")
Example #30
0
 def parse(self):
     try:
         self.data = gif.Gif.from_io(self.infile)
     # TODO: decide what exceptions to catch
     except (Exception, ValidationNotEqualError) as e:
         raise UnpackParserException(e.args)
     except BaseException as e:
         raise UnpackParserException(e.args)
     check_condition(self.data.logical_screen_descriptor.screen_width > 0,
                     "invalid width")
     check_condition(self.data.logical_screen_descriptor.screen_height > 0,
                     "invalid height")