Exemplo n.º 1
0
    def _read_and_validate_fs_info(self):
        self.scan_info("Validating FS INFO sector...")
        fs_info_bytes = self._sector_slice(self._fs_info_sector)
        try:
            validate_fs_info(fs_info_bytes)
        except ValueError:
            if self._print_scan_info:
                print("Incorrect format of FS Info sector, "
                      "FAT32 validation failed.")
                self.valid = False
            else:
                raise FATReaderError("Incorrect format of FS Info sector, "
                                     "image either corrupted or it's not FAT32")
        else:
            self.scan_info("FS INFO sector is valid.")

        parser = BytesParser(fs_info_bytes)
        self._free_clusters = \
            parser.parse_int_unsigned(0x1e8, 4, byteorder='little')
        if self._free_clusters == 0xFFFFFFFF:
            self._free_clusters = -1
        self._first_free_cluster = \
            parser.parse_int_unsigned(0x1ec, 4, byteorder='little')
        if self._first_free_cluster == 0xFFFFFFFF:
            self._first_free_cluster = -1
Exemplo n.º 2
0
 def test_parse_int_end(self):
     parser = BytesParser(b'\xf4\x43\xff\x57\xa3\x55')
     self.assertEqual(0x55, parser.parse_int_unsigned(5, 1))
Exemplo n.º 3
0
 def test_parse_int_middle(self):
     parser = BytesParser(b'\xf4\x43\xff\x57\xa3\x55')
     self.assertEqual(0x57ff, parser.parse_int_unsigned(2, 2))
Exemplo n.º 4
0
 def test_parse_int_start(self):
     parser = BytesParser(b'\xf4\x43\xff\x57\xa3\x55')
     self.assertEqual(0xff43f4, parser.parse_int_unsigned(0, 3))
Exemplo n.º 5
0
 def test_parse_int_little_big_endian(self):
     parser = BytesParser(b'\xf4\xa3\xff')
     self.assertEqual(0xffa3f4, parser.parse_int_unsigned(0, 3))
Exemplo n.º 6
0
 def test_parse_int_simple(self):
     parser = BytesParser(b'\x5f')
     self.assertEqual(0x5f, parser.parse_int_unsigned(0, 1))
Exemplo n.º 7
0
    def _parse_dir_files(self, data, directory):
        files = list()
        long_file_name_buffer = ""
        lfn_checksum_buffer = -1
        for start in range(0, len(data) - BYTES_PER_DIR_ENTRY,
                           BYTES_PER_DIR_ENTRY):
            debug('long_file_name_buffer = "' + long_file_name_buffer + '"')
            debug('lfn_checksum_buffer = ' + str(lfn_checksum_buffer))
            entry_bytes = data[start:start + BYTES_PER_DIR_ENTRY]
            if entry_bytes[0] == 0x00:
                # directory has no more entries
                break
            if entry_bytes[0] == 0xE5:
                # unused entry
                continue
            if entry_bytes[0] == 0x05:
                entry_bytes = b'\xe5' + entry_bytes[1:]

            entry_parser = BytesParser(entry_bytes)
            attributes = entry_parser.parse_int_unsigned(11, 1)

            if attributes == fsobjects.LFN:  # Long file name entry
                lfn_part, lfn_checksum = get_lfn_part(entry_bytes)
                long_file_name_buffer = lfn_part + \
                                        long_file_name_buffer
                if 0 <= lfn_checksum_buffer != lfn_checksum:
                    debug("Warning: checksum changed from {:d} to"
                          " {:d} during lfn sequence"
                          .format(lfn_checksum_buffer, lfn_checksum))
                lfn_checksum_buffer = lfn_checksum

            elif attributes & fsobjects.VOLUME_ID:
                pass
            else:
                try:
                    file = self._parse_file_entry(entry_parser,
                                                  long_file_name_buffer,
                                                  lfn_checksum_buffer)
                    requires_size_check = self.repair_file_size_mode and \
                                          not file.is_directory
                    requires_cluster_usage_logging = \
                        self.log_clusters_usage \
                        or self.log_clusters_usage_adv
                    if requires_cluster_usage_logging or \
                            requires_size_check:
                        cluster_size = self.get_cluster_size()
                        cluster_seq_num = start // cluster_size
                        entry_start_in_cluster = start % cluster_size
                        chain = self._get_cluster_chain(
                            directory._start_cluster)
                        cluster_num = \
                            chain[
                                cluster_seq_num]
                        start_bytes, _ = \
                            self._get_cluster_start_end_relative_to_data_start(
                                cluster_num)
                        entry_start = start_bytes + entry_start_in_cluster + self._data_area_start
                    if requires_cluster_usage_logging:
                        self._log_file_clusters_usage(file=file,
                                                      entry_start=entry_start)
                except ValueError:
                    debug('Entry is "dot" entry, ignoring...')
                    continue

                if requires_size_check:
                    self._repair_file_size(file, entry_start)

                file.parent = directory
                files.append(file)
                long_file_name_buffer = ""
                lfn_checksum_buffer = -1
                debug(file.get_attributes_str())
        return files