def _find_free_clusters(self, clusters_amount): required = clusters_amount if clusters_amount < 0: raise ValueError("Cluster amount cannot be negative!") if clusters_amount == 0: return list() free_clusters = list() start_sector, end_sector = self._get_active_fat_start_end_sectors() fat = self._sector_slice(start_sector, end_sector) fat_parser = BytesParser(fat) empty_fat_value = b'\x00' * BYTES_PER_FAT32_ENTRY for i in range(self._first_free_cluster * BYTES_PER_FAT32_ENTRY, len(fat), BYTES_PER_FAT32_ENTRY): if clusters_amount == 0: self._first_free_cluster = i // BYTES_PER_FAT32_ENTRY self._update_first_free_cluster() self._decrease_free_clusters_amount_by(required) break value = fat_parser.get_bytes(i, BYTES_PER_FAT32_ENTRY) debug("Looking to cluster #" + str( i // BYTES_PER_FAT32_ENTRY) + ", content: " + str(value) + ( " != " if value != empty_fat_value else " == ") + str( empty_fat_value)) if value == empty_fat_value: free_clusters.append(i // BYTES_PER_FAT32_ENTRY) clusters_amount -= 1 if clusters_amount > 0: raise ValueError("Have not found enough free clusters " "(Required: {}, Found: {})." .format(required, required - clusters_amount)) return free_clusters
def type(self, args): args_splitted = args.split(" ", maxsplit=1) if len(args) == 0: print(TYPE_USAGE) return elif len(args_splitted) == 1: encoding = 'ascii' file_name = args_splitted[0] else: encoding = args_splitted[0] file_name = " ".join(args_splitted[1:]) file = self.find(file_name, priority="file") if file is None: raise DirectoryBrowserError('File "' + file_name + '" not found.') if file.is_directory: raise DirectoryBrowserError('"' + file_name + '" is a directory.') bytes_parser = BytesParser(file.get_file_content(self._fat_editor)) if len(args_splitted) == 1: text = bytes_parser. \ parse_ascii_string_replace_errors(0, len(bytes_parser)) else: text = bytes_parser.parse_string(0, len(bytes_parser), encoding=encoding) print(text)
def _read_and_validate_fs_info(self): self.scan_info("Validating FS INFO sector...") fs_info_bytes = self._sector_slice(self._fs_info_sector) try: validate_fs_info(fs_info_bytes) except ValueError: if self._print_scan_info: print("Incorrect format of FS Info sector, " "FAT32 validation failed.") self.valid = False else: raise FATReaderError("Incorrect format of FS Info sector, " "image either corrupted or it's not FAT32") else: self.scan_info("FS INFO sector is valid.") parser = BytesParser(fs_info_bytes) self._free_clusters = \ parser.parse_int_unsigned(0x1e8, 4, byteorder='little') if self._free_clusters == 0xFFFFFFFF: self._free_clusters = -1 self._first_free_cluster = \ parser.parse_int_unsigned(0x1ec, 4, byteorder='little') if self._first_free_cluster == 0xFFFFFFFF: self._first_free_cluster = -1
def _get_data(self, cluster): parser = FileBytesParser(self._fat_image_file, self._data_area_start) start, end = self._get_cluster_start_end_relative_to_data_start( cluster) data = parser.get_bytes_end(start, end) if DEBUG_MODE: debug("Getting data from cluster " + str(cluster) + ": ") debug("\tCluster start: " + str(self._data_area_start + start)) debug("\tCluster end: " + str(self._data_area_start + end)) debug("\tContent: " + BytesParser(data).hex_readable(0, len(data))) return data
def _write_fat_bytes(self, cluster, bytes_): for i in range(self.fat_amount): debug("Writing value {} ({:d}) to FAT #{:d}".format( BytesParser(bytes_).hex_readable(), int.from_bytes(bytes_, byteorder='little'), i) ) fat_start, _ = self._get_fat_start_end_sectors(i) value_start = self._sectors_to_bytes(fat_start) + \ cluster * BYTES_PER_FAT32_ENTRY self._write_content_to_image(value_start, bytes_)
def get_lfn_part(entry_bytes): debug("get_lfn_part: ") debug("\thex: " + BytesParser(entry_bytes).hex_readable(0, BYTES_PER_DIR_ENTRY)) utf8_chars_pos = itertools.chain(range(1, 11, 2), range(14, 26, 2), range(28, 32, 2)) lfn_part = "" for pos in utf8_chars_pos: char = entry_bytes[pos:pos + 2] if char != b'\x00\x00' and char != b'\xFF\xFF': lfn_part += char.decode("utf_16") else: break return lfn_part, entry_bytes[0x0D]
def write_to_image(self, external_path, internal_path, directory=None) -> fsobjects.File: """ Writes file to image, returns File """ path = pathlib.Path(external_path) if not path.exists(): raise FileNotFoundError(str(path) + " not found.") if directory is None: directory = find_directory(self.get_root_directory(), internal_path) name = ("/" + str(path.absolute()).replace("\\", "/")).split("/")[-1] short_name = fsobjects.get_short_name(name, directory=directory) attributes = fsobjects.DIRECTORY if path.is_dir() else 0 creation_datetime, last_access_date, modification_datetime = \ get_time_stamps(external_path) file = fsobjects.File( long_name=name, short_name=short_name, create_datetime=creation_datetime, change_datetime=modification_datetime, last_open_date=last_access_date, attributes=attributes) file.parent = directory first_cluster, size_bytes = self._write_external_file_content(path, file) file._start_cluster = first_cluster file._size_bytes = size_bytes self._append_content_to_dir(directory, file.to_directory_entries()) if DEBUG_MODE: print(BytesParser(self.get_data_from_cluster_chain( directory._start_cluster)).hex_readable(0, BYTES_PER_DIR_ENTRY)) return file
def test_file_parse(self): file_expected = fsobjects.File('SHORT.TXT', '', fsobjects.ARCHIVE, datetime.datetime(day=29, month=7, year=2017, hour=14, minute=53, second=16, microsecond=76000), datetime.date(day=29, month=7, year=2017), datetime.datetime(day=14, month=7, year=2017, hour=20, minute=24, second=10), 1699) parser = BytesParser( b'\x53\x48\x4F\x52\x54\x20\x20\x20\x54\x58\x54\x20\x18\x4C\xA8\x76' b'\xFD\x4A\xFD\x4A\x00\x00\x05\xA3\xEE\x4A\x55\x00\xA3\x06\x00\x00' ) file_actual = fateditor.parse_file_info(parser) self.assertEqual(file_actual, file_expected)
def _write_content_and_get_first_cluster(self, content): """ Writes content and returns number of first cluster """ if len(content) == 0: return -1 cluster_size = self.get_cluster_size() clusters_required = math.ceil(len(content) / cluster_size) clusters = self._find_free_clusters(clusters_required) prev_cluster = -1 for content_start, cluster_num in \ zip(range(0, len(content), cluster_size), clusters): if prev_cluster != -1: self._write_fat_value(prev_cluster, cluster_num) content_end = content_start + cluster_size cluster_start, _ = self._get_cluster_start_end_relative_to_data_start( cluster_num) cluster_start += self._data_area_start _ += self._data_area_start debug("Writing from {:d} to {:d} (cluster {:d}):".format( cluster_start, _, cluster_num)) writing_content = content[content_start:content_end] debug(BytesParser(writing_content) .hex_readable(0, cluster_size)) self._write_content_to_image(cluster_start, writing_content) if DEBUG_MODE: if self._get_data(cluster_num)[ :len(writing_content)] != \ writing_content: raise Exception("Content written somewhere else!") prev_cluster = cluster_num self._write_eof_fat_value(clusters[-1]) return clusters[0]
def test_parse_date_start(self): # 09.08.2017(37) -> 37 08 09 # [0000100101001011]00000000 parser = BytesParser(b'\x09\x4B\x00') self.assertEqual(datetime.date(year=2017, month=8, day=9), parser.parse_date(0))
def test_parse_time_end(self): # 17:35:54 # 0000101100101000[0111101110001100] parser = BytesParser(b'\x0B\x28\x7B\x8C') self.assertEqual(datetime.time(hour=17, minute=35, second=54), parser.parse_time(2))
def test_parse_time_middle(self): # 1:25:00 # 00001111[1000011001100001]1010111100000000 parser = BytesParser(b'\x0F\x86\x61\xAF\x00') self.assertEqual(datetime.time(hour=12, minute=12, second=12), parser.parse_time(1))
def _parse_dir_files(self, data, directory): files = list() long_file_name_buffer = "" lfn_checksum_buffer = -1 for start in range(0, len(data) - BYTES_PER_DIR_ENTRY, BYTES_PER_DIR_ENTRY): debug('long_file_name_buffer = "' + long_file_name_buffer + '"') debug('lfn_checksum_buffer = ' + str(lfn_checksum_buffer)) entry_bytes = data[start:start + BYTES_PER_DIR_ENTRY] if entry_bytes[0] == 0x00: # directory has no more entries break if entry_bytes[0] == 0xE5: # unused entry continue if entry_bytes[0] == 0x05: entry_bytes = b'\xe5' + entry_bytes[1:] entry_parser = BytesParser(entry_bytes) attributes = entry_parser.parse_int_unsigned(11, 1) if attributes == fsobjects.LFN: # Long file name entry lfn_part, lfn_checksum = get_lfn_part(entry_bytes) long_file_name_buffer = lfn_part + \ long_file_name_buffer if 0 <= lfn_checksum_buffer != lfn_checksum: debug("Warning: checksum changed from {:d} to" " {:d} during lfn sequence" .format(lfn_checksum_buffer, lfn_checksum)) lfn_checksum_buffer = lfn_checksum elif attributes & fsobjects.VOLUME_ID: pass else: try: file = self._parse_file_entry(entry_parser, long_file_name_buffer, lfn_checksum_buffer) requires_size_check = self.repair_file_size_mode and \ not file.is_directory requires_cluster_usage_logging = \ self.log_clusters_usage \ or self.log_clusters_usage_adv if requires_cluster_usage_logging or \ requires_size_check: cluster_size = self.get_cluster_size() cluster_seq_num = start // cluster_size entry_start_in_cluster = start % cluster_size chain = self._get_cluster_chain( directory._start_cluster) cluster_num = \ chain[ cluster_seq_num] start_bytes, _ = \ self._get_cluster_start_end_relative_to_data_start( cluster_num) entry_start = start_bytes + entry_start_in_cluster + self._data_area_start if requires_cluster_usage_logging: self._log_file_clusters_usage(file=file, entry_start=entry_start) except ValueError: debug('Entry is "dot" entry, ignoring...') continue if requires_size_check: self._repair_file_size(file, entry_start) file.parent = directory files.append(file) long_file_name_buffer = "" lfn_checksum_buffer = -1 debug(file.get_attributes_str()) return files
def test_parse_int_middle(self): parser = BytesParser(b'\xf4\x43\xff\x57\xa3\x55') self.assertEqual(0x57ff, parser.parse_int_unsigned(2, 2))
def test_parse_int_little_big_endian(self): parser = BytesParser(b'\xf4\xa3\xff') self.assertEqual(0xffa3f4, parser.parse_int_unsigned(0, 3))
def test_parse_date_middle(self): # 08.10.1998(18) -> 18 10 08 # 110110000000101010000000[0010010101001000]10101011 parser = BytesParser(b'\xd8\x0a\x80\x48\x25\xab') self.assertEqual(datetime.date(year=1998, month=10, day=8), parser.parse_date(3))
def test_parse_date_end(self): # 01.01.2000(20) -> 20 01 01 # 10111011110001101101010101010000010111[0010000100101000] parser = BytesParser(b'\x2e\xf1\xb5\x54\x17\x21\x28') self.assertEqual(datetime.date(year=2000, month=1, day=1), parser.parse_date(5))
def test_parse_string_start(self): parser = BytesParser("Я love Python".encode(encoding=UTF16)) self.assertEqual("Я love", parser.parse_string(0, 14, encoding=UTF16))
def test_parse_int_simple(self): parser = BytesParser(b'\x5f') self.assertEqual(0x5f, parser.parse_int_unsigned(0, 1))
def test_parse_string_middle(self): parser = BytesParser("I love Python".encode(encoding=ASCII)) self.assertEqual("love", parser.parse_string(2, 4, encoding=ASCII))
def test_parse_int_start(self): parser = BytesParser(b'\xf4\x43\xff\x57\xa3\x55') self.assertEqual(0xff43f4, parser.parse_int_unsigned(0, 3))
def test_parse_string_end(self): parser = BytesParser("Hello, world!".encode(encoding=ASCII)) self.assertEqual("world!", parser.parse_string(7, 6, encoding=ASCII))
def test_parse_int_end(self): parser = BytesParser(b'\xf4\x43\xff\x57\xa3\x55') self.assertEqual(0x55, parser.parse_int_unsigned(5, 1))
def test_parse_time_start(self): # 1:25:00 # [0010000000001011] parser = BytesParser(b'\x20\x0b') self.assertEqual(datetime.time(hour=1, minute=25, second=0), parser.parse_time(0))