def __init__(self, f: IO): """ Init """ super().__init__() self.reserved: int = f.read(1) self.logger.debug(f"Reserved: {self.reserved}") self.mark_type: MarkType = MarkType( read_u8(f, endianess=Endianess.BIG_ENDIAN)) self.logger.debug(f"Mark Type: {self.mark_type}") self.ref_to_play_item_id: int = read_u16( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Ref To Play Item ID: {self.ref_to_play_item_id}") self.mark_timestamp: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Mark Timestamp: {self.mark_timestamp}") self.entry_esp_id: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Entry ESP Id: {self.entry_esp_id}") self.duration: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Duration: {self.duration}")
def __init__(self, f: IO): super().__init__() self.f: IO = f self.entirety_data_offset: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Entirety Data Offset: {self.entirety_data_offset}") self.entirety_data_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Entirety Data Size: {self.entirety_data_size}") self.flags: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Flags: {hex(self.flags)}") self.unk_1: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) constant_check(self.logger, "Unknown 1", self.unk_1, 0x00) self.unk_2: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Unknown 2: {self.unk_2}') self.unk_3: bytes = f.read(0x8) self.logger.info(f'Unknown 3: {hexlify(self.unk_3)}') self.sha_256_hash: bytes = f.read(0x20) self.logger.info(f'SHA-256 Hash: {hexlify(self.sha_256_hash)}')
def __init__(self, f: IO): super().__init__(f) #: PSARC Minor and Major File Versions self.version_major: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.version_minor: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.info( f'Version: v{self.version_major}.{self.version_minor}') #: PSARC Compression Type self.compression_type: CompressionType = CompressionType(f.read(4)) self.logger.info(f'Compression Type: {self.compression_type}') #: PSARC TOC Length self.toc_length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'TOC Length: {self.toc_length}') #: PSARC TOC Entry Size self.toc_entry_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'TOC Entry Size: {self.toc_entry_size}') #: PSARC TOC Entry Count self.toc_entry_count: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'TOC Entries: {self.toc_entry_count}') #: PSARC Block Size self.block_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Block Size: {self.block_size}') #: PSARC Archive Path Type # TODO: F*****g use this actually self.archive_path_type: ArchivePathType = ArchivePathType( read_u32(f, endianess=Endianess.BIG_ENDIAN)) self.logger.info(f'Archive Path Type: {self.archive_path_type}')
def __init__(self, f: IO): super().__init__(f) self.version: BluRayVersion = BluRayVersion(f.read(4).decode("ASCII")) self.logger.debug(f'Version: {self.version.value}') self.sequence_info_start_address: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f'Sequence Info Start Address: {self.sequence_info_start_address}') self.program_info_start_address: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f'Program Info Start Address: {self.program_info_start_address}') self.cpi_start_address: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f'CPI Start Address: {self.cpi_start_address}') self.clip_mark_start_address: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f'Clip Mark Start Address: {self.clip_mark_start_address}') self.extension_data_start_address: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f'Extension Data Start: {self.extension_data_start_address}') self.reserved: bytes = f.read(96 // 8) self.logger.debug(f'Reserved: {hexlify(self.reserved)}')
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") if self.length != 0: self.data_block_start_address: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Data Block Start Address: {self.data_block_start_address}") self.reserved_for_word_align: int = f.read(3) self.logger.debug( f"Reserved For Word Align: {self.reserved_for_word_align}") self.number_of_ext_data_entries: int = read_u8( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Number of Ext Data Entries: {self.number_of_ext_data_entries}" ) self.entries: List[Entry] = list() for entry_index in range(self.number_of_ext_data_entries): self.logger.debug(f"Reading Entry {entry_index}") self.entries.append(Entry(f)) self.data_block: bytes = f.read(4 + self.length - self.data_block_start_address) self.logger.debug(f"Data Block: {hex_log_str(self.data_block)}")
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.clip_information_file_name: str = f.read(5).decode("ASCII") self.logger.debug( f"Clip Information File Name: {self.clip_information_file_name}") self.clip_codec_identifier: str = f.read(4).decode("ASCII") self.logger.debug( f"Clip Codec Identifier: {self.clip_codec_identifier}") self.flags_1: bytes = f.read(2) self.logger.debug(f"Flags 1: {hex_log_str(self.flags_1)}") self.is_multi_angle: bool = ((self.flags_1[1] & 0b00010000) >> 4) == 1 self.logger.debug(f"Is Multi Angle: {self.is_multi_angle}") self.connection_condition: int = self.flags_1[1] & 0b00001111 self.logger.debug(f"Connection Condition: {self.connection_condition}") self.ref_to_stc_id: int = read_u8(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Reference to STC ID: {self.ref_to_stc_id}") self.in_time: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"In Time: {self.in_time}") self.out_time: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Out Time: {self.out_time}") self.u0_mask_table: U0MaskTable = U0MaskTable(f) self.flags_2: bytes = f.read(1) self.logger.debug(f"Flags 2: {hex_log_str(self.flags_2)}") self.random_access_flag: bool = ( (self.flags_2[0] & 0b10000000) >> 7) == 1 self.logger.debug(f"Random Access Flag: {self.random_access_flag}") self.still_mode: int = read_u8(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Still Mode: {self.still_mode}") if self.still_mode == 0x01: self.still_time: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Still Time: {self.still_time}") else: self.reserved: bytes = f.read(2) self.logger.debug(f"Reserved: {hex_log_str(self.reserved)}") if self.is_multi_angle: self.multi_angle_entries: MultiAngleEntries = MultiAngleEntries(f) self.stn_table: StnTable = StnTable(f)
def __init__(self, f: IO): super().__init__(f) self.version: BluRayVersion = BluRayVersion(f.read(4).decode("ASCII")) self.logger.debug(f'Version: {self.version.value}') self.indexes_start_address: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f'Index Start: {self.indexes_start_address}') self.extension_data_start_address: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f'Extension Data Start: {self.extension_data_start_address}') self.reserved: bytes = f.read(192 // 8) self.logger.debug(f'Reserved: {hexlify(self.reserved)}')
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.reserved_1: bytes = f.read(1) self.logger.debug(f"Reserved 1: {hex_log_str(self.reserved_1)}") self.sub_path_type: SubPathType = SubPathType( read_u8(f, endianess=Endianess.BIG_ENDIAN)) self.logger.debug(f"Sub Path Type: {self.sub_path_type}") self.flags_1: bytes = f.read(2) self.logger.debug(f"Flags 1: {hex_log_str(self.flags_1)}") self.is_repeat_sub_path: bool = (self.flags_1[1] & 0b00000001) == 1 self.logger.debug(f"Is Repeat Sub Path: {self.is_repeat_sub_path}") self.reserved_2: bytes = f.read(1) self.logger.debug(f"Reserved 2: {hex_log_str(self.reserved_2)}") self.num_sub_play_items: int = read_u8(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Num Sub Play Items: {self.length}") self.sub_play_items: List[SubPlayItem] = [] for index in range(self.num_sub_play_items): self.logger.debug(f"Reading Sub Play Item {index}") self.sub_play_items.append(SubPlayItem(f))
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.reserved: bytes = f.read(2) self.logger.debug(f"Reserved: {hex_log_str(self.reserved)}") self.number_of_play_items: int = read_u16( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Number of Play Items: {self.number_of_play_items}") self.number_of_sub_paths: int = read_u16( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Number of Sub Paths: {self.number_of_sub_paths}") self.play_items: List[PlayItem] = list() for index in range(self.number_of_play_items): self.logger.debug(f"Reading Play Item {index}") self.play_items.append(PlayItem(f)) self.sub_paths: List[SubPath] = list() for index in range(self.number_of_sub_paths): self.logger.debug(f"Reading Sub Path {index}") self.sub_paths.append(SubPath(f))
def __init__(self, f: IO): super().__init__() self.id = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.data_size = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.data = f.read(self.data_size) self.logger.debug(f'Identifier: {hex(self.id)}') self.logger.info(f'Type: {self.category_name}') self.logger.debug(f'Data Size: {hex(self.data_size)}') self.logger.info(f'Data: {hexlify(self.data)}') if len(self.possible_sizes) != 0 and self.data_size not in self.__class__.possible_sizes: raise InvalidPKGMetadataSizeException(self.data_size, self.possible_sizes) if len(self.possible_values) != 0 and self.data not in self.__class__.possible_values: raise InvalidPKGMetadataException(self.data, self.possible_values)
def __init__(self, f: IO): """ Init """ super().__init__() self.spn_program_sequence_start: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"SPN Program Sequence Start: {self.spn_program_sequence_start}") self.program_map_pid: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Program Map PID: {self.program_map_pid}") self.number_of_streams_in_ps: int = read_u8( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Number of Streams in PS: {self.number_of_streams_in_ps}") self.number_of_groups_in_ps: int = read_u8( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Number of Groups in PS: {self.number_of_groups_in_ps}") self.streams: List[StreamInPS] = list() for stream_in_ps_index in range(self.number_of_streams_in_ps): self.logger.debug(f"Reading Stream in PS {stream_in_ps_index}") self.streams.append(StreamInPS(f))
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.clip_information_file_name: str = f.read(5).decode("ASCII") self.logger.debug( f"Clip Information File Name: {self.clip_information_file_name}") self.clip_codec_identifier: str = f.read(4).decode("ASCII") self.logger.debug( f"Clip Codec Identifier: {self.clip_information_file_name}") self.flags_1: bytes = f.read(2) self.logger.debug(f"Flags 1: {hex_log_str(self.flags_1)}") self.connection_condition: int = (self.flags_1[1] & 0b00011110) >> 1 self.logger.debug(f"Connection Condition: {self.connection_condition}") self.is_multi_clip_entries: bool = (self.flags_1[1] & 0b00000001) == 1 self.logger.debug( f"Is Multi Clip Entries: {self.is_multi_clip_entries}") self.ref_to_stc_id: int = read_u8(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Reference to STC ID: {self.ref_to_stc_id}") self.in_time: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"In Time: {self.in_time}") self.out_time: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Out Time: {self.out_time}") self.sync_play_item_id: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Sync Play Item ID: {self.sync_play_item_id}") self.sync_start_pts_of_play_item: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Sync Start PTS of Play Item: {self.sync_start_pts_of_play_item}") if self.is_multi_clip_entries: self.multi_clip_entries: MultiClipEntries = MultiClipEntries(f)
def create(f: IO) -> 'PkgMetadata': metadata_id: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) f.seek(f.tell() - 4) for subclass in PkgMetadata.__subclasses__(): if subclass.id == metadata_id: return subclass(f) print(metadata_id) raise InvalidPKGException
def __init__(self, f: IO): """ Init """ super().__init__() self.pcrp_id: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"PCRP Id: {self.pcrp_id}") self.spn_stc_start: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"SPN STC Start: {self.spn_stc_start}") self.presentation_start_time: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Presentation Start Time: {self.presentation_start_time}") self.presentation_end_time: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Presentation End Time: {self.presentation_end_time}")
def __init__(self, f: IO): """ Init """ super().__init__() self.id1: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"ID1: {self.id1}") self.id2: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"ID2: {self.id2}") self.ext_data_start_address: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Ext Data Start Address: {self.ext_data_start_address}") self.ext_data_length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Ext Data Length: {self.ext_data_length}")
def __init__(self, f: IO): super().__init__() self.f: IO = f self.self_info_offset: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"SELF Info Offset: {self.self_info_offset}") self.self_info_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"SELF Info Size: {self.self_info_size}") self.unknown_1: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) constant_check(self.logger, "Unknown 1", self.unknown_1, 0x00) self.unknown_2: bytes = f.read(0x10) self.logger.info(f'Unknown 2: {hexlify(self.unknown_2)}') self.sha_256_hash: bytes = f.read(0x20) self.logger.info(f'SHA-256 Hash: {hexlify(self.sha_256_hash)}')
def __init__(self, f: IO): super().__init__(f) self.revision: PkgRevision = PkgRevision(f.read(2)) self.type: PkgType = PkgType(f.read(2)) self.logger.info(f'PKG Revision: {self.revision}') self.logger.info(f'PKG Type: {self.type}') self.metadata_offset: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.metadata_count: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.metadata_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Metadata Offset: {self.metadata_offset}') self.logger.info(f'Metadata Count: {self.metadata_count}') self.logger.info(f'Metadata Size: {self.metadata_size}') self.item_count: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Item Count: {self.item_count}') self.total_size: int = read_u64(f, endianess=Endianess.BIG_ENDIAN) self.data_offset: int = read_u64(f, endianess=Endianess.BIG_ENDIAN) self.data_size: int = read_u64(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Total Size: {self.total_size}') self.logger.info(f'Data Offset: {self.data_offset}') self.logger.info(f'Data Size: {self.data_size}') self.content_id: str = f.read(0x24).decode('utf-8') self.logger.info(f'Content ID: {self.content_id}') self.padding: bytes = f.read(0x0C) self.digest: bytes = f.read(0x10) self.pkg_data_riv: bytes = f.read(0x10) self.header_cmac_hash: bytes = f.read(0x10) self.header_npdrm_signature: bytes = f.read(0x28) self.header_sha1_hash: bytes = f.read(0x08) self.logger.info(f'Digest: {hexlify(self.digest)}') self.logger.info(f'PKG Data Riv: {self.pkg_data_riv}') self.logger.info(f'Header CMAC Hash: {hexlify(self.header_cmac_hash)}') self.logger.info( f'Header NPDRM Signature: {hexlify(self.header_npdrm_signature)}') self.logger.info(f'Header SHA1 Hash: {hexlify(self.header_sha1_hash)}') if self.type == PkgType.PSP_PSVITA: self.ext_header: PkgExtHeader = PkgExtHeader(f)
def __init__(self, f: BytesIO): super().__init__(f) version_data: bytes = f.read(4) #: SFO file format version self.version: str = f'{version_data[0]}.{version_data[1]}{version_data[2]}{version_data[3]}' self.logger.info(f'SFO Version: {self.version}') #: Start offset of Key Table self.key_table_offset: int = read_u32(f, Endianess.LITTLE_ENDIAN) self.logger.info(f'Key Table Offset: {self.key_table_offset}') #: Start offset of Data Table self.data_table_offset: int = read_u32(f, Endianess.LITTLE_ENDIAN) self.logger.info(f'Data Table Offset: {self.data_table_offset}') #: Entry Count (both tables) self.entry_count: int = read_u32(f, Endianess.LITTLE_ENDIAN) self.logger.info(f'Entry Count: {self.entry_count}')
def __init__(self, f: IO): super().__init__(f) #: TODO: Find what this unknown field is self.unknown_1: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) constant_check(self.logger, 'Unknown 1', self.unknown_1, 1) #: Header size self.header_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Header Size: {self.header_size}') #: Data size self.data_size: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Data Size: {self.data_size}') #: Main and EXT Headers HMAC offset TODO: Check this validity self.main_and_ext_headers_hmac_offset: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.info( f'Main and Ext Headers HMAC Offset: {self.main_and_ext_headers_hmac_offset}' ) #: Metadata Header HMAC offset TODO: Check this validity self.metadata_header_hmac_offset: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.info( f'Metadata Header HMAC Offset: {self.metadata_header_hmac_offset}') #: Tail offset self.tail_offset: int = read_u64(f, endianess=Endianess.BIG_ENDIAN) self.logger.info(f'Tail Offset: {self.tail_offset}') #: Just padding probably self.padding_1: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) constant_check(self.logger, 'Padding 1', self.padding_1, 0) #: PKG Key ID self.pkg_key_id: PkgKeyID = PkgKeyID( read_u32(f, endianess=Endianess.BIG_ENDIAN)) self.logger.info(f'PKG Key ID: {self.pkg_key_id}') #: Full Header HMAC offset TODO: Check this validity self.full_header_hmac_offset: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.info( f'Full Header HMAC Offset: {self.full_header_hmac_offset}') #: Just padding self.padding_2: bytes = f.read(0x14) constant_check(self.logger, 'Padding 2', self.padding_2, bytes([0x00] * 0x14))
def __init__(self, f: IO): super().__init__() self.f: IO = f self.param_offset: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Param Offset: {self.param_offset}") self.param_size: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Param Size: {self.param_size}") self.unknown_int: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Unknown Int: {self.unknown_int}") #: May be PSP2_SYSTEM_VER self.psp2_disp_version: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"PSP 2 Disp Version: {self.unknown_int}") self.unknown: bytes = f.read(0x08) constant_check(self.logger, "Unknown", self.unknown, bytes([0x00] * 8)) self.sha_256_hash: bytes = f.read(0x20) self.logger.info(f'SHA-256 Hash: {hexlify(self.sha_256_hash)}')
def __init__(self, f: IO): super().__init__() #: Key Offset (relative to key_table_offset) self.key_offset: int = read_u16(f, Endianess.LITTLE_ENDIAN) self.logger.debug(f'Key Offset: {self.key_offset}') #: Data Type self.data_type: DataType = DataType(f.read(2)) self.logger.debug(f'Data Type: {self.data_type}') #: Data Length (used bytes) self.data_length: int = read_u32(f, Endianess.LITTLE_ENDIAN) self.logger.debug(f'Data Length: {self.data_length}') #: Data Max Length self.data_max_length: int = read_u32(f, Endianess.LITTLE_ENDIAN) self.logger.debug(f'Data Max Length: {self.data_max_length}') #: Data Offset (relative to data_table_offset) self.data_offset: int = read_u32(f, Endianess.LITTLE_ENDIAN) self.logger.debug(f'Data Offset: {self.data_offset}')
def verify(f: IO) -> bool: """ Verifies the uncompressed data with the help of the last 4 bytes of the file that contain the CRC value. The file needs to be fully read, excluding the last 4 bytes, for this to work. :param f: the file handle :return: if declared CRC matches computed CRC """ total_bytes_no_crc: int = f.tell() crc: int = read_u32(f, endianess=Endianess.LITTLE_ENDIAN) f.seek(0) computed_crc = zlib.crc32(f.read(total_bytes_no_crc)) return crc == computed_crc
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.number_of_playlist_mark_items: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Number of Playlist Mark Items: {self.number_of_playlist_mark_items}") self.playlist_mark_items: List[PlaylistMarkItem] = list() for index in range(self.number_of_playlist_mark_items): self.logger.debug(f"Reading Playlist Mark Item {index}") self.playlist_mark_items.append(PlaylistMarkItem(f))
def __init__(self, f: IO): super().__init__() self.f: IO = f self.unknown_data_offset: int = read_u32( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Unknown Data Offset: {self.unknown_data_offset}") self.unknown_data_size: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Unknown Data Size: {self.unknown_data_size}") self.unknown: bytes = f.read(0x20) self.logger.info(f'Unknown: {hexlify(self.unknown)}') self.sha_256_hash: bytes = f.read(0x20) self.logger.info(f'SHA-256 Hash: {hexlify(self.sha_256_hash)}')
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.reserved_1: bytes = f.read(8 // 8) self.logger.debug(f"Reserved 1: {hex_log_str(self.reserved_1)}") self.number_of_programs: int = read_u8(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Number of Programs: {self.number_of_programs}") self.programs: List[Program] = list() for program_index in range(self.number_of_programs): self.logger.debug(f"Program Index {program_index}") self.programs.append(Program(f))
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.reserved_1: bytes = f.read(8 // 8) self.logger.debug(f"Reserved 1: {hex_log_str(self.reserved_1)}") self.number_of_atc_sequences: int = read_u8( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Number of ATC Sequences: {self.number_of_atc_sequences}") self.atc_sequences: List[AtcSequence] = list() for atc_sequence_index in range(self.number_of_atc_sequences): self.logger.debug(f"Reading ATC Sequence {atc_sequence_index}") self.atc_sequences.append(AtcSequence(f))
def __init__(self, f: IO): """ Init """ super().__init__() self.spn_atc_start: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"SPN ATC Start: {self.spn_atc_start}") self.number_of_stc_sequences: int = read_u8( f, endianess=Endianess.BIG_ENDIAN) self.logger.debug( f"Number of STC Sequences: {self.number_of_stc_sequences}") self.offset_stc_id: int = read_u8(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Offset STC ID: {self.offset_stc_id}") self.stc_sequences: List[StcSequence] = list() for stc_sequence_index in range(self.number_of_stc_sequences): self.logger.debug(f"Reading STC Sequence {stc_sequence_index}") self.stc_sequences.append(StcSequence(f))
def __init__(self, f: IO): """ Init """ super().__init__() self.length: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Length: {self.length}") self.first_playback_object: FirstPlaybackObject = FirstPlaybackObject( f) self.top_menu_object: TopMenuObject = TopMenuObject(f) self.number_of_titles: int = read_u16(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f"Number of Titles: {self.number_of_titles}") self.titles: List[TitleObject] = list() for title_index in range(self.number_of_titles): self.logger.debug(f"Reading Title Object {title_index}") self.titles.append(TitleObject(f))
def __init__(self, f: IO): super().__init__() #: Name (read later after reading the name data) self.name: str = None #: 128-bit MD5 Name Digest self.hash: bytes = f.read(16) self.logger.debug(f'Hash: {hexlify(self.hash)}') #: Entry Block Index self.block_index: int = read_u32(f, endianess=Endianess.BIG_ENDIAN) self.logger.debug(f'Block Index: {self.block_index}') #: Entry Decompressed Size self.decompressed_size: int = struct.unpack( '>Q', bytes([0, 0, 0]) + f.read(5))[0] self.logger.debug(f'Decompressed Size: {self.decompressed_size}') #: Entry Offset self.offset: int = struct.unpack('>Q', bytes([0, 0, 0]) + f.read(5))[0] self.logger.debug(f'Offset: {self.offset}')
def __init__(self, path: str, verify=True): super().__init__(path, IRDHeader) if self.version == 7: self.id: str = read_u32(self.file_handle) self.logger.debug(f"ID (v7 only): {self.id}") #: ISO9660 Header Size self.iso_header_size: int = read_u32(self.file_handle, endianess=Endianess.LITTLE_ENDIAN) self.logger.debug(f"Header Size: {self.iso_header_size}") # TODO: Write ISO9660 Header Parser #: ISO9660 Header self.iso_header: bytes = zlib.decompress( self.file_handle.read(self.iso_header_size), 15 + 16) #: ISO9660 Footer Size self.iso_footer_size: int = read_u32(self.file_handle, endianess=Endianess.LITTLE_ENDIAN) self.logger.debug(f"Footer Size: {self.iso_footer_size}") # TODO: Write ISO9660 Footer Parser #: ISO9660 Footer self.iso_footer: IO = gzip.GzipFile( fileobj=self.file_handle.read(self.iso_footer_size)) #: TODO: Document this! self.region_count: int = read_u8(self.file_handle) self.logger.debug(f"Region Count: {self.region_count}") #: TODO: Document this! self.region_hashes: List[bytes] = [] for i in range(0, self.region_count): region_hash: bytes = self.file_handle.read(16) self.region_hashes.append(region_hash) self.logger.debug(f"Region {i} hash: {hexlify(region_hash)}") #: Number of file hash entries self.file_count: int = read_u32(self.file_handle, endianess=Endianess.LITTLE_ENDIAN) self.logger.debug(f"File Count: {self.file_count}") #: File Key -> File Hash map self.file_map: Dict[bytes, bytes] = {} for i in range(0, self.file_count): file_key: bytes = self.file_handle.read(8) file_hash: bytes = self.file_handle.read(16) self.file_map[file_key] = file_hash self.logger.debug( f"File {i}: {hexlify(file_key)} -> {hexlify(file_hash)}") #: 4 byte padding self.padding: bytes = self.file_handle.read(4) if self.version >= 9: #: See http://www.t10.org/ftp/t10/document.04/04-328r0.pdf#page=43 self.pic: bytes = self.file_handle.read(115) self.logger.debug(f"PIC: {hexlify(self.pic).decode('ASCII')}") #: Used to derive the disc AES encryption pkg_internal_fs_key self.data1: bytes = self.file_handle.read(16) #: TODO: Document this! self.data2: bytes = self.file_handle.read(16) #: Data2 Decrypted self.data2_decrypted: bytes = IRD.decrypt_data2(self.data2) #: Data2 Patched self.data2_patched: bytes = IRD.patch_data2(self.data2) self.logger.info(f"Data1: {hexlify(self.data1).decode('ASCII')}") self.logger.info(f"Data2: {hexlify(self.data2).decode('ASCII')}") self.logger.info( f"Data2(decrypted): {hexlify(self.data2_decrypted).decode('ASCII')}" ) self.logger.info( f"Data2(patched): {hexlify(self.data2_patched).decode('ASCII')}") if self.version < 9: #: See http://www.t10.org/ftp/t10/document.04/04-328r0.pdf#page=43 self.pic: bytes = self.file_handle.read(115) self.logger.debug(f"PIC: {hexlify(self.pic).decode('ASCII')}") if self.version > 7: #: TODO: Document this! self.uid: str = read_u32(self.file_handle, endianess=Endianess.LITTLE_ENDIAN) self.logger.debug(f"UID: {self.uid}") if verify and self.verify(self.file_handle): self.logger.info(f"CRC Verified") else: raise InvalidIRDCRCException() self.logger.info(f"File successfully parsed")