def _header_chunk(header: None, data: bytes, data_out: Dict[str, Any]): """ Represents .ani's header chunk, which has an identifier of "anih". """ if (header is not None): raise SyntaxError("This ani has 2 headers!") if (len(data) == 36): data = data[4:] h_data = { "num_frames": to_int(data[0:4]), "num_steps": to_int(data[4:8]), "width": to_int(data[8:12]), "height": to_int(data[12:16]), "bit_count": to_int(data[16:20]), "num_planes": 1, "display_rate": to_int(data[24:28]), "contains_seq": bool((to_int(data[28:32]) >> 1) & 1), "is_in_ico": bool(to_int(data[28:32]) & 1) } data_out["header"] = h_data data_out["seq"] = [ i % h_data["num_frames"] for i in range(h_data["num_steps"]) ] data_out["rate"] = [h_data["display_rate"]] * h_data["num_steps"]
def _read_chunk(cls, offset: int, buffer: BinaryIO, nominal_size: int) -> Tuple[Image.Image, int, int, int]: buffer.seek(offset) # Begin to check if this is valid... chunk_size = to_int(buffer.read(4)) cls._assert( chunk_size == cls.IMG_CHUNK_H_SIZE, f"Image chunks must be {cls.IMG_CHUNK_H_SIZE} bytes!", ) cls._assert( to_int(buffer.read(4)) == cls.CURSOR_TYPE, f"Type does not match type in TOC!", ) cls._assert( to_int(buffer.read(4)) == nominal_size, f"Nominal sizes in TOC and image header don't match!", ) cls._assert( to_int(buffer.read(4)) == 1, f"Unsupported version of image header...") # Checks are done, load the rest of the header as we are good from here... rest_of_chunk = buffer.read(20) width, height, x_hot, y_hot, delay = [ to_int(rest_of_chunk[i:i + 4]) for i in range(0, len(rest_of_chunk), 4) ] cls._assert(width <= 0x7FFF, "Invalid width!") cls._assert(height <= 0x7FFF, "Invalid height!") x_hot, y_hot = ( x_hot if (0 <= x_hot < width) else 0, y_hot if (0 <= y_hot < height) else 0, ) img_data = np.frombuffer(buffer.read(width * height * 4), dtype=np.uint8).reshape(width, height, 4) # ARGB packed in little endian format, therefore its actually BGRA when read sequentially.... image = Image.fromarray(img_data[:, :, (2, 1, 0, 3)], "RGBA") return image, x_hot, y_hot, delay
def _rate_chunk(header: Dict[str, Any], data: bytes, data_out: Dict[str, Any]): """ Represents .ani's rate chunk, which has an identifier of "rate". """ if (header is None): raise SyntaxError("rate chunk became before header!") if ((len(data) // 4) != header["num_steps"]): raise SyntaxError( "Length of rate chunk does not match the number of steps!") data_out["rate"] = [to_int(data[i:i + 4]) for i in range(0, len(data), 4)]
def _seq_chunk(header: Dict[str, Any], data: bytes, data_out: Dict[str, Any]): """ Represents .ani's sequence chunk, which has an identifier of "seq ". """ if header is None: raise SyntaxError("seq chunk came before header!") if (len(data) // 4) != header["num_steps"]: raise SyntaxError( "Length of sequence chunk does not match the number of steps!") data_out["seq"] = [to_int(data[i:i + 4]) for i in range(0, len(data), 4)]
def read(cls, cur_file: BinaryIO) -> AnimatedCursor: """ Read an xcur or X-Org cursor file from the specified file buffer. :param cur_file: The file buffer with xcursor data. :return: An AnimatedCursor object, non-animated cursors will contain only 1 frame. """ magic_data = cur_file.read(4) cls._assert(cls.check(magic_data), "Not a XCursor File!!!") header_size = to_int(cur_file.read(4)) cls._assert(header_size == cls.HEADER_SIZE, f"Header size is not {cls.HEADER_SIZE}!") version = to_int(cur_file.read(4)) # Number of cursors... num_toc = to_int(cur_file.read(4)) # Used to store cursor offsets per size... nominal_sizes = {} for i in range(num_toc): main_type = to_int(cur_file.read(4)) if main_type == cls.CURSOR_TYPE: nominal_size = to_int(cur_file.read(4)) offset = to_int(cur_file.read(4)) if nominal_size not in nominal_sizes: nominal_sizes[nominal_size] = [offset] else: nominal_sizes[nominal_size].append(offset) max_len = max(len(nominal_sizes[size]) for size in nominal_sizes) cursors = [] delays = [] for i in range(max_len): cursor = Cursor() sub_delays = [] for size, offsets in nominal_sizes.items(): if i < len(offsets): img, x_hot, y_hot, delay = cls._read_chunk( offsets[i], cur_file, size) cursor.add(CursorIcon(img, x_hot, y_hot)) sub_delays.append(delay) cursors.append(cursor) delays.append(max(sub_delays)) return AnimatedCursor(cursors, delays)
def read_chunks( buffer: BinaryIO, skip_chunks: Set[bytes] = None, list_chunks: Set[bytes] = None, byteorder="little", ) -> Iterator[Tuple[bytes, int, bytes]]: """ Reads a valid RIFF file, reading all the chunks in the file... :param buffer: The file buffer with valid RIFF data. :param skip_chunks: A set of length 4 bytes specifying chunks which are not actual chunks but are identifiers followed by valid chunks. :param list_chunks: A set of length 4 bytes specifying chunks which containing sub-chunks, meaning there data should be sub-iterated. :param byteorder: The byteorder of the integers in the file, "big" or "little". Default is "little". :return: A generator which yields each chunks identifier, size, and data as bytes. """ if skip_chunks is None: skip_chunks = set() if list_chunks is None: list_chunks = set() while True: next_id = buffer.read(4) if next_id == b"": return if next_id in skip_chunks: continue size = to_int(buffer.read(4), byteorder=byteorder) if next_id in list_chunks: # print(f"(entering {next_id} chunk) -> [") yield from read_chunks(BytesIO(buffer.read(size)), skip_chunks, list_chunks, byteorder) # print(f"](exiting {next_id} chunk)") continue # print(f"emit chunk {next_id} of size {size}") yield next_id, size, buffer.read(size)