def __save_new(self, fileobj, atoms, ilst, offset): hdlr = Atom.render("hdlr", "\x00" * 8 + "mdirappl" + "\x00" * 9) meta = Atom.render("meta", "\x00\x00\x00\x00" + hdlr + ilst) moov, udta = atoms.path("moov", "udta") insert_bytes(fileobj, len(meta), udta.offset + offset + 8) fileobj.seek(udta.offset + offset + 8) fileobj.write(meta) self.__update_parents(fileobj, [moov, udta], len(meta), offset)
def save(self, filename=None, deleteid3=False): """Save metadata blocks to a file. If no filename is given, the one most recently loaded is used. """ if filename is None: filename = self.filename f = open(filename, "rb+") # Ensure we've got padding at the end, and only at the end. # If adding makes it too large, we'll scale it down later. self.metadata_blocks.append(Padding("\x00" * 1020)) MetadataBlock.group_padding(self.metadata_blocks) header = self.__check_header(f) available = self.__find_audio_offset(f) - header # "fLaC" and maybe ID3 data = MetadataBlock.writeblocks(self.metadata_blocks) # Delete ID3v2 if deleteid3 and header > 4: available += header - 4 header = 4 if len(data) > available: # If we have too much data, see if we can reduce padding. padding = self.metadata_blocks[-1] newlength = padding.length - (len(data) - available) if newlength > 0: padding.length = newlength data = MetadataBlock.writeblocks(self.metadata_blocks) assert len(data) == available elif len(data) < available: # If we have too little data, increase padding. self.metadata_blocks[-1].length += available - len(data) data = MetadataBlock.writeblocks(self.metadata_blocks) assert len(data) == available if len(data) != available: # We couldn't reduce the padding enough. diff = len(data) - available insert_bytes(f, diff, header) f.seek(header - 4) f.write("fLaC" + data) # Delete ID3v1 if deleteid3: try: f.seek(-128, 2) except IOError: pass else: if f.read(3) == "TAG": f.seek(-128, 2) f.truncate()
def replace(klass, fileobj, old_pages, new_pages): """Replace old_pages with new_pages within fileobj. old_pages must have come from reading fileobj originally. new_pages are assumed to have the 'same' data as old_pages, and so the serial and sequence numbers will be copied, as will the flags for the first and last pages. fileobj will be resized and pages renumbered as necessary. As such, it must be opened r+b or w+b. """ # Number the new pages starting from the first old page. first = old_pages[0].sequence for page, seq in zip(new_pages, range(first, first + len(new_pages))): page.sequence = seq page.serial = old_pages[0].serial new_pages[0].first = old_pages[0].first new_pages[0].last = old_pages[0].last new_pages[0].continued = old_pages[0].continued new_pages[-1].first = old_pages[-1].first new_pages[-1].last = old_pages[-1].last new_pages[-1].complete = old_pages[-1].complete if not new_pages[-1].complete and len(new_pages[-1].packets) == 1: new_pages[-1].position = -1L new_data = "".join(map(klass.write, new_pages)) # Make room in the file for the new data. delta = len(new_data) fileobj.seek(old_pages[0].offset, 0) insert_bytes(fileobj, delta, old_pages[0].offset) fileobj.seek(old_pages[0].offset, 0) fileobj.write(new_data) new_data_end = old_pages[0].offset + delta # Go through the old pages and delete them. Since we shifted # the data down the file, we need to adjust their offsets. We # also need to go backwards, so we don't adjust the deltas of # the other pages. old_pages.reverse() for old_page in old_pages: adj_offset = old_page.offset + delta delete_bytes(fileobj, old_page.size, adj_offset) # Finally, if there's any discrepency in length, we need to # renumber the pages for the logical stream. if len(old_pages) != len(new_pages): fileobj.seek(new_data_end, 0) serial = new_pages[-1].serial sequence = new_pages[-1].sequence + 1 klass.renumber(fileobj, serial, sequence)
def __save_existing(self, fileobj, atoms, path, data, offset): # Replace the old ilst atom. ilst = path.pop() delta = len(data) - ilst.length fileobj.seek(ilst.offset + offset) if delta > 0: insert_bytes(fileobj, delta, ilst.offset + offset) elif delta < 0: delete_bytes(fileobj, -delta, ilst.offset + offset) fileobj.seek(ilst.offset + offset) fileobj.write(data) self.__update_parents(fileobj, path, delta, offset)
def __save_new(self, fileobj, atoms, ilst): hdlr = Atom.render("hdlr", "\x00" * 8 + "mdirappl" + "\x00" * 9) meta = Atom.render( "meta", "\x00\x00\x00\x00" + hdlr + ilst + self.__pad_ilst(ilst)) try: path = atoms.path("moov", "udta") except KeyError: # moov.udta not found -- create one path = atoms.path("moov") meta = Atom.render("udta", meta) offset = path[-1].offset + 8 insert_bytes(fileobj, len(meta), offset) fileobj.seek(offset) fileobj.write(meta) self.__update_parents(fileobj, path, len(meta)) self.__update_offsets(fileobj, atoms, len(meta), offset)
def __save_existing(self, fileobj, atoms, path, data): # Replace the old ilst atom. ilst = path.pop() offset = ilst.offset length = ilst.length # Check for padding "free" atoms meta = path[-1] index = meta.children.index(ilst) try: prev = meta.children[index-1] if prev.name == "free": offset = prev.offset length += prev.length except IndexError: pass try: next = meta.children[index+1] if next.name == "free": length += next.length except IndexError: pass delta = len(data) - length if delta > 0 or (delta < 0 and delta > -8): data += self.__pad_ilst(data) delta = len(data) - length insert_bytes(fileobj, delta, offset) elif delta < 0: data += self.__pad_ilst(data, -delta - 8) delta = 0 fileobj.seek(offset) fileobj.write(data) self.__update_parents(fileobj, path, delta) self.__update_offsets(fileobj, atoms, delta, offset)