def save(self, filename=None, deleteid3=False): """Save metadata blocks to a file. If no filename is given, the one most recently loaded is used. """ if filename is None: filename = self.filename f = open(filename, 'rb+') # Ensure we've got padding at the end, and only at the end. # If adding makes it too large, we'll scale it down later. self.metadata_blocks.append(Padding('\x00' * 1020)) MetadataBlock.group_padding(self.metadata_blocks) header = self.__check_header(f) available = self.__find_audio_offset( f) - header # "fLaC" and maybe ID3 data = MetadataBlock.writeblocks(self.metadata_blocks) # Delete ID3v2 if deleteid3 and header > 4: available += header - 4 header = 4 if len(data) > available: # If we have too much data, see if we can reduce padding. padding = self.metadata_blocks[-1] newlength = padding.length - (len(data) - available) if newlength > 0: padding.length = newlength data = MetadataBlock.writeblocks(self.metadata_blocks) assert len(data) == available elif len(data) < available: # If we have too little data, increase padding. self.metadata_blocks[-1].length += (available - len(data)) data = MetadataBlock.writeblocks(self.metadata_blocks) assert len(data) == available if len(data) != available: # We couldn't reduce the padding enough. diff = (len(data) - available) insert_bytes(f, diff, header) f.seek(header - 4) f.write("fLaC" + data) # Delete ID3v1 if deleteid3: try: f.seek(-128, 2) except IOError: pass else: if f.read(3) == "TAG": f.seek(-128, 2) f.truncate()
def save(self, filename=None, deleteid3=False): """Save metadata blocks to a file. If no filename is given, the one most recently loaded is used. """ if filename is None: filename = self.filename f = open(filename, 'rb+') # Ensure we've got padding at the end, and only at the end. # If adding makes it too large, we'll scale it down later. self.metadata_blocks.append(Padding('\x00' * 1020)) MetadataBlock.group_padding(self.metadata_blocks) header = self.__check_header(f) available = self.__find_audio_offset(f) - header # "fLaC" and maybe ID3 data = MetadataBlock.writeblocks(self.metadata_blocks) # Delete ID3v2 if deleteid3 and header > 4: available += header - 4 header = 4 if len(data) > available: # If we have too much data, see if we can reduce padding. padding = self.metadata_blocks[-1] newlength = padding.length - (len(data) - available) if newlength > 0: padding.length = newlength data = MetadataBlock.writeblocks(self.metadata_blocks) assert len(data) == available elif len(data) < available: # If we have too little data, increase padding. self.metadata_blocks[-1].length += (available - len(data)) data = MetadataBlock.writeblocks(self.metadata_blocks) assert len(data) == available if len(data) != available: # We couldn't reduce the padding enough. diff = (len(data) - available) insert_bytes(f, diff, header) f.seek(header - 4) f.write("fLaC" + data) # Delete ID3v1 if deleteid3: try: f.seek(-128, 2) except IOError: pass else: if f.read(3) == "TAG": f.seek(-128, 2) f.truncate()
def save(self): # Move attributes to the right objects self.to_extended_content_description = {} self.to_metadata = {} self.to_metadata_library = [] for name, value in self.tags: if name in _standard_attribute_names: continue if (value.language is None and value.stream is None and name not in self.to_extended_content_description): self.to_extended_content_description[name] = value elif (value.language is None and value.stream is not None and name not in self.to_metadata): self.to_metadata[name] = value else: self.to_metadata_library.append((name, value)) # Add missing objects if not self.content_description_obj: self.content_description_obj = \ ContentDescriptionObject() self.objects.append(self.content_description_obj) if not self.extended_content_description_obj: self.extended_content_description_obj = \ ExtendedContentDescriptionObject() self.objects.append(self.extended_content_description_obj) if not self.header_extension_obj: self.header_extension_obj = \ HeaderExtensionObject() self.objects.append(self.header_extension_obj) if not self.metadata_obj: self.metadata_obj = \ MetadataObject() self.header_extension_obj.objects.append(self.metadata_obj) if not self.metadata_library_obj: self.metadata_library_obj = \ MetadataLibraryObject() self.header_extension_obj.objects.append(self.metadata_library_obj) # Render the header data = "".join([obj.render(self) for obj in self.objects]) data = (HeaderObject.GUID + struct.pack("<QL", len(data) + 30, len(self.objects)) + "\x01\x02" + data) fileobj = file(self.filename, "rb+") try: size = len(data) if size > self.size: insert_bytes(fileobj, size - self.size, self.size) if size < self.size: delete_bytes(fileobj, self.size - size, 0) fileobj.seek(0) fileobj.write(data) finally: fileobj.close()
def replace(klass, fileobj, old_pages, new_pages): """Replace old_pages with new_pages within fileobj. old_pages must have come from reading fileobj originally. new_pages are assumed to have the 'same' data as old_pages, and so the serial and sequence numbers will be copied, as will the flags for the first and last pages. fileobj will be resized and pages renumbered as necessary. As such, it must be opened r+b or w+b. """ # Number the new pages starting from the first old page. first = old_pages[0].sequence for page, seq in zip(new_pages, range(first, first + len(new_pages))): page.sequence = seq page.serial = old_pages[0].serial new_pages[0].first = old_pages[0].first new_pages[0].last = old_pages[0].last new_pages[0].continued = old_pages[0].continued new_pages[-1].first = old_pages[-1].first new_pages[-1].last = old_pages[-1].last new_pages[-1].complete = old_pages[-1].complete if not new_pages[-1].complete and len(new_pages[-1].packets) == 1: new_pages[-1].position = -1L new_data = "".join(map(klass.write, new_pages)) # Make room in the file for the new data. delta = len(new_data) fileobj.seek(old_pages[0].offset, 0) insert_bytes(fileobj, delta, old_pages[0].offset) fileobj.seek(old_pages[0].offset, 0) fileobj.write(new_data) new_data_end = old_pages[0].offset + delta # Go through the old pages and delete them. Since we shifted # the data down the file, we need to adjust their offsets. We # also need to go backwards, so we don't adjust the deltas of # the other pages. old_pages.reverse() for old_page in old_pages: adj_offset = old_page.offset + delta delete_bytes(fileobj, old_page.size, adj_offset) # Finally, if there's any discrepency in length, we need to # renumber the pages for the logical stream. if len(old_pages) != len(new_pages): fileobj.seek(new_data_end, 0) serial = new_pages[-1].serial sequence = new_pages[-1].sequence + 1 klass.renumber(fileobj, serial, sequence)
def __save_new(self, fileobj, atoms, ilst): hdlr = Atom.render("hdlr", "\x00" * 8 + "mdirappl" + "\x00" * 9) meta = Atom.render( "meta", "\x00\x00\x00\x00" + hdlr + ilst + self.__pad_ilst(ilst)) try: path = atoms.path("moov", "udta") except KeyError: # moov.udta not found -- create one path = atoms.path("moov") meta = Atom.render("udta", meta) offset = path[-1].offset + 8 insert_bytes(fileobj, len(meta), offset) fileobj.seek(offset) fileobj.write(meta) self.__update_parents(fileobj, path, len(meta)) self.__update_offsets(fileobj, atoms, len(meta), offset)
def __save_existing(self, fileobj, atoms, path, data): # Replace the old ilst atom. ilst = path.pop() offset = ilst.offset length = ilst.length # Check for padding "free" atoms meta = path[-1] index = meta.children.index(ilst) try: prev = meta.children[index - 1] if prev.name == "free": offset = prev.offset length += prev.length except IndexError: pass try: next = meta.children[index + 1] if next.name == "free": length += next.length except IndexError: pass delta = len(data) - length if delta > 0 or (delta < 0 and delta > -8): data += self.__pad_ilst(data) delta = len(data) - length insert_bytes(fileobj, delta, offset) elif delta < 0: data += self.__pad_ilst(data, -delta - 8) delta = 0 fileobj.seek(offset) fileobj.write(data) self.__update_parents(fileobj, path, delta) self.__update_offsets(fileobj, atoms, delta, offset)