Пример #1
0
    def __save_existing(self, fileobj, atoms, path, ilst_data, padding_func):
        # Replace the old ilst atom.
        ilst = path[-1]
        offset = ilst.offset
        length = ilst.length

        # Use adjacent free atom if there is one
        free = _find_padding(path)
        if free is not None:
            offset = min(offset, free.offset)
            length += free.length

        # Always add a padding atom to make things easier
        padding_overhead = len(Atom.render(b"free", b""))
        content_size = get_size(fileobj) - (offset + length)
        padding_size = length - (len(ilst_data) + padding_overhead)
        info = PaddingInfo(padding_size, content_size)
        new_padding = info._get_padding(padding_func)
        # Limit padding size so we can be sure the free atom overhead is as we
        # calculated above (see Atom.render)
        new_padding = min(0xFFFFFFFF, new_padding)

        ilst_data += Atom.render(b"free", b"\x00" * new_padding)

        resize_bytes(fileobj, length, len(ilst_data), offset)
        delta = len(ilst_data) - length

        fileobj.seek(offset)
        fileobj.write(ilst_data)
        self.__update_parents(fileobj, path[:-1], delta)
        self.__update_offsets(fileobj, atoms, delta, offset)
Пример #2
0
    def __save_existing(self, fileobj, atoms, path, ilst_data, padding_func):
        # Replace the old ilst atom.
        ilst = path[-1]
        offset = ilst.offset
        length = ilst.length

        # Use adjacent free atom if there is one
        free = _find_padding(path)
        if free is not None:
            offset = min(offset, free.offset)
            length += free.length

        # Always add a padding atom to make things easier
        padding_overhead = len(Atom.render(b"free", b""))
        content_size = get_size(fileobj) - (offset + length)
        padding_size = length - (len(ilst_data) + padding_overhead)
        info = PaddingInfo(padding_size, content_size)
        new_padding = info._get_padding(padding_func)
        # Limit padding size so we can be sure the free atom overhead is as we
        # calculated above (see Atom.render)
        new_padding = min(0xFFFFFFFF, new_padding)

        ilst_data += Atom.render(b"free", b"\x00" * new_padding)

        resize_bytes(fileobj, length, len(ilst_data), offset)
        delta = len(ilst_data) - length

        fileobj.seek(offset)
        fileobj.write(ilst_data)
        self.__update_parents(fileobj, path[:-1], delta)
        self.__update_offsets(fileobj, atoms, delta, offset)
Пример #3
0
    def _save(self, filething, metadata_blocks, deleteid3, padding):
        f = StrictFileObject(filething.fileobj)
        header = self.__check_header(f, filething.name)
        audio_offset = self.__find_audio_offset(f)
        # "fLaC" and maybe ID3
        available = audio_offset - header

        # Delete ID3v2
        if deleteid3 and header > 4:
            available += header - 4
            header = 4

        content_size = get_size(f) - audio_offset
        assert content_size >= 0
        data = MetadataBlock._writeblocks(metadata_blocks, available,
                                          content_size, padding)
        data_size = len(data)

        resize_bytes(filething.fileobj, available, data_size, header)
        f.seek(header - 4)
        f.write(b"fLaC")
        f.write(data)

        # Delete ID3v1
        if deleteid3:
            try:
                f.seek(-128, 2)
            except IOError:
                pass
            else:
                if f.read(3) == b"TAG":
                    f.seek(-128, 2)
                    f.truncate()
Пример #4
0
    def resize(self, new_data_size):
        """Resize the file and update the chunk sizes"""

        padding = new_data_size % 2
        resize_bytes(self._fileobj, self.data_size + self.padding(),
                     new_data_size + padding, self.data_offset)
        size_diff = new_data_size - self.data_size
        self._update_size(size_diff)
        self._fileobj.flush()
Пример #5
0
    def save(self, filething, deleteid3=False, padding=None):
        """Save metadata blocks to a file.

        Args:
            filething (filething)
            deleteid3 (bool): delete id3 tags while at it
            padding (PaddingFunction)

        If no filename is given, the one most recently loaded is used.
        """

        f = StrictFileObject(filething.fileobj)
        header = self.__check_header(f, filething.name)
        audio_offset = self.__find_audio_offset(f)
        # "fLaC" and maybe ID3
        available = audio_offset - header

        # Delete ID3v2
        if deleteid3 and header > 4:
            available += header - 4
            header = 4

        content_size = get_size(f) - audio_offset
        assert content_size >= 0
        data = MetadataBlock._writeblocks(
            self.metadata_blocks, available, content_size, padding)
        data_size = len(data)

        resize_bytes(filething.fileobj, available, data_size, header)
        f.seek(header - 4)
        f.write(b"fLaC")
        f.write(data)

        # Delete ID3v1
        if deleteid3:
            try:
                f.seek(-128, 2)
            except IOError:
                pass
            else:
                if f.read(3) == b"TAG":
                    f.seek(-128, 2)
                    f.truncate()
Пример #6
0
    def save(self, filething, deleteid3=False, padding=None):
        """Save metadata blocks to a file.

        Args:
            filething (filething)
            deleteid3 (bool): delete id3 tags while at it
            padding (PaddingFunction)

        If no filename is given, the one most recently loaded is used.
        """

        f = StrictFileObject(filething.fileobj)
        header = self.__check_header(f, filething.name)
        audio_offset = self.__find_audio_offset(f)
        # "fLaC" and maybe ID3
        available = audio_offset - header

        # Delete ID3v2
        if deleteid3 and header > 4:
            available += header - 4
            header = 4

        content_size = get_size(f) - audio_offset
        assert content_size >= 0
        data = MetadataBlock._writeblocks(self.metadata_blocks, available,
                                          content_size, padding)
        data_size = len(data)

        resize_bytes(filething.fileobj, available, data_size, header)
        f.seek(header - 4)
        f.write(b"fLaC")
        f.write(data)

        # Delete ID3v1
        if deleteid3:
            try:
                f.seek(-128, 2)
            except IOError:
                pass
            else:
                if f.read(3) == b"TAG":
                    f.seek(-128, 2)
                    f.truncate()
Пример #7
0
    def save(self, filename=None, deleteid3=False, padding=None):
        """Save metadata blocks to a file.

        If no filename is given, the one most recently loaded is used.
        """

        if filename is None:
            filename = self.filename

        with open(filename, 'rb+') as f:
            header = self.__check_header(f)
            audio_offset = self.__find_audio_offset(f)
            # "fLaC" and maybe ID3
            available = audio_offset - header

            # Delete ID3v2
            if deleteid3 and header > 4:
                available += header - 4
                header = 4

            content_size = get_size(f) - audio_offset
            assert content_size >= 0
            data = MetadataBlock._writeblocks(self.metadata_blocks, available,
                                              content_size, padding)
            data_size = len(data)

            resize_bytes(f, available, data_size, header)
            f.seek(header - 4)
            f.write(b"fLaC")
            f.write(data)

            # Delete ID3v1
            if deleteid3:
                try:
                    f.seek(-128, 2)
                except IOError:
                    pass
                else:
                    if f.read(3) == b"TAG":
                        f.seek(-128, 2)
                        f.truncate()
Пример #8
0
    def save(self, filename=None, deleteid3=False, padding=None):
        """Save metadata blocks to a file.

        If no filename is given, the one most recently loaded is used.
        """

        if filename is None:
            filename = self.filename

        with open(filename, 'rb+') as f:
            header = self.__check_header(f)
            audio_offset = self.__find_audio_offset(f)
            # "fLaC" and maybe ID3
            available = audio_offset - header

            # Delete ID3v2
            if deleteid3 and header > 4:
                available += header - 4
                header = 4

            content_size = get_size(f) - audio_offset
            assert content_size >= 0
            data = MetadataBlock._writeblocks(
                self.metadata_blocks, available, content_size, padding)
            data_size = len(data)

            resize_bytes(f, available, data_size, header)
            f.seek(header - 4)
            f.write(b"fLaC")
            f.write(data)

            # Delete ID3v1
            if deleteid3:
                try:
                    f.seek(-128, 2)
                except IOError:
                    pass
                else:
                    if f.read(3) == b"TAG":
                        f.seek(-128, 2)
                        f.truncate()
Пример #9
0
    def save(self, filething, padding=None):
        """save(filething=None, padding=None)

        Save tag changes back to the loaded file.

        Args:
            filething (filething)
            padding (PaddingFunction)
        Raises:
            mutagen.MutagenError
        """

        # Move attributes to the right objects
        self.to_content_description = {}
        self.to_extended_content_description = {}
        self.to_metadata = {}
        self.to_metadata_library = []
        for name, value in self.tags:
            library_only = (value.data_size() > 0xFFFF or value.TYPE == GUID)
            can_cont_desc = value.TYPE == UNICODE

            if library_only or value.language is not None:
                self.to_metadata_library.append((name, value))
            elif value.stream is not None:
                if name not in self.to_metadata:
                    self.to_metadata[name] = value
                else:
                    self.to_metadata_library.append((name, value))
            elif name in ContentDescriptionObject.NAMES:
                if name not in self.to_content_description and can_cont_desc:
                    self.to_content_description[name] = value
                else:
                    self.to_metadata_library.append((name, value))
            else:
                if name not in self.to_extended_content_description:
                    self.to_extended_content_description[name] = value
                else:
                    self.to_metadata_library.append((name, value))

        # Add missing objects
        header = self._header
        if header.get_child(ContentDescriptionObject.GUID) is None:
            header.objects.append(ContentDescriptionObject())
        if header.get_child(ExtendedContentDescriptionObject.GUID) is None:
            header.objects.append(ExtendedContentDescriptionObject())
        header_ext = header.get_child(HeaderExtensionObject.GUID)
        if header_ext is None:
            header_ext = HeaderExtensionObject()
            header.objects.append(header_ext)
        if header_ext.get_child(MetadataObject.GUID) is None:
            header_ext.objects.append(MetadataObject())
        if header_ext.get_child(MetadataLibraryObject.GUID) is None:
            header_ext.objects.append(MetadataLibraryObject())

        fileobj = filething.fileobj
        # Render to file
        old_size = header.parse_size(fileobj)[0]
        data = header.render_full(self, fileobj, old_size, padding)
        size = len(data)
        resize_bytes(fileobj, old_size, size, 0)
        fileobj.seek(0)
        fileobj.write(data)
Пример #10
0
    def replace(cls, fileobj, old_pages, new_pages):
        """Replace old_pages with new_pages within fileobj.

        old_pages must have come from reading fileobj originally.
        new_pages are assumed to have the 'same' data as old_pages,
        and so the serial and sequence numbers will be copied, as will
        the flags for the first and last pages.

        fileobj will be resized and pages renumbered as necessary. As
        such, it must be opened r+b or w+b.
        """

        if not len(old_pages) or not len(new_pages):
            raise ValueError("empty pages list not allowed")

        # Number the new pages starting from the first old page.
        first = old_pages[0].sequence
        for page, seq in izip(new_pages, xrange(first,
                                                first + len(new_pages))):
            page.sequence = seq
            page.serial = old_pages[0].serial

        new_pages[0].first = old_pages[0].first
        new_pages[0].last = old_pages[0].last
        new_pages[0].continued = old_pages[0].continued

        new_pages[-1].first = old_pages[-1].first
        new_pages[-1].last = old_pages[-1].last
        new_pages[-1].complete = old_pages[-1].complete
        if not new_pages[-1].complete and len(new_pages[-1].packets) == 1:
            new_pages[-1].position = -1

        new_data = [cls.write(p) for p in new_pages]

        # Add dummy data or merge the remaining data together so multiple
        # new pages replace an old one
        pages_diff = len(old_pages) - len(new_data)
        if pages_diff > 0:
            new_data.extend([b""] * pages_diff)
        elif pages_diff < 0:
            new_data[pages_diff - 1:] = [b"".join(new_data[pages_diff - 1:])]

        # Replace pages one by one. If the sizes match no resize happens.
        offset_adjust = 0
        new_data_end = None
        assert len(old_pages) == len(new_data)
        for old_page, data in izip(old_pages, new_data):
            offset = old_page.offset + offset_adjust
            data_size = len(data)
            resize_bytes(fileobj, old_page.size, data_size, offset)
            fileobj.seek(offset, 0)
            fileobj.write(data)
            new_data_end = offset + data_size
            offset_adjust += (data_size - old_page.size)

        # Finally, if there's any discrepency in length, we need to
        # renumber the pages for the logical stream.
        if len(old_pages) != len(new_pages):
            fileobj.seek(new_data_end, 0)
            serial = new_pages[-1].serial
            sequence = new_pages[-1].sequence + 1
            cls.renumber(fileobj, serial, sequence)
Пример #11
0
    def replace(cls, fileobj, old_pages, new_pages):
        """Replace old_pages with new_pages within fileobj.

        old_pages must have come from reading fileobj originally.
        new_pages are assumed to have the 'same' data as old_pages,
        and so the serial and sequence numbers will be copied, as will
        the flags for the first and last pages.

        fileobj will be resized and pages renumbered as necessary. As
        such, it must be opened r+b or w+b.
        """

        if not len(old_pages) or not len(new_pages):
            raise ValueError("empty pages list not allowed")

        # Number the new pages starting from the first old page.
        first = old_pages[0].sequence
        for page, seq in izip(new_pages,
                              xrange(first, first + len(new_pages))):
            page.sequence = seq
            page.serial = old_pages[0].serial

        new_pages[0].first = old_pages[0].first
        new_pages[0].last = old_pages[0].last
        new_pages[0].continued = old_pages[0].continued

        new_pages[-1].first = old_pages[-1].first
        new_pages[-1].last = old_pages[-1].last
        new_pages[-1].complete = old_pages[-1].complete
        if not new_pages[-1].complete and len(new_pages[-1].packets) == 1:
            new_pages[-1].position = -1

        new_data = [cls.write(p) for p in new_pages]

        # Add dummy data or merge the remaining data together so multiple
        # new pages replace an old one
        pages_diff = len(old_pages) - len(new_data)
        if pages_diff > 0:
            new_data.extend([b""] * pages_diff)
        elif pages_diff < 0:
            new_data[pages_diff - 1:] = [b"".join(new_data[pages_diff - 1:])]

        # Replace pages one by one. If the sizes match no resize happens.
        offset_adjust = 0
        new_data_end = None
        assert len(old_pages) == len(new_data)
        for old_page, data in izip(old_pages, new_data):
            offset = old_page.offset + offset_adjust
            data_size = len(data)
            resize_bytes(fileobj, old_page.size, data_size, offset)
            fileobj.seek(offset, 0)
            fileobj.write(data)
            new_data_end = offset + data_size
            offset_adjust += (data_size - old_page.size)

        # Finally, if there's any discrepency in length, we need to
        # renumber the pages for the logical stream.
        if len(old_pages) != len(new_pages):
            fileobj.seek(new_data_end, 0)
            serial = new_pages[-1].serial
            sequence = new_pages[-1].sequence + 1
            cls.renumber(fileobj, serial, sequence)
Пример #12
0
    def save(self, filename=None, padding=None):
        """Save tag changes back to the loaded file.

        :param padding: A callback which returns the amount of padding to use.
            See :class:`mutagen.PaddingInfo`

        :raises mutagen.asf.error: In case saving fails
        """

        if filename is not None and filename != self.filename:
            raise ValueError("saving to another file not supported atm")

        # Move attributes to the right objects
        self.to_content_description = {}
        self.to_extended_content_description = {}
        self.to_metadata = {}
        self.to_metadata_library = []
        for name, value in self.tags:
            library_only = (value.data_size() > 0xFFFF or value.TYPE == GUID)
            can_cont_desc = value.TYPE == UNICODE

            if library_only or value.language is not None:
                self.to_metadata_library.append((name, value))
            elif value.stream is not None:
                if name not in self.to_metadata:
                    self.to_metadata[name] = value
                else:
                    self.to_metadata_library.append((name, value))
            elif name in ContentDescriptionObject.NAMES:
                if name not in self.to_content_description and can_cont_desc:
                    self.to_content_description[name] = value
                else:
                    self.to_metadata_library.append((name, value))
            else:
                if name not in self.to_extended_content_description:
                    self.to_extended_content_description[name] = value
                else:
                    self.to_metadata_library.append((name, value))

        # Add missing objects
        header = self._header
        if header.get_child(ContentDescriptionObject.GUID) is None:
            header.objects.append(ContentDescriptionObject())
        if header.get_child(ExtendedContentDescriptionObject.GUID) is None:
            header.objects.append(ExtendedContentDescriptionObject())
        header_ext = header.get_child(HeaderExtensionObject.GUID)
        if header_ext is None:
            header_ext = HeaderExtensionObject()
            header.objects.append(header_ext)
        if header_ext.get_child(MetadataObject.GUID) is None:
            header_ext.objects.append(MetadataObject())
        if header_ext.get_child(MetadataLibraryObject.GUID) is None:
            header_ext.objects.append(MetadataLibraryObject())

        # Render to file
        with open(self.filename, "rb+") as fileobj:
            old_size = header.parse_size(fileobj)[0]
            data = header.render_full(self, fileobj, old_size, padding)
            size = len(data)
            resize_bytes(fileobj, old_size, size, 0)
            fileobj.seek(0)
            fileobj.write(data)
Пример #13
0
 def test_resize_nothing(self):
     o = self.file(b'abcd')
     resize_bytes(o, 2, 2, 1)
     self.assertEqual(self.read(o), b"abcd")
Пример #14
0
 def test_resize_increase(self):
     o = self.file(b'abcd')
     resize_bytes(o, 2, 4, 1)
     self.assertEqual(self.read(o), b"abcd\x00d")
Пример #15
0
    def resize(self, new_data_size):
        """Resize the file and update the chunk sizes"""

        resize_bytes(self.__fileobj, self.data_size, new_data_size,
                     self.data_offset)
        self._update_size(new_data_size)
Пример #16
0
 def test_resize_decrease(self):
     with self.file(b'abcd') as o:
         resize_bytes(o, 2, 1, 1)
         self.assertEqual(self.read(o), b"abd")
Пример #17
0
    def save(self, filething, padding=None):
        """save(filething=None, padding=None)

        Save tag changes back to the loaded file.

        Args:
            filething (filething)
            padding (PaddingFunction)
        Raises:
            mutagen.MutagenError
        """

        # Move attributes to the right objects
        self.to_content_description = {}
        self.to_extended_content_description = {}
        self.to_metadata = {}
        self.to_metadata_library = []
        for name, value in self.tags:
            library_only = (value.data_size() > 0xFFFF or value.TYPE == GUID)
            can_cont_desc = value.TYPE == UNICODE

            if library_only or value.language is not None:
                self.to_metadata_library.append((name, value))
            elif value.stream is not None:
                if name not in self.to_metadata:
                    self.to_metadata[name] = value
                else:
                    self.to_metadata_library.append((name, value))
            elif name in ContentDescriptionObject.NAMES:
                if name not in self.to_content_description and can_cont_desc:
                    self.to_content_description[name] = value
                else:
                    self.to_metadata_library.append((name, value))
            else:
                if name not in self.to_extended_content_description:
                    self.to_extended_content_description[name] = value
                else:
                    self.to_metadata_library.append((name, value))

        # Add missing objects
        header = self._header
        if header.get_child(ContentDescriptionObject.GUID) is None:
            header.objects.append(ContentDescriptionObject())
        if header.get_child(ExtendedContentDescriptionObject.GUID) is None:
            header.objects.append(ExtendedContentDescriptionObject())
        header_ext = header.get_child(HeaderExtensionObject.GUID)
        if header_ext is None:
            header_ext = HeaderExtensionObject()
            header.objects.append(header_ext)
        if header_ext.get_child(MetadataObject.GUID) is None:
            header_ext.objects.append(MetadataObject())
        if header_ext.get_child(MetadataLibraryObject.GUID) is None:
            header_ext.objects.append(MetadataLibraryObject())

        fileobj = filething.fileobj
        # Render to file
        old_size = header.parse_size(fileobj)[0]
        data = header.render_full(self, fileobj, old_size, padding)
        size = len(data)
        resize_bytes(fileobj, old_size, size, 0)
        fileobj.seek(0)
        fileobj.write(data)
Пример #18
0
 def test_resize_nothing(self):
     with self.file(b'abcd') as o:
         resize_bytes(o, 2, 2, 1)
         self.assertEqual(self.read(o), b"abcd")
Пример #19
0
 def test_resize_increase(self):
     with self.file(b'abcd') as o:
         resize_bytes(o, 2, 4, 1)
         self.assertEqual(self.read(o), b"abcd\x00d")