예제 #1
0
    def _toBytes(self):
        for k in range(1, 9):
            if -2**(7 * k - 1) <= self.data.numerator < 2**(7 * k - 1) - 1:
                numdata = toVint(self.data.numerator + 2**(7 * k - 1), k)
                break
        else:
            raise OverflowError

        dendata = toVint(self.data.denominator)
        return numdata + dendata
예제 #2
0
    def encodeEBMLLacing(sizes):
        lacingdata = len(sizes).to_bytes(1, "big") + toVint(sizes[0])

        for s1, s2 in zip(sizes[:-1], sizes[1:]):
            for k in range(1, 9):
                if -2**(7 * k - 1) < s2 - s1 < 2**(7 * k - 1):
                    lacingdata += toVint(s2 - s1 + 2**(7 * k - 1) - 1)
                    break
            else:
                raise OverflowError

        return lacingdata
예제 #3
0
    def _writeVoid(self, size):
        for k in range(1, 9):
            if size - 1 - k < 128**k - 1:
                break

        self._file.write(b"\xec")
        self._file.write(toVint(size - 1 - k, k))
예제 #4
0
    def sizeEBMLLacing(sizes):
        Sizes = [1, len(toVint(sizes[0]))]

        for s1, s2 in zip(sizes[:-1], sizes[1:]):
            for k in range(1, 9):
                if -2**(7 * k - 1) < s2 - s1 < 2**(7 * k - 1):
                    Sizes.append(k)
                    break
            else:
                raise OverflowError

        return sum(Sizes)
예제 #5
0
    def _toBytes(self):
        trackEntry = self.trackEntry

        if trackEntry is not None:
            compression = trackEntry.compression

            for pkt in self.packets:
                if pkt.compression != compression:
                    pkt.compression = compression

        else:
            compression = None

        trackNumber = toVint(self.trackNumber)
        localpts = self.localpts.to_bytes(2, "big", signed=True)

        if len(self.packets) > 1:
            flags = (self.keyFrame << 7 | self.invisible << 3
                     | self.lacing << 1 | self.discardable << 0).to_bytes(
                         1, "big")
            if self.lacing == 0b00:
                raise ValueError(
                    "Multiple packets requires lacing set to non-zero value.")

            sizes = [pkt.size for pkt in self.packets]

            if self.lacing == 0b10:
                if min(sizes) != max(sizes):
                    raise ValueError(
                        "Packets with different sizes incompatible with lacing=0b10 (fixed-size lacing)."
                    )

                lacingdata = (len(self.packets) - 1).to_bytes(1, "big")

            elif self.lacing == 0b11:
                lacingdata = self.encodeEBMLLacing(sizes[:-1])

            elif self.lacing == 0b01:
                lacingdata = self.encodeXiphLacing(sizes[:-1])

        else:
            flags = (self.keyFrame << 7 | self.invisible << 3
                     | self.discardable << 0).to_bytes(1, "big")
            lacingdata = b""

        if compression is not None:
            return trackNumber + localpts + flags + lacingdata + b"".join(
                [pkt.zdata for pkt in self.packets])

        return trackNumber + localpts + flags + lacingdata + b"".join(
            [pkt.data for pkt in self.packets])
예제 #6
0
    def calcOverhead(self):
        overhead = [40, 12, 128]  # EBML Head, Segment header, Seek Head + Void

        # Tracks
        trackSizes = self.lastoverhead.get("trackEntrySizes", ())
        overhead.append(len(matroska.Tracks.ebmlID))
        overhead.append(len(toVint(sum(trackSizes))))
        overhead.extend(trackSizes)
        overhead.append(128)

        if self.mkvfile:
            """
            'self.mkvfile' should never be none when transcode is started,
            but in case this method is called outside if self.transcode(),
            we will use sizes from the previous transcode if available
            (see 'else:' clause), provided that self.loadOverhead() is called
            beforehand.
            """

            overhead.append(self.mkvfile.segment.info.size())

            if len(self.mkvfile.chapters):
                overhead.append(self.mkvfile.chapters.size())

            if len(self.mkvfile.attachments):
                overhead.append(self.mkvfile.attachments.size())

            if len(self.mkvfile.tags):
                overhead.append(self.mkvfile.tags.size())

        else:
            overhead.append(self.lastoverhead.get("infoSize", 0))
            overhead.append(self.lastoverhead.get("chaptersSize", 0))
            overhead.append(self.lastoverhead.get("attachmentsSize", 0))
            overhead.append(self.lastoverhead.get("tagsSize", 0))

        overheadPerCluster = (len(matroska.cluster.Cluster.ebmlID) +
                              len(matroska.cluster.Timestamp.ebmlID) + 4)

        overhead.append(overheadPerCluster * self.lastoverhead.get(
            "clusterCount", int(self.duration / 32.768 + 1)))

        return sum(overhead)
예제 #7
0
    def _size(self):
        if self.trackEntry is not None:
            compression = self.trackEntry.compression

            for packet in self.packets:
                if packet.compression != compression:
                    packet.compression = compression

            pktsizes = [pkt.size for pkt in self.packets]

        else:
            compression = None
            pktsizes = [len(pkt.data) for pkt in self.packets]

        trackNumberSize = len(toVint(self.trackNumber))
        localptsSize = 2
        flagsSize = 1

        if len(self.packets) > 1:
            if self.lacing == 0:
                raise ValueError(
                    "Multiple packets requires lacing set to non-zero value.")

            if self.lacing == 0b10:
                if min(pktsizes) != max(pktsizes):
                    raise ValueError(
                        "Packets with different sizes incompatible with lacing=0b10 (fixed-size lacing)."
                    )

                lacingdataSize = 1

            elif self.lacing == 0b11:
                lacingdataSize = self.sizeEBMLLacing(pktsizes[:-1])

            elif self.lacing == 0b01:
                lacingdataSize = self.sizeXiphLacing(pktsizes[:-1])

        else:
            lacingdataSize = 0

        return trackNumberSize + localptsSize + flagsSize + lacingdataSize + sum(
            pktsizes)
예제 #8
0
    def close(self):
        """Writes Void elements in unallocated space and closes file."""
        if self._modified and self._file.writable():
            L = sorted(self._knownChildren.items())
            for (s1, e1), (s2, e2) in zip([(0, 0)] + L[:-1], L):
                if e1 < s2:
                    self.seek(e1)
                    self._writeVoid(s2 - e1)

            if len(L):
                (s, e) = max(L)

            else:
                e = 0

            self.seek(e)
            self._file.truncate()
            self.seek(-self._sizesize)
            self._file.write(toVint(e, self._sizesize))
        self._file.close()
예제 #9
0
 def _size(self):
     return sum(len(toVint(k)) for k in self.data)
예제 #10
0
 def _toBytes(self):
     return b"".join(toVint(k) for k in self.data)
예제 #11
0
 def _size(self):
     datasize = self.data.size * self.data.itemsize
     return self.dtype.size() + self.shape.size() + len(
         EBMLArrayData.ebmlID) + len(toVint(datasize)) + datasize
예제 #12
0
    def iterBlocks(self, start_pts=0, startPosition=0, trackNumber=None):
        """
        Create an iterator that yields blocks contained in cluster.

        'start_pts' (in seconds): Starts iteration at first block whose presentation timestamp is ≥ start_pts
        'startPosition' (in bytes): Starts demuxing at this offset inside cluster. Raises an exception if a child
            element does NOT start at this offset.
        'trackNumber': Filters by trackNumber. Can be either an integer or list/tuple of integers.
        """

        data = self.parent.readbytes(self.offsetInSegment, self.dataSize)
        timestampScale = self.segment.info.timestampScale

        for offset, ebmlID, sizesize, data in parseElements(data):
            if offset < startPosition:
                continue

            dataSize = len(data)

            if ebmlID == SimpleBlock.ebmlID:
                (trackNumber_, localpts, keyframe, invisible, discardable,
                 lacing, data_) = SimpleBlock.parsepkt(data)
                defaultDuration = self.segment.tracks.byTrackNumber[
                    trackNumber_].defaultDuration or 0

                if (self.timestamp +
                        localpts) * timestampScale < start_pts * 10**9:
                    continue

                if isinstance(
                        trackNumber,
                    (tuple, list, set)) and trackNumber_ not in trackNumber:
                    continue

                elif isinstance(trackNumber,
                                int) and trackNumber_ != trackNumber:
                    continue

                block = SimpleBlock.fromBytes(ebmlID + toVint(len(data)) +
                                              data,
                                              parent=self)
                block.offsetInParent = offset
                block.dataOffsetInParent = offset + len(ebmlID) + sizesize
                block.dataSize = len(data)
                yield block

            elif ebmlID == BlockGroup.ebmlID:
                (trackNumber_, localpts, duration, keyframe, invisible,
                 discardable, lacing, data_, referencePriority,
                 referenceBlocks) = BlockGroup.parsepkt(data)

                if (self.timestamp +
                        localpts) * timestampScale < start_pts * 10**9:
                    continue

                if isinstance(
                        trackNumber,
                    (tuple, list, set)) and trackNumber_ not in trackNumber:
                    continue

                elif isinstance(trackNumber,
                                int) and trackNumber_ != trackNumber:
                    continue

                block = BlockGroup.fromBytes(ebmlID + toVint(len(data)) + data,
                                             parent=self)
                block.offsetInParent = offset
                block.dataOffsetInParent = offset + len(ebmlID) + sizesize
                block.dataSize = len(data)
                yield block