示例#1
0
def test_round_trip_rss():
    now = datetime.utcnow().replace(microsecond=0)
    feed = rss_gen.RSS2(
        title='Feed Title',
        link='http://example.com/link/',
        description='feed description',
        lastBuildDate=now,
        items=[
            rss_gen.RSSItem(
                title='Item Title',
                link='http://example.com/1/',
                description='item description',
                pubDate=now - timedelta(seconds=5),
            ),
            rss_gen.RSSItem(
                title='Second Item Title',
                link='http://example.com/2/',
                description='another item description',
                pubDate=now - timedelta(seconds=10),
            )
        ]
    )
    pseudofile = BytesIO()
    feed.write_xml(pseudofile, encoding='utf-8')
    pseudofile.flush()
    pseudofile.seek(0)
    parsed = parse_rss(pseudofile)
    assert_feeds_equal(feed, parsed)
示例#2
0
class Collector(object):
    """
  Collector for map and reduce output values
  """
    def __init__(self, scheme=None, outputClient=None):
        """

    Parameters
    ---------------------------------------------
    scheme - The scheme for the datums to output - can be a json string
           - or an instance of Schema
    outputClient - The output client used to send messages to the parent
    """

        if not (isinstance(scheme, schema.Schema)):
            scheme = schema.parse(scheme)

        if (outputClient is None):
            raise ValueError("output client can't be none.")

        self.scheme = scheme
        self.buff = StringIO()
        self.encoder = avio.BinaryEncoder(self.buff)

        self.datum_writer = avio.DatumWriter(writers_schema=self.scheme)
        self.outputClient = outputClient

    def collect(self, record, partition=None):
        """Collect a map or reduce output value

    Parameters
    ------------------------------------------------------
    record - The record to write
    partition - Indicates the partition for a pre-partitioned map output
              - currently not supported
    """

        self.buff.truncate(0)
        self.datum_writer.write(record, self.encoder)
        self.buff.flush()
        self.buff.seek(0)

        # delete all the data in the buffer
        if (partition is None):

            # TODO: Is there a more efficient way to read the data in self.buff?
            # we could use self.buff.read() but that returns the byte array as a string
            # will that work?  We can also use self.buff.readinto to read it into
            # a bytearray but the byte array must be pre-allocated
            # self.outputClient.output(self.buff.buffer.read())

            #its not a StringIO
            self.outputClient.request("output", {"datum": self.buff.read()})
        else:
            self.outputClient.request("outputPartitioned", {
                "datum": self.buff.read(),
                "partition": partition
            })
示例#3
0
class UniversalBytesIO(object):
    def __init__(self, container=None, charset=None):
        self.charset = charset or settings.DEFAULT_CHARSET
        self._container = BytesIO() if container is None else container

    # These methods partially implement the file-like object interface.
    # See https://docs.python.org/3/library/io.html#io.IOBase

    def close(self):
        self._container.close()

    def write(self, content):
        self._container.write(self.make_bytes(content))

    def flush(self):
        self._container.flush()

    def tell(self):
        return self._container.tell()

    def readable(self):
        return False

    def seekable(self):
        return False

    def writable(self):
        return True

    def writelines(self, lines):
        for line in lines:
            self.write(line)

    def make_bytes(self, value):
        """Turn a value into a bytestring encoded in the output charset."""
        if isinstance(value, bytes):
            return bytes(value)
        if isinstance(value, six.text_type):
            return bytes(value.encode(self.charset))

        # Handle non-string types
        return force_bytes(value, self.charset)

    def get_string_value(self):
        return self._container.getvalue().decode(self.charset)

    def getvalue(self):
        return self._container.getvalue()

    if sys.version_info[0:2] < (3, 5):

        def seek(self, *args, **kwargs):
            pass
示例#4
0
class UniversalBytesIO(object):

    def __init__(self, container=None, charset=None):
        self.charset = charset or settings.DEFAULT_CHARSET
        self._container = BytesIO() if container is None else container

    # These methods partially implement the file-like object interface.
    # See https://docs.python.org/3/library/io.html#io.IOBase

    def close(self):
        self._container.close()

    def write(self, content):
        self._container.write(self.make_bytes(content))

    def flush(self):
        self._container.flush()

    def tell(self):
        return self._container.tell()

    def readable(self):
        return False

    def seekable(self):
        return False

    def writable(self):
        return True

    def writelines(self, lines):
        for line in lines:
            self.write(line)

    def make_bytes(self, value):
        """Turn a value into a bytestring encoded in the output charset."""
        if isinstance(value, bytes):
            return bytes(value)
        if isinstance(value, six.text_type):
            return bytes(value.encode(self.charset))

        # Handle non-string types
        return force_bytes(value, self.charset)

    def get_string_value(self):
        return self._container.getvalue().decode(self.charset)

    def getvalue(self):
        return self._container.getvalue()

    if sys.version_info[0:2] < (3, 5):
        def seek(self, *args, **kwargs):
            pass
示例#5
0
    def process(self, request, ids):
        if isinstance(ids, six.string_types) and ids == "all":
            return JsonResponse(
                {"error": ugettext("Selecting all is not supported.")})
        shipment_ids = set(
            Shipment.objects.filter(order_id__in=ids).values_list("id",
                                                                  flat=True))
        if len(shipment_ids) == 1:
            try:
                response = get_delivery_pdf(request, ids[0])
                response[
                    'Content-Disposition'] = 'attachment; filename=shipment_%s_delivery.pdf' % ids[
                        0]
                return response
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                return JsonResponse({"error": force_text(msg)})
        buff = BytesIO()
        archive = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED)

        added = 0
        errors = []
        for id in shipment_ids:
            try:
                pdf_file = get_delivery_pdf(request, id)
                filename = "shipment_%d_delivery.pdf" % id
                archive.writestr(filename, pdf_file.content)
                added += 1
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                errors.append(force_text(msg))
                continue
        if added:
            archive.close()
            buff.flush()
            ret_zip = buff.getvalue()
            buff.close()
            response = HttpResponse(content_type='application/zip')
            response[
                'Content-Disposition'] = 'attachment; filename=order_delivery_pdf.zip'
            response.write(ret_zip)
            return response
        return JsonResponse({"errors": errors})
示例#6
0
    def process(self, request, ids):
        if isinstance(ids, six.string_types) and ids == "all":
            return JsonResponse(
                {"error": ugettext("Error! Selecting all is not supported.")},
                status=400)
        if len(ids) == 1:
            try:
                response = get_confirmation_pdf(request, ids[0])
                response[
                    'Content-Disposition'] = 'attachment; filename=order_%s_confirmation.pdf' % ids[
                        0]
                return response
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                return JsonResponse({"error": force_text(msg)}, status=400)

        buff = BytesIO()
        archive = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED)
        added = 0
        errors = []
        for id in ids:
            try:
                pdf_file = get_confirmation_pdf(request, id)
                filename = "order_%d_confirmation.pdf" % id
                archive.writestr(filename, pdf_file.content)
                added += 1
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                errors.append(force_text(msg))
                continue
        if added:
            archive.close()
            buff.flush()
            ret_zip = buff.getvalue()
            buff.close()
            response = HttpResponse(content_type='application/zip')
            response[
                'Content-Disposition'] = 'attachment; filename=order_confirmation_pdf.zip'
            response.write(ret_zip)
            return response
        return JsonResponse({"errors": errors}, status=400)
示例#7
0
    def process(self, request, ids):
        if isinstance(ids, six.string_types) and ids == "all":
            return JsonResponse({"error": ugettext("Selecting all is not supported.")})
        shipment_ids = set(Shipment.objects.filter(order_id__in=ids).values_list("id", flat=True))
        if len(shipment_ids) == 1:
            try:
                shipment_id = shipment_ids.pop()
                response = get_delivery_pdf(request, shipment_id)
                response['Content-Disposition'] = 'attachment; filename=shipment_%s_delivery.pdf' % shipment_id
                return response
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                return JsonResponse({"error": force_text(msg)})
        buff = BytesIO()
        archive = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED)

        added = 0
        errors = []
        for id in shipment_ids:
            try:
                pdf_file = get_delivery_pdf(request, id)
                filename = "shipment_%d_delivery.pdf" % id
                archive.writestr(filename, pdf_file.content)
                added += 1
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                errors.append(force_text(msg))
                continue
        if added:
            archive.close()
            buff.flush()
            ret_zip = buff.getvalue()
            buff.close()
            response = HttpResponse(content_type='application/zip')
            response['Content-Disposition'] = 'attachment; filename=order_delivery_pdf.zip'
            response.write(ret_zip)
            return response
        return JsonResponse({"errors": errors})
示例#8
0
    def process(self, request, ids):
        if len(ids) == 1:
            try:
                response = get_delivery_pdf(request, ids[0])
                response[
                    'Content-Disposition'] = 'attachment; filename=order_%s_delivery.pdf' % ids[
                        0]
                return response
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                return JsonResponse({"error": force_text(msg)})
        buff = BytesIO()
        archive = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED)

        added = 0
        errors = []
        for id in ids:
            try:
                pdf_file = get_delivery_pdf(request, id)
                filename = "order_%d_delivery.pdf" % id
                archive.writestr(filename, pdf_file.content)
                added += 1
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                errors.append(force_text(msg))
                continue
        if added:
            archive.close()
            buff.flush()
            ret_zip = buff.getvalue()
            buff.close()
            response = HttpResponse(content_type='application/zip')
            response[
                'Content-Disposition'] = 'attachment; filename=order_delivery_pdf.zip'
            response.write(ret_zip)
            return response
        return JsonResponse({"errors": errors})
示例#9
0
    def process(self, request, ids):
        if isinstance(ids, six.string_types) and ids == "all":
            return JsonResponse({"error": ugettext("Selecting all is not supported.")}, status=400)
        if len(ids) == 1:
            try:
                response = get_confirmation_pdf(request, ids[0])
                response['Content-Disposition'] = 'attachment; filename=order_%s_confirmation.pdf' % ids[0]
                return response
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                return JsonResponse({"error": force_text(msg)}, status=400)

        buff = BytesIO()
        archive = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED)
        added = 0
        errors = []
        for id in ids:
            try:
                pdf_file = get_confirmation_pdf(request, id)
                filename = "order_%d_confirmation.pdf" % id
                archive.writestr(filename, pdf_file.content)
                added += 1
            except Exception as e:
                msg = e.message if hasattr(e, "message") else e
                errors.append(force_text(msg))
                continue
        if added:
            archive.close()
            buff.flush()
            ret_zip = buff.getvalue()
            buff.close()
            response = HttpResponse(content_type='application/zip')
            response['Content-Disposition'] = 'attachment; filename=order_confirmation_pdf.zip'
            response.write(ret_zip)
            return response
        return JsonResponse({"errors": errors}, status=400)
示例#10
0
文件: tee.py 项目: pashinin/restkit
class TeeInput(object):
    CHUNK_SIZE = conn.CHUNK_SIZE

    def __init__(self, stream):
        self.buf = StringIO()
        self.eof = False

        if isinstance(stream, six.string_types):
            stream = StringIO(stream)
            self.tmp = StringIO()
        else:
            self.tmp = tempfile.TemporaryFile()

        self.stream = stream

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, traceback):
        return

    def seek(self, offset, whence=0):
        """ naive implementation of seek """
        current_size = self._tmp_size()
        diff = 0
        if whence == 0:
            diff = offset - current_size
        elif whence == 2:
            diff = (self.tmp.tell() + offset) - current_size
        elif whence == 3 and not self.eof:
            # we read until the end
            while True:
                self.tmp.seek(0, 2)
                if not self._tee(self.CHUNK_SIZE):
                    break

        if not self.eof and diff > 0:
            self._ensure_length(StringIO(), diff)
        self.tmp.seek(offset, whence)

    def flush(self):
        self.tmp.flush()

    def read(self, length=-1):
        """ read """
        if self.eof:
            return self.tmp.read(length)

        if length < 0:
            buf = StringIO()
            buf.write(self.tmp.read())
            while True:
                chunk = self._tee(self.CHUNK_SIZE)
                if not chunk:
                    break
                buf.write(chunk)
            return buf.getvalue()
        else:
            dest = StringIO()
            diff = self._tmp_size() - self.tmp.tell()
            if not diff:
                dest.write(self._tee(length))
                return self._ensure_length(dest, length)
            else:
                l = min(diff, length)
                dest.write(self.tmp.read(l))
                return self._ensure_length(dest, length)

    def readline(self, size=-1):
        if self.eof:
            return self.tmp.readline()

        orig_size = self._tmp_size()
        if self.tmp.tell() == orig_size:
            if not self._tee(self.CHUNK_SIZE):
                return ''
            self.tmp.seek(orig_size)

        # now we can get line
        line = self.tmp.readline()
        if line.find("\n") >=0:
            return line

        buf = StringIO()
        buf.write(line)
        while True:
            orig_size = self.tmp.tell()
            data = self._tee(self.CHUNK_SIZE)
            if not data:
                break
            self.tmp.seek(orig_size)
            buf.write(self.tmp.readline())
            if data.find("\n") >= 0:
                break
        return buf.getvalue()

    def readlines(self, sizehint=0):
        total = 0
        lines = []
        line = self.readline()
        while line:
            lines.append(line)
            total += len(line)
            if 0 < sizehint <= total:
                break
            line = self.readline()
        return lines

    def close(self):
        if not self.eof:
            # we didn't read until the end
            self._close_unreader()
        return self.tmp.close()

    def next(self):
        r = self.readline()
        if not r:
            raise StopIteration
        return r
    __next__ = next

    def __iter__(self):
        return self

    def _tee(self, length):
        """ fetch partial body"""
        buf2 = self.buf
        buf2.seek(0, 2)
        chunk = self.stream.read(length)
        if chunk:
            self.tmp.write(chunk)
            self.tmp.flush()
            self.tmp.seek(0, 2)
            return chunk

        self._finalize()
        return ""

    def _finalize(self):
        """ here we wil fetch final trailers
        if any."""
        self.eof = True

    def _tmp_size(self):
        if hasattr(self.tmp, 'fileno'):
            return int(os.fstat(self.tmp.fileno())[6])
        else:
            return len(self.tmp.getvalue())

    def _ensure_length(self, dest, length):
        if len(dest.getvalue()) < length:
            data = self._tee(length - len(dest.getvalue()))
            dest.write(data)
        return dest.getvalue()
示例#11
0
class File(ClarityElement):
    """
    This is a file in Clarity. It is also a Python file (more or less).
    You can read, write, and do everything else you can normally do with a Python file.
    NOTE: nothing will be committed to Clarity until you call close, or commit.
    """

    UNIVERSAL_TAG = "{http://genologics.com/ri/file}file"

    def __init__(self, lims, uri=None, xml_root=None, name=None, limsid=None):
        super(File, self).__init__(lims, uri, xml_root, name, limsid)
        self._data = None
        self._dirty = False
        self.content_type = 'text/plain'
        self.writeable = True
        self.only_write_locally = False
        self.mode = "r"

    @classmethod
    def new_empty(cls, attachment_point_element, name=None):
        """
        Create a new empty :class:`File`.

        :param attachment_point_element: An element to attach the file to.
        :type attachment_point_element: ClarityElement
        :param name: A name for the file.
        :type name: str
        :rtype: File
        """
        root = ETree.Element(cls.UNIVERSAL_TAG)

        f = File(uri=None, xml_root=root, lims=attachment_point_element.lims)
        if name is not None:
            f.name = name
        f.attached_to = attachment_point_element.uri
        return f

    @classmethod
    def new_from_local(cls, attachment_point_element, local_file_path, mode="r+b"):
        """
        Create a new :class:`File` from a local file.

        :param attachment_point_element: An element to attach the file to.
        :type attachment_point_element: ClarityElement
        :param local_file_path: Path to the local file.
        :type local_file_path: str
        :param mode: Mode to open the file with.
        :type mode: str
        :rtype: File
        """
        root = ETree.Element(cls.UNIVERSAL_TAG)

        f = File(uri=None, xml_root=root, lims=attachment_point_element.lims)

        f.name = local_file_path
        f.attached_to = attachment_point_element.uri
        f._data = open(local_file_path, mode)
        f._dirty = True

        return f

    name = subnode_property('original-location')
    attached_to = subnode_property('attached-to')
    content_location = subnode_property('content-location')
    is_published = subnode_property('is-published', typename=types.BOOLEAN)

    @property
    def is_binary_mode(self):
        """
        :type: bool
        """
        return "b" in self.mode

    def pipe_to(self, target_file_object):
        """
        :raises FileNotFoundException: if the file does not exist in Clarity.
        """
        response = self.lims.raw_request('GET', self.uri + '/download')
        self.content_type = response.headers.get("Content-Type")

        if self.is_binary_mode:
            file_contents = response.content
        else:
            file_contents = response.content if isinstance(response.content, string_types) else response.text

        target_file_object.write(file_contents)

    def replace_and_commit_from_local(self, local_file_path, content_type='text/plain', mode="r+b"):
        self.mode = mode
        other_file = open(local_file_path, self.mode)
        self.replace_and_commit(other_file, local_file_path, content_type)
        other_file.close()

    def replace_and_commit(self, stream, name, content_type='text/plain'):
        if not self.writeable:
            raise Exception("file not writeable")
        self.name = name
        self.data.write(stream.read())
        self.content_type = content_type
        self._dirty = True
        self.commit()

    @property
    def data(self):
        """
        :return: The file data IO stream.
        :rtype:  io.IOBase
        """
        if self._data is None:
            if self.only_write_locally:
                pathstrippedname = os.path.basename(self.name)
                if os.path.exists(self.name):
                    file_name = self.name
                else:
                    file_name = pathstrippedname

                self._data = open(file_name, self.mode)
            else:
                self._data = BytesIO() if self.is_binary_mode else StringIO()

            if self.uri is not None:
                try:
                    log.debug("Getting file contents from lims...")

                    # convenient!
                    self.pipe_to(self._data)
                    self._data.seek(0)

                except FileNotFoundException:
                    log.debug("File not found at %s" % self.uri)

                    # this is ok, we just leave the buffer empty.
                    # uri = None means we will need a new uri, later, allocated through glsstorage.
                    self.uri = None

        return self._data

    # Implementation for standard io.IOBase methods to support being used as a file:
    def read(self, n=-1):
        return self.data.read(n)

    def readline(self, length=None):
        return self.data.readline(length)

    def readlines(self, sizehint=0):
        return self.data.readlines(sizehint)

    def write(self, s):
        if not self.writeable:
            raise Exception("file not writeable")

        self._dirty = True
        return self.data.write(s)

    def writelines(self, iterable):
        if not self.writeable:
            raise Exception("file not writeable")

        self._dirty = True
        return self.data.writelines(iterable)

    def flush(self):
        # don't do anything at all
        return

    def getvalue(self):
        return self.data.getvalue()

    def truncate(self, size=None):
        if not self.writeable:
            raise Exception("file not writeable")

        self._dirty = True

        if size is None and self._data is None:
            self._data = BytesIO() if self.is_binary_mode else StringIO()
        else:
            self._data.truncate(size)

    def tell(self):
        return self.data.tell()

    def isatty(self):
        return False

    def close(self):
        """
        Commit the file and close the data stream.
        """
        self.commit()
        return self.data.close()

    def __iter__(self):
        return self.data.__iter__()

    def seek(self, pos, mode=0):
        return self.data.seek(pos, mode)

    def readable(self):
        return self.data.readable()

    def writable(self):
        return self.data.writable()

    def seekable(self):
        return self.data.seekable()

    # end file-like functions

    def seek_to_end(self):
        return self.data.seek(0, 2)

    def commit(self):
        if not self.writeable or self._data is None:
            return

        if self.only_write_locally:
            self._data.flush()
            return

        if self.name is None:
            raise Exception("Value for .name required.")

        if self.uri is not None:
            # If we are overwriting an existing file, first delete to
            # allow name to be changed.
            self.lims.raw_request('DELETE', self.uri)
            self.uri = None

        # first we get an allocation from glsstorage
        self.post_and_parse(self.lims.root_uri + '/glsstorage')

        # then we post ourselves to files, which gives us a uri.
        self.post_and_parse(self.lims.root_uri + '/files')

        if self._dirty:
            old_pos = self.data.tell()
            self.data.seek(0)
            self.lims.raw_request('POST', self.uri + '/upload',
                                  files={'file': (self.name, self.data, self.content_type)}
                                  )
            self._dirty = False
            self.data.seek(old_pos)
示例#12
0
文件: tee.py 项目: pashinin/restkit
class TeeInput(object):
    CHUNK_SIZE = conn.CHUNK_SIZE

    def __init__(self, stream):
        self.buf = StringIO()
        self.eof = False

        if isinstance(stream, six.string_types):
            stream = StringIO(stream)
            self.tmp = StringIO()
        else:
            self.tmp = tempfile.TemporaryFile()

        self.stream = stream

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, traceback):
        return

    def seek(self, offset, whence=0):
        """ naive implementation of seek """
        current_size = self._tmp_size()
        diff = 0
        if whence == 0:
            diff = offset - current_size
        elif whence == 2:
            diff = (self.tmp.tell() + offset) - current_size
        elif whence == 3 and not self.eof:
            # we read until the end
            while True:
                self.tmp.seek(0, 2)
                if not self._tee(self.CHUNK_SIZE):
                    break

        if not self.eof and diff > 0:
            self._ensure_length(StringIO(), diff)
        self.tmp.seek(offset, whence)

    def flush(self):
        self.tmp.flush()

    def read(self, length=-1):
        """ read """
        if self.eof:
            return self.tmp.read(length)

        if length < 0:
            buf = StringIO()
            buf.write(self.tmp.read())
            while True:
                chunk = self._tee(self.CHUNK_SIZE)
                if not chunk:
                    break
                buf.write(chunk)
            return buf.getvalue()
        else:
            dest = StringIO()
            diff = self._tmp_size() - self.tmp.tell()
            if not diff:
                dest.write(self._tee(length))
                return self._ensure_length(dest, length)
            else:
                l = min(diff, length)
                dest.write(self.tmp.read(l))
                return self._ensure_length(dest, length)

    def readline(self, size=-1):
        if self.eof:
            return self.tmp.readline()

        orig_size = self._tmp_size()
        if self.tmp.tell() == orig_size:
            if not self._tee(self.CHUNK_SIZE):
                return ''
            self.tmp.seek(orig_size)

        # now we can get line
        line = self.tmp.readline()
        if line.find("\n") >= 0:
            return line

        buf = StringIO()
        buf.write(line)
        while True:
            orig_size = self.tmp.tell()
            data = self._tee(self.CHUNK_SIZE)
            if not data:
                break
            self.tmp.seek(orig_size)
            buf.write(self.tmp.readline())
            if data.find("\n") >= 0:
                break
        return buf.getvalue()

    def readlines(self, sizehint=0):
        total = 0
        lines = []
        line = self.readline()
        while line:
            lines.append(line)
            total += len(line)
            if 0 < sizehint <= total:
                break
            line = self.readline()
        return lines

    def close(self):
        if not self.eof:
            # we didn't read until the end
            self._close_unreader()
        return self.tmp.close()

    def next(self):
        r = self.readline()
        if not r:
            raise StopIteration
        return r

    __next__ = next

    def __iter__(self):
        return self

    def _tee(self, length):
        """ fetch partial body"""
        buf2 = self.buf
        buf2.seek(0, 2)
        chunk = self.stream.read(length)
        if chunk:
            self.tmp.write(chunk)
            self.tmp.flush()
            self.tmp.seek(0, 2)
            return chunk

        self._finalize()
        return ""

    def _finalize(self):
        """ here we wil fetch final trailers
        if any."""
        self.eof = True

    def _tmp_size(self):
        if hasattr(self.tmp, 'fileno'):
            return int(os.fstat(self.tmp.fileno())[6])
        else:
            return len(self.tmp.getvalue())

    def _ensure_length(self, dest, length):
        if len(dest.getvalue()) < length:
            data = self._tee(length - len(dest.getvalue()))
            dest.write(data)
        return dest.getvalue()