Esempio n. 1
0
        def downloadStream(self):
            readable_fh, writable_fh = os.pipe()
            with os.fdopen(readable_fh, 'r') as readable:
                with os.fdopen(writable_fh, 'w') as writable:
                    def writer():
                        try:
                            if self.content is not None:
                                writable.write(self.content)
                            elif self.version:
                                headers = self._s3EncryptionHeaders()
                                key = self.outer.filesBucket.get_key(self.fileID, validate=False)
                                key.get_contents_to_file(writable,
                                                         headers=headers,
                                                         version_id=self.version)
                            else:
                                assert False
                        finally:
                            # This close() will send EOF to the reading end and ultimately cause
                            # the yield to return. It also makes the implict .close() done by the
                            #  enclosing "with" context redundant but that should be ok since
                            # .close() on file objects are idempotent.
                            writable.close()

                    thread = ExceptionalThread(target=writer)
                    thread.start()
                    yield readable
                    thread.join()
Esempio n. 2
0
        def downloadStream(self):
            readable_fh, writable_fh = os.pipe()
            with os.fdopen(readable_fh, 'r') as readable:
                with os.fdopen(writable_fh, 'w') as writable:
                    def writer():
                        try:
                            if self.content is not None:
                                writable.write(self.content)
                            elif self.version:
                                headers = self._s3EncryptionHeaders()
                                key = self.outer.filesBucket.get_key(self.fileID, validate=False)
                                key.get_contents_to_file(writable,
                                                         headers=headers,
                                                         version_id=self.version)
                            else:
                                assert False
                        finally:
                            # This close() will send EOF to the reading end and ultimately cause
                            # the yield to return. It also makes the implict .close() done by the
                            #  enclosing "with" context redundant but that should be ok since
                            # .close() on file objects are idempotent.
                            writable.close()

                    thread = ExceptionalThread(target=writer)
                    thread.start()
                    yield readable
                    thread.join()
Esempio n. 3
0
        def uploadStream(self, multipart=True, allowInlining=True):
            store = self.outer
            readable_fh, writable_fh = os.pipe()
            with os.fdopen(readable_fh, 'r') as readable:
                with os.fdopen(writable_fh, 'w') as writable:

                    def multipartReader():
                        buf = readable.read(self.outer.partSize)
                        if allowInlining and len(
                                buf) <= self._maxInlinedSize():
                            self.content = buf
                        else:
                            headers = self._s3EncryptionHeaders()
                            upload = store.filesBucket.initiate_multipart_upload(
                                key_name=self.fileID, headers=headers)
                            try:
                                for part_num in itertools.count():
                                    # There must be at least one part, even if the file is empty.
                                    if len(buf) == 0 and part_num > 0: break
                                    upload.upload_part_from_file(
                                        fp=StringIO(buf),
                                        # S3 part numbers are 1-based
                                        part_num=part_num + 1,
                                        headers=headers)
                                    if len(buf) == 0: break
                                    buf = readable.read(self.outer.partSize)
                            except:
                                upload.cancel_upload()
                                raise
                            else:
                                self.version = upload.complete_upload(
                                ).version_id

                    def reader():
                        buf = readable.read()
                        if allowInlining and len(
                                buf) <= self._maxInlinedSize():
                            self.content = buf
                        else:
                            key = store.filesBucket.new_key(
                                key_name=self.fileID)
                            buf = StringIO(buf)
                            headers = self._s3EncryptionHeaders()
                            assert buf.len == key.set_contents_from_file(
                                fp=buf, headers=headers)
                            self.version = key.version_id

                    thread = ExceptionalThread(
                        target=multipartReader if multipart else reader)
                    thread.start()
                    yield writable
                # The writable is now closed. This will send EOF to the readable and cause that
                # thread to finish.
                thread.join()
                assert bool(self.version) == (self.content is None)
Esempio n. 4
0
    def _downloadStream(self, key, encrypt=True):
        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:
                def writer():
                    headers = self.encryptedHeaders if encrypt else self.headerValues
                    try:
                        key.get_file(writable, headers=headers)
                    finally:
                        writable.close()

                thread = ExceptionalThread(target=writer)
                thread.start()
                yield readable
                thread.join()
Esempio n. 5
0
        def uploadStream(self, multipart=True, allowInlining=True):
            store = self.outer
            readable_fh, writable_fh = os.pipe()
            with os.fdopen(readable_fh, 'r') as readable:
                with os.fdopen(writable_fh, 'w') as writable:
                    def multipartReader():
                        buf = readable.read(self.outer.partSize)
                        if allowInlining and len(buf) <= self._maxInlinedSize():
                            self.content = buf
                        else:
                            headers = self._s3EncryptionHeaders()
                            upload = store.filesBucket.initiate_multipart_upload(
                                key_name=self.fileID,
                                headers=headers)
                            try:
                                for part_num in itertools.count():
                                    # There must be at least one part, even if the file is empty.
                                    if len(buf) == 0 and part_num > 0: break
                                    upload.upload_part_from_file(fp=StringIO(buf),
                                                                 # S3 part numbers are 1-based
                                                                 part_num=part_num + 1,
                                                                 headers=headers)
                                    if len(buf) == 0: break
                                    buf = readable.read(self.outer.partSize)
                            except:
                                upload.cancel_upload()
                                raise
                            else:
                                self.version = upload.complete_upload().version_id

                    def reader():
                        buf = readable.read()
                        if allowInlining and len(buf) <= self._maxInlinedSize():
                            self.content = buf
                        else:
                            key = store.filesBucket.new_key(key_name=self.fileID)
                            buf = StringIO(buf)
                            headers = self._s3EncryptionHeaders()
                            assert buf.len == key.set_contents_from_file(fp=buf, headers=headers)
                            self.version = key.version_id

                    thread = ExceptionalThread(target=multipartReader if multipart else reader)
                    thread.start()
                    yield writable
                # The writable is now closed. This will send EOF to the readable and cause that
                # thread to finish.
                thread.join()
                assert bool(self.version) == (self.content is None)
Esempio n. 6
0
    def _downloadStream(self, key, encrypt=True):
        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:

                def writer():
                    headers = self.encryptedHeaders if encrypt else self.headerValues
                    try:
                        key.get_file(writable, headers=headers)
                    finally:
                        writable.close()

                thread = ExceptionalThread(target=writer)
                thread.start()
                yield readable
                thread.join()
Esempio n. 7
0
    def _uploadStream(self, key, update=False, encrypt=True):
        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:

                def writer():
                    headers = self.encryptedHeaders if encrypt else self.headerValues
                    if update:
                        try:
                            key.set_contents_from_stream(readable,
                                                         headers=headers)
                        except boto.exception.GSDataError:
                            if encrypt:
                                # https://github.com/boto/boto/issues/3518
                                # see self._writeFile for more
                                pass
                            else:
                                raise
                    else:
                        try:
                            # The if_condition kwarg insures that the existing key matches given
                            # generation (version) before modifying anything. Setting
                            # if_generation=0 insures key does not exist remotely
                            key.set_contents_from_stream(readable,
                                                         headers=headers,
                                                         if_generation=0)
                        except (boto.exception.GSResponseError,
                                boto.exception.GSDataError) as e:
                            if isinstance(e, boto.exception.GSResponseError):
                                if e.status == 412:
                                    raise ConcurrentFileModificationException(
                                        key.name)
                                else:
                                    raise e
                            elif encrypt:
                                # https://github.com/boto/boto/issues/3518
                                # see self._writeFile for more
                                pass
                            else:
                                raise

                thread = ExceptionalThread(target=writer)
                thread.start()
                yield writable
            thread.join()
Esempio n. 8
0
    def _downloadStream(self, jobStoreFileID, container):
        # The reason this is not in the writer is so we catch non-existant blobs early

        blobProps = container.get_blob_properties(blob_name=jobStoreFileID)

        encrypted = strict_bool(blobProps['x-ms-meta-encrypted'])
        if encrypted and self.keyPath is None:
            raise AssertionError(
                'Content is encrypted but no key was provided.')

        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:

                def writer():
                    try:
                        chunkStartPos = 0
                        fileSize = int(blobProps['Content-Length'])
                        while chunkStartPos < fileSize:
                            chunkEndPos = chunkStartPos + self._maxAzureBlockBytes - 1
                            buf = container.get_blob(
                                blob_name=jobStoreFileID,
                                x_ms_range="bytes=%d-%d" %
                                (chunkStartPos, chunkEndPos))
                            if encrypted:
                                buf = encryption.decrypt(buf, self.keyPath)
                            writable.write(buf)
                            chunkStartPos = chunkEndPos + 1
                    finally:
                        # Ensure readers aren't left blocking if this thread crashes.
                        # This close() will send EOF to the reading end and ultimately cause the
                        # yield to return. It also makes the implict .close() done by the enclosing
                        # "with" context redundant but that should be ok since .close() on file
                        # objects are idempotent.
                        writable.close()

                thread = ExceptionalThread(target=writer)
                thread.start()
                yield readable
                thread.join()
Esempio n. 9
0
 def _uploadStream(self, key, update=False, encrypt=True):
     readable_fh, writable_fh = os.pipe()
     with os.fdopen(readable_fh, 'r') as readable:
         with os.fdopen(writable_fh, 'w') as writable:
             def writer():
                 headers = self.encryptedHeaders if encrypt else self.headerValues
                 if update:
                     try:
                         key.set_contents_from_stream(readable, headers=headers)
                     except boto.exception.GSDataError:
                         if encrypt:
                             # https://github.com/boto/boto/issues/3518
                             # see self._writeFile for more
                             pass
                         else:
                             raise
                 else:
                     try:
                         # The if_condition kwarg insures that the existing key matches given
                         # generation (version) before modifying anything. Setting
                         # if_generation=0 insures key does not exist remotely
                         key.set_contents_from_stream(readable, headers=headers, if_generation=0)
                     except (boto.exception.GSResponseError, boto.exception.GSDataError) as e:
                         if isinstance(e, boto.exception.GSResponseError):
                             if e.status == 412:
                                 raise ConcurrentFileModificationException(key.name)
                             else:
                                 raise e
                         elif encrypt:
                             # https://github.com/boto/boto/issues/3518
                             # see self._writeFile for more
                             pass
                         else:
                             raise
             thread = ExceptionalThread(target=writer)
             thread.start()
             yield writable
         thread.join()
Esempio n. 10
0
    def _downloadStream(self, jobStoreFileID, container):
        # The reason this is not in the writer is so we catch non-existant blobs early

        blobProps = container.get_blob_properties(blob_name=jobStoreFileID)

        encrypted = strict_bool(blobProps['x-ms-meta-encrypted'])
        if encrypted and self.keyPath is None:
            raise AssertionError('Content is encrypted but no key was provided.')

        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:
                def writer():
                    try:
                        chunkStartPos = 0
                        fileSize = int(blobProps['Content-Length'])
                        while chunkStartPos < fileSize:
                            chunkEndPos = chunkStartPos + self._maxAzureBlockBytes - 1
                            buf = container.get_blob(blob_name=jobStoreFileID,
                                                     x_ms_range="bytes=%d-%d" % (chunkStartPos,
                                                                                 chunkEndPos))
                            if encrypted:
                                buf = encryption.decrypt(buf, self.keyPath)
                            writable.write(buf)
                            chunkStartPos = chunkEndPos + 1
                    finally:
                        # Ensure readers aren't left blocking if this thread crashes.
                        # This close() will send EOF to the reading end and ultimately cause the
                        # yield to return. It also makes the implict .close() done by the enclosing
                        # "with" context redundant but that should be ok since .close() on file
                        # objects are idempotent.
                        writable.close()

                thread = ExceptionalThread(target=writer)
                thread.start()
                yield readable
                thread.join()
Esempio n. 11
0
    def _uploadStream(self, jobStoreFileID, container, checkForModification=False, encrypted=None):
        """
        :param encrypted: True to enforce encryption (will raise exception unless key is set),
        False to prevent encryption or None to encrypt if key is set.
        """
        if checkForModification:
            try:
                expectedVersion = container.get_blob_properties(blob_name=jobStoreFileID)['etag']
            except WindowsAzureMissingResourceError:
                expectedVersion = None

        if encrypted is None:
            encrypted = self.keyPath is not None
        elif encrypted:
            if self.keyPath is None:
                raise RuntimeError('Encryption requested but no key was provided')

        maxBlockSize = self._maxAzureBlockBytes
        if encrypted:
            # There is a small overhead for encrypted data.
            maxBlockSize -= encryption.overhead
        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:
                def reader():
                    blockIDs = []
                    try:
                        while True:
                            buf = readable.read(maxBlockSize)
                            if len(buf) == 0:
                                # We're safe to break here even if we never read anything, since
                                # putting an empty block list creates an empty blob.
                                break
                            if encrypted:
                                buf = encryption.encrypt(buf, self.keyPath)
                            blockID = self._newFileID()
                            container.put_block(blob_name=jobStoreFileID,
                                                block=buf,
                                                blockid=blockID)
                            blockIDs.append(blockID)
                    except:
                        # This is guaranteed to delete any uncommitted
                        # blocks.
                        container.delete_blob(blob_name=jobStoreFileID)
                        raise

                    if checkForModification and expectedVersion is not None:
                        # Acquire a (60-second) write lock,
                        leaseID = container.lease_blob(blob_name=jobStoreFileID,
                                                       x_ms_lease_action='acquire')['x-ms-lease-id']
                        # check for modification,
                        blobProperties = container.get_blob_properties(blob_name=jobStoreFileID)
                        if blobProperties['etag'] != expectedVersion:
                            container.lease_blob(blob_name=jobStoreFileID,
                                                 x_ms_lease_action='release',
                                                 x_ms_lease_id=leaseID)
                            raise ConcurrentFileModificationException(jobStoreFileID)
                        # commit the file,
                        container.put_block_list(blob_name=jobStoreFileID,
                                                 block_list=blockIDs,
                                                 x_ms_lease_id=leaseID,
                                                 x_ms_meta_name_values=dict(
                                                     encrypted=str(encrypted)))
                        # then release the lock.
                        container.lease_blob(blob_name=jobStoreFileID,
                                             x_ms_lease_action='release',
                                             x_ms_lease_id=leaseID)
                    else:
                        # No need to check for modification, just blindly write over whatever
                        # was there.
                        container.put_block_list(blob_name=jobStoreFileID,
                                                 block_list=blockIDs,
                                                 x_ms_meta_name_values=dict(
                                                     encrypted=str(encrypted)))

                thread = ExceptionalThread(target=reader)
                thread.start()
                yield writable
            # The writable is now closed. This will send EOF to the readable and cause that
            # thread to finish.
            thread.join()
Esempio n. 12
0
    def _uploadStream(self,
                      jobStoreFileID,
                      container,
                      checkForModification=False,
                      encrypted=None):
        """
        :param encrypted: True to enforce encryption (will raise exception unless key is set),
        False to prevent encryption or None to encrypt if key is set.
        """
        if checkForModification:
            try:
                expectedVersion = container.get_blob_properties(
                    blob_name=jobStoreFileID)['etag']
            except AzureMissingResourceHttpError:
                expectedVersion = None

        if encrypted is None:
            encrypted = self.keyPath is not None
        elif encrypted:
            if self.keyPath is None:
                raise RuntimeError(
                    'Encryption requested but no key was provided')

        maxBlockSize = self._maxAzureBlockBytes
        if encrypted:
            # There is a small overhead for encrypted data.
            maxBlockSize -= encryption.overhead
        readable_fh, writable_fh = os.pipe()
        with os.fdopen(readable_fh, 'r') as readable:
            with os.fdopen(writable_fh, 'w') as writable:

                def reader():
                    blockIDs = []
                    try:
                        while True:
                            buf = readable.read(maxBlockSize)
                            if len(buf) == 0:
                                # We're safe to break here even if we never read anything, since
                                # putting an empty block list creates an empty blob.
                                break
                            if encrypted:
                                buf = encryption.encrypt(buf, self.keyPath)
                            blockID = self._newFileID()
                            container.put_block(blob_name=jobStoreFileID,
                                                block=buf,
                                                blockid=blockID)
                            blockIDs.append(blockID)
                    except:
                        # This is guaranteed to delete any uncommitted
                        # blocks.
                        container.delete_blob(blob_name=jobStoreFileID)
                        raise

                    if checkForModification and expectedVersion is not None:
                        # Acquire a (60-second) write lock,
                        leaseID = container.lease_blob(
                            blob_name=jobStoreFileID,
                            x_ms_lease_action='acquire')['x-ms-lease-id']
                        # check for modification,
                        blobProperties = container.get_blob_properties(
                            blob_name=jobStoreFileID)
                        if blobProperties['etag'] != expectedVersion:
                            container.lease_blob(blob_name=jobStoreFileID,
                                                 x_ms_lease_action='release',
                                                 x_ms_lease_id=leaseID)
                            raise ConcurrentFileModificationException(
                                jobStoreFileID)
                        # commit the file,
                        container.put_block_list(blob_name=jobStoreFileID,
                                                 block_list=blockIDs,
                                                 x_ms_lease_id=leaseID,
                                                 x_ms_meta_name_values=dict(
                                                     encrypted=str(encrypted)))
                        # then release the lock.
                        container.lease_blob(blob_name=jobStoreFileID,
                                             x_ms_lease_action='release',
                                             x_ms_lease_id=leaseID)
                    else:
                        # No need to check for modification, just blindly write over whatever
                        # was there.
                        container.put_block_list(blob_name=jobStoreFileID,
                                                 block_list=blockIDs,
                                                 x_ms_meta_name_values=dict(
                                                     encrypted=str(encrypted)))

                thread = ExceptionalThread(target=reader)
                thread.start()
                yield writable
            # The writable is now closed. This will send EOF to the readable and cause that
            # thread to finish.
            thread.join()
Esempio n. 13
0
class ReadablePipe(object):
    """
    An object-oriented wrapper for os.pipe. Clients should subclass it, implement
    :meth:`.writeTo` to place data into the writable end of the pipe, then instantiate the class
    as a context manager to get the writable end. See the example below.

    >>> import sys, shutil
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         writable.write('Hello, world!\\n')
    >>> with MyPipe() as readable:
    ...     shutil.copyfileobj(readable, sys.stdout)
    Hello, world!

    Each instance of this class creates a thread and invokes the :meth:`.writeTo` method in that
    thread. The thread will be join()ed upon normal exit from the context manager, i.e. the body
    of the `with` statement. If an exception occurs, the thread will not be joined but a
    well-behaved :meth:`.writeTo` implementation will terminate shortly thereafter due to the
    pipe having been closed.

    Now, exceptions in the reader thread will be reraised in the main thread:

    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         raise RuntimeError('Hello, world!')
    >>> with MyPipe() as readable:
    ...     pass
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!

    More complicated, less illustrative tests:

    Same as above, but provving that handles are closed:

    >>> x = os.dup(0); os.close(x)
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         raise RuntimeError('Hello, world!')
    >>> with MyPipe() as readable:
    ...     pass
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!
    >>> y = os.dup(0); os.close(y); x == y
    True

    Exceptions in the body of the with statement aren't masked, and handles are closed:

    >>> x = os.dup(0); os.close(x)
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         pass
    >>> with MyPipe() as readable:
    ...     raise RuntimeError('Hello, world!')
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!
    >>> y = os.dup(0); os.close(y); x == y
    True
    """

    __metaclass__ = ABCMeta

    @abstractmethod
    def writeTo(self, writable):
        """
        Implement this method to read data from the pipe.

        :param file writable: the file object representing the writable end of the pipe. Do not
        explicitly invoke the close() method of the object, that will be done automatically.
        """
        raise NotImplementedError()

    def _writer(self):
        with os.fdopen(self.writable_fh, 'w') as writable:
            # FIXME: another race here, causing a redundant attempt to close in the main thread
            self.writable_fh = None  # signal to parent thread that we've taken over
            self.writeTo(writable)

    def __init__(self):
        super(ReadablePipe, self).__init__()
        self.writable_fh = None
        self.readable = None
        self.thread = None

    def __enter__(self):
        readable_fh, self.writable_fh = os.pipe()
        self.readable = os.fdopen(readable_fh, 'r')
        self.thread = ExceptionalThread(target=self._writer)
        self.thread.start()
        return self.readable

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            if exc_type is None:
                if self.thread is not None:
                    # reraises any exception that was raised in the thread
                    self.thread.join()
        finally:
            self.readable.close()
            # The responsibility for closing the writable end is generally that of the writer
            # thread. To cover the small window before the writer takes over we also close it here.
            writable_fh = self.writable_fh
            if writable_fh is not None:
                # FIXME: This is still racy. The writer thread could close it now, and someone
                # else may immediately open a new file, reusing the file handle.
                os.close(writable_fh)
Esempio n. 14
0
File: utils.py Progetto: HPCBio/toil
class ReadablePipe(object):
    """
    An object-oriented wrapper for os.pipe. Clients should subclass it, implement
    :meth:`.writeTo` to place data into the writable end of the pipe, then instantiate the class
    as a context manager to get the writable end. See the example below.

    >>> import sys, shutil
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         writable.write('Hello, world!\\n')
    >>> with MyPipe() as readable:
    ...     shutil.copyfileobj(readable, sys.stdout)
    Hello, world!

    Each instance of this class creates a thread and invokes the :meth:`.writeTo` method in that
    thread. The thread will be join()ed upon normal exit from the context manager, i.e. the body
    of the `with` statement. If an exception occurs, the thread will not be joined but a
    well-behaved :meth:`.writeTo` implementation will terminate shortly thereafter due to the
    pipe having been closed.

    Now, exceptions in the reader thread will be reraised in the main thread:

    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         raise RuntimeError('Hello, world!')
    >>> with MyPipe() as readable:
    ...     pass
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!

    More complicated, less illustrative tests:

    Same as above, but provving that handles are closed:

    >>> x = os.dup(0); os.close(x)
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         raise RuntimeError('Hello, world!')
    >>> with MyPipe() as readable:
    ...     pass
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!
    >>> y = os.dup(0); os.close(y); x == y
    True

    Exceptions in the body of the with statement aren't masked, and handles are closed:

    >>> x = os.dup(0); os.close(x)
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         pass
    >>> with MyPipe() as readable:
    ...     raise RuntimeError('Hello, world!')
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!
    >>> y = os.dup(0); os.close(y); x == y
    True
    """

    __metaclass__ = ABCMeta

    @abstractmethod
    def writeTo(self, writable):
        """
        Implement this method to read data from the pipe.

        :param file writable: the file object representing the writable end of the pipe. Do not
        explicitly invoke the close() method of the object, that will be done automatically.
        """
        raise NotImplementedError()

    def _writer(self):
        with os.fdopen(self.writable_fh, "w") as writable:
            # FIXME: another race here, causing a redundant attempt to close in the main thread
            self.writable_fh = None  # signal to parent thread that we've taken over
            self.writeTo(writable)

    def __init__(self):
        super(ReadablePipe, self).__init__()
        self.writable_fh = None
        self.readable = None
        self.thread = None

    def __enter__(self):
        readable_fh, self.writable_fh = os.pipe()
        self.readable = os.fdopen(readable_fh, "r")
        self.thread = ExceptionalThread(target=self._writer)
        self.thread.start()
        return self.readable

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            if exc_type is None:
                if self.thread is not None:
                    # reraises any exception that was raised in the thread
                    self.thread.join()
        finally:
            self.readable.close()
            # The responsibility for closing the writable end is generally that of the writer
            # thread. To cover the small window before the writer takes over we also close it here.
            writable_fh = self.writable_fh
            if writable_fh is not None:
                # FIXME: This is still racy. The writer thread could close it now, and someone
                # else may immediately open a new file, reusing the file handle.
                os.close(writable_fh)
Esempio n. 15
0
class ReadablePipe(with_metaclass(ABCMeta, object)):
    """
    An object-oriented wrapper for os.pipe. Clients should subclass it, implement
    :meth:`.writeTo` to place data into the writable end of the pipe, then instantiate the class
    as a context manager to get the writable end. See the example below.

    >>> import sys, shutil
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         writable.write('Hello, world!\\n')
    >>> with MyPipe() as readable:
    ...     shutil.copyfileobj(readable, sys.stdout)
    Hello, world!

    Each instance of this class creates a thread and invokes the :meth:`.writeTo` method in that
    thread. The thread will be join()ed upon normal exit from the context manager, i.e. the body
    of the `with` statement. If an exception occurs, the thread will not be joined but a
    well-behaved :meth:`.writeTo` implementation will terminate shortly thereafter due to the
    pipe having been closed.

    Now, exceptions in the reader thread will be reraised in the main thread:

    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         raise RuntimeError('Hello, world!')
    >>> with MyPipe() as readable:
    ...     pass
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!

    More complicated, less illustrative tests:

    Same as above, but provving that handles are closed:

    >>> x = os.dup(0); os.close(x)
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         raise RuntimeError('Hello, world!')
    >>> with MyPipe() as readable:
    ...     pass
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!
    >>> y = os.dup(0); os.close(y); x == y
    True

    Exceptions in the body of the with statement aren't masked, and handles are closed:

    >>> x = os.dup(0); os.close(x)
    >>> class MyPipe(ReadablePipe):
    ...     def writeTo(self, writable):
    ...         pass
    >>> with MyPipe() as readable:
    ...     raise RuntimeError('Hello, world!')
    Traceback (most recent call last):
    ...
    RuntimeError: Hello, world!
    >>> y = os.dup(0); os.close(y); x == y
    True
    """
    @abstractmethod
    def writeTo(self, writable):
        """
        Implement this method to read data from the pipe.

        :param file writable: the file object representing the writable end of the pipe. Do not
        explicitly invoke the close() method of the object, that will be done automatically.
        """
        raise NotImplementedError()

    def _writer(self):
        try:
            with os.fdopen(self.writable_fh, 'w') as writable:
                self.writeTo(writable)
        except IOError as e:
            # The other side of the pipe may have been closed by the
            # reading thread, which is OK.
            if e.errno != errno.EPIPE:
                raise

    def __init__(self):
        super(ReadablePipe, self).__init__()
        self.writable_fh = None
        self.readable = None
        self.thread = None

    def __enter__(self):
        readable_fh, self.writable_fh = os.pipe()
        self.readable = os.fdopen(readable_fh, 'r')
        self.thread = ExceptionalThread(target=self._writer)
        self.thread.start()
        return self.readable

    def __exit__(self, exc_type, exc_val, exc_tb):
        # Close the read end of the pipe. The writing thread may
        # still be writing to the other end, but this will wake it up
        # if that's the case.
        self.readable.close()
        try:
            if self.thread is not None:
                # reraises any exception that was raised in the thread
                self.thread.join()
        except:
            if exc_type is None:
                # Only raise the child exception if there wasn't
                # already an exception in the main thread
                raise