Exemple #1
0
    def _stream_write_internal(
        self,
        path,
        fp,
        content_type=None,
        content_encoding=None,
        cancel_on_error=True,
        size=filelike.READ_UNTIL_END,
    ):
        """ Writes the data found in the file-like stream to the given path, with optional limit
        on size. Note that this method returns a *tuple* of (bytes_written, write_error) and should
        *not* raise an exception (such as IOError) if a problem uploading occurred. ALWAYS check
        the returned tuple on calls to this method.
    """
        # Minimum size of upload part size on S3 is 5MB
        self._initialize_cloud_conn()
        path = self._init_path(path)
        key = self._key_class(self._cloud_bucket, path)

        if content_type is not None:
            key.set_metadata("Content-Type", content_type)

        if content_encoding is not None:
            key.set_metadata("Content-Encoding", content_encoding)

        if size != filelike.READ_UNTIL_END:
            fp = filelike.StreamSlice(fp, 0, size)

        # TODO figure out how to handle cancel_on_error=False
        try:
            key.set_contents_from_stream(fp)
        except IOError as ex:
            return 0, ex

        return key.size, None
Exemple #2
0
    def _stream_write_internal(
        self,
        path,
        fp,
        content_type=None,
        content_encoding=None,
        cancel_on_error=True,
        size=filelike.READ_UNTIL_END,
    ):
        """
        Writes the data found in the file-like stream to the given path, with optional limit on
        size. Note that this method returns a *tuple* of (bytes_written, write_error) and should.

        *not* raise an exception (such as IOError) if a problem uploading occurred. ALWAYS check
        the returned tuple on calls to this method.
        """
        # Minimum size of upload part size on S3 is 5MB
        self._initialize_cloud_conn()
        path = self._init_path(path)
        obj = self.get_cloud_bucket().Object(path)

        extra_args = {}
        if content_type is not None:
            extra_args["ContentType"] = content_type

        if content_encoding is not None:
            extra_args["ContentEncoding"] = content_encoding

        if size != filelike.READ_UNTIL_END:
            fp = filelike.StreamSlice(fp, 0, size)

        with BytesIO() as buf:
            # Stage the bytes into the buffer for use with the multipart upload file API
            bytes_staged = self.stream_write_to_fp(fp, buf, size)
            buf.seek(0)

            # TODO figure out how to handle cancel_on_error=False
            try:
                obj.put(Body=buf, **extra_args)
            except Exception as ex:
                return 0, ex

        return bytes_staged, None
Exemple #3
0
 def _chunk_generator(self, chunk_list):
     for chunk in chunk_list:
         yield filelike.StreamSlice(self.stream_read_file(chunk.path), 0, chunk.length)