Exemple #1
0
 def drop_cache(self, fd, offset, length):
     """Method for no-oping buffer cache drop method."""
     if not self.keep_cache:
         drop_buffer_cache(fd, offset, length)
Exemple #2
0
 def drop_cache(self, fd, offset, length):
     """Method for no-oping buffer cache drop method."""
     if not self.keep_cache:
         drop_buffer_cache(fd, offset, length)
Exemple #3
0
        with file.mkstemp() as (fd, tmppath):
            if 'content-length' in request.headers:
                fallocate(fd, int(request.headers['content-length']))
            reader = request.environ['wsgi.input'].read
            for chunk in iter(lambda: reader(self.network_chunk_size), ''):
                upload_size += len(chunk)
                if time.time() > upload_expiration:
                    return HTTPRequestTimeout(request=request)
                etag.update(chunk)
                while chunk:
                    written = os.write(fd, chunk)
                    chunk = chunk[written:]
                # For large files sync every 512MB (by default) written
                if upload_size - last_sync >= self.bytes_per_sync:
                    tpool.execute(os.fdatasync, fd)
                    drop_buffer_cache(fd, last_sync, upload_size - last_sync)
                    last_sync = upload_size

            if 'content-length' in request.headers and \
                    int(request.headers['content-length']) != upload_size:
                return Response(status='499 Client Disconnect')
            etag = etag.hexdigest()
            if 'etag' in request.headers and \
                            request.headers['etag'].lower() != etag:
                return HTTPUnprocessableEntity(request=request)
            metadata = {
                'X-Timestamp': request.headers['x-timestamp'],
                'Content-Type': request.headers['content-type'],
                'ETag': etag,
                'Content-Length': str(os.fstat(fd).st_size),
            }
Exemple #4
0
        with file.mkstemp() as (fd, tmppath):
            if 'content-length' in request.headers:
                fallocate(fd, int(request.headers['content-length']))
            reader = request.environ['wsgi.input'].read
            for chunk in iter(lambda: reader(self.network_chunk_size), ''):
                upload_size += len(chunk)
                if time.time() > upload_expiration:
                    return HTTPRequestTimeout(request=request)
                etag.update(chunk)
                while chunk:
                    written = os.write(fd, chunk)
                    chunk = chunk[written:]
                # For large files sync every 512MB (by default) written
                if upload_size - last_sync >= self.bytes_per_sync:
                    tpool.execute(os.fdatasync, fd)
                    drop_buffer_cache(fd, last_sync, upload_size - last_sync)
                    last_sync = upload_size

            if 'content-length' in request.headers and \
                    int(request.headers['content-length']) != upload_size:
                return Response(status='499 Client Disconnect')
            etag = etag.hexdigest()
            if 'etag' in request.headers and \
                            request.headers['etag'].lower() != etag:
                return HTTPUnprocessableEntity(request=request)
            metadata = {
                'X-Timestamp': request.headers['x-timestamp'],
                'Content-Type': request.headers['content-type'],
                'ETag': etag,
                'Content-Length': str(os.fstat(fd).st_size),
            }