def upload_data(self, file_or_str): """ Accepts a file-like object or string and uploads it. Files are automatically uploaded in 4mb chunks. Implements the tus protocol. """ file_obj = StringIO(file_or_str) if isinstance(file_or_str, six.string_types) else file_or_str chunk_size = 4 * 1024 * 1024 length = utils.file_length(file_obj) # Initiate upload session request_raw("post", self._data_path, headers={"Entity-Length": str(length)}) # Upload chunks for chunk, offset in utils.read_in_chunks(file_obj, chunk_size): headers = {"Offset": str(offset), "Content-Type": "application/offset+octet-stream"} request_raw("patch", self._data_path, headers=headers, body=chunk) # Commit the session request_raw("post", self._commit_path) # Block until data has finished processing while True: resp = self.upload_status if resp.status == "Processing Successful" or resp.status == "Processing Failed": return resp else: time.sleep(analyzere.upload_poll_interval)
def test_file_length(): s = StringIO('foobar') assert utils.file_length(s) == 6 s.seek(3) assert utils.file_length(s) == 6
def upload_data(self, file_or_str, chunk_size=analyzere.upload_chunk_size, poll_interval=analyzere.upload_poll_interval, upload_callback=lambda x: None, commit_callback=lambda x: None): """ Accepts a file-like object or string and uploads it. Files are automatically uploaded in chunks. The default chunk size is 16MiB and can be overwritten by specifying the number of bytes in the ``chunk_size`` variable. Accepts an optional poll_interval for temporarily overriding the default value `analyzere.upload_poll_interval`. Implements the tus protocol. Takes optional callbacks that return the percentage complete for the given "phase" of upload: upload/commit. Callback values are returned as 10.0 for 10% """ if not callable(upload_callback): raise Exception('provided upload_callback is not callable') if not callable(commit_callback): raise Exception('provided commit_callback is not callable') file_obj = StringIO(file_or_str) if isinstance( file_or_str, six.string_types) else file_or_str # Upload file with known entity size if file object supports random # access. length = None if hasattr(file_obj, 'seek'): length = utils.file_length(file_obj) # Initiate upload session request_raw('post', self._data_path, headers={'Entity-Length': str(length)}) else: request_raw('post', self._data_path) # Upload chunks for chunk, offset in utils.read_in_chunks(file_obj, chunk_size): headers = { 'Offset': str(offset), 'Content-Type': 'application/offset+octet-stream' } request_raw('patch', self._data_path, headers=headers, body=chunk) # if there is a known size, and an upload callback, call it if length: upload_callback(offset * 100.0 / length) upload_callback(100.0) # Commit the session request_raw('post', self._commit_path) # Block until data has finished processing while True: resp = self.upload_status if (resp.status == 'Processing Successful' or resp.status == 'Processing Failed'): commit_callback(100.0) return resp else: commit_callback(float(resp.commit_progress)) time.sleep(poll_interval)