Exemplo n.º 1
0
 def _chunked_upload(self, fp, size, progress_callback):
     url = self._client.get_url(self._url_template_content_chunked,
                                path=self.path)
     chunks = list(
         base.split_file_into_chunks(
             fp, size, self._upload_chunk_size))  # need count of chunks
     chunk_count = len(chunks)
     headers = {}
     for chunk_number, chunk in enumerate(chunks, 1):  # count from 1 not 0
         headers['x-egnyte-chunk-num'] = "%d" % chunk_number
         headers['content-length'] = str(chunk.size)
         if chunk_number == chunk_count:  # last chunk
             headers['x-egnyte-last-chunk'] = "true"
         retries = max(self._upload_retries, 1)
         while retries > 0:
             r = self._client.POST(url, data=chunk, headers=headers)
             server_sha = r.headers['x-egnyte-chunk-sha512-checksum']
             our_sha = chunk.sha.hexdigest()
             if server_sha == our_sha:
                 break
             retries -= 1
             # TODO: retry network errors too
             # TODO: refactor common parts of chunked and standard upload
         if retries == 0:
             raise exc.ChecksumError("Failed to upload file chunk", {
                 "chunk_number": chunk_number,
                 "start_position": chunk.position
             })
         exc.default.check_response(r)
         if chunk_number == 1:
             headers['x-egnyte-upload-id'] = r.headers['x-egnyte-upload-id']
         if progress_callback is not None:
             progress_callback(self, size,
                               chunk_number * self._upload_chunk_size)
Exemplo n.º 2
0
 def _chunked_upload(self, fp, size, progress_callback):
     url = self._client.get_url(self._url_template_content_chunked, path=self.path)
     chunks = list(base.split_file_into_chunks(fp, size, self._upload_chunk_size))  # need count of chunks
     chunk_count = len(chunks)
     headers = {}
     for chunk_number, chunk in enumerate(chunks, 1):  # count from 1 not 0
         headers['x-egnyte-chunk-num'] = "%d" % chunk_number
         headers['content-length'] = chunk.size
         if chunk_number == chunk_count:  # last chunk
             headers['x-egnyte-last-chunk'] = "true"
         retries = max(self._upload_retries, 1)
         while retries > 0:
             r = self._client.POST(url, data=chunk, headers=headers)
             server_sha = r.headers['x-egnyte-chunk-sha512-checksum']
             our_sha = chunk.sha.hexdigest()
             if server_sha == our_sha:
                 break
             retries -= 1
             # TODO: retry network errors too
             # TODO: refactor common parts of chunked and standard upload
         if retries == 0:
             raise exc.ChecksumError("Failed to upload file chunk", {"chunk_number": chunk_number, "start_position": chunk.position})
         exc.default.check_response(r)
         if chunk_number == 1:
             headers['x-egnyte-upload-id'] = r.headers['x-egnyte-upload-id']
         if progress_callback is not None:
             progress_callback(self, size, chunk_number * self._upload_chunk_size)
Exemplo n.º 3
0
 def _chunked_upload(self, fp, size, threads, chunksize_mb,
                     progress_callback):
     url = self._client.get_url(self._url_template_content_chunked,
                                path=self.path)
     chunks = list(
         base.split_file_into_chunks(fp, size, chunksize_mb *
                                     MEGABYTES))  # need count of chunks
     chunk_count = len(chunks)
     # upload first chunk
     chunk = chunks.pop()
     headers = self._upload_single_chunk(url,
                                         1,
                                         chunk,
                                         size,
                                         chunksize_mb,
                                         progress_callback,
                                         is_last_chunk=chunk_count == 1)
     upload_id = headers['x-egnyte-upload-id']
     # upload intermediate chunks in separate threads
     if chunk_count > 2:
         n = 1
         for chunk_slice in six.moves.zip_longest(*[iter(chunks[:-1])] *
                                                  threads):
             threads = []
             for c in chunk_slice:
                 if c:
                     n += 1
                     threads.append(
                         threading.Thread(target=self._upload_single_chunk,
                                          args=(url, n, c, size,
                                                chunksize_mb,
                                                progress_callback),
                                          kwargs={'upload_id': upload_id}))
             for t in threads:
                 t.start()
             for t in threads:
                 t.join()
     # upload last chunk
     if chunk_count > 1:
         self._upload_single_chunk(url,
                                   chunk_count,
                                   chunks[-1],
                                   size,
                                   chunksize_mb,
                                   progress_callback,
                                   upload_id=upload_id,
                                   is_last_chunk=True)
Exemplo n.º 4
0
 def _chunked_upload(self, fp, size, threads, chunksize_mb, progress_callback):
     url = self._client.get_url(self._url_template_content_chunked, path=self.path)
     chunks = list(base.split_file_into_chunks(fp, size, chunksize_mb*MEGABYTES))  # need count of chunks
     chunk_count = len(chunks)
     # upload first chunk
     chunk = chunks.pop()
     headers = self._upload_single_chunk(url, 1, chunk, size, chunksize_mb, progress_callback, is_last_chunk=chunk_count==1)
     upload_id = headers['x-egnyte-upload-id']
     # upload intermediate chunks in separate threads
     if chunk_count > 2:
         n = 1
         for chunk_slice in six.moves.zip_longest(*[iter(chunks[:-1])]*threads):
             threads = []
             for c in chunk_slice:
                 if c:
                     n += 1
                     threads.append(threading.Thread(target=self._upload_single_chunk, args=(url, n, c, size, chunksize_mb, progress_callback), kwargs={'upload_id': upload_id}))
             for t in threads:
                 t.start()
             for t in threads:
                 t.join()
     # upload last chunk
     if chunk_count > 1:
         self._upload_single_chunk(url, chunk_count, chunks[-1], size, chunksize_mb, progress_callback, upload_id=upload_id, is_last_chunk=True)