def _start(self):
     if self._check_if_bucket_exists():
         url = (
             f'https://www.googleapis.com/upload/storage/v1/b/'
             f'{self._bucket.name}/o?uploadType=resumable'
         )
         self._request = requests.ResumableUpload(
             upload_url=url, chunk_size=self._chunk_size
         )
         self._request.initiate(
             transport=self._transport,
             content_type='video/mp4',
             stream=self,
             stream_final=False,
             metadata={
                 'name': self._blob.name,
                 'metadata': {
                     'id': str(self._blob_aditional_info.get('id')),
                     'anime': str(self._blob_aditional_info.get('anime')),
                     'episode': str(self._blob_aditional_info.get('episode'))
                 }
             },
         )
     else:
         raise Exception
Example #2
0
def _resumable_upload_helper(authorized_transport,
                             stream,
                             cleanup,
                             headers=None,
                             checksum=None):
    blob_name = os.path.basename(stream.name)
    # Make sure to clean up the uploaded blob when we are done.
    cleanup(blob_name, authorized_transport)
    check_does_not_exist(authorized_transport, blob_name)
    # Create the actual upload object.
    chunk_size = resumable_media.UPLOAD_CHUNK_SIZE
    upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD,
                                                chunk_size,
                                                headers=headers,
                                                checksum=checksum)
    # Initiate the upload.
    metadata = {u"name": blob_name, u"metadata": {u"direction": u"north"}}
    response = upload.initiate(authorized_transport, stream, metadata,
                               JPEG_CONTENT_TYPE)
    # Make sure ``initiate`` succeeded and did not mangle the stream.
    check_initiate(response, upload, stream, authorized_transport, metadata)
    # Actually upload the file in chunks.
    num_chunks = transmit_chunks(upload, authorized_transport, blob_name,
                                 metadata[u"metadata"])
    assert num_chunks == get_num_chunks(upload.total_bytes, chunk_size)
    # Download the content to make sure it's "working as expected".
    stream.seek(0)
    actual_contents = stream.read()
    check_content(blob_name,
                  actual_contents,
                  authorized_transport,
                  headers=headers)
    # Make sure the upload is tombstoned.
    check_tombstoned(upload, authorized_transport)
Example #3
0
 def test_smaller_than_chunk_size(self, authorized_transport, bucket, cleanup):
     blob_name = os.path.basename(ICO_FILE)
     chunk_size = resumable_media.UPLOAD_CHUNK_SIZE
     # Make sure to clean up the uploaded blob when we are done.
     cleanup(blob_name, authorized_transport)
     check_does_not_exist(authorized_transport, blob_name)
     # Make sure the blob is smaller than the chunk size.
     total_bytes = os.path.getsize(ICO_FILE)
     assert total_bytes < chunk_size
     # Create the actual upload object.
     upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size)
     # Initiate the upload.
     metadata = {u"name": blob_name}
     with open(ICO_FILE, u"rb") as stream:
         response = upload.initiate(
             authorized_transport,
             stream,
             metadata,
             ICO_CONTENT_TYPE,
             stream_final=False,
         )
         # Make sure ``initiate`` succeeded and did not mangle the stream.
         check_initiate(response, upload, stream, authorized_transport, metadata)
         # Make sure total bytes was never set.
         assert upload.total_bytes is None
         # Make the **ONLY** request.
         response = upload.transmit_next_chunk(authorized_transport)
         self._check_range_sent(response, 0, total_bytes - 1, total_bytes)
         check_response(response, blob_name, total_bytes=total_bytes)
         # Download the content to make sure it's "working as expected".
         stream.seek(0)
         actual_contents = stream.read()
         check_content(blob_name, actual_contents, authorized_transport)
         # Make sure the upload is tombstoned.
         check_tombstoned(upload, authorized_transport)
Example #4
0
def test_resumable_upload_bad_chunk_size(authorized_transport, img_stream):
    blob_name = os.path.basename(img_stream.name)
    # Create the actual upload object.
    upload = resumable_requests.ResumableUpload(
        utils.RESUMABLE_UPLOAD, resumable_media.UPLOAD_CHUNK_SIZE
    )
    # Modify the ``upload`` **after** construction so we can
    # use a bad chunk size.
    upload._chunk_size = 1024
    assert upload._chunk_size < resumable_media.UPLOAD_CHUNK_SIZE
    # Initiate the upload.
    metadata = {u"name": blob_name}
    response = upload.initiate(
        authorized_transport, img_stream, metadata, JPEG_CONTENT_TYPE
    )
    # Make sure ``initiate`` succeeded and did not mangle the stream.
    check_initiate(response, upload, img_stream, authorized_transport, metadata)
    # Make the first request and verify that it fails.
    check_bad_chunk(upload, authorized_transport)
    # Reset the chunk size (and the stream) and verify the "resumable"
    # URL is unusable.
    upload._chunk_size = resumable_media.UPLOAD_CHUNK_SIZE
    img_stream.seek(0)
    upload._invalid = False
    check_bad_chunk(upload, authorized_transport)
def _resumable_upload_recover_helper(authorized_transport, cleanup,
                                     headers=None):
    blob_name = u'some-bytes.bin'
    chunk_size = resumable_media.UPLOAD_CHUNK_SIZE
    data = b'123' * chunk_size  # 3 chunks worth.
    # Make sure to clean up the uploaded blob when we are done.
    cleanup(blob_name, authorized_transport)
    check_does_not_exist(authorized_transport, blob_name)
    # Create the actual upload object.
    upload = resumable_requests.ResumableUpload(
        utils.RESUMABLE_UPLOAD, chunk_size, headers=headers)
    # Initiate the upload.
    metadata = {u'name': blob_name}
    stream = io.BytesIO(data)
    response = upload.initiate(
        authorized_transport, stream, metadata, BYTES_CONTENT_TYPE)
    # Make sure ``initiate`` succeeded and did not mangle the stream.
    check_initiate(response, upload, stream, authorized_transport, metadata)
    # Make the first request.
    response = upload.transmit_next_chunk(authorized_transport)
    assert response.status_code == resumable_media.PERMANENT_REDIRECT
    # Call upload.recover().
    sabotage_and_recover(upload, stream, authorized_transport, chunk_size)
    # Now stream what remains.
    num_chunks = transmit_chunks(
        upload, authorized_transport, blob_name, None,
        num_chunks=1, content_type=BYTES_CONTENT_TYPE)
    assert num_chunks == 3
    # Download the content to make sure it's "working as expected".
    actual_contents = stream.getvalue()
    check_content(
        blob_name, actual_contents, authorized_transport, headers=headers)
    # Make sure the upload is tombstoned.
    check_tombstoned(upload, authorized_transport)
Example #6
0
 def start(self):
     url = (f'https://www.googleapis.com/upload/storage/v1/b/'
            f'{self._bucket.name}/o?uploadType=resumable')
     self._request = requests.ResumableUpload(upload_url=url,
                                              chunk_size=self._chunk_size)
     self._request.initiate(
         transport=self._transport,
         content_type='application/octet-stream',
         stream=self,
         stream_final=False,
         metadata={'name': self._blob.name},
     )
Example #7
0
 def test_interleave_writes(self, authorized_transport, bucket, cleanup,
                            checksum):
     blob_name = u"some-moar-stuff.bin"
     chunk_size = resumable_media.UPLOAD_CHUNK_SIZE
     # Make sure to clean up the uploaded blob when we are done.
     cleanup(blob_name, authorized_transport)
     check_does_not_exist(authorized_transport, blob_name)
     # Start out the blob as a single chunk (but we will add to it).
     stream = io.BytesIO(b"Z" * chunk_size)
     # Create the actual upload object.
     upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD,
                                                 chunk_size,
                                                 checksum=checksum)
     # Initiate the upload.
     metadata = {u"name": blob_name}
     response = upload.initiate(
         authorized_transport,
         stream,
         metadata,
         BYTES_CONTENT_TYPE,
         stream_final=False,
     )
     # Make sure ``initiate`` succeeded and did not mangle the stream.
     check_initiate(response, upload, stream, authorized_transport,
                    metadata)
     # Make sure total bytes was never set.
     assert upload.total_bytes is None
     # Make three requests.
     response0 = upload.transmit_next_chunk(authorized_transport)
     self._check_partial(upload, response0, chunk_size, 1)
     # Add another chunk before sending.
     self._add_bytes(stream, b"K" * chunk_size)
     response1 = upload.transmit_next_chunk(authorized_transport)
     self._check_partial(upload, response1, chunk_size, 2)
     # Add more bytes, but make sure less than a full chunk.
     last_chunk = 155
     self._add_bytes(stream, b"r" * last_chunk)
     response2 = upload.transmit_next_chunk(authorized_transport)
     assert upload.finished
     # Verify the "clean-up" request.
     total_bytes = 2 * chunk_size + last_chunk
     assert upload.bytes_uploaded == total_bytes
     check_response(
         response2,
         blob_name,
         actual_contents=stream.getvalue(),
         total_bytes=total_bytes,
         content_type=BYTES_CONTENT_TYPE,
     )
     self._check_range_sent(response2, 2 * chunk_size, total_bytes - 1,
                            total_bytes)
Example #8
0
    def test_finish_at_chunk(self, authorized_transport, bucket, cleanup,
                             checksum):
        blob_name = u"some-clean-stuff.bin"
        chunk_size = resumable_media.UPLOAD_CHUNK_SIZE
        # Make sure to clean up the uploaded blob when we are done.
        cleanup(blob_name, authorized_transport)
        check_does_not_exist(authorized_transport, blob_name)
        # Make sure the blob size is an exact multiple of the chunk size.
        data = b"ab" * chunk_size
        total_bytes = len(data)
        stream = io.BytesIO(data)
        # Create the actual upload object.
        upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD,
                                                    chunk_size,
                                                    checksum=checksum)
        # Initiate the upload.
        metadata = {u"name": blob_name}
        response = upload.initiate(
            authorized_transport,
            stream,
            metadata,
            BYTES_CONTENT_TYPE,
            stream_final=False,
        )
        # Make sure ``initiate`` succeeded and did not mangle the stream.
        check_initiate(response, upload, stream, authorized_transport,
                       metadata)
        # Make sure total bytes was never set.
        assert upload.total_bytes is None
        # Make three requests.
        response0 = upload.transmit_next_chunk(authorized_transport)
        self._check_partial(upload, response0, chunk_size, 1)

        response1 = upload.transmit_next_chunk(authorized_transport)
        self._check_partial(upload, response1, chunk_size, 2)

        response2 = upload.transmit_next_chunk(authorized_transport)
        assert upload.finished
        # Verify the "clean-up" request.
        assert upload.bytes_uploaded == 2 * chunk_size
        check_response(
            response2,
            blob_name,
            actual_contents=data,
            total_bytes=total_bytes,
            content_type=BYTES_CONTENT_TYPE,
        )
        self._check_range_sent(response2, None, None, 2 * chunk_size)
Example #9
0
  def begin(self):
    """Begin the streaming process.

    This method opens the resumable request and creates the destination blob
    ready for use.
    """
    self._transport = AuthorizedSession(credentials=self._credentials)
    url = (f'https://www.googleapis.com/upload/storage/v1/b/'
           f'{self._bucket.name}/o?uploadType=resumable')
    self._request = requests.ResumableUpload(
        upload_url=url, chunk_size=self._chunk_size)
    self._request.initiate(
        transport=self._transport,
        content_type='application/octet-stream',
        stream=self,
        stream_final=False,
        metadata={'name': self._blob.name},
    )
Example #10
0
    def upload_resumable(self, verbose=False):
        """Upload a local file to a blob on GCP Storage using a
        resumable connection.
        
        Parameters
        ----------
        verbose : bool, print operation status
        """

        url = (f'https://www.googleapis.com/upload/storage/v1/b/'
               f'{self.bucket_name}/o?uploadType=resumable')

        if verbose:
            print("BlobSync.upload_resumable to: {}".format(url))

        upload = requests.ResumableUpload(upload_url=url,
                                          chunk_size=self.chunk_size)

        stream = io.FileIO(self.local_filepath, mode='r')

        transport = AuthorizedSession(credentials=self.client._credentials)

        upload.initiate(transport=transport,
                        content_type='application/octet-stream',
                        stream=stream,
                        metadata={'name': self.blob_name})
        file_name = os.path.basename(self.local_filepath)

        # start upload
        dt = datetime.datetime.now().isoformat()
        f_size = os.path.getsize(self.local_filepath)
        log_line = [
            dt, "start", file_name, "", "", "", "", f_size, "",
            self.bucket_name
        ]
        self.log.append(log_line)
        if verbose:
            print(" ".join([str(s) for s in log_line]))

        # send one chunk of chunk_size
        while not upload.finished:
            try:
                response = upload.transmit_next_chunk(transport)
                code = str(response.status_code)
                if code == "200":
                    dt = pd.Timestamp()
                    log_line = [
                        str(dt), "chunk", file_name, self.chunk_size, code, "",
                        "", "", "", ""
                    ]
                    self.log.append(log_line)
                    if verbose:
                        print(" ".join([str(s) for s in log_line]))
            except common.InvalidResponse:
                upload.recover(transport)
            time.sleep(self.delay)

        # return upload object and end time
        dt = pd.Timestamp()
        log_line = [
            str(dt), "done", file_name, "", "", upload.bytes_uploaded,
            upload.total_bytes, "", "", ""
        ]
        self.log.append(log_line)
        if verbose:
            print(" ".join([str(s) for s in log_line]))
        return upload