def _api_core_retry_to_resumable_media_retry(retry, num_retries=None): """Convert google.api.core.Retry to google.resumable_media.RetryStrategy. Custom predicates are not translated. :type retry: google.api_core.Retry :param retry: (Optional) The google.api_core.Retry object to translate. :type num_retries: int :param num_retries: (Optional) The number of retries desired. This is supported for backwards compatibility and is mutually exclusive with `retry`. :rtype: google.resumable_media.RetryStrategy :returns: A RetryStrategy with all applicable attributes copied from input, or a RetryStrategy with max_retries set to 0 if None was input. """ if retry is not None and num_retries is not None: raise ValueError( "num_retries and retry arguments are mutually exclusive") elif retry is not None: return resumable_media.RetryStrategy( max_sleep=retry._maximum, max_cumulative_retry=retry._deadline, initial_delay=retry._initial, multiplier=retry._multiplier, ) elif num_retries is not None: return resumable_media.RetryStrategy(max_retries=num_retries) else: return resumable_media.RetryStrategy(max_retries=0)
def _do_multipart_upload(self, client, stream, metadata, size, num_retries): """Perform a multipart upload. :type client: :class:`~google.cloud.bigquery.client.Client` :param client: The client to use. :type stream: IO[bytes] :param stream: A bytes IO object open for reading. :type metadata: dict :param metadata: The metadata associated with the upload. :type size: int :param size: The number of bytes to be uploaded (which will be read from ``stream``). If not provided, the upload will be concluded once ``stream`` is exhausted (or :data:`None`). :type num_retries: int :param num_retries: Number of upload retries. (Deprecated: This argument will be removed in a future release.) :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. :raises: :exc:`ValueError` if the ``stream`` has fewer than ``size`` bytes remaining. """ data = stream.read(size) if len(data) < size: msg = _READ_LESS_THAN_SIZE.format(size, len(data)) raise ValueError(msg) transport = self._get_transport(client) headers = _get_upload_headers(client._connection.USER_AGENT) upload_url = _MULTIPART_URL_TEMPLATE.format(project=self.project) upload = MultipartUpload(upload_url, headers=headers) if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( max_retries=num_retries) response = upload.transmit(transport, data, metadata, _GENERIC_CONTENT_TYPE) return response
def _initiate_resumable_upload(self, client, stream, metadata, num_retries): """Initiate a resumable upload. :type client: :class:`~google.cloud.bigquery.client.Client` :param client: The client to use. :type stream: IO[bytes] :param stream: A bytes IO object open for reading. :type metadata: dict :param metadata: The metadata associated with the upload. :type num_retries: int :param num_retries: Number of upload retries. (Deprecated: This argument will be removed in a future release.) :rtype: tuple :returns: Pair of * The :class:`~google.resumable_media.requests.ResumableUpload` that was created * The ``transport`` used to initiate the upload. """ chunk_size = _DEFAULT_CHUNKSIZE transport = self._get_transport(client) headers = _get_upload_headers(client._connection.USER_AGENT) upload_url = _RESUMABLE_URL_TEMPLATE.format(project=self.project) upload = ResumableUpload(upload_url, chunk_size, headers=headers) if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( max_retries=num_retries) upload.initiate(transport, stream, metadata, _GENERIC_CONTENT_TYPE, stream_final=False) return upload, transport