示例#1
0
文件: batch.py 项目: nobmizue/ja_ocr
    def _finish_futures(self, responses):
        """Apply all the batch responses to the futures created.

        :type responses: list of (headers, payload) tuples.
        :param responses: List of headers and payloads from each response in
                          the batch.

        :raises: :class:`ValueError` if no requests have been deferred.
        """
        # If a bad status occurs, we track it, but don't raise an exception
        # until all futures have been populated.
        exception_args = None

        if len(self._target_objects) != len(responses):
            raise ValueError('Expected a response for every request.')

        for target_object, sub_response in zip(self._target_objects,
                                               responses):
            resp_headers, sub_payload = sub_response
            if not 200 <= resp_headers.status < 300:
                exception_args = exception_args or (resp_headers, sub_payload)
            elif target_object is not None:
                target_object._properties = sub_payload

        if exception_args is not None:
            raise make_exception(*exception_args)
示例#2
0
    def _finish_futures(self, responses):
        """Apply all the batch responses to the futures created.

        :type responses: list of (headers, payload) tuples.
        :param responses: List of headers and payloads from each response in
                          the batch.

        :raises: :class:`ValueError` if no requests have been deferred.
        """
        # If a bad status occurs, we track it, but don't raise an exception
        # until all futures have been populated.
        exception_args = None

        if len(self._target_objects) != len(responses):
            raise ValueError('Expected a response for every request.')

        for target_object, sub_response in zip(self._target_objects,
                                               responses):
            resp_headers, sub_payload = sub_response
            if not 200 <= resp_headers.status < 300:
                exception_args = exception_args or (resp_headers,
                                                    sub_payload)
            elif target_object is not None:
                target_object._properties = sub_payload

        if exception_args is not None:
            raise make_exception(*exception_args)
    def _call_fut(self, response, content, error_info=None, use_json=True):
        from google.cloud.exceptions import make_exception

        return make_exception(response,
                              content,
                              error_info=error_info,
                              use_json=use_json)
示例#4
0
    def _request(self, project, method, data):
        """Make a request over the Http transport to the Cloud Datastore API.

        :type project: str
        :param project: The project to make the request for.

        :type method: str
        :param method: The API call method name (ie, ``runQuery``,
                       ``lookup``, etc)

        :type data: str
        :param data: The data to send with the API call.
                     Typically this is a serialized Protobuf string.

        :rtype: str
        :returns: The string response content from the API call.
        :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the
                 response code is not 200 OK.
        """
        headers = {
            'Content-Type': 'application/x-protobuf',
            'Content-Length': str(len(data)),
            'User-Agent': self.connection.USER_AGENT,
        }
        headers, content = self.connection.http.request(
            uri=self.connection.build_api_url(project=project, method=method),
            method='POST', headers=headers, body=data)

        status = headers['status']
        if status != '200':
            error_status = status_pb2.Status.FromString(content)
            raise exceptions.make_exception(
                headers, error_status.message, use_json=False)

        return content
示例#5
0
    def _request(self, project, method, data):
        """Make a request over the Http transport to the Cloud Datastore API.

        :type project: str
        :param project: The project to make the request for.

        :type method: str
        :param method: The API call method name (ie, ``runQuery``,
                       ``lookup``, etc)

        :type data: str
        :param data: The data to send with the API call.
                     Typically this is a serialized Protobuf string.

        :rtype: str
        :returns: The string response content from the API call.
        :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the
                 response code is not 200 OK.
        """
        headers = {
            'Content-Type': 'application/x-protobuf',
            'Content-Length': str(len(data)),
            'User-Agent': self.connection.USER_AGENT,
        }
        headers, content = self.connection.http.request(
            uri=self.connection.build_api_url(project=project, method=method),
            method='POST', headers=headers, body=data)

        status = headers['status']
        if status != '200':
            error_status = status_pb2.Status.FromString(content)
            raise exceptions.make_exception(
                headers, error_status.message, use_json=False)

        return content
示例#6
0
 def _check_response_error(request, http_response):
     """Helper for :meth:`upload_from_file`."""
     info = http_response.info
     status = int(info['status'])
     if not 200 <= status < 300:
         faux_response = httplib2.Response({'status': status})
         raise make_exception(faux_response, http_response.content,
                              error_info=request.url)
示例#7
0
 def _check_response_error(request, http_response):
     """Helper for :meth:`upload_from_file`."""
     info = http_response.info
     status = int(info['status'])
     if not 200 <= status < 300:
         faux_response = httplib2.Response({'status': status})
         raise make_exception(faux_response, http_response.content,
                              error_info=request.url)
def _request(http, project, method, data, base_url):
    """Make a request over the Http transport to the Cloud Datastore API.

    :type http: :class:`~httplib2.Http`
    :param http: HTTP object to make requests.

    :type project: str
    :param project: The project to make the request for.

    :type method: str
    :param method: The API call method name (ie, ``runQuery``,
                   ``lookup``, etc)

    :type data: str
    :param data: The data to send with the API call.
                 Typically this is a serialized Protobuf string.

    :type base_url: str
    :param base_url: The base URL where the API lives.

    :rtype: str
    :returns: The string response content from the API call.
    :raises: :class:`google.cloud.exceptions.GoogleCloudError` if the
             response code is not 200 OK.
    """
    headers = {
        'Content-Type': 'application/x-protobuf',
        'Content-Length': str(len(data)),
        'User-Agent': connection_module.DEFAULT_USER_AGENT,
        connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO,
    }
    api_url = build_api_url(project, method, base_url)
    headers, content = http.request(uri=api_url,
                                    method='POST',
                                    headers=headers,
                                    body=data)

    status = headers['status']
    if status != '200':
        error_status = status_pb2.Status.FromString(content)
        raise exceptions.make_exception(headers,
                                        error_status.message,
                                        use_json=False)

    return content
示例#9
0
def _raise_from_invalid_response(error, error_info=None):
    """Re-wrap and raise an ``InvalidResponse`` exception.

    :type error: :exc:`google.resumable_media.InvalidResponse`
    :param error: A caught exception from the ``google-resumable-media``
                  library.

    :type error_info: str
    :param error_info: (Optional) Extra information about the failed request.

    :raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding
             to the failed status code
    """
    response = error.response
    faux_response = httplib2.Response({'status': response.status_code})
    raise make_exception(faux_response,
                         response.content,
                         error_info=error_info,
                         use_json=False)
示例#10
0
def _error_result_to_exception(error_result):
    """Maps BigQuery error reasons to an exception.

    The reasons and their matching HTTP status codes are documented on
    the `troubleshooting errors`_ page.

    .. _troubleshooting errors: https://cloud.google.com/bigquery\
        /troubleshooting-errors

    :type error_result: Mapping[str, str]
    :param error_result: The error result from BigQuery.

    :rtype google.cloud.exceptions.GoogleCloudError:
    :returns: The mapped exception.
    """
    reason = error_result.get('reason')
    status_code = _ERROR_REASON_TO_EXCEPTION.get(
        reason, http_client.INTERNAL_SERVER_ERROR)
    # make_exception expects an httplib2 response object.
    fake_response = _FakeResponse(status=status_code)
    return exceptions.make_exception(fake_response,
                                     error_result.get('message', ''),
                                     error_info=error_result,
                                     use_json=False)
示例#11
0
    def api_request(self,
                    method,
                    path,
                    query_params=None,
                    data=None,
                    content_type=None,
                    headers=None,
                    api_base_url=None,
                    api_version=None,
                    expect_json=True,
                    _target_object=None):
        """Make a request over the HTTP transport to the API.

        You shouldn't need to use this method, but if you plan to
        interact with the API using these primitives, this is the
        correct one to use.

        :type method: str
        :param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
                       Required.

        :type path: str
        :param path: The path to the resource (ie, ``'/b/bucket-name'``).
                     Required.

        :type query_params: dict or list
        :param query_params: A dictionary of keys and values (or list of
                             key-value pairs) to insert into the query
                             string of the URL.

        :type data: str
        :param data: The data to send as the body of the request. Default is
                     the empty string.

        :type content_type: str
        :param content_type: The proper MIME type of the data provided. Default
                             is None.

        :type headers: dict
        :param headers: extra HTTP headers to be sent with the request.

        :type api_base_url: str
        :param api_base_url: The base URL for the API endpoint.
                             Typically you won't have to provide this.
                             Default is the standard API base URL.

        :type api_version: str
        :param api_version: The version of the API to call.  Typically
                            you shouldn't provide this and instead use
                            the default for the library.  Default is the
                            latest API version supported by
                            google-cloud-python.

        :type expect_json: bool
        :param expect_json: If True, this method will try to parse the
                            response as JSON and raise an exception if
                            that cannot be done.  Default is True.

        :type _target_object: :class:`object`
        :param _target_object:
            (Optional) Protected argument to be used by library callers. This
            can allow custom behavior, for example, to defer an HTTP request
            and complete initialization of the object at a later time.

        :raises: Exception if the response code is not 200 OK.
        :rtype: dict or str
        :returns: The API response payload, either as a raw string or
                  a dictionary if the response is valid JSON.
        """
        url = self.build_api_url(path=path,
                                 query_params=query_params,
                                 api_base_url=api_base_url,
                                 api_version=api_version)

        # Making the executive decision that any dictionary
        # data will be sent properly as JSON.
        if data and isinstance(data, dict):
            data = json.dumps(data)
            content_type = 'application/json'

        response, content = self._make_request(method=method,
                                               url=url,
                                               data=data,
                                               content_type=content_type,
                                               headers=headers,
                                               target_object=_target_object)

        if not 200 <= response.status < 300:
            raise make_exception(response,
                                 content,
                                 error_info=method + ' ' + url)

        string_or_bytes = (six.binary_type, six.text_type)
        if content and expect_json and isinstance(content, string_or_bytes):
            content_type = response.get('content-type', '')
            if not content_type.startswith('application/json'):
                raise TypeError('Expected JSON, got %s' % content_type)
            if isinstance(content, six.binary_type):
                content = content.decode('utf-8')
            return json.loads(content)

        return content
示例#12
0
    def upload_from_file(self,
                         file_obj,
                         source_format,
                         rewind=False,
                         size=None,
                         num_retries=6,
                         allow_jagged_rows=None,
                         allow_quoted_newlines=None,
                         create_disposition=None,
                         encoding=None,
                         field_delimiter=None,
                         ignore_unknown_values=None,
                         max_bad_records=None,
                         quote_character=None,
                         skip_leading_rows=None,
                         write_disposition=None,
                         client=None,
                         job_name=None):
        """Upload the contents of this table from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - ``text/csv``.

        :type file_obj: file
        :param file_obj: A file handle opened in binary mode for reading.

        :type source_format: str
        :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
                              job configuration option; see
                              :meth:`google.cloud.bigquery.job.LoadJob`

        :type rewind: bool
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type num_retries: int
        :param num_retries: Number of upload retries. Defaults to 6.

        :type allow_jagged_rows: bool
        :param allow_jagged_rows: job configuration option;  see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type allow_quoted_newlines: bool
        :param allow_quoted_newlines: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type create_disposition: str
        :param create_disposition: job configuration option; see
                                   :meth:`google.cloud.bigquery.job.LoadJob`.

        :type encoding: str
        :param encoding: job configuration option; see
                         :meth:`google.cloud.bigquery.job.LoadJob`.

        :type field_delimiter: str
        :param field_delimiter: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type ignore_unknown_values: bool
        :param ignore_unknown_values: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type max_bad_records: int
        :param max_bad_records: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type quote_character: str
        :param quote_character: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type skip_leading_rows: int
        :param skip_leading_rows: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type write_disposition: str
        :param write_disposition: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current dataset.

        :type job_name: str
        :param job_name: Optional. The id of the job. Generated if not
                         explicitly passed in.

        :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob`
        :returns: the job instance used to load the data (e.g., for
                  querying status). Note that the job is already started:
                  do not call ``job.begin()``.
        :raises: :class:`ValueError` if ``size`` is not passed in and can not
                 be determined, or if the ``file_obj`` can be detected to be
                 a file opened in text mode.
        """
        client = self._require_client(client)
        connection = client._connection
        content_type = 'application/octet-stream'

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        mode = getattr(file_obj, 'mode', None)

        if mode is not None and mode not in ('rb', 'r+b', 'rb+'):
            raise ValueError(
                "Cannot upload files opened in text mode:  use "
                "open(filename, mode='rb') or open(filename, mode='r+b')")

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                total_bytes = os.fstat(file_obj.fileno()).st_size
            else:
                raise ValueError('total bytes could not be determined. Please '
                                 'pass an explicit size.')
        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
            'content-type': 'application/json',
        }

        metadata = {
            'configuration': {
                'load': {
                    'sourceFormat': source_format,
                    'destinationTable': {
                        'projectId': self._dataset.project,
                        'datasetId': self._dataset.name,
                        'tableId': self.name,
                    }
                }
            }
        }

        if len(self._schema) > 0:
            load_config = metadata['configuration']['load']
            load_config['schema'] = {
                'fields': _build_schema_resource(self._schema)
            }

        _configure_job_metadata(metadata, allow_jagged_rows,
                                allow_quoted_newlines, create_disposition,
                                encoding, field_delimiter,
                                ignore_unknown_values, max_bad_records,
                                quote_character, skip_leading_rows,
                                write_disposition, job_name)

        upload = Upload(file_obj,
                        content_type,
                        total_bytes,
                        auto_transfer=False)

        url_builder = _UrlBuilder()
        upload_config = _UploadConfig()

        # Base URL may change once we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        path = '/projects/%s/jobs' % (self._dataset.project, )
        upload_url = connection.build_api_url(api_base_url=base_url, path=path)

        # Use apitools 'Upload' facility.
        request = Request(upload_url,
                          'POST',
                          headers,
                          body=json.dumps(metadata))

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=path,
                                               query_params=query_params)
        try:
            upload.initialize_upload(request, connection.http)
        except HttpError as err_response:
            faux_response = httplib2.Response(err_response.response)
            raise make_exception(faux_response,
                                 err_response.content,
                                 error_info=request.url)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http,
                                             request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)

        response_content = http_response.content
        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        return client.job_from_resource(json.loads(response_content))
示例#13
0
 def _call_fut(self, response, content, error_info=None, use_json=True):
     from google.cloud.exceptions import make_exception
     return make_exception(response, content, error_info=error_info,
                           use_json=use_json)
示例#14
0
    def api_request(self, method, path, query_params=None,
                    data=None, content_type=None,
                    api_base_url=None, api_version=None,
                    expect_json=True, _target_object=None):
        """Make a request over the HTTP transport to the API.

        You shouldn't need to use this method, but if you plan to
        interact with the API using these primitives, this is the
        correct one to use.

        :type method: string
        :param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
                       Required.

        :type path: string
        :param path: The path to the resource (ie, ``'/b/bucket-name'``).
                     Required.

        :type query_params: dict or list
        :param query_params: A dictionary of keys and values (or list of
                             key-value pairs) to insert into the query
                             string of the URL.

        :type data: string
        :param data: The data to send as the body of the request. Default is
                     the empty string.

        :type content_type: string
        :param content_type: The proper MIME type of the data provided. Default
                             is None.

        :type api_base_url: string
        :param api_base_url: The base URL for the API endpoint.
                             Typically you won't have to provide this.
                             Default is the standard API base URL.

        :type api_version: string
        :param api_version: The version of the API to call.  Typically
                            you shouldn't provide this and instead use
                            the default for the library.  Default is the
                            latest API version supported by
                            google-cloud-python.

        :type expect_json: bool
        :param expect_json: If True, this method will try to parse the
                            response as JSON and raise an exception if
                            that cannot be done.  Default is True.

        :type _target_object: :class:`object` or :class:`NoneType`
        :param _target_object: Protected argument to be used by library
                               callers. This can allow custom behavior, for
                               example, to defer an HTTP request and complete
                               initialization of the object at a later time.

        :raises: Exception if the response code is not 200 OK.
        :rtype: dict or str
        :returns: The API response payload, either as a raw string or
                  a dictionary if the response is valid JSON.
        """
        url = self.build_api_url(path=path, query_params=query_params,
                                 api_base_url=api_base_url,
                                 api_version=api_version)

        # Making the executive decision that any dictionary
        # data will be sent properly as JSON.
        if data and isinstance(data, dict):
            data = json.dumps(data)
            content_type = 'application/json'

        response, content = self._make_request(
            method=method, url=url, data=data, content_type=content_type,
            target_object=_target_object)

        if not 200 <= response.status < 300:
            raise make_exception(response, content,
                                 error_info=method + ' ' + url)

        string_or_bytes = (six.binary_type, six.text_type)
        if content and expect_json and isinstance(content, string_or_bytes):
            content_type = response.get('content-type', '')
            if not content_type.startswith('application/json'):
                raise TypeError('Expected JSON, got %s' % content_type)
            if isinstance(content, six.binary_type):
                content = content.decode('utf-8')
            return json.loads(content)

        return content
示例#15
0
    def upload_from_file(self,
                         file_obj,
                         source_format,
                         rewind=False,
                         size=None,
                         num_retries=6,
                         allow_jagged_rows=None,
                         allow_quoted_newlines=None,
                         create_disposition=None,
                         encoding=None,
                         field_delimiter=None,
                         ignore_unknown_values=None,
                         max_bad_records=None,
                         quote_character=None,
                         skip_leading_rows=None,
                         write_disposition=None,
                         client=None):
        """Upload the contents of this table from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - ``text/csv``.

        :type file_obj: file
        :param file_obj: A file handle opened in binary mode for reading.

        :type source_format: str
        :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
                              job configuration option; see
                              :meth:`google.cloud.bigquery.job.LoadJob`

        :type rewind: bool
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type num_retries: int
        :param num_retries: Number of upload retries. Defaults to 6.

        :type allow_jagged_rows: bool
        :param allow_jagged_rows: job configuration option;  see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type allow_quoted_newlines: bool
        :param allow_quoted_newlines: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type create_disposition: str
        :param create_disposition: job configuration option; see
                                   :meth:`google.cloud.bigquery.job.LoadJob`.

        :type encoding: str
        :param encoding: job configuration option; see
                         :meth:`google.cloud.bigquery.job.LoadJob`.

        :type field_delimiter: str
        :param field_delimiter: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type ignore_unknown_values: bool
        :param ignore_unknown_values: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type max_bad_records: int
        :param max_bad_records: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type quote_character: str
        :param quote_character: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type skip_leading_rows: int
        :param skip_leading_rows: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type write_disposition: str
        :param write_disposition: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current dataset.

        :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob`
        :returns: the job instance used to load the data (e.g., for
                  querying status). Note that the job is already started:
                  do not call ``job.begin()``.
        :raises: :class:`ValueError` if ``size`` is not passed in and can not
                 be determined, or if the ``file_obj`` can be detected to be
                 a file opened in text mode.
        """
        client = self._require_client(client)
        connection = client._connection
        content_type = 'application/octet-stream'

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        mode = getattr(file_obj, 'mode', None)

        if mode is not None and mode not in ('rb', 'r+b', 'rb+'):
            raise ValueError(
                "Cannot upload files opened in text mode:  use "
                "open(filename, mode='rb') or open(filename, mode='r+b')")

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                total_bytes = os.fstat(file_obj.fileno()).st_size
            else:
                raise ValueError('total bytes could not be determined. Please '
                                 'pass an explicit size.')
        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
            'content-type': 'application/json',
        }

        metadata = {
            'configuration': {
                'load': {
                    'sourceFormat': source_format,
                    'schema': {
                        'fields': _build_schema_resource(self._schema),
                    },
                    'destinationTable': {
                        'projectId': self._dataset.project,
                        'datasetId': self._dataset.name,
                        'tableId': self.name,
                    }
                }
            }
        }

        _configure_job_metadata(metadata, allow_jagged_rows,
                                allow_quoted_newlines, create_disposition,
                                encoding, field_delimiter,
                                ignore_unknown_values, max_bad_records,
                                quote_character, skip_leading_rows,
                                write_disposition)

        upload = Upload(file_obj, content_type, total_bytes,
                        auto_transfer=False)

        url_builder = _UrlBuilder()
        upload_config = _UploadConfig()

        # Base URL may change once we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        path = '/projects/%s/jobs' % (self._dataset.project,)
        upload_url = connection.build_api_url(api_base_url=base_url, path=path)

        # Use apitools 'Upload' facility.
        request = Request(upload_url, 'POST', headers,
                          body=json.dumps(metadata))

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=path,
                                               query_params=query_params)
        try:
            upload.initialize_upload(request, connection.http)
        except HttpError as err_response:
            faux_response = httplib2.Response(err_response.response)
            raise make_exception(faux_response, err_response.content,
                                 error_info=request.url)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http, request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)

        response_content = http_response.content
        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        return client.job_from_resource(json.loads(response_content))