コード例 #1
0
ファイル: transfer.py プロジェクト: kwoodson/gcloud-python
    def initialize_download(self, http_request, http):
        """Initialize this download.

        If the instance has :attr:`auto_transfer` enabled, begins the
        download immediately.

        :type http_request: :class:`~.streaming.http_wrapper.Request`
        :param http_request: the request to use to initialize this download.

        :type http: :class:`httplib2.Http` (or workalike)
        :param http: Http instance for this request.
        """
        self._ensure_uninitialized()
        url = http_request.url
        if self.auto_transfer:
            end_byte = self._compute_end_byte(0)
            self._set_range_header(http_request, 0, end_byte)
            response = make_api_request(
                self.bytes_http or http, http_request)
            if response.status_code not in self._ACCEPTABLE_STATUSES:
                raise HttpError.from_response(response)
            self._initial_response = response
            self._set_total(response.info)
            url = response.info.get('content-location', response.request_url)
        self._initialize(http, url)
        # Unless the user has requested otherwise, we want to just
        # go ahead and pump the bytes now.
        if self.auto_transfer:
            self.stream_file(use_chunks=True)
コード例 #2
0
    def initialize_download(self, http_request, http):
        """Initialize this download.

        If the instance has :attr:`auto_transfer` enabled, begins the
        download immediately.

        :type http_request: :class:`~.streaming.http_wrapper.Request`
        :param http_request: the request to use to initialize this download.

        :type http: :class:`httplib2.Http` (or workalike)
        :param http: Http instance for this request.
        """
        self._ensure_uninitialized()
        url = http_request.url
        if self.auto_transfer:
            end_byte = self._compute_end_byte(0)
            self._set_range_header(http_request, 0, end_byte)
            response = make_api_request(self.bytes_http or http, http_request)
            if response.status_code not in self._ACCEPTABLE_STATUSES:
                raise HttpError.from_response(response)
            self._initial_response = response
            self._set_total(response.info)
            url = response.info.get('content-location', response.request_url)
        self._initialize(http, url)
        # Unless the user has requested otherwise, we want to just
        # go ahead and pump the bytes now.
        if self.auto_transfer:
            self.stream_file(use_chunks=True, headers=http_request.headers)
コード例 #3
0
ファイル: transfer.py プロジェクト: kwoodson/gcloud-python
    def _send_media_request(self, request, end):
        """Peform API upload request.

        Helper for _send_media_body & _send_chunk:

        :type request: :class:`google.cloud.streaming.http_wrapper.Request`
        :param request: the request to upload

        :type end: integer
        :param end: end byte of the to be uploaded

        :rtype: :class:`google.cloud.streaming.http_wrapper.Response`
        :returns: the response
        :raises: :exc:`~.streaming.exceptions.HttpError` if the status
                 code from the response indicates an error.
        """
        response = make_api_request(
            self.bytes_http, request, retries=self.num_retries)
        if response.status_code not in (http_client.OK, http_client.CREATED,
                                        RESUME_INCOMPLETE):
            # We want to reset our state to wherever the server left us
            # before this failed request, and then raise.
            self.refresh_upload_state()
            raise HttpError.from_response(response)
        if response.status_code == RESUME_INCOMPLETE:
            last_byte = self._last_byte(
                self._get_range_header(response))
            if last_byte + 1 != end:
                self.stream.seek(last_byte)
        return response
コード例 #4
0
    def _send_media_request(self, request, end):
        """Peform API upload request.

        Helper for _send_media_body & _send_chunk:

        :type request: :class:`google.cloud.streaming.http_wrapper.Request`
        :param request: the request to upload

        :type end: int
        :param end: end byte of the to be uploaded

        :rtype: :class:`google.cloud.streaming.http_wrapper.Response`
        :returns: the response
        :raises: :exc:`~.streaming.exceptions.HttpError` if the status
                 code from the response indicates an error.
        """
        response = make_api_request(self.bytes_http,
                                    request,
                                    retries=self.num_retries)
        if response.status_code not in (http_client.OK, http_client.CREATED,
                                        RESUME_INCOMPLETE):
            # We want to reset our state to wherever the server left us
            # before this failed request, and then raise.
            self.refresh_upload_state()
            raise HttpError.from_response(response)
        if response.status_code == RESUME_INCOMPLETE:
            last_byte = self._last_byte(self._get_range_header(response))
            if last_byte + 1 != end:
                self.stream.seek(last_byte)
        return response
コード例 #5
0
    def _get_chunk(self, start, end):
        """Retrieve a chunk of the file.

        :type start: integer
        :param start: start byte of the range.

        :type end: integer or None
        :param end: end byte of the range.

        :rtype: :class:`google.cloud.streaming.http_wrapper.Response`
        :returns: response from the chunk request.
        """
        self._ensure_initialized()
        request = Request(url=self.url)
        self._set_range_header(request, start, end=end)
        return make_api_request(
            self.bytes_http, request, retries=self.num_retries)
コード例 #6
0
 def refresh_upload_state(self):
     """Refresh the state of a resumable upload via query to the back-end.
     """
     if self.strategy != RESUMABLE_UPLOAD:
         return
     self._ensure_initialized()
     # NOTE: Per RFC 2616[1]/7231[2], a 'PUT' request is inappropriate
     #       here:  it is intended to be used to replace the entire
     #       resource, not to  query for a status.
     #
     #       If the back-end doesn't provide a way to query for this state
     #       via a 'GET' request, somebody should be spanked.
     #
     #       The violation is documented[3].
     #
     # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6
     # [2] http://tools.ietf.org/html/rfc7231#section-4.3.4
     # [3]
     # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload
     refresh_request = Request(url=self.url,
                               http_method='PUT',
                               headers={'Content-Range': 'bytes */*'})
     refresh_response = make_api_request(self.http,
                                         refresh_request,
                                         redirections=0,
                                         retries=self.num_retries)
     range_header = self._get_range_header(refresh_response)
     if refresh_response.status_code in (http_client.OK,
                                         http_client.CREATED):
         self._complete = True
         self._progress = self.total_size
         self.stream.seek(self.progress)
         # If we're finished, the refresh response will contain the metadata
         # originally requested. Cache it so it can be returned in
         # StreamInChunks.
         self._final_response = refresh_response
     elif refresh_response.status_code == RESUME_INCOMPLETE:
         if range_header is None:
             self._progress = 0
         else:
             self._progress = self._last_byte(range_header) + 1
         self.stream.seek(self.progress)
     else:
         raise HttpError.from_response(refresh_response)
コード例 #7
0
ファイル: transfer.py プロジェクト: kwoodson/gcloud-python
 def refresh_upload_state(self):
     """Refresh the state of a resumable upload via query to the back-end.
     """
     if self.strategy != RESUMABLE_UPLOAD:
         return
     self._ensure_initialized()
     # NOTE: Per RFC 2616[1]/7231[2], a 'PUT' request is inappropriate
     #       here:  it is intended to be used to replace the entire
     #       resource, not to  query for a status.
     #
     #       If the back-end doesn't provide a way to query for this state
     #       via a 'GET' request, somebody should be spanked.
     #
     #       The violation is documented[3].
     #
     # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6
     # [2] http://tools.ietf.org/html/rfc7231#section-4.3.4
     # [3]
     # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload
     refresh_request = Request(
         url=self.url, http_method='PUT',
         headers={'Content-Range': 'bytes */*'})
     refresh_response = make_api_request(
         self.http, refresh_request, redirections=0,
         retries=self.num_retries)
     range_header = self._get_range_header(refresh_response)
     if refresh_response.status_code in (http_client.OK,
                                         http_client.CREATED):
         self._complete = True
         self._progress = self.total_size
         self.stream.seek(self.progress)
         # If we're finished, the refresh response will contain the metadata
         # originally requested. Cache it so it can be returned in
         # StreamInChunks.
         self._final_response = refresh_response
     elif refresh_response.status_code == RESUME_INCOMPLETE:
         if range_header is None:
             self._progress = 0
         else:
             self._progress = self._last_byte(range_header) + 1
         self.stream.seek(self.progress)
     else:
         raise HttpError.from_response(refresh_response)
コード例 #8
0
ファイル: transfer.py プロジェクト: Fkawala/gcloud-python
    def _get_chunk(self, start, end, headers=None):
        """Retrieve a chunk of the file.

        :type start: int
        :param start: start byte of the range.

        :type end: int
        :param end: (Optional) end byte of the range.

        :type headers: dict
        :param headers: (Optional) Headers to be used for the ``Request``.

        :rtype: :class:`google.cloud.streaming.http_wrapper.Response`
        :returns: response from the chunk request.
        """
        self._ensure_initialized()
        request = Request(url=self.url, headers=headers)
        self._set_range_header(request, start, end=end)
        return make_api_request(
            self.bytes_http, request, retries=self.num_retries)
コード例 #9
0
    def _get_chunk(self, start, end, headers=None):
        """Retrieve a chunk of the file.

        :type start: int
        :param start: start byte of the range.

        :type end: int
        :param end: (Optional) end byte of the range.

        :type headers: dict
        :param headers: (Optional) Headers to be used for the ``Request``.

        :rtype: :class:`google.cloud.streaming.http_wrapper.Response`
        :returns: response from the chunk request.
        """
        self._ensure_initialized()
        request = Request(url=self.url, headers=headers)
        self._set_range_header(request, start, end=end)
        return make_api_request(self.bytes_http,
                                request,
                                retries=self.num_retries)
コード例 #10
0
    def initialize_upload(self, http_request, http):
        """Initialize this upload from the given http_request.

        :type http_request: :class:`~.streaming.http_wrapper.Request`
        :param http_request: the request to be used

        :type http: :class:`httplib2.Http` (or workalike)
        :param http: Http instance for this request.

        :raises: :exc:`ValueError` if the instance has not been configured
                 with a strategy.
        :rtype: :class:`~google.cloud.streaming.http_wrapper.Response`
        :returns: The response if the upload is resumable and auto transfer
                  is not used.
        """
        if self.strategy is None:
            raise ValueError(
                'No upload strategy set; did you call configure_request?')
        if self.strategy != RESUMABLE_UPLOAD:
            return
        self._ensure_uninitialized()
        http_response = make_api_request(http,
                                         http_request,
                                         retries=self.num_retries)
        if http_response.status_code != http_client.OK:
            raise HttpError.from_response(http_response)

        granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity')
        if granularity is not None:
            granularity = int(granularity)
        self._server_chunk_granularity = granularity
        url = http_response.info['location']
        self._initialize(http, url)

        # Unless the user has requested otherwise, we want to just
        # go ahead and pump the bytes now.
        if self.auto_transfer:
            return self.stream_file(use_chunks=True)
        else:
            return http_response
コード例 #11
0
ファイル: transfer.py プロジェクト: kwoodson/gcloud-python
    def initialize_upload(self, http_request, http):
        """Initialize this upload from the given http_request.

        :type http_request: :class:`~.streaming.http_wrapper.Request`
        :param http_request: the request to be used

        :type http: :class:`httplib2.Http` (or workalike)
        :param http: Http instance for this request.

        :raises: :exc:`ValueError` if the instance has not been configured
                 with a strategy.
        :rtype: :class:`~google.cloud.streaming.http_wrapper.Response`
        :returns: The response if the upload is resumable and auto transfer
                  is not used.
        """
        if self.strategy is None:
            raise ValueError(
                'No upload strategy set; did you call configure_request?')
        if self.strategy != RESUMABLE_UPLOAD:
            return
        self._ensure_uninitialized()
        http_response = make_api_request(http, http_request,
                                         retries=self.num_retries)
        if http_response.status_code != http_client.OK:
            raise HttpError.from_response(http_response)

        granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity')
        if granularity is not None:
            granularity = int(granularity)
        self._server_chunk_granularity = granularity
        url = http_response.info['location']
        self._initialize(http, url)

        # Unless the user has requested otherwise, we want to just
        # go ahead and pump the bytes now.
        if self.auto_transfer:
            return self.stream_file(use_chunks=True)
        else:
            return http_response
コード例 #12
0
ファイル: table.py プロジェクト: sonlac/google-cloud-python
    def upload_from_file(self,
                         file_obj,
                         source_format,
                         rewind=False,
                         size=None,
                         num_retries=6,
                         allow_jagged_rows=None,
                         allow_quoted_newlines=None,
                         create_disposition=None,
                         encoding=None,
                         field_delimiter=None,
                         ignore_unknown_values=None,
                         max_bad_records=None,
                         quote_character=None,
                         skip_leading_rows=None,
                         write_disposition=None,
                         client=None,
                         job_name=None):
        """Upload the contents of this table from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - ``text/csv``.

        :type file_obj: file
        :param file_obj: A file handle opened in binary mode for reading.

        :type source_format: str
        :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
                              job configuration option; see
                              :meth:`google.cloud.bigquery.job.LoadJob`

        :type rewind: bool
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type num_retries: int
        :param num_retries: Number of upload retries. Defaults to 6.

        :type allow_jagged_rows: bool
        :param allow_jagged_rows: job configuration option;  see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type allow_quoted_newlines: bool
        :param allow_quoted_newlines: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type create_disposition: str
        :param create_disposition: job configuration option; see
                                   :meth:`google.cloud.bigquery.job.LoadJob`.

        :type encoding: str
        :param encoding: job configuration option; see
                         :meth:`google.cloud.bigquery.job.LoadJob`.

        :type field_delimiter: str
        :param field_delimiter: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type ignore_unknown_values: bool
        :param ignore_unknown_values: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type max_bad_records: int
        :param max_bad_records: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type quote_character: str
        :param quote_character: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type skip_leading_rows: int
        :param skip_leading_rows: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type write_disposition: str
        :param write_disposition: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current dataset.

        :type job_name: str
        :param job_name: Optional. The id of the job. Generated if not
                         explicitly passed in.

        :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob`
        :returns: the job instance used to load the data (e.g., for
                  querying status). Note that the job is already started:
                  do not call ``job.begin()``.
        :raises: :class:`ValueError` if ``size`` is not passed in and can not
                 be determined, or if the ``file_obj`` can be detected to be
                 a file opened in text mode.
        """
        client = self._require_client(client)
        connection = client._connection
        content_type = 'application/octet-stream'

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        mode = getattr(file_obj, 'mode', None)

        if mode is not None and mode not in ('rb', 'r+b', 'rb+'):
            raise ValueError(
                "Cannot upload files opened in text mode:  use "
                "open(filename, mode='rb') or open(filename, mode='r+b')")

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                total_bytes = os.fstat(file_obj.fileno()).st_size
            else:
                raise ValueError('total bytes could not be determined. Please '
                                 'pass an explicit size.')
        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
            'content-type': 'application/json',
        }

        metadata = {
            'configuration': {
                'load': {
                    'sourceFormat': source_format,
                    'destinationTable': {
                        'projectId': self._dataset.project,
                        'datasetId': self._dataset.name,
                        'tableId': self.name,
                    }
                }
            }
        }

        if len(self._schema) > 0:
            load_config = metadata['configuration']['load']
            load_config['schema'] = {
                'fields': _build_schema_resource(self._schema)
            }

        _configure_job_metadata(metadata, allow_jagged_rows,
                                allow_quoted_newlines, create_disposition,
                                encoding, field_delimiter,
                                ignore_unknown_values, max_bad_records,
                                quote_character, skip_leading_rows,
                                write_disposition, job_name)

        upload = Upload(file_obj,
                        content_type,
                        total_bytes,
                        auto_transfer=False)

        url_builder = _UrlBuilder()
        upload_config = _UploadConfig()

        # Base URL may change once we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        path = '/projects/%s/jobs' % (self._dataset.project, )
        upload_url = connection.build_api_url(api_base_url=base_url, path=path)

        # Use apitools 'Upload' facility.
        request = Request(upload_url,
                          'POST',
                          headers,
                          body=json.dumps(metadata))

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=path,
                                               query_params=query_params)
        try:
            upload.initialize_upload(request, connection.http)
        except HttpError as err_response:
            faux_response = httplib2.Response(err_response.response)
            raise make_exception(faux_response,
                                 err_response.content,
                                 error_info=request.url)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http,
                                             request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)

        response_content = http_response.content
        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        return client.job_from_resource(json.loads(response_content))
コード例 #13
0
 def _callFUT(self, *args, **kw):
     from google.cloud.streaming.http_wrapper import make_api_request
     return make_api_request(*args, **kw)
コード例 #14
0
    def upload_from_file(self,
                         file_obj,
                         rewind=False,
                         size=None,
                         content_type=None,
                         num_retries=6,
                         client=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        Uploading a file with a `customer-supplied`_ encryption key:

        .. literalinclude:: storage_snippets.py
            :start-after: [START upload_from_file]
            :end-before: [END upload_from_file]

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: bool
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type content_type: str
        :param content_type: Optional type of content being uploaded.

        :type num_retries: int
        :param num_retries: Number of upload retries. Defaults to 6.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`ValueError` if size is not passed in and can not be
                 determined; :class:`google.cloud.exceptions.GoogleCloudError`
                 if the upload response returns an error status.
        """
        client = self._require_client(client)
        # Use ``_base_connection`` rather ``_connection`` since the current
        # connection may be a batch. A batch wraps a client's connection,
        # but does not store the ``http`` object. The rest (API_BASE_URL and
        # build_api_url) are also defined on the Batch class, but we just
        # use the wrapped connection since it has all three (http,
        # API_BASE_URL and build_api_url).
        connection = client._base_connection

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                try:
                    total_bytes = os.fstat(file_obj.fileno()).st_size
                except (OSError, UnsupportedOperation):
                    pass  # Assuming fd is not an actual file (maybe socket).

        chunk_size = None
        strategy = None
        if self.chunk_size is not None:
            chunk_size = self.chunk_size

            if total_bytes is None:
                strategy = RESUMABLE_UPLOAD
        elif total_bytes is None:
            raise ValueError('total bytes could not be determined. Please '
                             'pass an explicit size, or supply a chunk size '
                             'for a streaming transfer.')

        upload, request, _ = self._create_upload(client,
                                                 file_obj=file_obj,
                                                 size=total_bytes,
                                                 content_type=content_type,
                                                 chunk_size=chunk_size,
                                                 strategy=strategy)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http,
                                             request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)
        response_content = http_response.content

        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))
コード例 #15
0
    def upload_from_file(self,
                         file_obj,
                         rewind=False,
                         size=None,
                         content_type=None,
                         num_retries=6,
                         client=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        Uploading a file with a `customer-supplied`_ encryption key::

            >>> from google.cloud import storage
            >>> from google.cloud.storage import Blob

            >>> client = storage.Client(project='my-project')
            >>> bucket = client.get_bucket('my-bucket')
            >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
            >>> blob = Blob('secure-data', bucket,
            ...             encryption_key=encryption_key)
            >>> with open('my-file', 'rb') as my_file:
            >>>     blob.upload_from_file(my_file)

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: bool
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type content_type: str
        :param content_type: Optional type of content being uploaded.

        :type num_retries: int
        :param num_retries: Number of upload retries. Defaults to 6.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`ValueError` if size is not passed in and can not be
                 determined; :class:`google.cloud.exceptions.GoogleCloudError`
                 if the upload response returns an error status.
        """
        client = self._require_client(client)
        # Use ``_base_connection`` rather ``_connection`` since the current
        # connection may be a batch. A batch wraps a client's connection,
        # but does not store the ``http`` object. The rest (API_BASE_URL and
        # build_api_url) are also defined on the Batch class, but we just
        # use the wrapped connection since it has all three (http,
        # API_BASE_URL and build_api_url).
        connection = client._base_connection
        content_type = (content_type or self._properties.get('contentType')
                        or 'application/octet-stream')

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                try:
                    total_bytes = os.fstat(file_obj.fileno()).st_size
                except (OSError, UnsupportedOperation):
                    pass  # Assuming fd is not an actual file (maybe socket).

        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
        }

        headers.update(_get_encryption_headers(self._encryption_key))

        upload = Upload(file_obj,
                        content_type,
                        total_bytes,
                        auto_transfer=False)

        if self.chunk_size is not None:
            upload.chunksize = self.chunk_size

            if total_bytes is None:
                upload.strategy = RESUMABLE_UPLOAD
        elif total_bytes is None:
            raise ValueError('total bytes could not be determined. Please '
                             'pass an explicit size, or supply a chunk size '
                             'for a streaming transfer.')

        url_builder = _UrlBuilder(bucket_name=self.bucket.name,
                                  object_name=self.name)
        upload_config = _UploadConfig()

        # Temporary URL, until we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        upload_url = connection.build_api_url(api_base_url=base_url,
                                              path=self.bucket.path + '/o')

        # Use apitools 'Upload' facility.
        request = Request(upload_url, 'POST', headers)

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=self.bucket.path + '/o',
                                               query_params=query_params)
        upload.initialize_upload(request, connection.http)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http,
                                             request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)
        response_content = http_response.content

        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))
コード例 #16
0
 def _callFUT(self, *args, **kw):
     from google.cloud.streaming.http_wrapper import make_api_request
     return make_api_request(*args, **kw)
コード例 #17
0
ファイル: table.py プロジェクト: kwoodson/gcloud-python
    def upload_from_file(self,
                         file_obj,
                         source_format,
                         rewind=False,
                         size=None,
                         num_retries=6,
                         allow_jagged_rows=None,
                         allow_quoted_newlines=None,
                         create_disposition=None,
                         encoding=None,
                         field_delimiter=None,
                         ignore_unknown_values=None,
                         max_bad_records=None,
                         quote_character=None,
                         skip_leading_rows=None,
                         write_disposition=None,
                         client=None):
        """Upload the contents of this table from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - ``text/csv``.

        :type file_obj: file
        :param file_obj: A file handle opened in binary mode for reading.

        :type source_format: str
        :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
                              job configuration option; see
                              :meth:`google.cloud.bigquery.job.LoadJob`

        :type rewind: boolean
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type num_retries: integer
        :param num_retries: Number of upload retries. Defaults to 6.

        :type allow_jagged_rows: boolean
        :param allow_jagged_rows: job configuration option;  see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type allow_quoted_newlines: boolean
        :param allow_quoted_newlines: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type create_disposition: str
        :param create_disposition: job configuration option; see
                                   :meth:`google.cloud.bigquery.job.LoadJob`.

        :type encoding: str
        :param encoding: job configuration option; see
                         :meth:`google.cloud.bigquery.job.LoadJob`.

        :type field_delimiter: str
        :param field_delimiter: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type ignore_unknown_values: boolean
        :param ignore_unknown_values: job configuration option; see
                                      :meth:`google.cloud.bigquery.job.LoadJob`.

        :type max_bad_records: integer
        :param max_bad_records: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type quote_character: str
        :param quote_character: job configuration option; see
                                :meth:`google.cloud.bigquery.job.LoadJob`.

        :type skip_leading_rows: integer
        :param skip_leading_rows: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type write_disposition: str
        :param write_disposition: job configuration option; see
                                  :meth:`google.cloud.bigquery.job.LoadJob`.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the current dataset.

        :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob`
        :returns: the job instance used to load the data (e.g., for
                  querying status). Note that the job is already started:
                  do not call ``job.begin()``.
        :raises: :class:`ValueError` if ``size`` is not passed in and can not
                 be determined, or if the ``file_obj`` can be detected to be
                 a file opened in text mode.
        """
        client = self._require_client(client)
        connection = client.connection
        content_type = 'application/octet-stream'

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        mode = getattr(file_obj, 'mode', None)
        if mode is not None and mode != 'rb':
            raise ValueError(
                "Cannot upload files opened in text mode:  use "
                "open(filename, mode='rb')")

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                total_bytes = os.fstat(file_obj.fileno()).st_size
            else:
                raise ValueError('total bytes could not be determined. Please '
                                 'pass an explicit size.')
        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
            'content-type': 'application/json',
        }

        metadata = {
            'configuration': {
                'load': {
                    'sourceFormat': source_format,
                    'schema': {
                        'fields': _build_schema_resource(self._schema),
                    },
                    'destinationTable': {
                        'projectId': self._dataset.project,
                        'datasetId': self._dataset.name,
                        'tableId': self.name,
                    }
                }
            }
        }

        _configure_job_metadata(metadata, allow_jagged_rows,
                                allow_quoted_newlines, create_disposition,
                                encoding, field_delimiter,
                                ignore_unknown_values, max_bad_records,
                                quote_character, skip_leading_rows,
                                write_disposition)

        upload = Upload(file_obj, content_type, total_bytes,
                        auto_transfer=False)

        url_builder = _UrlBuilder()
        upload_config = _UploadConfig()

        # Base URL may change once we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        path = '/projects/%s/jobs' % (self._dataset.project,)
        upload_url = connection.build_api_url(api_base_url=base_url, path=path)

        # Use apitools 'Upload' facility.
        request = Request(upload_url, 'POST', headers,
                          body=json.dumps(metadata))

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=path,
                                               query_params=query_params)
        upload.initialize_upload(request, connection.http)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http, request,
                                             retries=num_retries)
        response_content = http_response.content
        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        return client.job_from_resource(json.loads(response_content))
コード例 #18
0
ファイル: blob.py プロジェクト: kwoodson/gcloud-python
    def upload_from_file(self, file_obj, rewind=False, size=None,
                         encryption_key=None, content_type=None, num_retries=6,
                         client=None):
        """Upload the contents of this blob from a file-like object.

        The content type of the upload will either be
        - The value passed in to the function (if any)
        - The value stored on the current blob
        - The default value of 'application/octet-stream'

        .. note::
           The effect of uploading to an existing blob depends on the
           "versioning" and "lifecycle" policies defined on the blob's
           bucket.  In the absence of those policies, upload will
           overwrite any existing contents.

           See the `object versioning
           <https://cloud.google.com/storage/docs/object-versioning>`_ and
           `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
           API documents for details.

        Uploading a file with a `customer-supplied`_ encryption key::

            >>> from google.cloud import storage
            >>> from google.cloud.storage import Blob

            >>> client = storage.Client(project='my-project')
            >>> bucket = client.get_bucket('my-bucket')
            >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
            >>> blob = Blob('secure-data', bucket)
            >>> with open('my-file', 'rb') as my_file:
            >>>     blob.upload_from_file(my_file,
            ...                           encryption_key=encryption_key)

        The ``encryption_key`` should be a str or bytes with a length of at
        least 32.

        .. _customer-supplied: https://cloud.google.com/storage/docs/\
                               encryption#customer-supplied

        :type file_obj: file
        :param file_obj: A file handle open for reading.

        :type rewind: boolean
        :param rewind: If True, seek to the beginning of the file handle before
                       writing the file to Cloud Storage.

        :type size: int
        :param size: The number of bytes to read from the file handle.
                     If not provided, we'll try to guess the size using
                     :func:`os.fstat`. (If the file handle is not from the
                     filesystem this won't be possible.)

        :type encryption_key: str or bytes
        :param encryption_key: Optional 32 byte encryption key for
                               customer-supplied encryption.

        :type content_type: string or ``NoneType``
        :param content_type: Optional type of content being uploaded.

        :type num_retries: integer
        :param num_retries: Number of upload retries. Defaults to 6.

        :type client: :class:`~google.cloud.storage.client.Client` or
                      ``NoneType``
        :param client: Optional. The client to use.  If not passed, falls back
                       to the ``client`` stored on the blob's bucket.

        :raises: :class:`ValueError` if size is not passed in and can not be
                 determined; :class:`google.cloud.exceptions.GoogleCloudError`
                 if the upload response returns an error status.
        """
        client = self._require_client(client)
        # Use the private ``_connection`` rather than the public
        # ``.connection``, since the public connection may be a batch. A
        # batch wraps a client's connection, but does not store the `http`
        # object. The rest (API_BASE_URL and build_api_url) are also defined
        # on the Batch class, but we just use the wrapped connection since
        # it has all three (http, API_BASE_URL and build_api_url).
        connection = client._connection
        content_type = (content_type or self._properties.get('contentType') or
                        'application/octet-stream')

        # Rewind the file if desired.
        if rewind:
            file_obj.seek(0, os.SEEK_SET)

        # Get the basic stats about the file.
        total_bytes = size
        if total_bytes is None:
            if hasattr(file_obj, 'fileno'):
                try:
                    total_bytes = os.fstat(file_obj.fileno()).st_size
                except (OSError, UnsupportedOperation):
                    pass  # Assuming fd is not an actual file (maybe socket).

        headers = {
            'Accept': 'application/json',
            'Accept-Encoding': 'gzip, deflate',
            'User-Agent': connection.USER_AGENT,
        }

        if encryption_key:
            _set_encryption_headers(encryption_key, headers)

        upload = Upload(file_obj, content_type, total_bytes,
                        auto_transfer=False)

        if self.chunk_size is not None:
            upload.chunksize = self.chunk_size

            if total_bytes is None:
                upload.strategy = RESUMABLE_UPLOAD
        elif total_bytes is None:
            raise ValueError('total bytes could not be determined. Please '
                             'pass an explicit size, or supply a chunk size '
                             'for a streaming transfer.')

        url_builder = _UrlBuilder(bucket_name=self.bucket.name,
                                  object_name=self.name)
        upload_config = _UploadConfig()

        # Temporary URL, until we know simple vs. resumable.
        base_url = connection.API_BASE_URL + '/upload'
        upload_url = connection.build_api_url(api_base_url=base_url,
                                              path=self.bucket.path + '/o')

        # Use apitools 'Upload' facility.
        request = Request(upload_url, 'POST', headers)

        upload.configure_request(upload_config, request, url_builder)
        query_params = url_builder.query_params
        base_url = connection.API_BASE_URL + '/upload'
        request.url = connection.build_api_url(api_base_url=base_url,
                                               path=self.bucket.path + '/o',
                                               query_params=query_params)
        upload.initialize_upload(request, connection.http)

        if upload.strategy == RESUMABLE_UPLOAD:
            http_response = upload.stream_file(use_chunks=True)
        else:
            http_response = make_api_request(connection.http, request,
                                             retries=num_retries)

        self._check_response_error(request, http_response)
        response_content = http_response.content

        if not isinstance(response_content,
                          six.string_types):  # pragma: NO COVER  Python3
            response_content = response_content.decode('utf-8')
        self._set_properties(json.loads(response_content))