def testEscaping(self): filename = '/bucket/foo' self.assertEqual(filename, api_utils._quote_filename(filename)) filename = '/bucket._-bucket/foo' self.assertEqual(filename, api_utils._quote_filename(filename)) filename = '/bucket/a ;/?:@&=+$,' self.assertEqual('/bucket/a%20%3B/%3F%3A%40%26%3D%2B%24%2C', api_utils._quote_filename(filename))
def stat(filename, retry_params=None, _account_id=None): """Get GCSFileStat of a Google Cloud storage file. Args: filename: A Google Cloud Storage filename of form '/bucket/filename'. retry_params: An api_utils.RetryParams for this call to GCS. If None, the default one is used. _account_id: Internal-use only. Returns: a GCSFileStat object containing info about this file. Raises: errors.AuthorizationError: if authorization failed. errors.NotFoundError: if an object that's expected to exist doesn't. """ common.validate_file_path(filename) api = storage_api._get_storage_api(retry_params=retry_params, account_id=_account_id) status, headers, content = api.head_object( api_utils._quote_filename(filename)) errors.check_status(status, [200], filename, resp_headers=headers, body=content) file_stat = common.GCSFileStat( filename=filename, st_size=common.get_stored_content_length(headers), st_ctime=common.http_time_to_posix(headers.get('last-modified')), etag=headers.get('etag'), content_type=headers.get('content-type'), metadata=common.get_metadata(headers)) return file_stat
def _copy2(src, dst, metadata=None, retry_params=None): """Copy the file content from src to dst. Internal use only! Args: src: /bucket/filename dst: /bucket/filename metadata: a dict of metadata for this copy. If None, old metadata is copied. For example, {'x-goog-meta-foo': 'bar'}. retry_params: An api_utils.RetryParams for this call to GCS. If None, the default one is used. Raises: errors.AuthorizationError: if authorization failed. errors.NotFoundError: if an object that's expected to exist doesn't. """ common.validate_file_path(src) common.validate_file_path(dst) if metadata is None: metadata = {} copy_meta = 'COPY' else: copy_meta = 'REPLACE' metadata.update({'x-goog-copy-source': src, 'x-goog-metadata-directive': copy_meta}) api = storage_api._get_storage_api(retry_params=retry_params) status, resp_headers, content = api.put_object( api_utils._quote_filename(dst), headers=metadata) errors.check_status(status, [200], src, metadata, resp_headers, body=content)
def open(filename, mode='r', content_type=None, options=None, read_buffer_size=storage_api.ReadBuffer.DEFAULT_BUFFER_SIZE, retry_params=None, _account_id=None): """Opens a Google Cloud Storage file and returns it as a File-like object. Args: filename: A Google Cloud Storage filename of form '/bucket/filename'. mode: 'r' for reading mode. 'w' for writing mode. In reading mode, the file must exist. In writing mode, a file will be created or be overrode. content_type: The MIME type of the file. str. Only valid in writing mode. options: A str->basestring dict to specify additional headers to pass to GCS e.g. {'x-goog-acl': 'private', 'x-goog-meta-foo': 'foo'}. Supported options are x-goog-acl, x-goog-meta-, cache-control, content-disposition, and content-encoding. Only valid in writing mode. See https://developers.google.com/storage/docs/reference-headers for details. read_buffer_size: The buffer size for read. Read keeps a buffer and prefetches another one. To minimize blocking for large files, always read by buffer size. To minimize number of RPC requests for small files, set a large buffer size. Max is 30MB. retry_params: An instance of api_utils.RetryParams for subsequent calls to GCS from this file handle. If None, the default one is used. _account_id: Internal-use only. Returns: A reading or writing buffer that supports File-like interface. Buffer must be closed after operations are done. Raises: errors.AuthorizationError: if authorization failed. errors.NotFoundError: if an object that's expected to exist doesn't. ValueError: invalid open mode or if content_type or options are specified in reading mode. """ common.validate_file_path(filename) api = storage_api._get_storage_api(retry_params=retry_params, account_id=_account_id) filename = api_utils._quote_filename(filename) if mode == 'w': common.validate_options(options) return storage_api.StreamingBuffer(api, filename, content_type, options) elif mode == 'r': if content_type or options: raise ValueError('Options and content_type can only be specified ' 'for writing mode.') return storage_api.ReadBuffer(api, filename, buffer_size=read_buffer_size) else: raise ValueError('Invalid mode %s.' % mode)
def _gs_copy(self, src, dst, src_etag=None): # pragma: no cover """Copy |src| file to |dst| optionally checking src ETag. Raises cloudstorage.FatalError on precondition error. """ # See cloudstorage.cloudstorage_api._copy2. cloudstorage.validate_file_path(src) cloudstorage.validate_file_path(dst) headers = { 'x-goog-copy-source': src, 'x-goog-metadata-directive': 'COPY', } if src_etag is not None: headers['x-goog-copy-source-if-match'] = src_etag api = storage_api._get_storage_api(retry_params=self._retry_params) status, resp_headers, content = api.put_object( api_utils._quote_filename(dst), headers=headers) errors.check_status(status, [200], src, headers, resp_headers, body=content)
def delete(filename, retry_params=None, _account_id=None): """Delete a Google Cloud Storage file. Args: filename: A Google Cloud Storage filename of form '/bucket/filename'. retry_params: An api_utils.RetryParams for this call to GCS. If None, the default one is used. _account_id: Internal-use only. Raises: errors.NotFoundError: if the file doesn't exist prior to deletion. """ api = storage_api._get_storage_api(retry_params=retry_params, account_id=_account_id) common.validate_file_path(filename) filename = api_utils._quote_filename(filename) status, resp_headers, content = api.delete_object(filename) errors.check_status(status, [204], filename, resp_headers=resp_headers, body=content)
def remove_all_cached_thumbnail_images(self): """ Removes all cached thumbnail images from GCS for this YT video """ gcs_folder = self.gcs_format.format(gcs_bucket=settings.GCS_BUCKET, bucket_folder=self.gcs_bucket, yt_id=self.youtube_id) api = storage_api._get_storage_api(None) futures = {} for file_stat in gcs.listbucket(gcs_folder, delimiter="/"): filename = api_utils._quote_filename(file_stat.filename) futures[file_stat.filename] = api.delete_object_async(filename) for filename, future in futures.items(): status, resp_headers, content = future.get_result() if status != 204: logging.error("Could not delete thumbnail {0}: {1}", filename, content) else: logging.info("Deleted thumbnail file {0}:", filename)
def _copy2(src, dst, metadata=None, retry_params=None): """Copy the file content from src to dst. Internal use only! Args: src: /bucket/filename dst: /bucket/filename metadata: a dict of metadata for this copy. If None, old metadata is copied. For example, {'x-goog-meta-foo': 'bar'}. retry_params: An api_utils.RetryParams for this call to GCS. If None, the default one is used. Raises: errors.AuthorizationError: if authorization failed. errors.NotFoundError: if an object that's expected to exist doesn't. """ common.validate_file_path(src) common.validate_file_path(dst) if metadata is None: metadata = {} copy_meta = 'COPY' else: copy_meta = 'REPLACE' metadata.update({ 'x-goog-copy-source': src, 'x-goog-metadata-directive': copy_meta }) api = storage_api._get_storage_api(retry_params=retry_params) status, resp_headers, content = api.put_object( api_utils._quote_filename(dst), headers=metadata) errors.check_status(status, [200], src, metadata, resp_headers, body=content)
def EscapeUnescapeFilename(unescaped, escaped): self.assertEqual(escaped, api_utils._quote_filename(unescaped)) self.assertEqual(unescaped, api_utils._unquote_filename(escaped))