def get(cls, blob_keys): """Retrieve BlobInfo by key or list of keys. Args: blob_keys: A key or a list of keys. Keys may be instances of str, unicode and BlobKey. Returns: A BlobInfo instance associated with provided key or a list of BlobInfo instances if a list of keys was provided. Keys that are not found in Blobstore return None as their values. """ blob_keys = cls.__normalize_and_convert_keys(blob_keys) try: entities = datastore.Get(blob_keys) except datastore_errors.EntityNotFoundError: return None if isinstance(entities, datastore.Entity): return BlobInfo(entities) else: references = [] for entity in entities: if entity is not None: references.append(BlobInfo(entity)) else: references.append(None) return references
def _OpenBlob(self, blob_key): key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key, namespace='') try: datastore.Get(key) except datastore_errors.Error: logging.exception('Blob with key %r does not exist', blob_key) raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR) blobstore_stub = apiproxy_stub_map.apiproxy.GetStub("blobstore") try: blob_file = blobstore_stub.storage.OpenBlob(blob_key) except IOError: logging.exception('Could not get file for blob_key %r', blob_key) raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_IMAGE_DATA) try: return Image.open(blob_file) except IOError: logging.exception('Could not open image %r for blob_key %r', blob_file, blob_key) raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
def Dispatch(self, request, outfile, logfile, base_env_dict=None): """Handle post dispatch. This dispatcher will handle all uploaded files in the POST request, store the results in the blob-storage, close the upload session and transform the original request in to one where the uploaded files have external bodies. Returns: New AppServerRequest indicating request forward to upload success handler. """ if base_env_dict['REQUEST_METHOD'] != 'POST': outfile.write('Status: 400\n\n') return upload_key = re.match(UPLOAD_URL_PATTERN, request.relative_url).group(1) try: upload_session = datastore.Get(upload_key) except datastore_errors.EntityNotFoundError: upload_session = None if upload_session: success_path = upload_session['success_path'] upload_form = cgi.FieldStorage(fp=request.infile, headers=request.headers, environ=base_env_dict) try: mime_message_string = self.__cgi_handler.GenerateMIMEMessageString( upload_form) datastore.Delete(upload_session) self.current_session = upload_session header_end = mime_message_string.find('\n\n') + 1 content_start = header_end + 1 header_text = mime_message_string[:header_end] content_text = mime_message_string[content_start:] complete_headers = ('%s' 'Content-Length: %d\n' '\n') % (header_text, len(content_text)) return appserver.AppServerRequest( success_path, None, mimetools.Message(cStringIO.StringIO(complete_headers)), cStringIO.StringIO(content_text), force_admin=True) except appserver_upload.InvalidMIMETypeFormatError: outfile.write('Status: 400\n\n') else: logging.error('Could not find session for %s', upload_key) outfile.write('Status: 404\n\n')
def _Dynamic_FetchData(self, request, response): """Fetch a blob fragment from a blob by its blob-key. Fetches a blob fragment using its blob-key. Start index is inclusive, end index is inclusive. Valid requests for information outside of the range of the blob return a partial string or empty string if entirely out of range. Args: request: A fully initialized FetchDataRequest instance. response: A FetchDataResponse instance. Raises: ApplicationError when application has the following errors: INDEX_OUT_OF_RANGE: Index is negative or end > start. BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than MAX_BLOB_FRAGMENT_SIZE. BLOB_NOT_FOUND: If invalid blob-key is provided or is not found. """ start_index = request.start_index() if start_index < 0: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE) end_index = request.end_index() if end_index < start_index: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE) fetch_size = end_index - start_index + 1 if fetch_size > blobstore.MAX_BLOB_FETCH_SIZE: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE) blob_key = request.blob_key() blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key, namespace='') try: datastore.Get(blob_info_key) except datastore_errors.EntityNotFoundError, err: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
def __get_value(self, name): """Get a BlobInfo value, loading entity if necessary. This method allows lazy loading of the underlying datastore entity. It should never be invoked directly. Args: name: Name of property to get value for. Returns: Value of BlobInfo property from entity. """ if self.__entity is None: self.__entity = datastore.Get( datastore_types.Key.from_path( self.kind(), str(self.__key), namespace='')) try: return self.__entity[name] except KeyError: raise AttributeError(name)
def GenerateBlobKey(time_func=time.time, random_func=random.random): """Generate a unique BlobKey. BlobKey is generated using the current time stamp combined with a random number. The two values are subject to an md5 digest and base64 url-safe encoded. The new key is checked against the possibility of existence within the datastore and the random number is regenerated until there is no match. Args: time_func: Function used for generating the timestamp. Used for dependency injection. Allows for predictable results during tests. Must return a floating point UTC timestamp. random_func: Function used for generating the random number. Used for dependency injection. Allows for predictable results during tests. Returns: String version of BlobKey that is unique within the BlobInfo datastore. None if there are too many name conflicts. """ timestamp = str(time_func()) tries = 0 while tries < 10: number = str(random_func()) digester = md5.md5() digester.update(timestamp) digester.update(number) blob_key = base64.urlsafe_b64encode(digester.digest()) datastore_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key, namespace='') try: datastore.Get(datastore_key) tries += 1 except datastore_errors.EntityNotFoundError: return blob_key return None
def DownloadRewriter(response, request_headers): """Intercepts blob download key and rewrites response with large download. Checks for the X-AppEngine-BlobKey header in the response. If found, it will discard the body of the request and replace it with the blob content indicated. If a valid blob is not found, it will send a 404 to the client. If the application itself provides a content-type header, it will override the content-type stored in the action blob. If Content-Range header is provided, blob will be partially served. The application can set blobstore.BLOB_RANGE_HEADER if the size of the blob is not known. If Range is present, and not blobstore.BLOB_RANGE_HEADER, will use Range instead. Args: response: Response object to be rewritten. request_headers: Original request headers. Looks for 'Range' header to copy to response. """ blob_key = response.headers.getheader(blobstore.BLOB_KEY_HEADER) if blob_key: del response.headers[blobstore.BLOB_KEY_HEADER] try: blob_info = datastore.Get( datastore.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key, namespace='')) content_range_header = response.headers.getheader('Content-Range') blob_size = blob_info['size'] range_header = response.headers.getheader(blobstore.BLOB_RANGE_HEADER) if range_header is not None: del response.headers[blobstore.BLOB_RANGE_HEADER] else: range_header = request_headers.getheader('Range') def not_satisfiable(): """Short circuit response and return 416 error.""" response.status_code = 416 response.status_message = 'Requested Range Not Satisfiable' response.body = cStringIO.StringIO('') response.headers['Content-Length'] = '0' del response.headers['Content-Type'] del response.headers['Content-Range'] if range_header: start, end = ParseRangeHeader(range_header) if start is not None: if end is None: if start >= 0: content_range_start = start else: content_range_start = blob_size + start content_range = byterange.ContentRange( content_range_start, blob_size - 1, blob_size) content_range.stop -= 1 content_range_header = str(content_range) else: range = byterange.ContentRange(start, end, blob_size) range.stop -= 1 content_range_header = str(range) response.headers['Content-Range'] = content_range_header else: not_satisfiable() return content_range = response.headers.getheader('Content-Range') content_length = blob_size start = 0 end = content_length if content_range is not None: parsed_start, parsed_end = ParseContentRangeHeader(content_range) if parsed_start is not None: start = parsed_start content_range = byterange.ContentRange(start, parsed_end, blob_size) content_range.stop -= 1 content_range.stop = min(content_range.stop, blob_size - 2) content_length = min(parsed_end, blob_size - 1) - start + 1 response.headers['Content-Range'] = str(content_range) else: not_satisfiable() return blob_stream = GetBlobStorage().OpenBlob(blob_key) blob_stream.seek(start) response.body = cStringIO.StringIO(blob_stream.read(content_length)) response.headers['Content-Length'] = str(content_length) if not response.headers.getheader('Content-Type'): response.headers['Content-Type'] = blob_info['content_type'] response.large_response = True except datastore_errors.EntityNotFoundError: response.status_code = 500 response.status_message = 'Internal Error' response.body = cStringIO.StringIO() if response.headers.getheader('status'): del response.headers['status'] if response.headers.getheader('location'): del response.headers['location'] if response.headers.getheader('content-type'): del response.headers['content-type'] logging.error('Could not find blob with key %s.', blob_key)