def _OpenImageData(self, image_data): """Open image data from ImageData protocol buffer. Args: image_data: ImageData protocol buffer containing image data or blob reference. Returns: Image containing the image data passed in or reference by blob-key. Raises: ApplicationError if both content and blob-key are provided. NOTE: 'content' must always be set because it is a required field, however, it must be the empty string when a blob-key is provided. """ if image_data.content() and image_data.has_blob_key(): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.INVALID_BLOB_KEY) if image_data.has_blob_key(): image = self._OpenBlob(image_data.blob_key()) else: image = self._OpenImage(image_data.content()) img_format = image.format if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.NOT_IMAGE) return image
def _OpenBlob(self, blob_key): key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key, namespace='') try: datastore.Get(key) except datastore_errors.Error: logging.exception('Blob with key %r does not exist', blob_key) raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR) blobstore_stub = apiproxy_stub_map.apiproxy.GetStub("blobstore") try: blob_file = blobstore_stub.storage.OpenBlob(blob_key) except IOError: logging.exception('Could not get file for blob_key %r', blob_key) raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_IMAGE_DATA) try: return Image.open(blob_file) except IOError: logging.exception('Could not open image %r for blob_key %r', blob_file, blob_key) raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
def _Resize(self, image, transform): """Use PIL to resize the given image with the given transform. Args: image: PIL.Image.Image object to resize. transform: images_service_pb.Transform to use when resizing. Returns: PIL.Image.Image with transforms performed on it. Raises: BadRequestError if the resize data given is bad. """ width = 0 height = 0 if transform.has_width(): width = transform.width() if width < 0 or 4000 < width: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if transform.has_height(): height = transform.height() if height < 0 or 4000 < height: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) current_width, current_height = image.size new_width, new_height = self._CalculateNewDimensions( current_width, current_height, width, height) return image.resize((new_width, new_height), Image.ANTIALIAS)
def _Dynamic_PurgeQueue(self, request, response): """Local purge implementation of TaskQueueService.PurgeQueue. Args: request: A taskqueue_service_pb.TaskQueuePurgeQueueRequest. response: A taskqueue_service_pb.TaskQueuePurgeQueueResponse. """ if not request.queue_name(): raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME) queues = self._app_queues.get(request.app_id(), {}) if request.queue_name() != DEFAULT_QUEUE_NAME: if request.queue_name() not in queues: raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) elif queues[request.queue_name()] is None: raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE ) store = self.GetDummyTaskStore(request.app_id(), request.queue_name()) for task in store.Lookup(store.Count()): store.Delete(task.task_name()) self.FlushQueue(request.queue_name())
def _Dynamic_Fetch(self, request, response): """Trivial implementation of URLFetchService::Fetch(). Args: request: the fetch to perform, a URLFetchRequest response: the fetch response, a URLFetchResponse """ (protocol, host, path, parameters, query, fragment) = urlparse.urlparse(request.url()) payload = None if request.method() == urlfetch_service_pb.URLFetchRequest.GET: method = 'GET' elif request.method() == urlfetch_service_pb.URLFetchRequest.POST: method = 'POST' payload = request.payload() elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD: method = 'HEAD' elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT: method = 'PUT' payload = request.payload() elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE: method = 'DELETE' else: logging.error('Invalid method: %s', request.method()) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR) if not (protocol == 'http' or protocol == 'https'): logging.error('Invalid protocol: %s', protocol) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.INVALID_URL) if not host: logging.error('Missing host.') raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR) sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS, request.header_list()) request.clear_header() request.header_list().extend(sanitized_headers) deadline = _API_CALL_DEADLINE if request.has_deadline(): deadline = request.deadline() self._RetrieveURL(request.url(), payload, method, request.header_list(), request, response, follow_redirects=request.followredirects(), deadline=deadline)
def _Dynamic_Histogram(self, request, response): """Trivial implementation of ImagesService::Histogram. Based off documentation of the PIL library at http://www.pythonware.com/library/pil/handbook/index.htm Args: request: ImagesHistogramRequest, contains the image. response: ImagesHistogramResponse, contains histogram of the image. """ image = self._OpenImageData(request.image()) img_format = image.format if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.NOT_IMAGE) image = image.convert("RGBA") red = [0] * 256 green = [0] * 256 blue = [0] * 256 for pixel in image.getdata(): red[int((pixel[0] * pixel[3]) / 255)] += 1 green[int((pixel[1] * pixel[3]) / 255)] += 1 blue[int((pixel[2] * pixel[3]) / 255)] += 1 histogram = response.mutable_histogram() for value in red: histogram.add_red(value) for value in green: histogram.add_green(value) for value in blue: histogram.add_blue(value)
def _ValidateCropArg(self, arg): """Check an argument for the Crop transform. Args: arg: float, argument to Crop transform to check. Raises: apiproxy_errors.ApplicationError on problem with argument. """ if not isinstance(arg, float): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if not (0 <= arg <= 1.0): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
def __MakeCallDone(self): self.__state = RPC.FINISHING self.cpu_usage_mcycles = self.__result_dict['cpu_usage_mcycles'] if self.__result_dict['error'] == APPLICATION_ERROR: self.__exception = apiproxy_errors.ApplicationError( self.__result_dict['application_error'], self.__result_dict['error_detail']) elif self.__result_dict['error'] == CAPABILITY_DISABLED: if self.__result_dict['error_detail']: self.__exception = apiproxy_errors.CapabilityDisabledError( self.__result_dict['error_detail']) else: self.__exception = apiproxy_errors.CapabilityDisabledError( "The API call %s.%s() is temporarily unavailable." % (self.package, self.call)) elif self.__result_dict['error'] == FEATURE_DISABLED: self.__exception = apiproxy_errors.FeatureNotEnabledError( self.__result_dict['error_detail']) elif self.__result_dict['error'] in _ExceptionsMap: exception_entry = _ExceptionsMap[self.__result_dict['error']] self.__exception = exception_entry[0](exception_entry[1] % (self.package, self.call)) else: try: self.response.ParseFromString( self.__result_dict['result_string']) except Exception, e: self.__exception = e
def _Dynamic_FetchData(self, request, response): """Fetch a blob fragment from a blob by its blob-key. Fetches a blob fragment using its blob-key. Start index is inclusive, end index is inclusive. Valid requests for information outside of the range of the blob return a partial string or empty string if entirely out of range. Args: request: A fully initialized FetchDataRequest instance. response: A FetchDataResponse instance. Raises: ApplicationError when application has the following errors: INDEX_OUT_OF_RANGE: Index is negative or end > start. BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than MAX_BLOB_FRAGMENT_SIZE. BLOB_NOT_FOUND: If invalid blob-key is provided or is not found. """ start_index = request.start_index() if start_index < 0: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE) end_index = request.end_index() if end_index < start_index: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE) fetch_size = end_index - start_index + 1 if fetch_size > blobstore.MAX_BLOB_FETCH_SIZE: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE) blob_key = request.blob_key() blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key, namespace='') try: datastore.Get(blob_info_key) except datastore_errors.EntityNotFoundError, err: raise apiproxy_errors.ApplicationError( blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
def _Dynamic_Composite(self, request, response): """Implementation of ImagesService::Composite. Based off documentation of the PIL library at http://www.pythonware.com/library/pil/handbook/index.htm Args: request: ImagesCompositeRequest, contains image request info. response: ImagesCompositeResponse, contains transformed image. """ width = request.canvas().width() height = request.canvas().height() color = _ArgbToRgbaTuple(request.canvas().color()) color = _BackendPremultiplication(color) canvas = Image.new("RGBA", (width, height), color) sources = [] if (not request.canvas().width() or request.canvas().width() > 4000 or not request.canvas().height() or request.canvas().height() > 4000): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if not request.image_size(): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if not request.options_size(): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if request.options_size() > images.MAX_COMPOSITES_PER_REQUEST: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) for image in request.image_list(): sources.append(self._OpenImageData(image)) for options in request.options_list(): if (options.anchor() < images.TOP_LEFT or options.anchor() > images.BOTTOM_RIGHT): raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if options.source_index() >= len( sources) or options.source_index() < 0: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) if options.opacity() < 0 or options.opacity() > 1: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) source = sources[options.source_index()] x_anchor = (options.anchor() % 3) * 0.5 y_anchor = (options.anchor() / 3) * 0.5 x_offset = int(options.x_offset() + x_anchor * (width - source.size[0])) y_offset = int(options.y_offset() + y_anchor * (height - source.size[1])) alpha = options.opacity() * 255 mask = Image.new("L", source.size, alpha) canvas.paste(source, (x_offset, y_offset), mask) response_value = self._EncodeImage(canvas, request.canvas().output()) response.mutable_image().set_content(response_value)
def _Dynamic_UpdateStorageLimit(self, request, response): """Local implementation of TaskQueueService.UpdateStorageLimit. Args: request: A taskqueue_service_pb.TaskQueueUpdateStorageLimitRequest. response: A taskqueue_service_pb.TaskQueueUpdateStorageLimitResponse. """ if request.limit() < 0 or request.limit() > 1000 * (1024**4): raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST) response.set_new_limit(request.limit())
def _Dynamic_DeleteQueue(self, request, response): """Local delete implementation of TaskQueueService.DeleteQueue. Args: request: A taskqueue_service_pb.TaskQueueDeleteQueueRequest. response: A taskqueue_service_pb.TaskQueueDeleteQueueResponse. """ if not request.queue_name(): raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME) queues = self._app_queues.get(request.app_id(), {}) if request.queue_name() not in queues: raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) elif queues[request.queue_name()] is None: raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE) queues[request.queue_name()] = None
def _Dynamic_BulkAdd(self, request, response): """Add many tasks to a queue using a single request. Args: request: The taskqueue_service_pb.TaskQueueBulkAddRequest. See taskqueue_service.proto. response: The taskqueue_service_pb.TaskQueueBulkAddResponse. See taskqueue_service.proto. """ assert request.add_request_size( ), 'taskqueue should prevent empty requests' if not self._IsValidQueue(request.add_request(0).queue_name()): raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) error_found = False task_results_with_chosen_names = [] for add_request in request.add_request_list(): task_result = response.add_taskresult() error = self._VerifyTaskQueueAddRequest(add_request) if error == taskqueue_service_pb.TaskQueueServiceError.OK: if not add_request.task_name(): chosen_name = self._ChooseTaskName() add_request.set_task_name(chosen_name) task_results_with_chosen_names.append(task_result) task_result.set_result( taskqueue_service_pb.TaskQueueServiceError.SKIPPED) else: error_found = True task_result.set_result(error) if error_found: return if request.add_request(0).has_transaction(): self._TransactionalBulkAdd(request) elif request.add_request(0).has_app_id(): self._DummyTaskStoreBulkAdd(request, response) else: self._NonTransactionalBulkAdd(request, response) for add_request, task_result in zip(request.add_request_list(), response.taskresult_list()): if (task_result.result() == taskqueue_service_pb.TaskQueueServiceError.SKIPPED): task_result.set_result( taskqueue_service_pb.TaskQueueServiceError.OK) if task_result in task_results_with_chosen_names: task_result.set_chosen_task_name(add_request.task_name())
def _Dynamic_CreateChannel(self, request, response): """Implementation of channel.get_channel. Args: request: A ChannelServiceRequest. response: A ChannelServiceResponse """ application_key = request.application_key() if not application_key: raise apiproxy_errors.ApplicationError( channel_service_pb.ChannelServiceError.INVALID_CHANNEL_KEY) response.set_client_id(application_key)
def _OpenImage(self, image): """Opens an image provided as a string. Args: image: image data to be opened Raises: apiproxy_errors.ApplicationError if the image cannot be opened or if it is an unsupported format. Returns: Image containing the image data passed in. """ if not image: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.NOT_IMAGE) image = StringIO.StringIO(image) try: return Image.open(image) except IOError: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
def _Dynamic_Add(self, request, response): bulk_request = taskqueue_service_pb.TaskQueueBulkAddRequest() bulk_response = taskqueue_service_pb.TaskQueueBulkAddResponse() bulk_request.add_add_request().CopyFrom(request) self._Dynamic_BulkAdd(bulk_request, bulk_response) assert bulk_response.taskresult_size() == 1 result = bulk_response.taskresult(0).result() if result != taskqueue_service_pb.TaskQueueServiceError.OK: raise apiproxy_errors.ApplicationError(result) elif bulk_response.taskresult(0).has_chosen_task_name(): response.set_chosen_task_name( bulk_response.taskresult(0).chosen_task_name())
def _TransactionalBulkAdd(self, request): """Uses datastore.AddActions to associate tasks with a transaction. Args: request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the tasks to add. N.B. all tasks in the request have been validated and assigned unique names. """ try: apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AddActions', request, api_base_pb.VoidProto()) except apiproxy_errors.ApplicationError, e: raise apiproxy_errors.ApplicationError( e.application_error + taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR, e.error_detail)
def _ProcessTransforms(self, image, transforms): """Execute PIL operations based on transform values. Args: image: PIL.Image.Image instance, image to manipulate. trasnforms: list of ImagesTransformRequest.Transform objects. Returns: PIL.Image.Image with transforms performed on it. Raises: BadRequestError if we are passed more than one of the same type of transform. """ new_image = image if len(transforms) > images.MAX_TRANSFORMS_PER_REQUEST: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) for transform in transforms: if transform.has_width() or transform.has_height(): new_image = self._Resize(new_image, transform) elif transform.has_rotate(): new_image = self._Rotate(new_image, transform) elif transform.has_horizontal_flip(): new_image = new_image.transpose(Image.FLIP_LEFT_RIGHT) elif transform.has_vertical_flip(): new_image = new_image.transpose(Image.FLIP_TOP_BOTTOM) elif (transform.has_crop_left_x() or transform.has_crop_top_y() or transform.has_crop_right_x() or transform.has_crop_bottom_y()): new_image = self._Crop(new_image, transform) elif transform.has_autolevels(): logging.info( "I'm Feeling Lucky autolevels will be visible once this " "application is deployed.") else: logging.warn("Found no transformations found to perform.") return new_image
def _Dynamic_UpdateQueue(self, request, unused_response): """Local implementation of the UpdateQueue RPC in TaskQueueService. Must adhere to the '_Dynamic_' naming convention for stubbing to work. See taskqueue_service.proto for a full description of the RPC. Args: request: A taskqueue_service_pb.TaskQueueUpdateQueueRequest. unused_response: A taskqueue_service_pb.TaskQueueUpdateQueueResponse. Not used. """ queues = self._app_queues.setdefault(request.app_id(), {}) if request.queue_name( ) in queues and queues[request.queue_name()] is None: raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE) defensive_copy = self._QueueDetails() defensive_copy.CopyFrom(request) queues[request.queue_name()] = defensive_copy
def _Rotate(self, image, transform): """Use PIL to rotate the given image with the given transform. Args: image: PIL.Image.Image object to rotate. transform: images_service_pb.Transform to use when rotating. Returns: PIL.Image.Image with transforms performed on it. Raises: BadRequestError if the rotate data given is bad. """ degrees = transform.rotate() if degrees < 0 or degrees % 90 != 0: raise apiproxy_errors.ApplicationError( images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA) degrees %= 360 degrees = 360 - degrees return image.rotate(degrees)
def _Dynamic_SendChannelMessage(self, request, response): """Implementation of channel.send_message. Queues a message to be retrieved by the client when it polls. Args: request: A SendMessageRequest. response: A VoidProto. """ application_key = request.application_key() if not request.message(): raise apiproxy_errors.ApplicationError( channel_service_pb.ChannelServiceError.BAD_MESSAGE) conn = httplib.HTTPConnection(self._address) headers = {'Content-Type': 'text/plain', 'Last-Modified': rfc1123_date()} conn.request("POST", "/_ah/publish?id=%s" % application_key, request.message(), headers) conn.close()
def Add(self, request): """Inserts a new task into the store. Args: request: A taskqueue_service_pb.TaskQueueAddRequest. Raises: apiproxy_errors.ApplicationError: If a task with the same name is already in the store. """ pos = bisect.bisect_left(self._sorted_by_name, (request.task_name(), )) if (pos < len(self._sorted_by_name) and self._sorted_by_name[pos][0] == request.task_name()): raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS) now = datetime.datetime.utcnow() now_sec = time.mktime(now.timetuple()) task = taskqueue_service_pb.TaskQueueQueryTasksResponse_Task() task.set_task_name(request.task_name()) task.set_eta_usec(request.eta_usec()) task.set_creation_time_usec(now_sec * 1e6) task.set_url(request.url()) task.set_method(request.method()) for keyvalue in task.header_list(): header = task.add_header() header.set_key(keyvalue.key()) header.set_value(keyvalue.value()) if request.has_description(): task.set_description(request.description()) if request.has_body(): task.set_body(request.body()) if request.has_crontimetable(): task.mutable_crontimetable().set_schedule( request.crontimetable().schedule()) task.mutable_crontimetable().set_timezone( request.crontimetable().timezone()) self._InsertTask(task)
header_value = str(len(response.content())) header_proto = response.add_header() header_proto.set_key(header_key) header_proto.set_value(header_value) if len(http_response_data) > MAX_RESPONSE_SIZE: response.set_contentwastruncated(True) if request.url() != url: response.set_finalurl(url) break else: error_msg = 'Too many repeated redirects' logging.error(error_msg) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg) def _SanitizeHttpHeaders(self, untrusted_headers, headers): """Cleans "unsafe" headers from the HTTP request/response. Args: untrusted_headers: set of untrusted headers names headers: list of string pairs, first is header name and the second is header's value """ prohibited_headers = [h.key() for h in headers if h.key().lower() in untrusted_headers] if prohibited_headers: logging.warn('Stripped prohibited headers from URLFetch request: %s', prohibited_headers) return (h for h in headers if h.key().lower() not in untrusted_headers)
def _RetrieveURL(self, url, payload, method, headers, request, response, follow_redirects=True, deadline=_API_CALL_DEADLINE): """Retrieves a URL. Args: url: String containing the URL to access. payload: Request payload to send, if any; None if no payload. method: HTTP method to use (e.g., 'GET') headers: List of additional header objects to use for the request. request: Request object from original request. response: Response object to populate with the response data. follow_redirects: optional setting (defaulting to True) for whether or not we should transparently follow redirects (up to MAX_REDIRECTS) deadline: Number of seconds to wait for the urlfetch to finish. Raises: Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR in cases where: - MAX_REDIRECTS is exceeded - The protocol of the redirected URL is bad or missing. """ last_protocol = '' last_host = '' for redirect_number in xrange(MAX_REDIRECTS + 1): parsed = urlparse.urlparse(url) protocol, host, path, parameters, query, fragment = parsed port = urllib.splitport(urllib.splituser(host)[1])[1] if not _IsAllowedPort(port): logging.warning( 'urlfetch received %s ; port %s is not allowed in production!' % (url, port)) if protocol and not host: logging.error('Missing host on redirect; target url is %s' % url) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR) if not host and not protocol: host = last_host protocol = last_protocol adjusted_headers = { 'User-Agent': 'AppEngine-Google; (+http://code.google.com/appengine)', 'Host': host, 'Accept-Encoding': 'gzip', } if payload is not None: adjusted_headers['Content-Length'] = len(payload) if method == 'POST' and payload: adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded' for header in headers: if header.key().title().lower() == 'user-agent': adjusted_headers['User-Agent'] = ( '%s %s' % (header.value(), adjusted_headers['User-Agent'])) else: adjusted_headers[header.key().title()] = header.value() logging.debug('Making HTTP request: host = %s, ' 'url = %s, payload = %s, headers = %s', host, url, payload, adjusted_headers) try: if protocol == 'http': connection = httplib.HTTPConnection(host) elif protocol == 'https': connection = httplib.HTTPSConnection(host) else: error_msg = 'Redirect specified invalid protocol: "%s"' % protocol logging.error(error_msg) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg) last_protocol = protocol last_host = host if query != '': full_path = path + '?' + query else: full_path = path orig_timeout = socket.getdefaulttimeout() try: socket.setdefaulttimeout(deadline) connection.request(method, full_path, payload, adjusted_headers) http_response = connection.getresponse() if method == 'HEAD': http_response_data = '' else: http_response_data = http_response.read() finally: socket.setdefaulttimeout(orig_timeout) connection.close() except (httplib.error, socket.error, IOError), e: raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e)) if http_response.status in REDIRECT_STATUSES and follow_redirects: url = http_response.getheader('Location', None) if url is None: error_msg = 'Redirecting response was missing "Location" header' logging.error(error_msg) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg) else: response.set_statuscode(http_response.status) if http_response.getheader('content-encoding') == 'gzip': gzip_stream = StringIO.StringIO(http_response_data) gzip_file = gzip.GzipFile(fileobj=gzip_stream) http_response_data = gzip_file.read() response.set_content(http_response_data[:MAX_RESPONSE_SIZE]) for header_key, header_value in http_response.getheaders(): if (header_key.lower() == 'content-encoding' and header_value == 'gzip'): continue if header_key.lower() == 'content-length': header_value = str(len(response.content())) header_proto = response.add_header() header_proto.set_key(header_key) header_proto.set_value(header_value) if len(http_response_data) > MAX_RESPONSE_SIZE: response.set_contentwastruncated(True) if request.url() != url: response.set_finalurl(url) break
def BadRequest(message): raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, message)