Пример #1
0
    def _AddTransactionalBulkTask(self, request, response):
        """ Add a transactional task.

    Args:
      request: A taskqueue_service_pb.TaskQueueAddRequest.
      response: A taskqueue_service_pb.TaskQueueAddResponse.
    Returns:
      The taskqueue response.
    """
        for add_request in request.add_request_list():
            task_result = response.add_taskresult()

            task_name = None
            if add_request.has_task_name():
                task_name = add_request.task_name()

            if not task_name:
                task_name = self._ChooseTaskName()

            namespaced_name = '_'.join(
                ['task', self.__app_id,
                 add_request.queue_name(), task_name])

            add_request.set_task_name(task_name)
            task_result.set_chosen_task_name(namespaced_name)

        for add_request, task_result in zip(request.add_request_list(),
                                            response.taskresult_list()):
            task_result.set_result(
                taskqueue_service_pb.TaskQueueServiceError.OK)

        # All task should have been validated and assigned a unique name by this point.
        try:
            apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AddActions',
                                           request, api_base_pb.VoidProto())
        except apiproxy_errors.ApplicationError, e:
            raise apiproxy_errors.ApplicationError(
                e.application_error +
                taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR,
                e.error_detail)
Пример #2
0
    def __insert_v3_entity(self, v3_entity, v3_txn):
        """Inserts a v3 entity.

    Args:
      v3_entity: an entity_pb.EntityProto
      v3_txn: a datastore_pb.Transaction or None

    Returns:
      a tuple (the number of index writes that occurred,
               the entity key)

    Raises:
      ApplicationError: if the entity already exists
    """
        if not v3_txn:

            v3_txn = datastore_pb.Transaction()
            v3_begin_txn_req = datastore_pb.BeginTransactionRequest()
            v3_begin_txn_req.set_app(v3_entity.key().app())
            self.__make_v3_call('BeginTransaction', v3_begin_txn_req, v3_txn)
            _, key = self.__insert_v3_entity(v3_entity, v3_txn)
            v3_resp = datastore_pb.CommitResponse()
            self.__make_v3_call('Commit', v3_txn, v3_resp)
            return (v3_resp.cost().index_writes(), key)

        if datastore_pbs.is_complete_v3_key(v3_entity.key()):
            v3_get_req = datastore_pb.GetRequest()
            v3_get_req.mutable_transaction().CopyFrom(v3_txn)
            v3_get_req.key_list().append(v3_entity.key())
            v3_get_resp = datastore_pb.GetResponse()
            self.__make_v3_call('Get', v3_get_req, v3_get_resp)
            if v3_get_resp.entity(0).has_entity():
                raise apiproxy_errors.ApplicationError(
                    datastore_pb.Error.BAD_REQUEST, 'Entity already exists.')
        v3_put_req = datastore_pb.PutRequest()
        v3_put_req.mutable_transaction().CopyFrom(v3_txn)
        v3_put_req.entity_list().append(v3_entity)
        v3_put_resp = datastore_pb.PutResponse()
        self.__make_v3_call('Put', v3_put_req, v3_put_resp)
        return (v3_put_resp.cost().index_writes(), v3_put_resp.key(0))
Пример #3
0
  def _RemoteSend(self, request, response, method):
    """ Sends a request remotely to the search server.

    Args:
      request: A request object.
      response: A response object to be filled in.
      method: A str, the dynamic function doing the call.
    """
    if not self.__search_location:
      raise search.InternalError("Search service not configured.")

    api_request = remote_api_pb.Request()
    api_request.set_method(method)
    api_request.set_service_name("search")
    api_request.set_request(request.Encode())

    api_response = remote_api_pb.Response()
    api_response = api_request.sendCommand(self.__search_location,
      "",
      api_response,
      1,
      False,
      KEY_LOCATION,
      CERT_LOCATION)

    if api_response.has_application_error():
      error_pb = api_response.application_error()
      logging.error(error_pb.detail())
      raise apiproxy_errors.ApplicationError(error_pb.code(),
                                             error_pb.detail())

    if api_response.has_exception():
      raise api_response.exception()

    if not api_response or not api_response.has_response():
      raise search.InternalError(
          'No response from search server on %s requests.' % method)

    response.ParseFromString(api_response.response())
Пример #4
0
    def _Dynamic_AddActions(self, request, _):
        """Associates the creation of one or more tasks with a transaction.

    Args:
      request: A taskqueue_service_pb.TaskQueueBulkAddRequest containing the
          tasks that should be created when the transaction is comitted.
    """
        if ((len(self.__tx_actions) + request.add_request_size()) >
                _MAX_ACTIONS_PER_TXN):
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                'Too many messages, maximum allowed %s' % _MAX_ACTIONS_PER_TXN)

        new_actions = []
        for add_request in request.add_request_list():
            self.__ValidateTransaction(add_request.transaction())
            clone = taskqueue_service_pb.TaskQueueAddRequest()
            clone.CopyFrom(add_request)
            clone.clear_transaction()
            new_actions.append(clone)

        self.__tx_actions.extend(new_actions)
Пример #5
0
 def __get_task_function(self, request):
     """ Returns a function pointer to a celery task.
     Load the module for the app/queue.
 
 Args:
   request: A taskqueue_service_pb.TaskQueueAddRequest.
 Returns:
   A function pointer to a celery task.
 Raises:
   taskqueue_service_pb.TaskQueueServiceError
 """
     try:
         task_module = __import__(TaskQueueConfig.\
                     get_celery_worker_module_name(request.app_id()))
         task_func = getattr(
             task_module,
             TaskQueueConfig.get_queue_function_name(request.queue_name()))
         return task_func
     except AttributeError, attribute_error:
         logging.exception(attribute_error)
         raise apiproxy_errors.ApplicationError(
             taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
  def _Rotate(self, image, transform):
    """Use PIL to rotate the given image with the given transform.

    Args:
      image: PIL.Image.Image object to rotate.
      transform: images_service_pb.Transform to use when rotating.

    Returns:
      PIL.Image.Image with transforms performed on it.

    Raises:
      BadRequestError if the rotate data given is bad.
    """
    degrees = transform.rotate()
    if degrees < 0 or degrees % 90 != 0:
      raise apiproxy_errors.ApplicationError(
          images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
    degrees %= 360


    degrees = 360 - degrees
    return image.rotate(degrees)
Пример #7
0
  def _MakeRealSyncCall(self, service, call, request, response):
    request_pb = remote_api_pb.Request()
    request_pb.set_service_name(service)
    request_pb.set_method(call)
    request_pb.set_request(request.Encode())

    response_pb = remote_api_pb.Response()
    encoded_request = request_pb.Encode()
    encoded_response = self._server.Send(self._path, encoded_request)
    response_pb.ParseFromString(encoded_response)
    
    if response_pb.has_application_error():
      error_pb = response_pb.application_error()
      raise apiproxy_errors.ApplicationError(error_pb.code(),
                                             error_pb.detail())
    elif response_pb.has_exception():
      raise pickle.loads(response_pb.exception())
    elif response_pb.has_java_exception():
      raise UnknownJavaServerError("An unknown error has occured in the "
                                   "Java remote_api handler for this call.")
    else:
      response.ParseFromString(response_pb.response())
def TranslateSystemErrors(method):
    """Decorator to catch and translate socket.error to ApplicationError.

  Args:
    method: An unbound method of APIProxyStub or a subclass.

  Returns:
    The method, altered such it catches socket.error, socket.timeout and
    socket.gaierror and re-raises the required apiproxy_errors.ApplicationError.
  """
    def WrappedMethod(self, *args, **kwargs):
        try:
            return method(self, *args, **kwargs)
        except socket.gaierror, e:
            raise apiproxy_errors.ApplicationError(
                RemoteSocketServiceError.GAI_ERROR,
                'system_error:%u error_detail:"%s"' % (e.errno, e.strerror))
        except socket.timeout, e:
            raise apiproxy_errors.ApplicationError(
                RemoteSocketServiceError.SYSTEM_ERROR,
                'system_error:%u error_detail:"%s"' %
                (errno.EAGAIN, os.strerror(errno.EAGAIN)))
Пример #9
0
    def _MakeRemoteSyncCall(self, service, call, request, response):
        """Send an RPC to a remote_api endpoint."""
        request_pb = remote_api_pb.Request()
        request_pb.set_service_name(service)
        request_pb.set_method(call)
        request_pb.set_request(request.Encode())

        response_pb = remote_api_pb.Response()
        encoded_request = request_pb.Encode()
        try:
            urlfetch_response = urlfetch.fetch(self.remote_url,
                                               encoded_request,
                                               urlfetch.POST,
                                               self.extra_headers,
                                               follow_redirects=False,
                                               deadline=10)
        except Exception as e:

            logging.exception('Fetch failed to %s', self.remote_url)
            raise FetchFailed(e)
        if urlfetch_response.status_code != 200:
            logging.error('Fetch failed to %s; Status %s; body %s',
                          self.remote_url, urlfetch_response.status_code,
                          urlfetch_response.content)
            raise FetchFailed(urlfetch_response.status_code)
        response_pb.ParseFromString(urlfetch_response.content)

        if response_pb.has_application_error():
            error_pb = response_pb.application_error()
            raise apiproxy_errors.ApplicationError(error_pb.code(),
                                                   error_pb.detail())
        elif response_pb.has_exception():
            raise pickle.loads(response_pb.exception())
        elif response_pb.has_java_exception():
            raise UnknownJavaServerError(
                'An unknown error has occured in the '
                'Java remote_api handler for this call.')
        else:
            response.ParseFromString(response_pb.response())
    def Add(self, request):
        """Inserts a new task into the store.

    Args:
      request: A taskqueue_service_pb.TaskQueueAddRequest.

    Raises:
      apiproxy_errors.ApplicationError: If a task with the same name is already
      in the store.
    """

        pos = bisect.bisect_left(self._sorted_by_name, (request.task_name(), ))
        if (pos < len(self._sorted_by_name)
                and self._sorted_by_name[pos][0] == request.task_name()):
            raise apiproxy_errors.ApplicationError(
                taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)

        now = datetime.datetime.utcnow()
        now_sec = time.mktime(now.timetuple())
        task = taskqueue_service_pb.TaskQueueQueryTasksResponse_Task()
        task.set_task_name(request.task_name())
        task.set_eta_usec(request.eta_usec())
        task.set_creation_time_usec(now_sec * 1e6)
        task.set_url(request.url())
        task.set_method(request.method())
        for keyvalue in task.header_list():
            header = task.add_header()
            header.set_key(keyvalue.key())
            header.set_value(keyvalue.value())
        if request.has_description():
            task.set_description(request.description())
        if request.has_body():
            task.set_body(request.body())
        if request.has_crontimetable():
            task.mutable_crontimetable().set_schedule(
                request.crontimetable().schedule())
            task.mutable_crontimetable().set_timezone(
                request.crontimetable().timezone())
        self._InsertTask(task)
Пример #11
0
    def _Dynamic_Next(self, next_request, query_result):
        assert next_request.offset() == 0
        cursor_id = next_request.cursor().cursor()
        if cursor_id not in self.__queries:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                'Cursor %d not found' % cursor_id)
        query = self.__queries[cursor_id]

        if query is None:

            query_result.set_more_results(False)
            return
        else:
            if next_request.has_count():
                query.set_count(next_request.count())
            else:
                query.clear_count()

        self._Dynamic_RunQuery(query, query_result, cursor_id)

        query_result.set_skipped_results(0)
    def __update_v3_entity(self, v3_entity, v3_txn):
        """Updates a v3 entity.

    Args:
      v3_entity: a datastore_v4_pb.Entity
      v3_txn: a datastore_pb.Transaction or None

    Returns:
      the number of index writes that occurred

    Raises:
      ApplicationError: if the entity does not exist
    """
        if not v3_txn:

            v3_txn = datastore_pb.Transaction()
            v3_begin_txn_req = datastore_pb.BeginTransactionRequest()
            v3_begin_txn_req.set_app(v3_entity.key().app())
            self.__make_v3_call('BeginTransaction', v3_begin_txn_req, v3_txn)
            self.__update_v3_entity(v3_entity, v3_txn)
            v3_resp = datastore_pb.CommitResponse()
            self.__make_v3_call('Commit', v3_txn, v3_resp)
            return v3_resp.cost().index_writes()

        v3_get_req = datastore_pb.GetRequest()
        v3_get_req.mutable_transaction().CopyFrom(v3_txn)
        v3_get_req.key_list().append(v3_entity.key())
        v3_get_resp = datastore_pb.GetResponse()
        self.__make_v3_call('Get', v3_get_req, v3_get_resp)
        if not v3_get_resp.entity(0).has_entity():
            raise apiproxy_errors.ApplicationError(
                datastore_v4_pb.Error.BAD_REQUEST, 'Entity does not exist.')
        v3_put_req = datastore_pb.PutRequest()
        v3_put_req.mutable_transaction().CopyFrom(v3_txn)
        v3_put_req.entity_list().append(v3_entity)
        v3_put_resp = datastore_pb.PutResponse()
        self.__make_v3_call('Put', v3_put_req, v3_put_resp)
        return v3_put_resp.cost().index_writes()
Пример #13
0
  def _Dynamic_Histogram(self, request, response):
    """Trivial implementation of an API.

    Based off documentation of the PIL library at
    http://www.pythonware.com/library/pil/handbook/index.htm

    Args:
      request: ImagesHistogramRequest - Contains the image.
      response: ImagesHistogramResponse - Contains histogram of the image.

    Raises:
      ApplicationError: Image was of an unsupported format.
    """
    image = self._OpenImageData(request.image())

    img_format = image.format
    if img_format not in FORMAT_LIST:
      raise apiproxy_errors.ApplicationError(
          images_service_pb.ImagesServiceError.NOT_IMAGE)
    image = image.convert(RGBA)
    red = [0] * 256
    green = [0] * 256
    blue = [0] * 256




    for pixel in image.getdata():
      red[int((pixel[0] * pixel[3]) / 255)] += 1
      green[int((pixel[1] * pixel[3]) / 255)] += 1
      blue[int((pixel[2] * pixel[3]) / 255)] += 1
    histogram = response.mutable_histogram()
    for value in red:
      histogram.add_red(value)
    for value in green:
      histogram.add_green(value)
    for value in blue:
      histogram.add_blue(value)
Пример #14
0
    def _Dynamic_SendChannelMessage(self, request, response):
        """Implementation of channel.send_message.

    Queues a message to be retrieved by the client when it polls.

    Args:
      request: A SendMessageRequest.
      response: A VoidProto.
    """
        application_key = request.application_key()

        if not request.message():
            raise apiproxy_errors.ApplicationError(
                channel_service_pb.ChannelServiceError.BAD_MESSAGE)

        conn = httplib.HTTPConnection(self._address)
        headers = {
            'Content-Type': 'text/plain',
            'Last-Modified': rfc1123_date()
        }
        conn.request("POST", "/_ah/publish?id=%s" % application_key,
                     request.message(), headers)
        conn.close()
Пример #15
0
  def _Dynamic_GetSocketOptions(self, request, response):
    state = self._LookupSocket(request.socket_descriptor())
    for opt in request.options_list():
      if (opt.level() ==
          remote_socket_service_pb.SocketOption.SOCKET_SOL_SOCKET and
          opt.option() ==
          remote_socket_service_pb.SocketOption.SOCKET_SO_ERROR):
        ret = response.add_options()
        ret.set_level(opt.level())
        ret.set_option(opt.option())
        ret.set_value(
            state.sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR, 1024))
      else:
        value = self._mock_options.GetMockValue(opt.level(), opt.option())
        if value is None:
          raise apiproxy_errors.ApplicationError(
              RemoteSocketServiceError.PERMISSION_DENIED,
              'Attempt to get blocked socket option.')

        ret = response.add_options()
        ret.set_level(opt.level())
        ret.set_option(opt.option())
        ret.set_value(value)
Пример #16
0
    def __filter_binding(self, key, value, operation, prototype):
        if key in prototype:
            key += self.__filter_suffix(prototype[key])

        if key == "__key__":
            key = "_id"
            value = self.__id_for_key(value._ToPb())
        else:
            value = self.__create_mongo_value_for_value(value)

        if operation == "<":
            return (key, {'$lt': value})
        elif operation == '<=':
            return (key, {'$lte': value})
        elif operation == '>':
            return (key, {'$gt': value})
        elif operation == '>=':
            return (key, {'$gte': value})
        elif operation == '==':
            return (key, value)
        raise apiproxy_errors.ApplicationError(
            datastore_pb.Error.BAD_REQUEST,
            "Can't handle operation %r." % operation)
Пример #17
0
    def _Dynamic_Commit(self, transaction, transaction_response):
        if not self.__transactions.has_key(transaction.handle()):
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                'Transaction handle %d not found' % transaction.handle())

        self.__tx_snapshot = {}
        try:
            self.__WriteDatastore()

            for action in self.__tx_actions:
                try:
                    apiproxy_stub_map.MakeSyncCall('taskqueue', 'Add', action,
                                                   api_base_pb.VoidProto())
                except apiproxy_errors.ApplicationError, e:
                    logging.warning(
                        'Transactional task %s has been dropped, %s', action,
                        e)
                    pass

        finally:
            self.__tx_actions = []
            self.__tx_lock.release()
Пример #18
0
  def _Dynamic_AllocateIds(self, req, resp):



    v3_stub = apiproxy_stub_map.apiproxy.GetStub(V3_SERVICE_NAME)
    try:
      self.__service_validator.validate_allocate_ids_req(req)

      if req.allocate_list():
        v3_refs = self.__entity_converter.v4_to_v3_references(
            req.allocate_list())

        v3_full_refs = v3_stub._AllocateIds(v3_refs)
        resp.allocated_list().extend(
            self.__entity_converter.v3_to_v4_keys(v3_full_refs))
      elif req.reserve_list():
        v3_refs = self.__entity_converter.v4_to_v3_references(
            req.reserve_list())

        v3_stub._AllocateIds(v3_refs)
    except datastore_pbs.InvalidConversionError, e:
      raise apiproxy_errors.ApplicationError(
          datastore_v4_pb.Error.BAD_REQUEST, str(e))
Пример #19
0
  def __ReadPickled(self, filename):
    """Reads a pickled object from the given file and returns it.
    """
    self.__file_lock.acquire()

    try:
      try:
        if filename and filename != '/dev/null' and os.path.isfile(filename):
          return pickle.load(open(filename, 'rb'))
        else:
          logging.warning('Could not read datastore data from %s', filename)
      except (AttributeError, LookupError, ImportError, NameError, TypeError,
              ValueError, struct.error, pickle.PickleError), e:


        raise apiproxy_errors.ApplicationError(
            datastore_pb.Error.INTERNAL_ERROR,
            'Could not read data from %s. Try running with the '
            '--clear_datastore flag. Cause:\n%r' % (filename, e))
    finally:
      self.__file_lock.release()

    return []
    def _Dynamic_Next(self, next_request, query_result):
        cursor = next_request.cursor().cursor()
        query_result.set_more_results(False)

        if cursor == 0:  # we exited early from the query w/ no results...
            return

        try:
            cursor = self.__queries[cursor]
        except KeyError:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor)

        count = next_request.count()
        if count == 0:
            count = 1
        for _ in range(count):
            try:
                query_result.result_list().append(
                    self.__entity_for_mongo_document(cursor.next()))
            except StopIteration:
                return
        query_result.set_more_results(True)
Пример #21
0
    def _Rotate(self, image, transform):
        """Use PIL to rotate the given image with the given transform.

    Args:
      image: PIL.Image.Image object to rotate.
      transform: images_service_pb.Transform to use when rotating.

    Returns:
      PIL.Image.Image with transforms performed on it.

    Raises:
      BadRequestError if the rotate data given is bad.
    """
        degrees = transform.rotate()
        if degrees < 0 or degrees % 90 != 0:
            raise apiproxy_errors.ApplicationError(
                images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
        degrees %= 360

        degrees = 360 - degrees
        # AppScale: An update to the pillow library makes the expand parameter
        # necessary to behave the same way as GAE does.
        return image.rotate(degrees, expand=True)
 def _Dynamic_Commit(self, req, resp):
     try:
         self.__service_validator.validate_commit_req(req)
         if req.has_transaction():
             resp.mutable_deprecated_mutation_result()
             resp.mutable_deprecated_mutation_result().CopyFrom(
                 self.__apply_v4_deprecated_mutation(
                     req.deprecated_mutation(), req.transaction()))
             v3_req = self.__service_converter.v4_commit_req_to_v3_txn(req)
             v3_resp = datastore_pb.CommitResponse()
             self.__make_v3_call('Commit', v3_req, v3_resp)
             total_index_updates = (
                 resp.mutable_deprecated_mutation_result().index_updates() +
                 v3_resp.cost().index_writes())
             resp.mutable_deprecated_mutation_result().set_index_updates(
                 total_index_updates)
         else:
             resp.mutable_deprecated_mutation_result().CopyFrom(
                 self.__apply_v4_deprecated_mutation(
                     req.deprecated_mutation(), None))
     except datastore_pbs.InvalidConversionError, e:
         raise apiproxy_errors.ApplicationError(
             datastore_v4_pb.Error.BAD_REQUEST, str(e))
  def _RemoteSend(self, request, response, method):
    """Sends a request remotely to the datstore server. """
    tag = self.__app_id
    self._maybeSetDefaultAuthDomain() 
    user = users.GetCurrentUser()
    if user != None:
      tag += ":" + user.email()
      tag += ":" + user.nickname()
      tag += ":" + user.auth_domain()
    api_request = remote_api_pb.Request()
    api_request.set_method(method)
    api_request.set_service_name("datastore_v3")
    api_request.set_request(request.Encode())

    api_response = remote_api_pb.Response()
    api_response = api_request.sendCommand(self.__datastore_location,
      tag,
      api_response,
      1,
      self.__is_encrypted, 
      KEY_LOCATION,
      CERT_LOCATION)

    if not api_response or not api_response.has_response():
      raise datastore_errors.InternalError(
          'No response from db server on %s requests.' % method)
    
    if api_response.has_application_error():
      error_pb = api_response.application_error()
      logging.error(error_pb.detail())
      raise apiproxy_errors.ApplicationError(error_pb.code(),
                                             error_pb.detail())

    if api_response.has_exception():
      raise api_response.exception()
   
    response.ParseFromString(api_response.response())
Пример #24
0
    def _MakeCallDone(self):
        self._state = RPC.FINISHING
        self.cpu_usage_mcycles = self._result_dict['cpu_usage_mcycles']
        if self._result_dict['error'] == APPLICATION_ERROR:
            appl_err = self._result_dict['application_error']
            if appl_err == MEMCACHE_UNAVAILABLE and self.package == 'memcache':

                self._exception = apiproxy_errors.CapabilityDisabledError(
                    'The memcache service is temporarily unavailable. %s' %
                    self._result_dict['error_detail'])
            else:

                self._exception = apiproxy_errors.ApplicationError(
                    appl_err, self._result_dict['error_detail'])
        elif self._result_dict['error'] == CAPABILITY_DISABLED:

            if self._result_dict['error_detail']:
                self._exception = apiproxy_errors.CapabilityDisabledError(
                    self._result_dict['error_detail'])
            else:
                self._exception = apiproxy_errors.CapabilityDisabledError(
                    "The API call %s.%s() is temporarily unavailable." %
                    (self.package, self.call))
        elif self._result_dict['error'] == FEATURE_DISABLED:
            self._exception = apiproxy_errors.FeatureNotEnabledError(
                self._result_dict['error_detail'])
        elif self._result_dict['error'] in _ExceptionsMap:
            exception_entry = _ExceptionsMap[self._result_dict['error']]
            self._exception = exception_entry[0](exception_entry[1] %
                                                 (self.package, self.call))
        else:
            try:
                self.response.ParseFromString(
                    self._result_dict['result_string'])
            except Exception as e:
                self._exception = e
        self._Callback()
Пример #25
0
    def _Dynamic_RunQuery(self, query, query_result, request_id=None):
        """Send a query request to the datastore server. """
        if query.has_transaction():
            if not query.has_ancestor():
                raise apiproxy_errors.ApplicationError(
                    datastore_pb.Error.BAD_REQUEST,
                    'Only ancestor queries are allowed inside transactions.')
        (filters, orders) = datastore_index.Normalize(query.filter_list(),
                                                      query.order_list(), [])

        old_datastore_stub_util.FillUsersInQuery(filters)

        if not query.has_app():
            query.set_app(self.project_id)
        self.__ValidateAppId(query.app())

        self._RemoteSend(query, query_result, "RunQuery", request_id)
        results = query_result.result_list()
        for result in results:
            old_datastore_stub_util.PrepareSpecialPropertiesForLoad(result)

        last_cursor = None
        if query_result.has_compiled_cursor():
            last_cursor = query_result.compiled_cursor()

        if query_result.more_results():
            new_cursor = InternalCursor(query, last_cursor, len(results))
            cursor_id = self.__getCursorID()
            cursor = query_result.mutable_cursor()
            cursor.set_app(self.project_id)
            cursor.set_cursor(cursor_id)
            self.__queries[cursor_id] = new_cursor

        if query.compile():
            compiled_query = query_result.mutable_compiled_query()
            compiled_query.set_keys_only(query.keys_only())
            compiled_query.mutable_primaryscan().set_index_name(query.Encode())
Пример #26
0
def _FindIndexToUse(query, indexes):
    """ Matches the query with one of the composite indexes. 

  Args:
    query: A datastore_pb.Query.
    indexes: A list of entity_pb.CompsiteIndex.
  Returns:
    The composite index of the list for which the composite index matches 
    the query. Returns None if there is no match.
  """
    if not query.has_kind():
        return None

    index_list = __IndexListForQuery(query)
    if index_list == []:
        return None

    index_match = index_list[0]
    for index in indexes:
        if index_match.Equals(index.definition()):
            return index

    raise apiproxy_errors.ApplicationError(datastore_pb.Error.NEED_INDEX,
                                           'Query requires an index')
Пример #27
0
    def _Dynamic_AllocateIds(self, allocate_ids_request,
                             allocate_ids_response):
        model_key = allocate_ids_request.model_key()

        self.__ValidateAppId(model_key.app())

        if allocate_ids_request.has_size() and allocate_ids_request.has_max():
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                'Both size and max cannot be set.')
        try:
            self.__id_lock.acquire()
            start = self.__next_id
            if allocate_ids_request.has_size():
                self.__next_id += allocate_ids_request.size()
            elif allocate_ids_request.has_max():
                self.__next_id = max(self.__next_id,
                                     allocate_ids_request.max() + 1)
            end = self.__next_id - 1
        finally:
            self.__id_lock.release()

        allocate_ids_response.set_start(start)
        allocate_ids_response.set_end(end)
Пример #28
0
    def StoreBlob(self, blob_key, blob_stream):
        """Store blob stream to disk.

    Args:
      blob_key: Blob key of blob to store.
      blob_stream: Stream or stream-like object that will generate blob content.
    """
        blob_key = self._BlobKey(blob_key)
        block_count = 0
        try:
            while True:
                block = blob_stream.read(blobstore.MAX_BLOB_FETCH_SIZE)
                if not block:
                    break
                entity = datastore.Entity(_BLOB_CHUNK_KIND_,
                                          name=str(blob_key) + "__" +
                                          str(block_count),
                                          namespace='')
                entity.update({'block': datastore_types.Blob(block)})
                datastore.Put(entity)
                block_count += 1
        except datastore_errors.EntityNotFoundError, err:
            raise apiproxy_errors.ApplicationError(
                blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
class CloudDatastoreV1Stub(apiproxy_stub.APIProxyStub):
  """Implementation of the Cloud Datastore V1 API.

  This proxies requests to the v3 service."""


  THREADSAFE = False

  def __init__(self, app_id):
    assert _CLOUD_DATASTORE_ENABLED, (
        'Cannot initialize the Cloud Datastore'
        ' stub without installing the Cloud'
        ' Datastore client libraries.')
    apiproxy_stub.APIProxyStub.__init__(self, SERVICE_NAME)
    self.__app_id = app_id
    self._id_resolver = _StubIdResolver([app_id])
    self.__entity_converter = datastore_pbs.get_entity_converter(
        self._id_resolver)
    self.__service_converter = datastore_stub_util.get_service_converter(
        self._id_resolver)
    self.__service_validator = cloud_datastore_validator.get_service_validator(
        self._id_resolver)

  def _Dynamic_BeginTransaction(self, req, resp):


    try:
      self.__service_validator.validate_begin_transaction_req(req)
      v3_req = self.__service_converter.v1_to_v3_begin_transaction_req(
          self.__app_id, req)
    except datastore_pbs.InvalidConversionError, e:
      raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
                                             str(e))
    except cloud_datastore_validator.ValidationError, e:
      raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
                                             str(e))
Пример #30
0
  def _Dynamic_RunQuery(self, req, resp):
    try:
      self.__normalize_v4_run_query_request(req)
      self.__service_validator.validate_run_query_req(req)

      v3_stub = apiproxy_stub_map.apiproxy.GetStub(V3_SERVICE_NAME)

      new_req, filter_predicate = self._SplitGeospatialFilters(req)




      if (issubclass(v3_stub.__class__, datastore_stub_util.BaseDatastore)
          and filter_predicate is not None):
        v3_req = self.__service_converter.v4_run_query_req_to_v3_query(new_req)
        v3_resp = datastore_pb.QueryResult()
        v3_stub._Dynamic_RunQuery(v3_req, v3_resp, filter_predicate)
      else:
        v3_req = self.__service_converter.v4_run_query_req_to_v3_query(req)
        v3_resp = datastore_pb.QueryResult()
        self.__make_v3_call('RunQuery', v3_req, v3_resp)
    except datastore_pbs.InvalidConversionError, e:
      raise apiproxy_errors.ApplicationError(
          datastore_v4_pb.Error.BAD_REQUEST, str(e))