Exemplo n.º 1
0
  def __ValidateAppId(self, app_id):
    """Verify that this is the stub for app_id.

    Args:
      app_id: An application ID.

    Raises:
      datastore_errors.BadRequestError: if this is not the stub for app_id.
    """
    assert app_id
    if not self.__trusted and app_id != self.__app_id:
      raise datastore_errors.BadRequestError(
          'app %s cannot access app %s\'s data' % (self.__app_id, app_id))
Exemplo n.º 2
0
  def __ValidateKey(self, key):
    """Validate this key.

    Args:
      key: entity_pb.Reference

    Raises:
      datastore_errors.BadRequestError: if the key is invalid
    """
    assert isinstance(key, entity_pb.Reference)

    self.__ValidateAppId(key.app())

    for elem in key.path().element_list():
      if elem.has_id() == elem.has_name():
        raise datastore_errors.BadRequestError(
          'each key path element should have id or name but not both: %r' % key)
Exemplo n.º 3
0
    def _Dynamic_RunQuery(self, query, query_result):
        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            (required, kind, ancestor, props,
             num_eq_filters) = (datastore_index.CompositeIndexForQuery(query))
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()
        if query.has_name_space():
            collection = query.name_space(
            ) + _NAMESPACE_CONCAT_STR + collection

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        try:
            prototype = self.__db[collection].find_one()
        except pymongo.errors.InvalidName:
            raise datastore_errors.BadRequestError('query without kind')
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if (not isinstance(spec[key], types.DictType)
                        and not isinstance(value, types.DictType)):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        offset = 0
        # Cursor magic
        if query.has_compiled_cursor():
            offset, query_pb, unused_spec, incl = self._DecodeCompiledCursor(
                query.compiled_cursor())

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.offset() == datastore._MAX_INT_32:
            query.set_offset(0)
            query.set_limit(datastore._MAX_INT_32)

        if offset:
            cursor = cursor.skip(int(offset))
        elif query.has_offset() and query.offset() != _MAX_QUERY_OFFSET:
            cursor = cursor.skip(int(query.offset()))
        if query.has_limit():
            cursor = cursor.limit(int(query.limit()))

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        # Cursor magic
        compiled_cursor = query_result.mutable_compiled_cursor()
        position = compiled_cursor.add_position()
        query_info = self._MinimalQueryInfo(query)
        cloned_cursor = cursor.clone()
        results = list(cloned_cursor)
        if results:
            start_key = _CURSOR_CONCAT_STR.join(
                (str(len(results) + offset), query_info.Encode(),
                 self.__entity_for_mongo_document(results[-1]).Encode()))
            # Populate query result
            result_list = query_result.result_list()
            for doc in results:
                result_list.append(self.__entity_for_mongo_document(doc))
            query_result.set_skipped_results(len(results))
            position.set_start_key(str(start_key))
            position.set_start_inclusive(False)
        del cloned_cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(False)
Exemplo n.º 4
0
    def transaction(self, callback, **ctx_options):

        options = _make_ctx_options(ctx_options, TransactionOptions)
        propagation = TransactionOptions.propagation(options)
        if propagation is None:
            propagation = TransactionOptions.NESTED

        mode = datastore_rpc.TransactionMode.READ_WRITE
        if ctx_options.get('read_only', False):
            mode = datastore_rpc.TransactionMode.READ_ONLY

        parent = self
        if propagation == TransactionOptions.NESTED:
            if self.in_transaction():
                raise datastore_errors.BadRequestError(
                    'Nested transactions are not supported.')
        elif propagation == TransactionOptions.MANDATORY:
            if not self.in_transaction():
                raise datastore_errors.BadRequestError(
                    'Requires an existing transaction.')
            result = callback()
            if isinstance(result, tasklets.Future):
                result = yield result
            raise tasklets.Return(result)
        elif propagation == TransactionOptions.ALLOWED:
            if self.in_transaction():
                result = callback()
                if isinstance(result, tasklets.Future):
                    result = yield result
                raise tasklets.Return(result)
        elif propagation == TransactionOptions.INDEPENDENT:
            while parent.in_transaction():
                parent = parent._parent_context
                if parent is None:
                    raise datastore_errors.BadRequestError(
                        'Context without non-transactional ancestor')
        else:
            raise datastore_errors.BadArgumentError(
                'Invalid propagation value (%s).' % (propagation, ))

        app = TransactionOptions.app(options) or key_module._DefaultAppId()

        retries = TransactionOptions.retries(options)
        if retries is None:
            retries = 3
        yield parent.flush()

        transaction = None
        tconn = None
        for _ in range(1 + max(0, retries)):
            previous_transaction = (transaction if mode
                                    == datastore_rpc.TransactionMode.READ_WRITE
                                    else None)
            transaction = yield (parent._conn.async_begin_transaction(
                options, app, previous_transaction, mode))
            tconn = datastore_rpc.TransactionalConnection(
                adapter=parent._conn.adapter,
                config=parent._conn.config,
                transaction=transaction,
                _api_version=parent._conn._api_version)
            tctx = parent.__class__(
                conn=tconn,
                auto_batcher_class=parent._auto_batcher_class,
                parent_context=parent)
            tctx._old_ds_conn = datastore._GetConnection()
            ok = False
            try:

                tctx.set_memcache_policy(parent.get_memcache_policy())
                tctx.set_memcache_timeout_policy(
                    parent.get_memcache_timeout_policy())
                tasklets.set_context(tctx)
                datastore._SetConnection(tconn)
                try:
                    try:
                        result = callback()
                        if isinstance(result, tasklets.Future):
                            result = yield result
                    finally:
                        yield tctx.flush()
                except GeneratorExit:
                    raise
                except Exception:
                    t, e, tb = sys.exc_info()
                    tconn.async_rollback(options)
                    if issubclass(t, datastore_errors.Rollback):

                        return
                    else:
                        six.reraise(t, e, tb)
                else:
                    ok = yield tconn.async_commit(options)
                    if ok:
                        parent._cache.update(tctx._cache)
                        yield parent._clear_memcache(tctx._cache)
                        raise tasklets.Return(result)

            finally:
                datastore._SetConnection(tctx._old_ds_conn)
                del tctx._old_ds_conn
                if ok:

                    for on_commit_callback in tctx._on_commit_queue:
                        on_commit_callback()

        tconn.async_rollback(options)
        raise datastore_errors.TransactionFailedError(
            'The transaction could not be committed. Please try again.')
Exemplo n.º 5
0
 def MockNdbTransaction(func, **options):
     if len(calls) < 1:
         calls.append(1)
         raise datastore_errors.BadRequestError('Other reason')
     return original_step_put(func, **options)
 def MockNdbTransaction(func, **options):
     if len(calls) < 1:
         calls.append(1)
         raise datastore_errors.BadRequestError()
     return original_ndb_transaction(func, **options)