Beispiel #1
0
  def test_is_zigzag_merge_join(self):
    zookeeper = flexmock()
    zookeeper.should_receive("get_transaction_id").and_return(1)
    zookeeper.should_receive("get_valid_transaction_id").and_return(1)
    zookeeper.should_receive("register_updated_key").and_return(1)
    zookeeper.should_receive("acquire_lock").and_return(True)
    zookeeper.should_receive("release_lock").and_return(True)
    db_batch = flexmock()
    db_batch.should_receive("batch_delete").and_return(None)
    db_batch.should_receive("batch_put_entity").and_return(None)
    db_batch.should_receive("batch_get_entity").and_return(None)

    query = datastore_pb.Query()
    dd = DatastoreDistributed(db_batch, zookeeper) 
    db_batch.should_receive("remove_exists_filters").and_return({})
    self.assertEquals(dd.is_zigzag_merge_join(query, {}, {}), False)
    filter_info = {"prop1":[(datastore_pb.Query_Filter.EQUAL, "1")],
      "prop2": [(datastore_pb.Query_Filter.EQUAL, "2")]}
    db_batch.should_receive("remove_exists_filters").and_return(filter_info)
         
    self.assertEquals(dd.is_zigzag_merge_join(query, filter_info, []), True)

    filter_info = {"prop1":[(datastore_pb.Query_Filter.EQUAL, "1")],
      "prop1": [(datastore_pb.Query_Filter.EQUAL, "2")]}
    self.assertEquals(dd.is_zigzag_merge_join(query, filter_info, []), False)
Beispiel #2
0
  def run_query(self, http_request_data):
    """ High level function for running queries.

    Args:
      http_request_data: Stores the protocol buffer request from the AppServer.
    Returns:
      Returns an encoded query response.
    """
    global datastore_access
    query = datastore_pb.Query(http_request_data)
    clone_qr_pb = UnprocessedQueryResult()
    try:
      yield datastore_access._dynamic_run_query(query, clone_qr_pb)
    except dbconstants.BadRequest as error:
      raise gen.Return( ('', datastore_pb.Error.BAD_REQUEST, str(error)))
    except zktransaction.ZKBadRequest as error:
      logger.exception(
        'Illegal arguments in transaction during {}'.format(query))
      raise gen.Return(('', datastore_pb.Error.BAD_REQUEST, str(error)))
    except zktransaction.ZKInternalException as error:
      logger.exception('ZKInternalException during {}'.format(query))
      raise gen.Return(('', datastore_pb.Error.INTERNAL_ERROR, str(error)))
    except zktransaction.ZKTransactionException as error:
      logger.exception('Concurrent transaction during {}'.format(query))
      raise gen.Return(
        ('', datastore_pb.Error.CONCURRENT_TRANSACTION, str(error)))
    except dbconstants.AppScaleDBConnectionError as error:
      logger.exception('DB connection error during query')
      raise gen.Return(('', datastore_pb.Error.INTERNAL_ERROR, str(error)))
    raise gen.Return((clone_qr_pb.Encode(), 0, ''))
Beispiel #3
0
  def test_kindless_query(self):
    query = datastore_pb.Query()
    ancestor = query.mutable_ancestor()
    entity_proto1 = self.get_new_entity_proto("test", "test_kind", "nancy", "prop1name", 
                                              "prop1val", ns="blah")
    entity_key = entity_proto1.key()
    get_req = datastore_pb.GetRequest()
    key = get_req.add_key() 
    key.MergeFrom(entity_key)
    
    tombstone1 = {'key': {APP_ENTITY_SCHEMA[0]:TOMBSTONE, APP_ENTITY_SCHEMA[1]: 1}}
    db_batch = flexmock()
    db_batch.should_receive("batch_get_entity").and_return(
               {"test\x00blah\x00test_kind:nancy\x01": 
                 {
                   APP_ENTITY_SCHEMA[0]: entity_proto1.Encode(),
                   APP_ENTITY_SCHEMA[1]: 1
                 }
               })

    db_batch.should_receive("batch_put_entity").and_return(None)
    entity_proto1 = {'test\x00blah\x00test_kind:nancy\x01':{APP_ENTITY_SCHEMA[0]:entity_proto1.Encode(),
                      APP_ENTITY_SCHEMA[1]: 1}}
    db_batch.should_receive("range_query").and_return([entity_proto1, tombstone1]).and_return([])
    zookeeper = flexmock()
    zookeeper.should_receive("get_valid_transaction_id").and_return(1)
    zookeeper.should_receive("acquire_lock").and_return(True)
    dd = DatastoreDistributed(db_batch, zookeeper) 
    filter_info = {
      '__key__' : [[0, 0]]
    }
    dd.kindless_query(query, filter_info, None)
Beispiel #4
0
def _get_entities(ds_access, kind, namespace, order, start, count):
  """Returns a list and a count of entities of the given kind.

  Args:
    kind: A string representing the name of the kind of the entities to
        return.
    namespace: A string representing the namespace of the entities to return.
    order: A string containing the name of the property to sorted the results
        by. A "-" prefix indicates descending order e.g. "-age".
    start: The number of initial entities to skip in the result set.
    count: The maximum number of entities to return.

  Returns:
    A tuple of (list of datastore.Entity, total entity count).
  """
  query = datastore_pb.Query()
  query.set_name_space(namespace)
  query.set_app(ds_access.project_id)
  query.set_kind(kind)
  query.set_compile(True)

  if order:
    query_order = query.add_order()
    if order.startswith('-'):
      query_order.set_direction(datastore_pb.Query_Order.DESCENDING)
      query_order.set_property(order[1:])
    else:
      query_order.set_direction(datastore_pb.Query_Order.ASCENDING)
      query_order.set_property(order)

  # Count queries just take note of the skipped results.
  count_query = datastore_pb.Query()
  count_query.CopyFrom(query)
  count_query.set_offset(1000)
  count_query.set_limit(0)
  result = datastore_pb.QueryResult()
  ds_access._Dynamic_RunQuery(count_query, result)
  total = result.skipped_results()

  query.set_limit(count)
  query.set_offset(start)
  result = datastore_pb.QueryResult()
  ds_access._Dynamic_RunQuery(query, result)
  entities = [datastore.Entity.FromPb(entity_pb)
              for entity_pb in result.result_list()]

  return entities, total
Beispiel #5
0
    def _to_pb(self, conn, query_options):
        """Returns the internal only pb representation."""
        pb = datastore_pb.Query()

        pb.set_app(self.__app.encode('utf-8'))
        datastore_types.SetNamespace(pb, self.__namespace)
        if self.__kind is not None:
            pb.set_kind(self.__kind.encode('utf-8'))
        if self.__ancestor:
            pb.mutable_ancestor().CopyFrom(self.__ancestor)

        if self.__filter_predicate:
            for f in self.__filter_predicate._to_pbs():
                pb.add_filter().CopyFrom(f)

        if self.__order:
            for order in self.__order._to_pbs():
                pb.add_order().CopyFrom(order)

        if QueryOptions.keys_only(query_options, conn.config):
            pb.set_keys_only(True)

        if QueryOptions.produce_cursors(query_options, conn.config):
            pb.set_compile(True)

        limit = QueryOptions.limit(query_options, conn.config)
        if limit is not None:
            pb.set_limit(limit)

        count = QueryOptions.prefetch_size(query_options, conn.config)
        if count is None:
            count = QueryOptions.batch_size(query_options, conn.config)
        if count is not None:
            pb.set_count(count)

        if query_options.offset:
            pb.set_offset(query_options.offset)

        if query_options.start_cursor is not None:
            pb.mutable_compiled_cursor().CopyFrom(
                query_options.start_cursor._to_pb())

        if query_options.end_cursor is not None:
            pb.mutable_end_compiled_cursor().CopyFrom(
                query_options.end_cursor._to_pb())

        if ((query_options.hint == QueryOptions.ORDER_FIRST and self.__order)
                or (query_options.hint == QueryOptions.ANCESTOR_FIRST
                    and self.__ancestor)
                or (query_options.hint == QueryOptions.FILTER_FIRST
                    and pb.filter_size() > 0)):
            pb.set_hint(query_options.hint)

        conn._set_request_read_policy(pb, query_options)
        conn._set_request_transaction(pb)

        return pb
Beispiel #6
0
 def MakeSyncCall(self, service, call, request, response):
     if service == 'datastore_v3' and call == 'RunQuery':
         clone = datastore_pb.Query()
         clone.CopyFrom(request)
         clone.clear_hint()
         self.__query_history[clone] = self.__query_history.get(clone,
                                                                0) + 1
     super(RecordingSocketApiProxyStub,
           self).MakeSyncCall(service, call, request, response)
Beispiel #7
0
    def _Dynamic_RunQuery(self, query, query_result):
        super(DatastoreSqliteStub, self)._Dynamic_RunQuery(query, query_result)

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        clone.clear_limit()
        clone.clear_count()
        clone.clear_offset()
        self.__query_history[clone] = self.__query_history.get(clone, 0) + 1
  def test_ancestor_query(self):
    query = datastore_pb.Query()
    ancestor = query.mutable_ancestor()
    entity_proto1 = self.get_new_entity_proto(
      "test", "test_kind", "nancy", "prop1name", "prop1val", ns="blah")
    entity_key = entity_proto1.key()
    get_req = datastore_pb.GetRequest()
    key = get_req.add_key()
    key.MergeFrom(entity_key)
    ancestor.MergeFrom(entity_key)

    async_result = gen.Future()
    async_result.set_result({
      "test\x00blah\x00test_kind:nancy\x01": {
        APP_ENTITY_SCHEMA[0]: entity_proto1.Encode(),
        APP_ENTITY_SCHEMA[1]: 1
      }
    })

    filter_info = []
    tombstone1 = {'key': {APP_ENTITY_SCHEMA[0]:TOMBSTONE, APP_ENTITY_SCHEMA[1]: 1}}
    db_batch = flexmock()
    db_batch.should_receive('record_reads').and_return(ASYNC_NONE)
    db_batch.should_receive('valid_data_version_sync').and_return(True)
    db_batch.should_receive("batch_get_entity").and_return(async_result)

    entity_proto1 = {
      'test\x00blah\x00test_kind:nancy\x01': {
        APP_ENTITY_SCHEMA[0]: entity_proto1.Encode(),
        APP_ENTITY_SCHEMA[1]: 1
      }
    }
    async_result_1 = gen.Future()
    async_result_1.set_result([entity_proto1, tombstone1])
    async_result_2 = gen.Future()
    async_result_2.set_result([])

    db_batch.should_receive("range_query").\
      and_return(async_result_1).\
      and_return(async_result_2)

    zk_client = flexmock()
    zk_client.should_receive('add_listener')

    zookeeper = flexmock(handle=zk_client)
    zookeeper.should_receive("get_valid_transaction_id").and_return(1)
    zookeeper.should_receive("acquire_lock").and_return(True)
    zookeeper.should_receive("is_in_transaction").and_return(False)
    transaction_manager = flexmock()
    dd = DatastoreDistributed(db_batch, transaction_manager, zookeeper)
    yield dd.ancestor_query(query, filter_info)
    # Now with a transaction
    transaction = query.mutable_transaction()
    transaction.set_handle(2)
    yield dd.ancestor_query(query, filter_info)
Beispiel #9
0
  def _Dynamic_RunQuery(self, query, query_result):
    super(DatastoreFileStub, self)._Dynamic_RunQuery(query, query_result)


    clone = datastore_pb.Query()
    clone.CopyFrom(query)
    clone.clear_hint()
    clone.clear_limit()
    clone.clear_offset()
    if clone in self.__query_history:
      self.__query_history[clone] += 1
    else:
      self.__query_history[clone] = 1
Beispiel #10
0
    def Read(self):
        """ Reads the datastore and history files into memory.

    The in-memory query history is cleared, but the datastore is *not*
    cleared; the entities in the files are merged into the entities in memory.
    If you want them to overwrite the in-memory datastore, call Clear() before
    calling Read().

    If the datastore file contains an entity with the same app name, kind, and
    key as an entity already in the datastore, the entity from the file
    overwrites the entity in the datastore.

    Also sets __next_id to one greater than the highest id allocated so far.
    """
        pb_exceptions = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError,
                         TypeError, ValueError)
        error_msg = ('Data in %s is corrupt or a different version. '
                     'Try running with the --clear_datastore flag.\n%r')

        if self.__datastore_file and self.__datastore_file != '/dev/null':
            for encoded_entity in self.__ReadPickled(self.__datastore_file):
                try:
                    entity = entity_pb.EntityProto(encoded_entity)
                except pb_exceptions, e:
                    raise datastore_errors.InternalError(
                        error_msg % (self.__datastore_file, e))

                last_path = entity.key().path().element_list()[-1]
                app_kind = (entity.key().app(), last_path.type())
                kind_dict = self.__entities.setdefault(app_kind, {})
                kind_dict[entity.key()] = entity

                if last_path.has_id() and last_path.id() >= self.__next_id:
                    self.__next_id = last_path.id() + 1

            self.__query_history = {}
            for encoded_query, count in self.__ReadPickled(
                    self.__history_file):
                try:
                    query_pb = datastore_pb.Query(encoded_query)
                except pb_exceptions, e:
                    raise datastore_errors.InternalError(
                        error_msg % (self.__history_file, e))

                if query_pb in self.__query_history:
                    self.__query_history[query_pb] += count
                else:
                    self.__query_history[query_pb] = count
Beispiel #11
0
    def run_query(self, http_request_data):
        """ High level function for running queries.

    Args:
      http_request_data: Stores the protocol buffer request from the AppServer.
    Returns:
      Returns an encoded query response.
    """
        global datastore_access
        query = datastore_pb.Query(http_request_data)
        clone_qr_pb = UnprocessedQueryResult()
        try:
            datastore_access._dynamic_run_query(query, clone_qr_pb)
        except zktransaction.ZKBadRequest, zkie:
            logger.exception(
                'Illegal arguments in transaction during {}'.format(query))
            return (clone_qr_pb.Encode(), datastore_pb.Error.BAD_REQUEST,
                    "Illegal arguments for transaction. {0}".format(str(zkie)))
Beispiel #12
0
def _WidenQueryProto(query_pb):
    """Return a simple query that is a superset of the requested query.

  Args:
    query_pb: A datastore_pb.Query object that requires a composite index.

  Returns:
    A datastore_pb.Query object that does not require a composit index, or
    None if the original query cannot be widened.
  """

    # Check for features that cannot be handled.
    if (query_pb.has_compiled_cursor() or query_pb.has_end_compiled_cursor()):
        return None

    # Assume that most fields carry over intact.
    wide_pb = datastore_pb.Query()
    wide_pb.CopyFrom(query_pb)

    # Remove any offset/limit since we'll apply those later.
    wide_pb.clear_offset()
    wide_pb.clear_limit()

    # Only keep EQUAL filters.
    eq_filters = [f for f in query_pb.filter_list() if f.op == f.EQUAL]
    wide_pb.clear_filter()
    for f in eq_filters:
        wide_pb.add_filter().CopyFrom(f)

    # Remove orders.
    #
    # TODO: technically we could support a single ascending
    # order, but since we're going to buffer everything in memory it
    # doesn't matter if we leave any orders in the widened query.  If in
    # the future we stream results for queries that are only widened due
    # to filters then it might be beneficial to leave the orders intact
    # if they consist of a single ascending order.
    wide_pb.clear_order()

    # The keys-only field must be set to False since the full entities are
    # requires for post-processing.
    wide_pb.set_keys_only(False)

    return wide_pb
Beispiel #13
0
  def _get_kinds(cls, ds_access, namespace):
    """ Returns a sorted list of kind names present in the given namespace.

    Args:
      ds_access: A DatastoreDistributed client.
      namespace: A string specifying the datastore namespace.
    Returns:
      A list of string specifying kind names.
    """
    assert namespace is not None
    query = datastore_pb.Query()
    query.set_name_space(namespace)
    query.set_app(ds_access.project_id)
    query.set_kind('__kind__')
    result = datastore_pb.QueryResult()
    ds_access._Dynamic_RunQuery(query, result)
    kinds = [entity.key().path().element(0).name()
             for entity in result.result_list()]
    return sorted(kinds)
Beispiel #14
0
    def _DecodeCompiledCursor(self, query, compiled_cursor):
        """Converts a compiled_cursor into a cursor_entity.

    Returns:
      (cursor_entity, inclusive): a datastore_pb.EntityProto and if it should
      be included in the result set.
    """
        assert len(compiled_cursor.position_list()) == 1

        position = compiled_cursor.position(0)
        entity_as_pb = datastore_pb.EntityProto()
        (query_info_encoded,
         entity_encoded) = position.start_key().split(_CURSOR_CONCAT_STR, 1)
        query_info_pb = datastore_pb.Query()
        query_info_pb.ParseFromString(query_info_encoded)
        self._ValidateQuery(query, query_info_pb)

        entity_as_pb.ParseFromString(entity_encoded)
        return (entity_as_pb, position.start_inclusive())
  def test_zigzag_merge_join(self):
    zk_client = flexmock()
    zk_client.should_receive('add_listener')

    zookeeper = flexmock(handle=zk_client)
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version_sync').and_return(True)

    query = datastore_pb.Query()
    transaction_manager = flexmock()
    dd = DatastoreDistributed(db_batch, transaction_manager, zookeeper)
    flexmock(dd).should_receive("is_zigzag_merge_join").and_return(False)
    result = yield dd.zigzag_merge_join(None, None, None)
    self.assertEquals(result, (None, False))

    filter_info = {
      "prop1": [(datastore_pb.Query_Filter.EQUAL, "1")],
      "prop2": [(datastore_pb.Query_Filter.EQUAL, "2")]
    }
    result = yield dd.zigzag_merge_join(query, filter_info, [])
    self.assertEquals(result, (None, False))
Beispiel #16
0
    def _DecodeCompiledCursor(self, compiled_cursor):
        """Converts a compiled_cursor into a cursor_entity.

    Args:
      compiled_cursor: Cursor instance to decode.

    Returns:
      (offset, query_pb, cursor_entity, inclusive)
    """
        assert len(compiled_cursor.position_list()) == 1

        position = compiled_cursor.position(0)
        entity_pb = datastore_pb.EntityProto()
        (count, query_info_encoded,
         entity_encoded) = position.start_key().split(_CURSOR_CONCAT_STR)
        query_info_pb = datastore_pb.Query()
        query_info_pb.ParseFromString(query_info_encoded)
        entity_pb.ParseFromString(entity_encoded)
        offset = int(count) + query_info_pb.offset()
        return (offset, query_info_pb,
                datastore.Entity._FromPb(entity_pb,
                                         True), position.start_inclusive())
  def test_ordered_ancestor_query(self):
    query = datastore_pb.Query()
    ancestor = query.mutable_ancestor()
    entity_proto1 = self.get_new_entity_proto("test", "test_kind", "nancy", "prop1name",
                                              "prop1val", ns="blah")
    entity_key = entity_proto1.key()
    get_req = datastore_pb.GetRequest()
    key = get_req.add_key()
    key.MergeFrom(entity_key)
    ancestor.MergeFrom(entity_key)

    filter_info = []
    tombstone1 = {'key': {APP_ENTITY_SCHEMA[0]:TOMBSTONE, APP_ENTITY_SCHEMA[1]: 1}}
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version').and_return(True)
    db_batch.should_receive("batch_get_entity").and_return(
               {"test\x00blah\x00test_kind:nancy\x01":
                 {
                   APP_ENTITY_SCHEMA[0]: entity_proto1.Encode(),
                   APP_ENTITY_SCHEMA[1]: 1
                 }
               })
    db_batch.should_receive('record_reads')

    db_batch.should_receive("batch_put_entity").and_return(None)
    entity_proto1 = {'test\x00blah\x00test_kind:nancy\x01':{APP_ENTITY_SCHEMA[0]:entity_proto1.Encode(),
                      APP_ENTITY_SCHEMA[1]: 1}}
    db_batch.should_receive("range_query").and_return([entity_proto1, tombstone1]).and_return([])
    zookeeper = flexmock()
    zookeeper.should_receive("get_valid_transaction_id").and_return(1)
    zookeeper.should_receive("acquire_lock").and_return(True)
    zookeeper.should_receive("is_in_transaction").and_return(False)
    dd = DatastoreDistributed(db_batch, zookeeper)
    dd.ordered_ancestor_query(query, filter_info, None)

    # Now with a transaction
    transaction = query.mutable_transaction()
    transaction.set_handle(2)
    dd.ordered_ancestor_query(query, filter_info, None) 
    def _GetEntitiesInEntityGroup(self, entity_group):
        query = datastore_pb.Query()
        query.set_app(entity_group.app())
        if entity_group.name_space():
            query.set_name_space(entity_group.name_space())
        query.mutable_ancestor().CopyFrom(entity_group)

        filter_info = self.__GenerateFilterInfo(query.filter_list(), query)
        order_info = self.__GenerateOrderInfo(query.order_list())
        sql_stmt, params = self.__KindQuery(query, filter_info, order_info)

        conn = self._GetConnection()
        try:
            db_cursor = conn.execute(sql_stmt, params)
            entities = (entity_pb.EntityProto(row[1])
                        for row in db_cursor.fetchall())
            return dict(
                (datastore_types.ReferenceToKeyValue(entity.key()), entity)
                for entity in entities)
        finally:

            self._ReleaseConnection(conn)
  def test_extract_entities_from_composite_indexes(self):
    project_id = 'guestbook'
    props = ['prop1', 'prop2']
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version_sync').and_return(True)
    transaction_manager = flexmock()
    dd = DatastoreDistributed(db_batch, transaction_manager,
                              self.get_zookeeper())
    query = datastore_pb.Query()
    for prop_name in props:
      query.add_property_name(prop_name)

    index = query.add_composite_index()
    definition = index.mutable_definition()
    for prop_name in props:
      prop = definition.add_property()
      prop.set_name(prop_name)

    entity_id = 1524699263329044
    val1 = entity_pb.PropertyValue()
    val1.set_int64value(5)
    val2 = entity_pb.PropertyValue()
    val2.set_stringvalue('test')
    index_key = '\x00'.join(
      [project_id, 'namespace', 'index1', str(encode_index_pb(val1)),
       str(encode_index_pb(val2)), 'Greeting:{}\x01'.format(entity_id)])

    index_results = [{index_key: {'reference': 'ignored-ref'}}]
    entities = dd._extract_entities_from_composite_indexes(
      query, index_results, index)
    self.assertEqual(len(entities), 1)
    returned_entity = entity_pb.EntityProto(entities[0])
    self.assertEqual(returned_entity.property_size(), 2)
    self.assertEqual(returned_entity.key().path().element(0).type(), 'Greeting')
    self.assertEqual(returned_entity.key().path().element(0).id(), entity_id)
    self.assertEqual(returned_entity.property(0).name(), 'prop1')
    self.assertEqual(returned_entity.property(0).value().int64value(), 5)
    self.assertEqual(returned_entity.property(1).name(), 'prop2')
    self.assertEqual(returned_entity.property(1).value().stringvalue(), 'test')
  def test_zigzag_merge_join(self):
    zookeeper = flexmock()
    zookeeper.should_receive("get_transaction_id").and_return(1)
    zookeeper.should_receive("get_valid_transaction_id").and_return(1)
    zookeeper.should_receive("register_updated_key").and_return(1)
    zookeeper.should_receive("acquire_lock").and_return(True)
    zookeeper.should_receive("release_lock").and_return(True)
    db_batch = flexmock()
    db_batch.should_receive("batch_delete").and_return(None)
    db_batch.should_receive("batch_put_entity").and_return(None)
    db_batch.should_receive("batch_get_entity").and_return(None)

    query = datastore_pb.Query()
    dd = DatastoreDistributed(db_batch, zookeeper) 
    flexmock(dd).should_receive("is_zigzag_merge_join").and_return(False)
    self.assertEquals(dd.zigzag_merge_join(None, None, None), None)

    filter_info = {"prop1":[(datastore_pb.Query_Filter.EQUAL, "1")],
      "prop2": [(datastore_pb.Query_Filter.EQUAL, "2")]}
    flexmock(query).should_receive("kind").and_return("kind")
    flexmock(dd).should_receive("get_table_prefix").and_return("prefix")
    flexmock(dd).should_receive("__apply_filters").and_return([])
    flexmock(query).should_receive("limit").and_return(1)
    self.assertEquals(dd.zigzag_merge_join(query, filter_info, []), None)
    def _DecodeCompiledCursor(self, query, compiled_cursor):
        """Converts a compiled_cursor into a cursor_entity.

    Returns:
      (cursor_entity, inclusive): a datastore_pb.EntityProto and if it should
      be included in the result set.
    """
        assert len(compiled_cursor.position_list()) == 1

        position = compiled_cursor.position(0)
        entity_as_pb = datastore_pb.EntityProto()
        if position.start_key():
            (query_info_encoded, entity_encoded) = \
                       position.start_key().split(_CURSOR_CONCAT_STR, 1)
            query_info_pb = datastore_pb.Query()
            query_info_pb.ParseFromString(query_info_encoded)
            entity_as_pb.ParseFromString(entity_encoded)
        else:
            """Java doesn't include a start_key() so we will create the last entity
         from the position variable. 
      """
            entity_as_pb.key().MergeFrom(position.key_)
            entity_as_pb.entity_group().MergeFrom(position.key_.path_)
        return (entity_as_pb, position.start_inclusive())
Beispiel #22
0
    def _Dynamic_RunQuery(self, query, query_result):
        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            (required, kind, ancestor, props,
             num_eq_filters) = (datastore_index.CompositeIndexForQuery(query))
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()
        if query.has_name_space():
            collection = query.name_space(
            ) + _NAMESPACE_CONCAT_STR + collection

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        try:
            prototype = self.__db[collection].find_one()
        except pymongo.errors.InvalidName:
            raise datastore_errors.BadRequestError('query without kind')
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if (not isinstance(spec[key], types.DictType)
                        and not isinstance(value, types.DictType)):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        offset = 0
        # Cursor magic
        if query.has_compiled_cursor():
            offset, query_pb, unused_spec, incl = self._DecodeCompiledCursor(
                query.compiled_cursor())

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.offset() == datastore._MAX_INT_32:
            query.set_offset(0)
            query.set_limit(datastore._MAX_INT_32)

        if offset:
            cursor = cursor.skip(int(offset))
        elif query.has_offset() and query.offset() != _MAX_QUERY_OFFSET:
            cursor = cursor.skip(int(query.offset()))
        if query.has_limit():
            cursor = cursor.limit(int(query.limit()))

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        # Cursor magic
        compiled_cursor = query_result.mutable_compiled_cursor()
        position = compiled_cursor.add_position()
        query_info = self._MinimalQueryInfo(query)
        cloned_cursor = cursor.clone()
        results = list(cloned_cursor)
        if results:
            start_key = _CURSOR_CONCAT_STR.join(
                (str(len(results) + offset), query_info.Encode(),
                 self.__entity_for_mongo_document(results[-1]).Encode()))
            # Populate query result
            result_list = query_result.result_list()
            for doc in results:
                result_list.append(self.__entity_for_mongo_document(doc))
            query_result.set_skipped_results(len(results))
            position.set_start_key(str(start_key))
            position.set_start_inclusive(False)
        del cloned_cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(False)
    def _Dynamic_RunQuery(self, query, query_result):
        if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(
                query)
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        prototype = self.__db[collection].find_one()
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if not isinstance(spec[key],
                                  types.DictType) and not isinstance(
                                      value, types.DictType):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.has_offset():
            cursor = cursor.skip(query.offset())
        if query.has_limit():
            cursor = cursor.limit(query.limit())

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(True)
Beispiel #24
0
    def _Dynamic_RunQuery(self, query, query_result):
        if not self.__tx_lock.acquire(False):
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                "Can't query inside a transaction.")
        else:
            self.__tx_lock.release()

        app = self.ResolveAppId(query.app())

        if self.__require_indexes:
            required_index = datastore_index.CompositeIndexForQuery(query)
            if required_index is not None:
                kind, ancestor, props, num_eq_filters = required_index
                required_key = kind, ancestor, props
                indexes = self.__indexes.get(app)
                if not indexes:
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.BAD_REQUEST,
                        "This query requires a composite index, but none are defined. "
                        "You must create an index.yaml file in your application root."
                    )
                eq_filters_set = set(props[:num_eq_filters])
                remaining_filters = props[num_eq_filters:]
                for index in indexes:
                    definition = datastore_admin.ProtoToIndexDefinition(index)
                    index_key = datastore_index.IndexToKey(definition)
                    if required_key == index_key:
                        break
                    if num_eq_filters > 1 and (kind,
                                               ancestor) == index_key[:2]:
                        this_props = index_key[2]
                        this_eq_filters_set = set(this_props[:num_eq_filters])
                        this_remaining_filters = this_props[num_eq_filters:]
                        if (eq_filters_set == this_eq_filters_set and
                                remaining_filters == this_remaining_filters):
                            break
                else:
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.BAD_REQUEST,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        try:
            query.set_app(app)
            results = self.__entities[app, query.kind()].values()
            results = [datastore.Entity._FromPb(pb) for pb in results]
        except KeyError:
            results = []

        if query.has_ancestor():
            ancestor_path = query.ancestor().path().element_list()

            def is_descendant(entity):
                path = entity.key()._Key__reference.path().element_list()
                return path[:len(ancestor_path)] == ancestor_path

            results = filter(is_descendant, results)

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            def passes(entity):
                """ Returns True if the entity passes the filter, False otherwise. """
                entity_vals = entity.get(prop, [])
                if type(entity_vals) is not types.ListType:
                    entity_vals = [entity_vals]

                entity_property_list = [
                    datastore_types.ToPropertyPb(prop, value)
                    for value in entity_vals
                ]

                for entity_prop in entity_property_list:
                    fixed_entity_val = datastore_types.FromPropertyPb(
                        entity_prop)

                    for filter_prop in filt.property_list():
                        filter_val = datastore_types.FromPropertyPb(
                            filter_prop)

                        comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)

                        logging.log(logging.DEBUG - 1,
                                    'Evaling filter expression "%s"', comp)

                        if eval(comp):
                            return True

                return False

            results = filter(passes, results)

        for order in query.order_list():
            prop = order.property().decode('utf-8')
            results = [entity for entity in results if prop in entity]

        def order_compare(a, b):
            """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
            for o in query.order_list():
                prop = o.property().decode('utf-8')

                a_values = a[prop]
                if not isinstance(a_values, types.ListType):
                    a_values = [a_values]

                b_values = b[prop]
                if not isinstance(b_values, types.ListType):
                    b_values = [b_values]

                cmped = cmp(min(a_values), min(b_values))

                if o.direction() is datastore_pb.Query_Order.DESCENDING:
                    cmped = -cmped

                if cmped != 0:
                    return cmped

            return 0

        results.sort(order_compare)

        if query.has_limit():
            results = results[:query.limit()]

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1
        self.__WriteHistory()

        results = [e._ToPb() for e in results]
        self.__cursor_lock.acquire()
        cursor = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor] = (results, len(results))

        query_result.mutable_cursor().set_cursor(cursor)
        query_result.set_more_results(len(results) > 0)
Beispiel #25
0
    def _Dynamic_RunQuery(self, query, query_result):
        if query.has_transaction():
            if not query.has_ancestor():
                raise apiproxy_errors.ApplicationError(
                    datastore_pb.Error.BAD_REQUEST,
                    'Only ancestor queries are allowed inside transactions.')
        (filters, orders) = datastore_index.Normalize(query.filter_list(),
                                                      query.order_list())

        datastore_stub_util.FillUsersInQuery(filters)

        query_response = datastore_pb.QueryResult()
        query.set_app(self.__app_id)
        self._RemoteSend(query, query_response, "RunQuery")
        results = query_response.result_list()
        results = [datastore.Entity._FromPb(r) for r in results]

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        def has_prop_indexed(entity, prop):
            """Returns True if prop is in the entity and is indexed."""
            if prop in datastore_types._SPECIAL_PROPERTIES:
                return True
            elif prop in entity.unindexed_properties():
                return False

            values = entity.get(prop, [])
            if not isinstance(values, (tuple, list)):
                values = [values]

            for value in values:
                if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
                    return True
            return False

        def order_compare_entities(a, b):
            """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
            cmped = 0
            for o in orders:
                prop = o.property().decode('utf-8')

                reverse = (o.direction() is
                           datastore_pb.Query_Order.DESCENDING)

                a_val = datastore._GetPropertyValue(a, prop)
                if isinstance(a_val, list):
                    a_val = sorted(a_val,
                                   order_compare_properties,
                                   reverse=reverse)[0]

                b_val = datastore._GetPropertyValue(b, prop)
                if isinstance(b_val, list):
                    b_val = sorted(b_val,
                                   order_compare_properties,
                                   reverse=reverse)[0]

                cmped = order_compare_properties(a_val, b_val)

                if o.direction() is datastore_pb.Query_Order.DESCENDING:
                    cmped = -cmped

                if cmped != 0:
                    return cmped

            if cmped == 0:
                return cmp(a.key(), b.key())

        def order_compare_entities_pb(a, b):
            """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. a and b are protobuf-encoded
      entities."""
            return order_compare_entities(datastore.Entity.FromPb(a),
                                          datastore.Entity.FromPb(b))

        def order_compare_properties(x, y):
            """Return a negative, zero or positive number depending on whether
      property value x is considered smaller than, equal to, or larger than
      property value y. If x and y are different types, they're compared based
      on the type ordering used in the real datastore, which is based on the
      tag numbers in the PropertyValue PB.
      """
            if isinstance(x, datetime.datetime):
                x = datastore_types.DatetimeToTimestamp(x)
            if isinstance(y, datetime.datetime):
                y = datastore_types.DatetimeToTimestamp(y)

            x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
            y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)

            if x_type == y_type:
                try:
                    return cmp(x, y)
                except TypeError:
                    return 0
            else:
                return cmp(x_type, y_type)

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        clone.clear_limit()
        clone.clear_offset()
        results = [r._ToPb() for r in results]
        for result in results:
            datastore_stub_util.PrepareSpecialPropertiesForLoad(result)

        datastore_stub_util.ValidateQuery(query, filters, orders,
                                          _MAX_QUERY_COMPONENTS)

        cursor = datastore_stub_util.ListCursor(query, results,
                                                order_compare_entities_pb)
        self.__queries = cursor

        if query.has_count():
            count = query.count()
        elif query.has_limit():
            count = query.limit()
        else:
            count = _BATCH_SIZE

        cursor.PopulateQueryResult(query_result,
                                   count,
                                   query.offset(),
                                   compile=query.compile())

        if query.compile():
            compiled_query = query_result.mutable_compiled_query()
            compiled_query.set_keys_only(query.keys_only())
            compiled_query.mutable_primaryscan().set_index_name(query.Encode())
  def _Dynamic_RunQuery(self, query, query_result):
    if query.has_transaction():
      self.__ValidateTransaction(query.transaction())
      if not query.has_ancestor():
        raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          'Only ancestor queries are allowed inside transactions.')
      entities = self.__tx_snapshot
    else:
      entities = self.__entities

    app_id = query.app()
    namespace = query.name_space()
    self.__ValidateAppId(app_id)

    if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

    num_components = len(query.filter_list()) + len(query.order_list())
    if query.has_ancestor():
      num_components += 1
    if num_components > _MAX_QUERY_COMPONENTS:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          ('query is too large. may not have more than %s filters'
           ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

    (filters, orders) = datastore_index.Normalize(query.filter_list(),
                                                  query.order_list())

    if self.__require_indexes:
      required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
      if required:
        required_key = kind, ancestor, props
        indexes = self.__indexes.get(app_id)
        if not indexes:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index, but none are defined. "
              "You must create an index.yaml file in your application root.")
        eq_filters_set = set(props[:num_eq_filters])
        remaining_filters = props[num_eq_filters:]
        for index in indexes:
          definition = datastore_index.ProtoToIndexDefinition(index)
          index_key = datastore_index.IndexToKey(definition)
          if required_key == index_key:
            break
          if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
            this_props = index_key[2]
            this_eq_filters_set = set(this_props[:num_eq_filters])
            this_remaining_filters = this_props[num_eq_filters:]
            if (eq_filters_set == this_eq_filters_set and
                remaining_filters == this_remaining_filters):
              break
        else:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index that is not defined. "
              "You must update the index.yaml file in your application root.")

    try:
      query.set_app(app_id)
      datastore_types.SetNamespace(query, namespace)
      encoded = datastore_types.EncodeAppIdNamespace(app_id, namespace)
      if query.has_kind():
        results = entities[encoded, query.kind()].values()
        results = [entity.native for entity in results]
      else:
        results = []
        for key in entities:
          if key[0] == encoded:
            results += [entity.native for entity in entities[key].values()]
    except KeyError:
      results = []

    if query.has_ancestor():
      ancestor_path = query.ancestor().path().element_list()
      def is_descendant(entity):
        path = entity.key()._Key__reference.path().element_list()
        return path[:len(ancestor_path)] == ancestor_path
      results = filter(is_descendant, results)

    operators = {datastore_pb.Query_Filter.LESS_THAN:             '<',
                 datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL:    '<=',
                 datastore_pb.Query_Filter.GREATER_THAN:          '>',
                 datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
                 datastore_pb.Query_Filter.EQUAL:                 '==',
                 }

    def has_prop_indexed(entity, prop):
      """Returns True if prop is in the entity and is indexed."""
      if prop in datastore_types._SPECIAL_PROPERTIES:
        return True
      elif prop in entity.unindexed_properties():
        return False

      values = entity.get(prop, [])
      if not isinstance(values, (tuple, list)):
        values = [values]

      for value in values:
        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
          return True
      return False

    for filt in filters:
      assert filt.op() != datastore_pb.Query_Filter.IN

      prop = filt.property(0).name().decode('utf-8')
      op = operators[filt.op()]

      filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                         for filter_prop in filt.property_list()]

      def passes_filter(entity):
        """Returns True if the entity passes the filter, False otherwise.

        The filter being evaluated is filt, the current filter that we're on
        in the list of filters in the query.
        """
        if not has_prop_indexed(entity, prop):
          return False

        try:
          entity_vals = datastore._GetPropertyValue(entity, prop)
        except KeyError:
          entity_vals = []

        if not isinstance(entity_vals, list):
          entity_vals = [entity_vals]

        for fixed_entity_val in entity_vals:
          for filter_val in filter_val_list:
            fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
              fixed_entity_val.__class__)
            filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
            if fixed_entity_type == filter_type:
              comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
            elif op != '==':
              comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
            else:
              continue

            logging.log(logging.DEBUG - 1,
                        'Evaling filter expression "%s"', comp)

            try:
              ret = eval(comp)
              if ret and ret != NotImplementedError:
                return True
            except TypeError:
              pass

        return False

      results = filter(passes_filter, results)

    for order in orders:
      prop = order.property().decode('utf-8')
      results = [entity for entity in results if has_prop_indexed(entity, prop)]

    def order_compare_entities(a, b):
      """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
      cmped = 0
      for o in orders:
        prop = o.property().decode('utf-8')

        reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)

        a_val = datastore._GetPropertyValue(a, prop)
        if isinstance(a_val, list):
          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]

        b_val = datastore._GetPropertyValue(b, prop)
        if isinstance(b_val, list):
          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]

        cmped = order_compare_properties(a_val, b_val)

        if o.direction() is datastore_pb.Query_Order.DESCENDING:
          cmped = -cmped

        if cmped != 0:
          return cmped

      if cmped == 0:
        return cmp(a.key(), b.key())

    def order_compare_properties(x, y):
      """Return a negative, zero or positive number depending on whether
      property value x is considered smaller than, equal to, or larger than
      property value y. If x and y are different types, they're compared based
      on the type ordering used in the real datastore, which is based on the
      tag numbers in the PropertyValue PB.
      """
      if isinstance(x, datetime.datetime):
        x = datastore_types.DatetimeToTimestamp(x)
      if isinstance(y, datetime.datetime):
        y = datastore_types.DatetimeToTimestamp(y)

      x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
      y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)

      if x_type == y_type:
        try:
          return cmp(x, y)
        except TypeError:
          return 0
      else:
        return cmp(x_type, y_type)

    results.sort(order_compare_entities)

    clone = datastore_pb.Query()
    clone.CopyFrom(query)
    clone.clear_hint()
    if clone in self.__query_history:
      self.__query_history[clone] += 1
    else:
      self.__query_history[clone] = 1

    cursor = _Cursor(query, results, order_compare_entities)
    self.__queries[cursor.cursor] = cursor

    if query.has_count():
      count = query.count()
    elif query.has_limit():
      count = query.limit()
    else:
      count = _BATCH_SIZE

    cursor.PopulateQueryResult(query_result, count, compile=query.compile())

    if query.compile():
      compiled_query = query_result.mutable_compiled_query()
      compiled_query.set_keys_only(query.keys_only())
      compiled_query.mutable_primaryscan().set_index_name(query.Encode())
class DatastoreFileStub(apiproxy_stub.APIProxyStub):
  """ Persistent stub for the Python datastore API.

  Stores all entities in memory, and persists them to a file as pickled
  protocol buffers. A DatastoreFileStub instance handles a single app's data
  and is backed by files on disk.
  """

  _PROPERTY_TYPE_TAGS = {
    datastore_types.Blob: entity_pb.PropertyValue.kstringValue,
    bool: entity_pb.PropertyValue.kbooleanValue,
    datastore_types.Category: entity_pb.PropertyValue.kstringValue,
    datetime.datetime: entity_pb.PropertyValue.kint64Value,
    datastore_types.Email: entity_pb.PropertyValue.kstringValue,
    float: entity_pb.PropertyValue.kdoubleValue,
    datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup,
    datastore_types.IM: entity_pb.PropertyValue.kstringValue,
    int: entity_pb.PropertyValue.kint64Value,
    datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup,
    datastore_types.Link: entity_pb.PropertyValue.kstringValue,
    long: entity_pb.PropertyValue.kint64Value,
    datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue,
    datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
    datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
    str: entity_pb.PropertyValue.kstringValue,
    datastore_types.Text: entity_pb.PropertyValue.kstringValue,
    type(None): 0,
    unicode: entity_pb.PropertyValue.kstringValue,
    users.User: entity_pb.PropertyValue.kUserValueGroup,
    }

  WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY
  READ_WRITE = entity_pb.CompositeIndex.READ_WRITE
  DELETED = entity_pb.CompositeIndex.DELETED
  ERROR = entity_pb.CompositeIndex.ERROR

  _INDEX_STATE_TRANSITIONS = {
    WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)),
    READ_WRITE: frozenset((DELETED,)),
    ERROR: frozenset((DELETED,)),
    DELETED: frozenset((ERROR,)),
  }

  def __init__(self,
               app_id,
               datastore_file,
               history_file,
               require_indexes=False,
               service_name='datastore_v3'):
    """Constructor.

    Initializes and loads the datastore from the backing files, if they exist.

    Args:
      app_id: string
      datastore_file: string, stores all entities across sessions.  Use None
          not to use a file.
      history_file: string, stores query history.  Use None as with
          datastore_file.
      require_indexes: bool, default False.  If True, composite indexes must
          exist in index.yaml for queries that need them.
      service_name: Service name expected for all calls.
    """
    super(DatastoreFileStub, self).__init__(service_name)


    assert isinstance(app_id, basestring) and app_id != ''
    self.__app_id = app_id
    self.__datastore_file = datastore_file
    self.__history_file = history_file

    self.__entities = {}

    self.__schema_cache = {}

    self.__tx_snapshot = {}

    self.__queries = {}

    self.__transactions = {}

    self.__indexes = {}
    self.__require_indexes = require_indexes

    self.__query_history = {}

    self.__next_id = 1
    self.__next_cursor = 1
    self.__next_tx_handle = 1
    self.__next_index_id = 1
    self.__id_lock = threading.Lock()
    self.__cursor_lock = threading.Lock()
    self.__tx_handle_lock = threading.Lock()
    self.__index_id_lock = threading.Lock()
    self.__tx_lock = threading.Lock()
    self.__entities_lock = threading.Lock()
    self.__file_lock = threading.Lock()
    self.__indexes_lock = threading.Lock()

    self.Read()

  def Clear(self):
    """ Clears the datastore by deleting all currently stored entities and
    queries. """
    self.__entities = {}
    self.__queries = {}
    self.__transactions = {}
    self.__query_history = {}
    self.__schema_cache = {}

  def _AppKindForKey(self, key):
    """ Get (app, kind) tuple from given key.

    The (app, kind) tuple is used as an index into several internal
    dictionaries, e.g. __entities.

    Args:
      key: entity_pb.Reference

    Returns:
      Tuple (app, kind), both are unicode strings.
    """
    last_path = key.path().element_list()[-1]
    return key.app(), last_path.type()

  def _StoreEntity(self, entity):
    """ Store the given entity.

    Args:
      entity: entity_pb.EntityProto
    """
    key = entity.key()
    app_kind = self._AppKindForKey(key)
    if app_kind not in self.__entities:
      self.__entities[app_kind] = {}
    self.__entities[app_kind][key] = _StoredEntity(entity)

    if app_kind in self.__schema_cache:
      del self.__schema_cache[app_kind]

  READ_PB_EXCEPTIONS = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError,
                        TypeError, ValueError)
  READ_ERROR_MSG = ('Data in %s is corrupt or a different version. '
                    'Try running with the --clear_datastore flag.\n%r')
  READ_PY250_MSG = ('Are you using FloatProperty and/or GeoPtProperty? '
                    'Unfortunately loading float values from the datastore '
                    'file does not work with Python 2.5.0. '
                    'Please upgrade to a newer Python 2.5 release or use '
                    'the --clear_datastore flag.\n')

  def Read(self):
    """ Reads the datastore and history files into memory.

    The in-memory query history is cleared, but the datastore is *not*
    cleared; the entities in the files are merged into the entities in memory.
    If you want them to overwrite the in-memory datastore, call Clear() before
    calling Read().

    If the datastore file contains an entity with the same app name, kind, and
    key as an entity already in the datastore, the entity from the file
    overwrites the entity in the datastore.

    Also sets __next_id to one greater than the highest id allocated so far.
    """
    if self.__datastore_file and self.__datastore_file != '/dev/null':
      for encoded_entity in self.__ReadPickled(self.__datastore_file):
        try:
          entity = entity_pb.EntityProto(encoded_entity)
        except self.READ_PB_EXCEPTIONS, e:
          raise datastore_errors.InternalError(self.READ_ERROR_MSG %
                                               (self.__datastore_file, e))
        except struct.error, e:
          if (sys.version_info[0:3] == (2, 5, 0)
              and e.message.startswith('unpack requires a string argument')):
            raise datastore_errors.InternalError(self.READ_PY250_MSG +
                                                 self.READ_ERROR_MSG %
                                                 (self.__datastore_file, e))
          else:
            raise

        self._StoreEntity(entity)

        last_path = entity.key().path().element_list()[-1]
        if last_path.has_id() and last_path.id() >= self.__next_id:
          self.__next_id = last_path.id() + 1

      self.__query_history = {}
      for encoded_query, count in self.__ReadPickled(self.__history_file):
        try:
          query_pb = datastore_pb.Query(encoded_query)
        except self.READ_PB_EXCEPTIONS, e:
          raise datastore_errors.InternalError(self.READ_ERROR_MSG %
                                               (self.__history_file, e))

        if query_pb in self.__query_history:
          self.__query_history[query_pb] += count
        else:
          self.__query_history[query_pb] = count
  def _Dynamic_RunQuery(self, query, query_result):
    if not self.__tx_lock.acquire(False):
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST, 'Can\'t query inside a transaction.')
    else:
      self.__tx_lock.release()

    if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

    num_components = len(query.filter_list()) + len(query.order_list())
    if query.has_ancestor():
      num_components += 1
    if num_components > _MAX_QUERY_COMPONENTS:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          ('query is too large. may not have more than %s filters'
           ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

    app = query.app()

    if self.__require_indexes:
      required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
      if required:
        required_key = kind, ancestor, props
        indexes = self.__indexes.get(app)
        if not indexes:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index, but none are defined. "
              "You must create an index.yaml file in your application root.")
        eq_filters_set = set(props[:num_eq_filters])
        remaining_filters = props[num_eq_filters:]
        for index in indexes:
          definition = datastore_admin.ProtoToIndexDefinition(index)
          index_key = datastore_index.IndexToKey(definition)
          if required_key == index_key:
            break
          if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
            this_props = index_key[2]
            this_eq_filters_set = set(this_props[:num_eq_filters])
            this_remaining_filters = this_props[num_eq_filters:]
            if (eq_filters_set == this_eq_filters_set and
                remaining_filters == this_remaining_filters):
              break
        else:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index that is not defined. "
              "You must update the index.yaml file in your application root.")

    try:
      query.set_app(app)
      results = self.__entities[app, query.kind()].values()
      results = [entity.native for entity in results]
    except KeyError:
      results = []

    if query.has_ancestor():
      ancestor_path = query.ancestor().path().element_list()
      def is_descendant(entity):
        path = entity.key()._Key__reference.path().element_list()
        return path[:len(ancestor_path)] == ancestor_path
      results = filter(is_descendant, results)

    operators = {datastore_pb.Query_Filter.LESS_THAN:             '<',
                 datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL:    '<=',
                 datastore_pb.Query_Filter.GREATER_THAN:          '>',
                 datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
                 datastore_pb.Query_Filter.EQUAL:                 '==',
                 }

    for filt in query.filter_list():
      assert filt.op() != datastore_pb.Query_Filter.IN

      prop = filt.property(0).name().decode('utf-8')
      op = operators[filt.op()]

      filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                         for filter_prop in filt.property_list()]

      def passes(entity):
        """ Returns True if the entity passes the filter, False otherwise. """
        if prop in datastore_types._SPECIAL_PROPERTIES:
          entity_vals = self.__GetSpecialPropertyValue(entity, prop)
        else:
          entity_vals = entity.get(prop, [])

        if not isinstance(entity_vals, list):
          entity_vals = [entity_vals]

        for fixed_entity_val in entity_vals:
          if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
            continue

          for filter_val in filter_val_list:
            fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
              fixed_entity_val.__class__)
            filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
            if fixed_entity_type == filter_type:
              comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
            elif op != '==':
              comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
            else:
              continue

            logging.log(logging.DEBUG - 1,
                        'Evaling filter expression "%s"', comp)

            try:
              ret = eval(comp)
              if ret and ret != NotImplementedError:
                return True
            except TypeError:
              pass

        return False

      results = filter(passes, results)

    def has_prop_indexed(entity, prop):
      """Returns True if prop is in the entity and is not a raw property, or
      is a special property."""
      if prop in datastore_types._SPECIAL_PROPERTIES:
        return True

      values = entity.get(prop, [])
      if not isinstance(values, (tuple, list)):
        values = [values]

      for value in values:
        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
          return True
      return False

    for order in query.order_list():
      prop = order.property().decode('utf-8')
      results = [entity for entity in results if has_prop_indexed(entity, prop)]

    def order_compare_entities(a, b):
      """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
      cmped = 0
      for o in query.order_list():
        prop = o.property().decode('utf-8')

        reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)

        if prop in datastore_types._SPECIAL_PROPERTIES:
          a_val = self.__GetSpecialPropertyValue(a, prop)
          b_val = self.__GetSpecialPropertyValue(b, prop)
        else:
          a_val = a[prop]
          if isinstance(a_val, list):
            a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]

          b_val = b[prop]
          if isinstance(b_val, list):
            b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]

        cmped = order_compare_properties(a_val, b_val)

        if o.direction() is datastore_pb.Query_Order.DESCENDING:
          cmped = -cmped

        if cmped != 0:
          return cmped

      if cmped == 0:
        return cmp(a.key(), b.key())

    def order_compare_properties(x, y):
      """Return a negative, zero or positive number depending on whether
      property value x is considered smaller than, equal to, or larger than
      property value y. If x and y are different types, they're compared based
      on the type ordering used in the real datastore, which is based on the
      tag numbers in the PropertyValue PB.
      """
      if isinstance(x, datetime.datetime):
        x = datastore_types.DatetimeToTimestamp(x)
      if isinstance(y, datetime.datetime):
        y = datastore_types.DatetimeToTimestamp(y)

      x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
      y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)

      if x_type == y_type:
        try:
          return cmp(x, y)
        except TypeError:
          return 0
      else:
        return cmp(x_type, y_type)

    results.sort(order_compare_entities)

    offset = 0
    limit = len(results)
    if query.has_offset():
      offset = query.offset()
    if query.has_limit():
      limit = query.limit()
    if limit > _MAXIMUM_RESULTS:
      limit = _MAXIMUM_RESULTS
    results = results[offset:limit + offset]

    clone = datastore_pb.Query()
    clone.CopyFrom(query)
    clone.clear_hint()
    if clone in self.__query_history:
      self.__query_history[clone] += 1
    else:
      self.__query_history[clone] = 1
    self.__WriteHistory()

    self.__cursor_lock.acquire()
    cursor = self.__next_cursor
    self.__next_cursor += 1
    self.__cursor_lock.release()
    self.__queries[cursor] = (results, len(results))

    query_result.mutable_cursor().set_cursor(cursor)
    query_result.set_more_results(len(results) > 0)