def _get_entities(ds_access, kind, namespace, order, start, count): """Returns a list and a count of entities of the given kind. Args: kind: A string representing the name of the kind of the entities to return. namespace: A string representing the namespace of the entities to return. order: A string containing the name of the property to sorted the results by. A "-" prefix indicates descending order e.g. "-age". start: The number of initial entities to skip in the result set. count: The maximum number of entities to return. Returns: A tuple of (list of datastore.Entity, total entity count). """ query = datastore_pb.Query() query.set_name_space(namespace) query.set_app(ds_access.project_id) query.set_kind(kind) query.set_compile(True) if order: query_order = query.add_order() if order.startswith('-'): query_order.set_direction(datastore_pb.Query_Order.DESCENDING) query_order.set_property(order[1:]) else: query_order.set_direction(datastore_pb.Query_Order.ASCENDING) query_order.set_property(order) # Count queries just take note of the skipped results. count_query = datastore_pb.Query() count_query.CopyFrom(query) count_query.set_offset(1000) count_query.set_limit(0) result = datastore_pb.QueryResult() ds_access._Dynamic_RunQuery(count_query, result) total = result.skipped_results() query.set_limit(count) query.set_offset(start) result = datastore_pb.QueryResult() ds_access._Dynamic_RunQuery(query, result) entities = [ datastore.Entity.FromPb(entity_pb) for entity_pb in result.result_list() ] return entities, total
def _Dynamic_Count(self, query, integer64proto): query_result = datastore_pb.QueryResult() self._Dynamic_RunQuery(query, query_result) cursor = query_result.cursor().cursor() integer64proto.set_value( min(self.__queries[cursor].count, _MAXIMUM_RESULTS)) del self.__queries[cursor]
def run_query(self, query): query_pb = query._ToPb() encoded_response = yield self._make_request('RunQuery', query_pb.Encode()) results_pb = datastore_pb.QueryResult(encoded_response) raise gen.Return( [Entity.FromPb(entity) for entity in results_pb.result_list()])
def _Dynamic_RunQuery(self, req, resp): self.__normalize_v1_run_query_request(req) try: self.__service_validator.validate_run_query_req(req) v3_req = self.__service_converter.v1_run_query_req_to_v3_query(req) except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, str(e)) except cloud_datastore_validator.ValidationError as e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, str(e)) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) try: v1_resp = self.__service_converter.v3_to_v1_run_query_resp(v3_resp) if req.query.projection: if (len(req.query.projection) == 1 and req.query.projection[0].property.name == '__key__'): result_type = googledatastore.EntityResult.KEY_ONLY else: result_type = googledatastore.EntityResult.PROJECTION v1_resp.batch.entity_result_type = result_type except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.INTERNAL_ERROR, str(e)) resp.CopyFrom(v1_resp)
def _Dynamic_Count(self, query, integer64proto): query_result = datastore_pb.QueryResult() self._Dynamic_RunQuery(query, query_result) cursor = query_result.cursor().cursor() results, count = self.__queries[cursor] integer64proto.set_value(count) del self.__queries[cursor]
def _Dynamic_RunQuery(self, req, resp): try: self.__normalize_v4_run_query_request(req) self.__service_validator.validate_run_query_req(req) v3_req = self.__service_converter.v4_run_query_req_to_v3_query(req) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e)) except datastore_v4_validator.ValidationError as e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e)) try: v4_resp = self.__service_converter.v3_to_v4_run_query_resp(v3_resp) if req.query().projection_list(): if req.query().projection_list() == ['__key__']: result_type = datastore_v4_pb.EntityResult.KEY_ONLY else: result_type = datastore_v4_pb.EntityResult.PROJECTION v4_resp.mutable_batch().set_entity_result_type(result_type) except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.INTERNAL_ERROR, str(e)) resp.CopyFrom(v4_resp)
def run_query(self, query): entities = [] query_pb = query._ToPb() encoded_response = yield self._make_request('RunQuery', query_pb.Encode()) results_pb = datastore_pb.QueryResult(encoded_response) entities.extend( [Entity.FromPb(entity) for entity in results_pb.result_list()]) while results_pb.has_more_results() and len( results_pb.result_list()) >= 100: query_pb.set_offset(query_pb.offset() + len(results_pb.result_list())) encoded_response = yield self._make_request( 'RunQuery', query_pb.Encode()) results_pb = datastore_pb.QueryResult(encoded_response) entities.extend( [Entity.FromPb(entity) for entity in results_pb.result_list()]) raise gen.Return(entities)
def _Dynamic_ContinueQuery(self, req, resp): try: self.__service_validator.validate_continue_query_req(req) v3_req = self.__service_converter.v4_to_v3_next_req(req) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('Next', v3_req, v3_resp) except datastore_pbs.InvalidConversionError, e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e))
def _Dynamic_Count(self, query, integer64proto): self.__ValidateAppId(query.app()) query_result = datastore_pb.QueryResult() count = _MAXIMUM_RESULTS if query.has_limit(): count = query.limit() self._Dynamic_RunQuery(query, query_result, count=count) cursor = query_result.cursor().cursor() integer64proto.set_value(self.__queries[cursor].count) del self.__queries[cursor]
def _Dynamic_RunQuery(self, req, resp): try: self.__normalize_v4_run_query_request(req) self.__service_validator.validate_run_query_req(req) v3_req = self.__service_converter.v4_run_query_req_to_v3_query(req) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) except datastore_pbs.InvalidConversionError, e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e))
def _Dynamic_RunQuery(self, req, resp): try: self.__normalize_v4_run_query_request(req) self.__service_validator.validate_run_query_req(req) v3_stub = apiproxy_stub_map.apiproxy.GetStub(V3_SERVICE_NAME) new_req, filter_predicate = self._SplitGeospatialFilters(req) if (issubclass(v3_stub.__class__, datastore_stub_util.BaseDatastore) and filter_predicate is not None): v3_req = self.__service_converter.v4_run_query_req_to_v3_query( new_req) v3_resp = datastore_pb.QueryResult() v3_stub._Dynamic_RunQuery(v3_req, v3_resp, filter_predicate) else: v3_req = self.__service_converter.v4_run_query_req_to_v3_query( req) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e)) except datastore_v4_validator.ValidationError as e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e)) try: v4_resp = self.__service_converter.v3_to_v4_run_query_resp(v3_resp) if req.query().projection_list(): if req.query().projection_list() == ['__key__']: result_type = datastore_v4_pb.EntityResult.KEY_ONLY else: result_type = datastore_v4_pb.EntityResult.PROJECTION v4_resp.mutable_batch().set_entity_result_type(result_type) except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.INTERNAL_ERROR, str(e)) resp.CopyFrom(v4_resp)
def _Dynamic_Count(self, query, integer64proto): query_result = datastore_pb.QueryResult() self._Dynamic_RunQuery(query, query_result) cursor_number = query_result.cursor().cursor() if cursor_number == 0: # we exited early from the query w/ no results... integer64proto.set_value(0) else: cursor = self.__queries[cursor_number] count = cursor.count() del self.__queries[cursor_number] if query.has_limit() and count > query.limit(): count = query.limit() integer64proto.set_value(count)
def _make_query_result_rpc_call(self, name, config, req): """Makes either a RunQuery or Next call that will modify the instance. Args: name: A string, the name of the call to invoke. config: The datastore_rpc.Configuration to use for the call. req: The request to send with the call. Returns: A UserRPC object that can be used to fetch the result of the RPC. """ return self.__conn.make_rpc_call(config, name, req, datastore_pb.QueryResult(), self.__query_result_hook)
def _Dynamic_RunQuery(self, request, response): """Handle a RunQuery request. We handle RunQuery by executing a Query and a Next and returning the result of the Next request. """ runquery_response = datastore_pb.QueryResult() apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', request, runquery_response) next_request = datastore_pb.NextRequest() next_request.mutable_cursor().CopyFrom(runquery_response.cursor()) next_request.set_count(request.limit()) apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', next_request, response)
def _Dynamic_RunQuery(self, req, resp): try: self.__normalize_v4_run_query_request(req) self.__service_validator.validate_run_query_req(req) v3_stub = apiproxy_stub_map.apiproxy.GetStub(V3_SERVICE_NAME) new_req, filter_predicate = self._SplitGeospatialFilters(req) if (issubclass(v3_stub.__class__, datastore_stub_util.BaseDatastore) and filter_predicate is not None): v3_req = self.__service_converter.v4_run_query_req_to_v3_query(new_req) v3_resp = datastore_pb.QueryResult() v3_stub._Dynamic_RunQuery(v3_req, v3_resp, filter_predicate) else: v3_req = self.__service_converter.v4_run_query_req_to_v3_query(req) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) except datastore_pbs.InvalidConversionError, e: raise apiproxy_errors.ApplicationError( datastore_v4_pb.Error.BAD_REQUEST, str(e))
def _Dynamic_RunQuery(self, request, response): """Handle a RunQuery request. We handle RunQuery by executing a Query and a Next and returning the result of the Next request. This method is DEPRECATED, but left in place for older clients. """ runquery_response = datastore_pb.QueryResult() self.__call('datastore_v3', 'RunQuery', request, runquery_response) if runquery_response.result_size() > 0: response.CopyFrom(runquery_response) return next_request = datastore_pb.NextRequest() next_request.mutable_cursor().CopyFrom(runquery_response.cursor()) next_request.set_count(request.limit()) self.__call('datastore_v3', 'Next', next_request, response)
def next_callback(rpc, entities, exception, callback=None): try: assert isinstance( rpc.request, datastore_pb.NextRequest), "request should be a query" assert isinstance( rpc.response, datastore_pb.QueryResult), "response should be a QueryResult" result = next_rpc_handler(rpc) entity_list = process_query_result(result) count = rpc.request.count() if len(entity_list) > count: del entity_list[count:] entities += entity_list if result.more_results() and len(entity_list) < count: # create rpc for running count = count - len(entity_list) req = datastore_pb.NextRequest() req.set_count(count) req.mutable_cursor().CopyFrom(rpc.response.cursor()) result = datastore_pb.QueryResult() nextrpc = create_rpc(deadline=rpc.deadline) nextrpc.callback = lambda: next_callback( nextrpc, entities, exception, callback=callback) rpc.runner.append(nextrpc) nextrpc.make_call('Next', req, result) if rpc.runner: rpc.runner.append(nextrpc) else: nextrpc.Wait() except (datastore_errors.Error, apiproxy_errors.Error), exp: logging.debug("Exception (Next):%s", exp) exception.append(exp) if callback: callback(rpc)
def _get_kinds(cls, ds_access, namespace): """ Returns a sorted list of kind names present in the given namespace. Args: ds_access: A DatastoreDistributed client. namespace: A string specifying the datastore namespace. Returns: A list of string specifying kind names. """ assert namespace is not None query = datastore_pb.Query() query.set_name_space(namespace) query.set_app(ds_access.project_id) query.set_kind('__kind__') result = datastore_pb.QueryResult() ds_access._Dynamic_RunQuery(query, result) kinds = [entity.key().path().element(0).name() for entity in result.result_list()] return sorted(kinds)
def _Dynamic_RunQuery(self, query, query_result): if query.has_transaction(): if not query.has_ancestor(): raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Only ancestor queries are allowed inside transactions.') (filters, orders) = datastore_index.Normalize(query.filter_list(), query.order_list()) datastore_stub_util.FillUsersInQuery(filters) query_response = datastore_pb.QueryResult() query.set_app(self.__app_id) self._RemoteSend(query, query_response, "RunQuery") results = query_response.result_list() results = [datastore.Entity._FromPb(r) for r in results] if query.has_ancestor(): ancestor_path = query.ancestor().path().element_list() def is_descendant(entity): path = entity.key()._Key__reference.path().element_list() return path[:len(ancestor_path)] == ancestor_path results = filter(is_descendant, results) operators = { datastore_pb.Query_Filter.LESS_THAN: '<', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=', datastore_pb.Query_Filter.GREATER_THAN: '>', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=', datastore_pb.Query_Filter.EQUAL: '==', } def has_prop_indexed(entity, prop): """Returns True if prop is in the entity and is indexed.""" if prop in datastore_types._SPECIAL_PROPERTIES: return True elif prop in entity.unindexed_properties(): return False values = entity.get(prop, []) if not isinstance(values, (tuple, list)): values = [values] for value in values: if type(value) not in datastore_types._RAW_PROPERTY_TYPES: return True return False for filt in filters: assert filt.op() != datastore_pb.Query_Filter.IN prop = filt.property(0).name().decode('utf-8') op = operators[filt.op()] filter_val_list = [ datastore_types.FromPropertyPb(filter_prop) for filter_prop in filt.property_list() ] for order in orders: prop = order.property().decode('utf-8') results = [ entity for entity in results if has_prop_indexed(entity, prop) ] def order_compare_entities(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. """ cmped = 0 for o in orders: prop = o.property().decode('utf-8') reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING) a_val = datastore._GetPropertyValue(a, prop) if isinstance(a_val, list): a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0] b_val = datastore._GetPropertyValue(b, prop) if isinstance(b_val, list): b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0] cmped = order_compare_properties(a_val, b_val) if o.direction() is datastore_pb.Query_Order.DESCENDING: cmped = -cmped if cmped != 0: return cmped if cmped == 0: return cmp(a.key(), b.key()) def order_compare_entities_pb(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. a and b are protobuf-encoded entities.""" return order_compare_entities(datastore.Entity.FromPb(a), datastore.Entity.FromPb(b)) def order_compare_properties(x, y): """Return a negative, zero or positive number depending on whether property value x is considered smaller than, equal to, or larger than property value y. If x and y are different types, they're compared based on the type ordering used in the real datastore, which is based on the tag numbers in the PropertyValue PB. """ if isinstance(x, datetime.datetime): x = datastore_types.DatetimeToTimestamp(x) if isinstance(y, datetime.datetime): y = datastore_types.DatetimeToTimestamp(y) x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__) y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__) if x_type == y_type: try: return cmp(x, y) except TypeError: return 0 else: return cmp(x_type, y_type) results.sort(order_compare_entities) clone = datastore_pb.Query() clone.CopyFrom(query) clone.clear_hint() clone.clear_limit() clone.clear_offset() #if clone in self.__query_history: # self.__query_history[clone] += 1 #else: # self.__query_history[clone] = 1 results = [r._ToPb() for r in results] for result in results: datastore_stub_util.PrepareSpecialPropertiesForLoad(result) datastore_stub_util.ValidateQuery(query, filters, orders, _MAX_QUERY_COMPONENTS) cursor = datastore_stub_util.ListCursor(query, results, order_compare_entities_pb) self.__queries[cursor.cursor] = cursor if query.has_count(): count = query.count() elif query.has_limit(): count = query.limit() else: count = _BATCH_SIZE cursor.PopulateQueryResult(query_result, count, query.offset(), compile=query.compile()) if query.compile(): compiled_query = query_result.mutable_compiled_query() compiled_query.set_keys_only(query.keys_only()) compiled_query.mutable_primaryscan().set_index_name(query.Encode())
datastore_pb.Error.BAD_REQUEST, str(e)) def _Dynamic_RunQuery(self, req, resp): self.__normalize_v1_run_query_request(req) try: self.__service_validator.validate_run_query_req(req) v3_req = self.__service_converter.v1_run_query_req_to_v3_query(req) except datastore_pbs.InvalidConversionError, e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, str(e)) except cloud_datastore_validator.ValidationError, e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, str(e)) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) try: v1_resp = self.__service_converter.v3_to_v1_run_query_resp(v3_resp) if req.query.projection: if (len(req.query.projection) == 1 and req.query.projection[0].property.name == '__key__'): result_type = googledatastore.EntityResult.KEY_ONLY else: result_type = googledatastore.EntityResult.PROJECTION v1_resp.batch.entity_result_type = result_type except datastore_pbs.InvalidConversionError, e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.INTERNAL_ERROR, str(e)) resp.CopyFrom(v1_resp)
def _Dynamic_RunQuery(self, query, query_result): """Send a query request to the datastore server. """ if query.has_transaction(): if not query.has_ancestor(): raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Only ancestor queries are allowed inside transactions.') (filters, orders) = datastore_index.Normalize(query.filter_list(), query.order_list(), []) old_datastore_stub_util.FillUsersInQuery(filters) query_response = datastore_pb.QueryResult() if not query.has_app(): query.set_app(self.__app_id) self.__ValidateAppId(query.app()) self._RemoteSend(query, query_response, "RunQuery") skipped_results = 0 if query_response.has_skipped_results(): skipped_results = query_response.skipped_results() def has_prop_indexed(entity, prop): """Returns True if prop is in the entity and is indexed.""" if prop in datastore_types._SPECIAL_PROPERTIES: return True elif prop in entity.unindexed_properties(): return False values = entity.get(prop, []) if not isinstance(values, (tuple, list)): values = [values] for value in values: if type(value) not in datastore_types._RAW_PROPERTY_TYPES: return True return False def order_compare_entities(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. """ cmped = 0 for o in orders: prop = o.property().decode('utf-8') reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING) a_val = datastore._GetPropertyValue(a, prop) if isinstance(a_val, list): a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0] b_val = datastore._GetPropertyValue(b, prop) if isinstance(b_val, list): b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0] cmped = order_compare_properties(a_val, b_val) if o.direction() is datastore_pb.Query_Order.DESCENDING: cmped = -cmped if cmped != 0: return cmped if cmped == 0: return cmp(a.key(), b.key()) def order_compare_entities_pb(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. a and b are protobuf-encoded entities.""" return order_compare_entities(datastore.Entity.FromPb(a), datastore.Entity.FromPb(b)) def order_compare_properties(x, y): """Return a negative, zero or positive number depending on whether property value x is considered smaller than, equal to, or larger than property value y. If x and y are different types, they're compared based on the type ordering used in the real datastore, which is based on the tag numbers in the PropertyValue PB. """ if isinstance(x, datetime.datetime): x = datastore_types.DatetimeToTimestamp(x) if isinstance(y, datetime.datetime): y = datastore_types.DatetimeToTimestamp(y) x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__) y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__) if x_type == y_type: try: return cmp(x, y) except TypeError: return 0 else: return cmp(x_type, y_type) results = query_response.result_list() for result in results: old_datastore_stub_util.PrepareSpecialPropertiesForLoad(result) old_datastore_stub_util.ValidateQuery(query, filters, orders, _MAX_QUERY_COMPONENTS) cursor = old_datastore_stub_util.ListCursor(query, results, order_compare_entities_pb) self.__cleanup_old_cursors() self.__queries[cursor.cursor] = cursor, datetime.datetime.now() if query.has_count(): count = query.count() elif query.has_limit(): count = query.limit() else: count = _BATCH_SIZE cursor.PopulateQueryResult(query_result, count, query.offset(), compile=query.compile()) query_result.set_skipped_results(skipped_results) if query.compile(): compiled_query = query_result.mutable_compiled_query() compiled_query.set_keys_only(query.keys_only()) compiled_query.mutable_primaryscan().set_index_name(query.Encode())
def _Dynamic_RunQuery(self, req, resp): self.__normalize_v1_run_query_request(req) snapshot_version = None txn = None txn_to_cleanup = None new_txn = None try: try: self.__service_validator.validate_run_query_req(req) if req.read_options.WhichOneof('consistency_type') == 'new_transaction': new_txn = self.__begin_adhoc_txn(req) v3_req = self.__service_converter.v1_run_query_req_to_v3_query( req, new_txn=new_txn) if new_txn: txn = new_txn txn_to_cleanup = new_txn elif req.read_options.transaction: txn = req.read_options.transaction elif (v3_req.has_ancestor() and req.read_options.read_consistency != googledatastore.ReadOptions.EVENTUAL and v3_req.kind != '__property__'): txn = self.__begin_adhoc_txn(req) txn_to_cleanup = txn v3_req.transaction = txn except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, str(e)) except cloud_datastore_validator.ValidationError as e: raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, str(e)) v3_resp = datastore_pb.QueryResult() self.__make_v3_call('RunQuery', v3_req, v3_resp) if txn: lookup = googledatastore.LookupRequest() lookup.project_id = req.partition_id.project_id lookup.database_id = req.partition_id.database_id lookup.read_options.transaction = txn key = lookup.keys.add() key.partition_id.CopyFrom(req.partition_id) key.partition_id.database_id = req.database_id path = key.path.add() path.kind = '__none__' path.id = 1 lookup_response = googledatastore.LookupResponse() self._Dynamic_Lookup(lookup, lookup_response) snapshot_version = lookup_response.missing[0].version try: v1_resp = self.__service_converter.v3_to_v1_run_query_resp( v3_resp, new_txn=new_txn) if req.query.projection: if (len(req.query.projection) == 1 and req.query.projection[0].property.name == '__key__'): result_type = googledatastore.EntityResult.KEY_ONLY else: result_type = googledatastore.EntityResult.PROJECTION v1_resp.batch.entity_result_type = result_type if snapshot_version: v1_resp.batch.snapshot_version = snapshot_version except datastore_pbs.InvalidConversionError as e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.INTERNAL_ERROR, str(e)) except: if txn_to_cleanup: self.__rollback_adhoc_txn(req, txn_to_cleanup) raise resp.CopyFrom(v1_resp)
def _Dynamic_Count(self, query, integer64proto, request_id=None): """Get the number of entities for a query. """ query_result = datastore_pb.QueryResult() self._Dynamic_RunQuery(query, query_result, request_id) count = query_result.result_size() integer64proto.set_value(count)
def _Dynamic_Count(self, query, integer64proto): query_result = datastore_pb.QueryResult() self._Dynamic_RunQuery(query, query_result) count = query_result.result_size() integer64proto.set_value(count)