def test_stale_getmore(self): # A cursor is created, but its member goes down and is removed from # the topology before the getMore message is sent. Test that # MongoClient._send_message_with_response handles the error. with self.assertRaises(AutoReconnect): client = MongoClient(host, port, connect=False, serverSelectionTimeoutMS=100, replicaSet=client_context.replica_set_name) client._send_message_with_response( operation=message._GetMore('collection', 101, 1234), address=('not-a-member', 27017))
def _refresh(self): """Refreshes the cursor with more data from Mongo. Returns the length of self.__data after refresh. Will exit early if self.__data is already non-empty. Raises OperationFailure when the cursor cannot be refreshed due to an error on the query. """ if len(self.__data) or self.__killed: return len(self.__data) if self.__id is None: # Query ntoreturn = self.__batch_size if self.__limit: if self.__batch_size: ntoreturn = min(self.__limit, self.__batch_size) else: ntoreturn = self.__limit self.__send_message(_Query(self.__query_flags, self.__collection.database.name, self.__collection.name, self.__skip, ntoreturn, self.__query_spec(), self.__projection, self.__codec_options, self.__read_preference, self.__limit, self.__batch_size, self.__read_concern)) if not self.__id: self.__killed = True elif self.__id: # Get More if self.__limit: limit = self.__limit - self.__retrieved if self.__batch_size: limit = min(limit, self.__batch_size) else: limit = self.__batch_size # Exhaust cursors don't send getMore messages. if self.__exhaust: self.__send_message(None) else: self.__send_message(_GetMore(self.__collection.database.name, self.__collection.name, limit, self.__id, self.__codec_options, self.__max_await_time_ms)) else: # Cursor id is zero nothing else to return self.__killed = True return len(self.__data)
async def _refresh(self) -> None: """Refreshes the cursor with more data from the server. Returns the length of self.__data after refresh. Will exit early if self.__data is already non-empty. Raises OperationFailure when the cursor cannot be refreshed due to an error on the query. """ if len(self.__data) or self.__killed: return len(self.__data) if self.__id: # Get More dbname, collname = self.__ns.split('.', 1) try: data = await self.__connection.perform_operation( _GetMore(dbname, collname, self.__batch_size, self.__id, self.__collection.codec_options)) except EOFError: self.__killed = True raise try: doc = helpers._unpack_response(data, self.__id, self.__collection.codec_options) helpers._check_command_response(doc['data'][0]) except OperationFailure: self.__killed = True raise cursor = doc['data'][0]['cursor'] documents = cursor['nextBatch'] self.__id = cursor['id'] self.__retrieved += len(documents) if self.__id == 0: self.__killed = True self.__data = deque(documents) else: # Cursor id is zero nothing else to return self.__killed = True return len(self.__data)
def _refresh(self): """Refreshes the cursor with more data from the server. Returns the length of self.__data after refresh. Will exit early if self.__data is already non-empty. Raises OperationFailure when the cursor cannot be refreshed due to an error on the query. """ if len(self.__data) or self.__killed: return len(self.__data) if self.__id: # Get More self.__send_message( _GetMore(self.__ns, self.__batch_size, self.__id)) else: # Cursor id is zero nothing else to return self.__killed = True return len(self.__data)
async def _refresh(self) -> None: if len(self.__data) or self.__killed: return 0 if self.__connection is None: self.__connection = await self.__collection.database.client.get_connection() is_query = False if self.__id is None: is_query = True data = await self.__connection.perform_operation( _Query(self.__query_flags, self.__collection.database.name, self.__collection.name, self.__skip, self.__query_spec(), self.__projection, self.__codec_options, self.__read_preference, self.__limit, self.__batch_size, self.__read_concern, self.__collation) ) elif self.__id: if self.__limit: limit = self.__limit - self.__retrieved if self.__batch_size: limit = min(limit, self.__batch_size) else: limit = self.__batch_size try: data = await self.__connection.perform_operation( _GetMore(self.__collection.database.name, self.__collection.name, limit, self.__id, self.__codec_options, self.__max_await_time_ms) ) except EOFError: self.__killed = True raise else: self.__killed = True self.__data = data = None if data: doc = helpers._unpack_response(response=data, cursor_id=self.__id, codec_options=self.__codec_options) helpers._check_command_response(doc['data'][0]) if not self.__explain: cursor = doc['data'][0]['cursor'] self.__id = cursor['id'] if is_query: documents = cursor['firstBatch'] else: documents = cursor['nextBatch'] self.__data = deque(documents) self.__retrieved += len(documents) else: self.__id = doc['cursor_id'] self.__data = deque(doc['data']) self.__retrieved += doc['number_returned'] if self.__id == 0: self.__killed = True if self.__limit and self.__id and self.__limit <= self.__retrieved: await self.close() return len(self.__data)