def _list_collections(self, sock_info, slave_okay, criteria=None, session=None): """Internal listCollections helper.""" criteria = criteria or {} cmd = SON([("listCollections", 1), ("cursor", {})]) if criteria: cmd["filter"] = criteria if sock_info.max_wire_version > 2: coll = self["$cmd"] with self.__client._tmp_session(session, close=False) as s: cursor = self._command( sock_info, cmd, slave_okay, session=s)["cursor"] return CommandCursor(coll, cursor, sock_info.address, session=s, explicit_session=session is not None) else: coll = self["system.namespaces"] res = _first_batch(sock_info, coll.database.name, coll.name, criteria, 0, slave_okay, CodecOptions(), ReadPreference.PRIMARY, cmd, self.client._event_listeners, session=None) data = res["data"] cursor = { "id": res["cursor_id"], "firstBatch": data, "ns": coll.full_name, } return CommandCursor(coll, cursor, sock_info.address)
def _list_collections(self, sock_info, slave_okay, criteria=None): """Internal listCollections helper.""" criteria = criteria or {} cmd = SON([("listCollections", 1), ("cursor", {})]) if criteria: cmd["filter"] = criteria if sock_info.max_wire_version > 2: coll = self["$cmd"] cursor = self._command(sock_info, cmd, slave_okay)["cursor"] return CommandCursor(coll, cursor, sock_info.address) else: coll = self["system.namespaces"] res = _first_batch(sock_info, coll.database.name, coll.name, criteria, 0, slave_okay, CodecOptions(), ReadPreference.PRIMARY, cmd, self.client._event_listeners) data = res["data"] cursor = { "id": res["cursor_id"], "firstBatch": data, "ns": coll.full_name, } # Need to tell the cursor how many docs were in the first batch. return CommandCursor(coll, cursor, sock_info.address, len(data))
def test_cursor_transfer(self): # This is just a test, don't try this at home... client = client_context.rs_or_standalone_client db = client.pymongo_test db.test.delete_many({}) db.test.insert_many([{'_id': i} for i in range(200)]) class CManager(CursorManager): def __init__(self, client): super(CManager, self).__init__(client) def close(self, dummy, dummy2): # Do absolutely nothing... pass client.set_cursor_manager(CManager) self.addCleanup(client.set_cursor_manager, CursorManager) docs = [] cursor = db.test.find().batch_size(10) docs.append(next(cursor)) cursor.close() docs.extend(cursor) self.assertEqual(len(docs), 10) cmd_cursor = {'id': cursor.cursor_id, 'firstBatch': []} ccursor = CommandCursor(cursor.collection, cmd_cursor, cursor.address, retrieved=cursor.retrieved) docs.extend(ccursor) self.assertEqual(len(docs), 200)
def test_get_more_failure(self): address = self.client.address coll = self.client.pymongo_test.test cursor_doc = {"id": 12345, "firstBatch": [], "ns": coll.full_name} cursor = CommandCursor(coll, cursor_doc, address) try: next(cursor) except Exception: pass results = self.listener.results started = results.get('started') self.assertIsNone(results.get('succeeded')) failed = results.get('failed') self.assertTrue( isinstance(started, monitoring.CommandStartedEvent)) self.assertEqual( SON([('getMore', 12345), ('collection', 'test')]), started.command) self.assertEqual('getMore', started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual('pymongo_test', started.database_name) self.assertTrue(isinstance(started.request_id, int)) self.assertTrue( isinstance(failed, monitoring.CommandFailedEvent)) self.assertTrue(isinstance(failed.duration_micros, int)) self.assertEqual('getMore', failed.command_name) self.assertTrue(isinstance(failed.request_id, int)) self.assertEqual(cursor.address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok"))
def collection_names(self, include_system_collections=True): """Get a list of all the collection names in this database. :Parameters: - `include_system_collections` (optional): if ``False`` list will not include system collections (e.g ``system.indexes``) """ client = self.connection client._ensure_connected(True) slave_okay = not client._rs_client and not client.is_mongos if client.max_wire_version > 2: res, addr = self._command("listCollections", cursor={}, read_preference=ReadPreference.PRIMARY, slave_okay=slave_okay) # MongoDB 2.8rc2 if "collections" in res: results = res["collections"] # >= MongoDB 2.8rc3 else: results = CommandCursor(self["$cmd"], res["cursor"], addr) names = [result["name"] for result in results] else: names = [result["name"] for result in self["system.namespaces"].find( slave_okay=slave_okay, _must_use_master=True)] names = [n[len(self.__name) + 1:] for n in names if n.startswith(self.__name + ".") and "$" not in n] if not include_system_collections: names = [n for n in names if not n.startswith("system.")] return names
def test_cursor_transfer(self): # This is just a test, don't try this at home... self.db.test.remove({}) self.db.test.insert({'_id': i} for i in xrange(200)) class CManager(CursorManager): def __init__(self, connection): super(CManager, self).__init__(connection) def close(self, dummy): # Do absolutely nothing... pass client = self.db.connection try: client.set_cursor_manager(CManager) docs = [] cursor = self.db.test.find().batch_size(10) docs.append(cursor.next()) cursor.close() docs.extend(cursor) self.assertEqual(len(docs), 10) cmd_cursor = {'id': cursor.cursor_id, 'firstBatch': []} ccursor = CommandCursor(cursor.collection, cmd_cursor, cursor.conn_id, retrieved=cursor.retrieved) docs.extend(ccursor) self.assertEqual(len(docs), 200) finally: client.set_cursor_manager(CursorManager)
def customCursorToCursor(custom_cursor, username, password, db_name, coll_name): (pdb, pcoll, pclient) = mongoConnectionCreator(username, password, db_name, coll_name) address = ('molspace.rc.fas.harvard.edu', 27017) # return (CommandCursor(pclient[pdb][pcoll], custom_cursor, address), pdb, pcoll, pclient) return (CommandCursor(pcoll, custom_cursor, address), pdb, pcoll, pclient)
def recreate_cursor(collection, cursor_id, retrieved, batch_size): """ Creates and returns a Cursor object based on an existing cursor in the in the server. If cursor_id is invalid, the returned cursor will raise OperationFailure on read. If batch_size is -1, then all remaining documents on the cursor are returned. """ if cursor_id == 0: return None cursor_info = {'id': cursor_id, 'firstBatch': []} _logger.info( "collection: {0} cursor_info: {1} retrieved {2} batch_size {3}" .format(collection, cursor_id, retrieved, batch_size)) cursor = CommandCursor(collection, cursor_info, 0, retrieved=retrieved) cursor.batch_size(batch_size) return cursor
def _list_collections(self, sock_info, secondary_okay, session, read_preference, **kwargs): """Internal listCollections helper.""" coll = self.get_collection("$cmd", read_preference=read_preference) cmd = SON([("listCollections", 1), ("cursor", {})]) cmd.update(kwargs) with self.__client._tmp_session(session, close=False) as tmp_session: cursor = self._command(sock_info, cmd, secondary_okay, read_preference=read_preference, session=tmp_session)["cursor"] cmd_cursor = CommandCursor(coll, cursor, sock_info.address, session=tmp_session, explicit_session=session is not None) cmd_cursor._maybe_pin_connection(sock_info) return cmd_cursor
def _list_collections(self, sock_info, slave_okay, session, read_preference, **kwargs): """Internal listCollections helper.""" coll = self.get_collection("$cmd", read_preference=read_preference) if sock_info.max_wire_version > 2: cmd = SON([("listCollections", 1), ("cursor", {})]) cmd.update(kwargs) with self.__client._tmp_session(session, close=False) as tmp_session: cursor = self._command(sock_info, cmd, slave_okay, read_preference=read_preference, session=tmp_session)["cursor"] return CommandCursor(coll, cursor, sock_info.address, session=tmp_session, explicit_session=session is not None) else: match = _INDEX_REGEX if "filter" in kwargs: match = {"$and": [_INDEX_REGEX, kwargs["filter"]]} dblen = len(self.name.encode("utf8") + b".") pipeline = [{ "$project": { "name": { "$substr": ["$name", dblen, -1] }, "options": 1 } }, { "$match": match }] cmd = SON([("aggregate", "system.namespaces"), ("pipeline", pipeline), ("cursor", kwargs.get("cursor", {}))]) cursor = self._command(sock_info, cmd, slave_okay)["cursor"] return CommandCursor(coll, cursor, sock_info.address)
def _run_aggregation_cmd(self, session, explicit_session): """Run the full aggregation pipeline for this ChangeStream and return the corresponding CommandCursor. """ read_preference = self._target._read_preference_for(session) client = self._database.client with client._socket_for_reads( read_preference, session) as (sock_info, slave_ok): pipeline = self._full_pipeline() cmd = SON([("aggregate", self._aggregation_target), ("pipeline", pipeline), ("cursor", {})]) result = sock_info.command( self._database.name, cmd, slave_ok, read_preference, self._target.codec_options, parse_write_concern_error=True, read_concern=self._target.read_concern, collation=self._collation, session=session, client=self._database.client) cursor = result["cursor"] if (self._start_at_operation_time is None and self._resume_token is None and cursor.get("_id") is None and sock_info.max_wire_version >= 7): self._start_at_operation_time = result["operationTime"] ns = cursor["ns"] _, collname = ns.split(".", 1) aggregation_collection = self._database.get_collection( collname, codec_options=self._target.codec_options, read_preference=read_preference, write_concern=self._target.write_concern, read_concern=self._target.read_concern ) return CommandCursor( aggregation_collection, cursor, sock_info.address, batch_size=self._batch_size or 0, max_await_time_ms=self._max_await_time_ms, session=session, explicit_session=explicit_session )
def _list_collections(self, sock_info, slave_okay, criteria=None): """Internal listCollections helper.""" criteria = criteria or {} if sock_info.max_wire_version > 2: cmd = SON([("listCollections", 1), ("cursor", {})]) if criteria: cmd["filter"] = criteria coll = self["$cmd"] cursor = self._command(sock_info, cmd, slave_okay)["cursor"] else: coll = self["system.namespaces"] res = _first_batch(sock_info, coll.full_name, criteria, 0, slave_okay, CodecOptions(), ReadPreference.PRIMARY) cursor = { "id": res["cursor_id"], "firstBatch": res["data"], "ns": coll.full_name, } return CommandCursor(coll, cursor, sock_info.address)
def _cmd(session, server, sock_info, slave_ok): pipeline = self._full_pipeline() cmd = SON([("aggregate", self._aggregation_target), ("pipeline", pipeline), ("cursor", {})]) result = sock_info.command(self._database.name, cmd, slave_ok, read_preference, self._target.codec_options, parse_write_concern_error=True, read_concern=self._target.read_concern, collation=self._collation, session=session, client=self._database.client) cursor = result["cursor"] if (self._start_at_operation_time is None and self._resume_token is None and self._start_after is None and sock_info.max_wire_version >= 7): self._start_at_operation_time = result["operationTime"] ns = cursor["ns"] _, collname = ns.split(".", 1) aggregation_collection = self._database.get_collection( collname, codec_options=self._target.codec_options, read_preference=read_preference, write_concern=self._target.write_concern, read_concern=self._target.read_concern) return CommandCursor(aggregation_collection, cursor, sock_info.address, batch_size=self._batch_size or 0, max_await_time_ms=self._max_await_time_ms, session=session, explicit_session=explicit_session)