def test_multiple_connections(self):
        a = get_connection(auto_start_request=False)
        b = get_connection(auto_start_request=False)
        self.assertEqual(1, len(a._Connection__pool.sockets))
        self.assertEqual(1, len(b._Connection__pool.sockets))

        a.start_request()
        a.test.test.find_one()
        self.assertEqual(0, len(a._Connection__pool.sockets))
        a.end_request()
        self.assertEqual(1, len(a._Connection__pool.sockets))
        self.assertEqual(1, len(b._Connection__pool.sockets))
        a_sock = one(a._Connection__pool.sockets)

        b.end_request()
        self.assertEqual(1, len(a._Connection__pool.sockets))
        self.assertEqual(1, len(b._Connection__pool.sockets))

        b.start_request()
        b.test.test.find_one()
        self.assertEqual(1, len(a._Connection__pool.sockets))
        self.assertEqual(0, len(b._Connection__pool.sockets))

        b.end_request()
        b_sock = one(b._Connection__pool.sockets)
        b.test.test.find_one()
        a.test.test.find_one()

        self.assertEqual(b_sock,
                         b._Connection__pool.get_socket((b.host, b.port)))
        self.assertEqual(a_sock,
                         a._Connection__pool.get_socket((a.host, a.port)))
    def test_mongos_connection(self):
        c = get_connection()
        is_mongos = utils.is_mongos(c)

        # Test default mode, PRIMARY
        cursor = c.pymongo_test.test.find()
        if is_mongos:
            self.assertEqual(
                {'mode': 'primary'},
                cursor._Cursor__query_spec().get('$readPreference')
            )
        else:
            self.assertFalse(
                '$readPreference' in cursor._Cursor__query_spec())

        # Test non-PRIMARY modes which can be combined with tags
        for mode, mongos_mode in (
            (ReadPreference.PRIMARY_PREFERRED, 'primaryPreferred'),
            (ReadPreference.SECONDARY, 'secondary'),
            (ReadPreference.SECONDARY_PREFERRED, 'secondaryPreferred'),
            (ReadPreference.NEAREST, 'nearest'),
        ):
            for tag_sets in (
                None, [{}]
            ):
                c = get_connection(
                    read_preference=mode,
                    tag_sets=tag_sets)

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    self.assertEqual(
                        {'mode': mongos_mode},
                        cursor._Cursor__query_spec().get('$readPreference')
                    )
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())

            for tag_sets in (
                [{'dc': 'la'}],
                [{'dc': 'la'}, {'dc': 'sf'}],
                [{'dc': 'la'}, {'dc': 'sf'}, {}],
            ):
                c = get_connection(
                    read_preference=mode,
                    tag_sets=tag_sets)

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    self.assertEqual(
                        {'mode': mongos_mode, 'tags': tag_sets},
                        cursor._Cursor__query_spec().get('$readPreference'))
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())
    def test_uuid_queries(self):
        if not should_test_uuid:
            raise SkipTest()

        c = get_connection()
        coll = c.pymongo_test.test
        coll.drop()

        uu = uuid.uuid4()
        # Wrap uu.bytes in binary_type to work
        # around http://bugs.python.org/issue7380.
        coll.insert({'uuid': Binary(binary_type(uu.bytes), 3)})
        self.assertEqual(1, coll.count())

        # Test UUIDLegacy queries.
        coll.uuid_subtype = 4
        self.assertEqual(0, coll.find({'uuid': uu}).count())
        cur = coll.find({'uuid': UUIDLegacy(uu)})
        self.assertEqual(1, cur.count())
        retrieved = cur.next()
        self.assertEqual(uu, retrieved['uuid'])

        # Test regular UUID queries (using subtype 4).
        coll.insert({'uuid': uu})
        self.assertEqual(2, coll.count())
        cur = coll.find({'uuid': uu})
        self.assertEqual(1, cur.count())
        retrieved = cur.next()
        self.assertEqual(uu, retrieved['uuid'])

        # Test both.
        cur = coll.find({'uuid': {'$in': [uu, UUIDLegacy(uu)]}})
        self.assertEqual(2, cur.count())
        coll.drop()
Example #4
0
    def test_uuid_queries(self):
        if not should_test_uuid:
            raise SkipTest("No uuid module")

        c = get_connection()
        coll = c.pymongo_test.test
        coll.drop()

        uu = uuid.uuid4()
        # Wrap uu.bytes in binary_type to work
        # around http://bugs.python.org/issue7380.
        coll.insert({'uuid': Binary(binary_type(uu.bytes), 3)})
        self.assertEqual(1, coll.count())

        # Test UUIDLegacy queries.
        coll.uuid_subtype = 4
        self.assertEqual(0, coll.find({'uuid': uu}).count())
        cur = coll.find({'uuid': UUIDLegacy(uu)})
        self.assertEqual(1, cur.count())
        retrieved = cur.next()
        self.assertEqual(uu, retrieved['uuid'])

        # Test regular UUID queries (using subtype 4).
        coll.insert({'uuid': uu})
        self.assertEqual(2, coll.count())
        cur = coll.find({'uuid': uu})
        self.assertEqual(1, cur.count())
        retrieved = cur.next()
        self.assertEqual(uu, retrieved['uuid'])

        # Test both.
        cur = coll.find({'uuid': {'$in': [uu, UUIDLegacy(uu)]}})
        self.assertEqual(2, cur.count())
        coll.drop()
    def test_authenticate_and_request(self):
        if (is_mongos(self.connection) and not
            version.at_least(self.connection, (2, 0, 0))):
            raise SkipTest("Auth with sharding requires MongoDB >= 2.0.0")
        # Database.authenticate() needs to be in a request - check that it
        # always runs in a request, and that it restores the request state
        # (in or not in a request) properly when it's finished.
        self.assertTrue(self.connection.auto_start_request)
        db = self.connection.pymongo_test
        db.system.users.remove({})
        db.remove_user("mike")
        db.add_user("mike", "password")
        self.assertTrue(self.connection.in_request())
        self.assertTrue(db.authenticate("mike", "password"))
        self.assertTrue(self.connection.in_request())

        no_request_cx = get_connection(auto_start_request=False)
        no_request_db = no_request_cx.pymongo_test
        self.assertFalse(no_request_cx.in_request())
        self.assertTrue(no_request_db.authenticate("mike", "password"))
        self.assertFalse(no_request_cx.in_request())

        # just make sure there are no exceptions here
        db.logout()
        no_request_db.logout()
    def test_only_secondary_ok_commands_have_read_prefs(self):
        c = get_connection(read_preference=ReadPreference.SECONDARY)
        is_mongos = utils.is_mongos(c)
        if not is_mongos:
            raise SkipTest("Only mongos have read_prefs added to the spec")

        # Ensure secondary_ok_commands have readPreference
        for cmd in secondary_ok_commands:
            if cmd == "mapreduce":  # map reduce is a special case
                continue
            command = SON([(cmd, 1)])
            cursor = c.pymongo_test["$cmd"].find(command.copy())
            # White-listed commands also have to be wrapped in $query
            command = SON([("$query", command)])
            command["$readPreference"] = {"mode": "secondary"}
            self.assertEqual(command, cursor._Cursor__query_spec())

        # map_reduce inline should have read prefs
        command = SON([("mapreduce", "test"), ("out", {"inline": 1})])
        cursor = c.pymongo_test["$cmd"].find(command.copy())
        # White-listed commands also have to be wrapped in $query
        command = SON([("$query", command)])
        command["$readPreference"] = {"mode": "secondary"}
        self.assertEqual(command, cursor._Cursor__query_spec())

        # map_reduce that outputs to a collection shouldn't have read prefs
        command = SON([("mapreduce", "test"), ("out", {"mrtest": 1})])
        cursor = c.pymongo_test["$cmd"].find(command.copy())
        self.assertEqual(command, cursor._Cursor__query_spec())

        # Other commands shouldn't be changed
        for cmd in ("drop", "create", "any-future-cmd"):
            command = SON([(cmd, 1)])
            cursor = c.pymongo_test["$cmd"].find(command.copy())
            self.assertEqual(command, cursor._Cursor__query_spec())
    def test_authenticate_and_request(self):
        if (is_mongos(self.connection)
                and not version.at_least(self.connection, (2, 0, 0))):
            raise SkipTest("Auth with sharding requires MongoDB >= 2.0.0")
        # Database.authenticate() needs to be in a request - check that it
        # always runs in a request, and that it restores the request state
        # (in or not in a request) properly when it's finished.
        self.assertTrue(self.connection.auto_start_request)
        db = self.connection.pymongo_test
        db.system.users.remove({})
        db.remove_user("mike")
        db.add_user("mike", "password")
        self.assertTrue(self.connection.in_request())
        self.assertTrue(db.authenticate("mike", "password"))
        self.assertTrue(self.connection.in_request())

        no_request_cx = get_connection(auto_start_request=False)
        no_request_db = no_request_cx.pymongo_test
        self.assertFalse(no_request_cx.in_request())
        self.assertTrue(no_request_db.authenticate("mike", "password"))
        self.assertFalse(no_request_cx.in_request())

        # just make sure there are no exceptions here
        db.logout()
        no_request_db.logout()
    def test_only_secondary_ok_commands_have_read_prefs(self):
        c = get_connection(read_preference=ReadPreference.SECONDARY)
        is_mongos = utils.is_mongos(c)
        if not is_mongos:
            raise SkipTest("Only mongos have read_prefs added to the spec")

        # Ensure secondary_ok_commands have readPreference
        for cmd in secondary_ok_commands:
            if cmd == 'mapreduce':  # map reduce is a special case
                continue
            command = SON([(cmd, 1)])
            cursor = c.pymongo_test["$cmd"].find(command.copy())
            command['$readPreference'] = {'mode': 'secondary'}
            self.assertEqual(command, cursor._Cursor__query_spec())

        # map_reduce inline should have read prefs
        command = SON([('mapreduce', 'test'), ('out', {'inline': 1})])
        cursor = c.pymongo_test["$cmd"].find(command.copy())
        command['$readPreference'] = {'mode': 'secondary'}
        self.assertEqual(command, cursor._Cursor__query_spec())

        # map_reduce that outputs to a collection shouldn't have read prefs
        command = SON([('mapreduce', 'test'), ('out', {'mrtest': 1})])
        cursor = c.pymongo_test["$cmd"].find(command.copy())
        self.assertEqual(command, cursor._Cursor__query_spec())

        # Other commands shouldn't be changed
        for cmd in ('drop', 'create', 'any-future-cmd'):
            command = SON([(cmd, 1)])
            cursor = c.pymongo_test["$cmd"].find(command.copy())
            self.assertEqual(command, cursor._Cursor__query_spec())
    def test_only_secondary_ok_commands_have_read_prefs(self):
        c = get_connection(read_preference=ReadPreference.SECONDARY)
        is_mongos = utils.is_mongos(c)
        if not is_mongos:
            raise SkipTest("Only mongos have read_prefs added to the spec")

        # Ensure secondary_ok_commands have readPreference
        for cmd in secondary_ok_commands:
            if cmd == 'mapreduce':  # map reduce is a special case
                continue
            command = SON([(cmd, 1)])
            cursor = c.pymongo_test["$cmd"].find(command.copy())
            # White-listed commands also have to be wrapped in $query
            command = SON([('$query', command)])
            command['$readPreference'] = {'mode': 'secondary'}
            self.assertEqual(command, cursor._Cursor__query_spec())

        # map_reduce inline should have read prefs
        command = SON([('mapreduce', 'test'), ('out', {'inline': 1})])
        cursor = c.pymongo_test["$cmd"].find(command.copy())
        # White-listed commands also have to be wrapped in $query
        command = SON([('$query', command)])
        command['$readPreference'] = {'mode': 'secondary'}
        self.assertEqual(command, cursor._Cursor__query_spec())

        # map_reduce that outputs to a collection shouldn't have read prefs
        command = SON([('mapreduce', 'test'), ('out', {'mrtest': 1})])
        cursor = c.pymongo_test["$cmd"].find(command.copy())
        self.assertEqual(command, cursor._Cursor__query_spec())

        # Other commands shouldn't be changed
        for cmd in ('drop', 'create', 'any-future-cmd'):
            command = SON([(cmd, 1)])
            cursor = c.pymongo_test["$cmd"].find(command.copy())
            self.assertEqual(command, cursor._Cursor__query_spec())
Example #10
0
 def _get_connection(self):
     """
     Intended for overriding in TestThreadsAuthReplicaSet. This method
     returns a Connection here, and a ReplicaSetConnection in
     test_threads_replica_set_connection.py.
     """
     # Regular test connection
     return get_connection()
Example #11
0
 def setUp(self):
     self.db = get_connection().pymongo_test
     self.db.drop_collection("fs.files")
     self.db.drop_collection("fs.chunks")
     self.db.drop_collection("alt.files")
     self.db.drop_collection("alt.chunks")
     self.fs = gridfs.GridFS(self.db)
     self.alt = gridfs.GridFS(self.db, "alt")
Example #12
0
 def _get_connection(self):
     """
     Intended for overriding in TestThreadsAuthReplicaSet. This method
     returns a Connection here, and a ReplicaSetConnection in
     test_threads_replica_set_connection.py.
     """
     # Regular test connection
     return get_connection()
 def setUp(self):
     self.db = get_connection().pymongo_test
     self.db.drop_collection("fs.files")
     self.db.drop_collection("fs.chunks")
     self.db.drop_collection("alt.files")
     self.db.drop_collection("alt.chunks")
     self.fs = gridfs.GridFS(self.db)
     self.alt = gridfs.GridFS(self.db, "alt")
Example #14
0
def teardown():
    c = get_connection()

    c.drop_database("pymongo-pooling-tests")
    c.drop_database("pymongo_test")
    c.drop_database("pymongo_test1")
    c.drop_database("pymongo_test2")
    c.drop_database("pymongo_test_mike")
    c.drop_database("pymongo_test_bernie")
    def setUp(self):
        self.c = get_connection(auto_start_request=False)

        # reset the db
        db = self.c[DB]
        db.unique.drop()
        db.test.drop()
        db.unique.insert({"_id": "mike"})
        db.test.insert([{} for i in range(1000)])
def teardown():
    c = get_connection()

    c.drop_database("pymongo-pooling-tests")
    c.drop_database("pymongo_test")
    c.drop_database("pymongo_test1")
    c.drop_database("pymongo_test2")
    c.drop_database("pymongo_test_mike")
    c.drop_database("pymongo_test_bernie")
    def test_authenticate_multiple(self):
        conn = get_connection()
        if (is_mongos(conn) and not
            version.at_least(self.connection, (2, 0, 0))):
            raise SkipTest("Auth with sharding requires MongoDB >= 2.0.0")
        if not server_started_with_auth(conn):
            raise SkipTest("Authentication is not enabled on server")

        # Setup
        users_db = conn.pymongo_test
        admin_db = conn.admin
        other_db = conn.pymongo_test1
        users_db.system.users.remove(safe=True)
        admin_db.system.users.remove(safe=True)
        users_db.test.remove(safe=True)
        other_db.test.remove(safe=True)

        admin_db.add_user('admin', 'pass')
        self.assertTrue(admin_db.authenticate('admin', 'pass'))

        admin_db.add_user('ro-admin', 'pass', read_only=True)
        users_db.add_user('user', 'pass')

        admin_db.logout()
        self.assertRaises(OperationFailure, users_db.test.find_one)

        # Regular user should be able to query its own db, but
        # no other.
        users_db.authenticate('user', 'pass')
        self.assertEqual(0, users_db.test.count())
        self.assertRaises(OperationFailure, other_db.test.find_one)

        # Admin read-only user should be able to query any db,
        # but not write.
        admin_db.authenticate('ro-admin', 'pass')
        self.assertEqual(0, other_db.test.count())
        self.assertRaises(OperationFailure,
                          other_db.test.insert, {}, safe=True)

        # Force close all sockets
        conn.disconnect()

        # We should still be able to write to the regular user's db
        self.assertTrue(users_db.test.remove(safe=True))
        # And read from other dbs...
        self.assertEqual(0, other_db.test.count())
        # But still not write to other dbs...
        self.assertRaises(OperationFailure,
                          other_db.test.insert, {}, safe=True)

        # Cleanup
        admin_db.logout()
        users_db.logout()
        self.assertTrue(admin_db.authenticate('admin', 'pass'))
        self.assertTrue(admin_db.system.users.remove(safe=True))
        self.assertEqual(0, admin_db.system.users.count())
        self.assertTrue(users_db.system.users.remove(safe=True))
    def test_pool_with_fork(self):
        # Test that separate Connections have separate Pools, and that the
        # driver can create a new Connection after forking
        if sys.platform == "win32":
            raise SkipTest("Can't test forking on Windows")

        try:
            from multiprocessing import Process, Pipe
        except ImportError:
            raise SkipTest("No multiprocessing module")

        a = get_connection(auto_start_request=False)
        a.test.test.remove(safe=True)
        a.test.test.insert({'_id':1}, safe=True)
        a.test.test.find_one()
        self.assertEqual(1, len(a._Connection__pool.sockets))
        a_sock = one(a._Connection__pool.sockets)

        def loop(pipe):
            c = get_connection(auto_start_request=False)
            self.assertEqual(1,len(c._Connection__pool.sockets))
            c.test.test.find_one()
            self.assertEqual(1,len(c._Connection__pool.sockets))
            pipe.send(one(c._Connection__pool.sockets).sock.getsockname())

        cp1, cc1 = Pipe()
        cp2, cc2 = Pipe()

        p1 = Process(target=loop, args=(cc1,))
        p2 = Process(target=loop, args=(cc2,))

        p1.start()
        p2.start()

        p1.join(1)
        p2.join(1)

        p1.terminate()
        p2.terminate()

        p1.join()
        p2.join()

        cc1.close()
        cc2.close()

        b_sock = cp1.recv()
        c_sock = cp2.recv()
        self.assertTrue(a_sock.sock.getsockname() != b_sock)
        self.assertTrue(a_sock.sock.getsockname() != c_sock)
        self.assertTrue(b_sock != c_sock)
        self.assertEqual(a_sock,
                         a._Connection__pool.get_socket((a.host, a.port)))
    def test_file_exists(self):
        db = get_connection(w=1).pymongo_test
        fs = gridfs.GridFS(db)

        oid = fs.put(b("hello"))
        self.assertRaises(FileExists, fs.put, b("world"), _id=oid)

        one = fs.new_file(_id=123)
        one.write(b("some content"))
        one.close()

        two = fs.new_file(_id=123)
        self.assertRaises(FileExists, two.write, b('x' * 262146))
Example #20
0
    def test_low_network_timeout(self):
        db = None
        i = 0
        n = 10
        while db is None and i < n:
            try:
                db = get_connection(network_timeout=0.0001).pymongo_test
            except AutoReconnect:
                i += 1
        if i == n:
            raise SkipTest()

        threads = []
        for _ in range(4):
            t = IgnoreAutoReconnect(db.test, 100)
            t.start()
            threads.append(t)

        joinall(threads)
Example #21
0
    def test_low_network_timeout(self):
        db = None
        i = 0
        n = 10
        while db is None and i < n:
            try:
                db = get_connection(network_timeout=0.0001).pymongo_test
            except AutoReconnect:
                i += 1
        if i == n:
            raise SkipTest()

        threads = []
        for _ in range(4):
            t = IgnoreAutoReconnect(db.test, 100)
            t.start()
            threads.append(t)

        for t in threads:
            t.join()
Example #22
0
    def test_authenticate_and_request(self):
        # Database.authenticate() needs to be in a request - check that it
        # always runs in a request, and that it restores the request state
        # (in or not in a request) properly when it's finished.
        self.assertTrue(self.connection.auto_start_request)
        db = self.connection.pymongo_test
        db.system.users.remove({})
        db.remove_user("mike")
        db.add_user("mike", "password")
        self.assertTrue(self.connection.in_request())
        self.assertTrue(db.authenticate("mike", "password"))
        self.assertTrue(self.connection.in_request())

        no_request_cx = get_connection(auto_start_request=False)
        no_request_db = no_request_cx.pymongo_test
        self.assertFalse(no_request_cx.in_request())
        self.assertTrue(no_request_db.authenticate("mike", "password"))
        self.assertFalse(no_request_cx.in_request())

        # just make sure there are no exceptions here
        db.logout()
        no_request_db.logout()
    def test_authenticate_and_request(self):
        # Database.authenticate() needs to be in a request - check that it
        # always runs in a request, and that it restores the request state
        # (in or not in a request) properly when it's finished.
        self.assertTrue(self.connection.auto_start_request)
        db = self.connection.pymongo_test
        db.system.users.remove({})
        db.remove_user("mike")
        db.add_user("mike", "password")
        self.assertTrue(self.connection.in_request())
        self.assertTrue(db.authenticate("mike", "password"))
        self.assertTrue(self.connection.in_request())

        no_request_cx = get_connection(auto_start_request=False)
        no_request_db = no_request_cx.pymongo_test
        self.assertFalse(no_request_cx.in_request())
        self.assertTrue(no_request_db.authenticate("mike", "password"))
        self.assertFalse(no_request_cx.in_request())

        # just make sure there are no exceptions here
        db.logout()
        no_request_db.logout()
    def test_legacy_csharp_uuid(self):
        if not should_test_uuid:
            raise SkipTest("No uuid module")

        # Generated by the .net driver
        from_csharp = b('ZAAAABBfaWQAAAAAAAVuZXdndWlkABAAAAAD+MkoCd/Jy0iYJ7Vhl'
                        'iF3BAJuZXdndWlkc3RyaW5nACUAAAAwOTI4YzlmOC1jOWRmLTQ4Y2'
                        'ItOTgyNy1iNTYxOTYyMTc3MDQAAGQAAAAQX2lkAAEAAAAFbmV3Z3V'
                        'pZAAQAAAAA9MD0oXQe6VOp7mK4jkttWUCbmV3Z3VpZHN0cmluZwAl'
                        'AAAAODVkMjAzZDMtN2JkMC00ZWE1LWE3YjktOGFlMjM5MmRiNTY1A'
                        'ABkAAAAEF9pZAACAAAABW5ld2d1aWQAEAAAAAPRmIO2auc/Tprq1Z'
                        'oQ1oNYAm5ld2d1aWRzdHJpbmcAJQAAAGI2ODM5OGQxLWU3NmEtNGU'
                        'zZi05YWVhLWQ1OWExMGQ2ODM1OAAAZAAAABBfaWQAAwAAAAVuZXdn'
                        'dWlkABAAAAADISpriopuTEaXIa7arYOCFAJuZXdndWlkc3RyaW5nA'
                        'CUAAAA4YTZiMmEyMS02ZThhLTQ2NGMtOTcyMS1hZWRhYWQ4MzgyMT'
                        'QAAGQAAAAQX2lkAAQAAAAFbmV3Z3VpZAAQAAAAA98eg0CFpGlPihP'
                        'MwOmYGOMCbmV3Z3VpZHN0cmluZwAlAAAANDA4MzFlZGYtYTQ4NS00'
                        'ZjY5LThhMTMtY2NjMGU5OTgxOGUzAAA=')

        data = base64.b64decode(from_csharp)

        # Test decoding
        docs = bson.decode_all(data, SON, False, OLD_UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, JAVA_LEGACY)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, CSHARP_LEGACY)
        for d in docs:
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        # Test encoding
        encoded = b('').join([bson.BSON.encode(doc,
                                               uuid_subtype=OLD_UUID_SUBTYPE)
                              for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([bson.BSON.encode(doc, uuid_subtype=UUID_SUBTYPE)
                              for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([bson.BSON.encode(doc, uuid_subtype=JAVA_LEGACY)
                              for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([bson.BSON.encode(doc, uuid_subtype=CSHARP_LEGACY)
                              for doc in docs])
        self.assertEqual(data, encoded)

        # Test insert and find
        conn = get_connection()
        coll = conn.pymongo_test.csharp_uuid
        coll.uuid_subtype = CSHARP_LEGACY

        coll.insert(docs, safe=True)
        self.assertEqual(5, coll.count())
        for d in coll.find():
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        coll.uuid_subtype = OLD_UUID_SUBTYPE
        for d in coll.find():
            self.assertNotEqual(d['newguid'], d['newguidstring'])
        conn.pymongo_test.drop_collection('csharp_uuid')
    def setUp(self):
        if not json_util.json_lib:
            raise SkipTest("No json or simplejson module")

        self.db = get_connection().pymongo_test
Example #26
0
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_connection(use_greenlets=self.use_greenlets,
                            auto_start_request=False)

        db = cx.pymongo_test
        if not version.at_least(db.connection, (1, 7, 2)):
            raise SkipTest("Need at least MongoDB version 1.7.2 to use"
                           " db.eval(nolock=True)")

        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses 5 sec. 'nolock' allows find_fast
            # to start and finish while we're waiting for this.
            fn = delay(5)
            self.assertEqual({
                'ok': 1.0,
                'retval': True
            }, db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
 def setUp(self):
     self.connection = get_connection()
     self.db = self.connection.pymongo_test
 def setUp(self):
     self.db = Database(get_connection(), "pymongo_test")
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_connection(use_greenlets=self.use_greenlets,
                            auto_start_request=False)

        db = cx.pymongo_test
        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses N seconds per document
            fn = delay(10)
            if (is_mongos(db.connection)
                    or not version.at_least(db.connection, (1, 7, 2))):
                # mongos doesn't support eval so we have to use $where
                # which is less reliable in this context.
                self.assertEqual(1, db.test.find({"$where": fn}).count())
            else:
                # 'nolock' allows find_fast to start and finish while we're
                # waiting for this to complete.
                self.assertEqual({
                    'ok': 1.0,
                    'retval': True
                }, db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
    def test_legacy_java_uuid(self):
        if not should_test_uuid:
            raise SkipTest("No uuid module")

        # Generated by the Java driver
        from_java = b('bAAAAAdfaWQAUCBQxkVm+XdxJ9tOBW5ld2d1aWQAEAAAAAMIQkfACFu'
                      'Z/0RustLOU/G6Am5ld2d1aWRzdHJpbmcAJQAAAGZmOTk1YjA4LWMwND'
                      'ctNDIwOC1iYWYxLTUzY2VkMmIyNmU0NAAAbAAAAAdfaWQAUCBQxkVm+'
                      'XdxJ9tPBW5ld2d1aWQAEAAAAANgS/xhRXXv8kfIec+dYdyCAm5ld2d1'
                      'aWRzdHJpbmcAJQAAAGYyZWY3NTQ1LTYxZmMtNGI2MC04MmRjLTYxOWR'
                      'jZjc5Yzg0NwAAbAAAAAdfaWQAUCBQxkVm+XdxJ9tQBW5ld2d1aWQAEA'
                      'AAAAPqREIbhZPUJOSdHCJIgaqNAm5ld2d1aWRzdHJpbmcAJQAAADI0Z'
                      'DQ5Mzg1LTFiNDItNDRlYS04ZGFhLTgxNDgyMjFjOWRlNAAAbAAAAAdf'
                      'aWQAUCBQxkVm+XdxJ9tRBW5ld2d1aWQAEAAAAANjQBn/aQuNfRyfNyx'
                      '29COkAm5ld2d1aWRzdHJpbmcAJQAAADdkOGQwYjY5LWZmMTktNDA2My'
                      '1hNDIzLWY0NzYyYzM3OWYxYwAAbAAAAAdfaWQAUCBQxkVm+XdxJ9tSB'
                      'W5ld2d1aWQAEAAAAAMtSv/Et1cAQUFHUYevqxaLAm5ld2d1aWRzdHJp'
                      'bmcAJQAAADQxMDA1N2I3LWM0ZmYtNGEyZC04YjE2LWFiYWY4NzUxNDc'
                      '0MQAA')

        data = base64.b64decode(from_java)

        # Test decoding
        docs = bson.decode_all(data, SON, False, OLD_UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, CSHARP_LEGACY)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, JAVA_LEGACY)
        for d in docs:
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        # Test encoding
        encoded = b('').join([bson.BSON.encode(doc,
                                               uuid_subtype=OLD_UUID_SUBTYPE)
                              for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([bson.BSON.encode(doc, uuid_subtype=UUID_SUBTYPE)
                              for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([bson.BSON.encode(doc, uuid_subtype=CSHARP_LEGACY)
                              for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([bson.BSON.encode(doc, uuid_subtype=JAVA_LEGACY)
                              for doc in docs])
        self.assertEqual(data, encoded)

        # Test insert and find
        conn = get_connection()
        coll = conn.pymongo_test.java_uuid
        coll.uuid_subtype = JAVA_LEGACY

        coll.insert(docs, safe=True)
        self.assertEqual(5, coll.count())
        for d in coll.find():
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        coll.uuid_subtype = OLD_UUID_SUBTYPE
        for d in coll.find():
            self.assertNotEqual(d['newguid'], d['newguidstring'])
        conn.pymongo_test.drop_collection('java_uuid')
 def test_max_pool_size_with_end_request_only(self):
     # Call end_request() but not start_request()
     c = get_connection(max_pool_size=4, auto_start_request=False)
     self._test_max_pool_size(c, 0, 1)
 def get_connection(self, *args, **kwargs):
     opts = kwargs.copy()
     opts['use_greenlets'] = self.use_greenlets
     return get_connection(*args, **opts)
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_connection(
            use_greenlets=self.use_greenlets,
            auto_start_request=False
        )

        db = cx.pymongo_test
        if not version.at_least(db.connection, (1, 7, 2)):
            raise SkipTest("Need at least MongoDB version 1.7.2 to use"
                           " db.eval(nolock=True)")
        
        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses 5 sec. 'nolock' allows find_fast
            # to start and finish while we're waiting for this.
            fn = delay(5)
            self.assertEqual(
                {'ok': 1.0, 'retval': True},
                db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
Example #34
0
 def setUp(self):
     self.db = Database(get_connection(), "pymongo_test")
    def test_mongos_connection(self):
        c = get_connection()
        is_mongos = utils.is_mongos(c)

        # Test default mode, PRIMARY
        cursor = c.pymongo_test.test.find()
        if is_mongos:
            # We only set $readPreference if it's something other than
            # PRIMARY to avoid problems with mongos versions that don't
            # support read preferences.
            self.assertEqual(
                None,
                cursor._Cursor__query_spec().get('$readPreference'))
        else:
            self.assertFalse('$readPreference' in cursor._Cursor__query_spec())

        # Copy these constants for brevity
        PRIMARY_PREFERRED = ReadPreference.PRIMARY_PREFERRED
        SECONDARY = ReadPreference.SECONDARY
        SECONDARY_PREFERRED = ReadPreference.SECONDARY_PREFERRED
        NEAREST = ReadPreference.NEAREST
        SLAVE_OKAY = _QUERY_OPTIONS['slave_okay']

        # Test non-PRIMARY modes which can be combined with tags
        for kwarg, value, mongos_mode in (('read_preference',
                                           PRIMARY_PREFERRED,
                                           'primaryPreferred'),
                                          ('read_preference', SECONDARY,
                                           'secondary'),
                                          ('read_preference',
                                           SECONDARY_PREFERRED,
                                           'secondaryPreferred'),
                                          ('read_preference', NEAREST,
                                           'nearest'), ('slave_okay', True,
                                                        'secondaryPreferred'),
                                          ('slave_okay', False, 'primary')):
            for tag_sets in (None, [{}]):
                # Create a connection e.g. with read_preference=NEAREST or
                # slave_okay=True
                c = get_connection(tag_sets=tag_sets, **{kwarg: value})

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    # We don't set $readPreference for SECONDARY_PREFERRED
                    # unless tags are in use. slaveOkay has the same effect.
                    if mongos_mode == 'secondaryPreferred':
                        self.assertEqual(
                            None,
                            cursor._Cursor__query_spec().get(
                                '$readPreference'))

                        self.assertTrue(cursor._Cursor__query_options()
                                        & SLAVE_OKAY)

                    # Don't send $readPreference for PRIMARY either
                    elif mongos_mode == 'primary':
                        self.assertEqual(
                            None,
                            cursor._Cursor__query_spec().get(
                                '$readPreference'))

                        self.assertFalse(cursor._Cursor__query_options()
                                         & SLAVE_OKAY)
                    else:
                        self.assertEqual({'mode': mongos_mode},
                                         cursor._Cursor__query_spec().get(
                                             '$readPreference'))

                        self.assertTrue(cursor._Cursor__query_options()
                                        & SLAVE_OKAY)
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())

            for tag_sets in (
                [{
                    'dc': 'la'
                }],
                [{
                    'dc': 'la'
                }, {
                    'dc': 'sf'
                }],
                [{
                    'dc': 'la'
                }, {
                    'dc': 'sf'
                }, {}],
            ):
                if kwarg == 'slave_okay':
                    # Can't use tags with slave_okay True or False, need a
                    # real read preference
                    self.assertRaises(ConfigurationError,
                                      get_connection,
                                      tag_sets=tag_sets,
                                      **{kwarg: value})

                    continue

                c = get_connection(tag_sets=tag_sets, **{kwarg: value})

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    self.assertEqual(
                        {
                            'mode': mongos_mode,
                            'tags': tag_sets
                        },
                        cursor._Cursor__query_spec().get('$readPreference'))
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())
    def test_mongos_connection(self):
        c = get_connection()
        is_mongos = utils.is_mongos(c)

        # Test default mode, PRIMARY
        cursor = c.pymongo_test.test.find()
        if is_mongos:
            self.assertEqual(
                {'mode': 'primary'},
                cursor._Cursor__query_spec().get('$readPreference'))
        else:
            self.assertFalse('$readPreference' in cursor._Cursor__query_spec())

        # Test non-PRIMARY modes which can be combined with tags
        for mode, mongos_mode in (
            (ReadPreference.PRIMARY_PREFERRED, 'primaryPreferred'),
            (ReadPreference.SECONDARY, 'secondary'),
            (ReadPreference.SECONDARY_PREFERRED, 'secondaryPreferred'),
            (ReadPreference.NEAREST, 'nearest'),
        ):
            for tag_sets in (None, [{}]):
                c = get_connection(read_preference=mode, tag_sets=tag_sets)

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    self.assertEqual(
                        {'mode': mongos_mode},
                        cursor._Cursor__query_spec().get('$readPreference'))
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())

            for tag_sets in (
                [{
                    'dc': 'la'
                }],
                [{
                    'dc': 'la'
                }, {
                    'dc': 'sf'
                }],
                [{
                    'dc': 'la'
                }, {
                    'dc': 'sf'
                }, {}],
            ):
                c = get_connection(read_preference=mode, tag_sets=tag_sets)

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    self.assertEqual(
                        {
                            'mode': mongos_mode,
                            'tags': tag_sets
                        },
                        cursor._Cursor__query_spec().get('$readPreference'))
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        results = {
            'find_fast_result': None,
            'find_slow_result': None,
        }

        cx = get_connection(
            use_greenlets=self.use_greenlets,
            auto_start_request=False
        )

        db = cx.pymongo_test
        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            results['find_fast_result'] = list(db.test.find())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses
            where = delay(2)
            results['find_slow_result'] = list(db.test.find(
                    {'$where': where}
            ))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr1 = threading.Thread(target=find_fast)
            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([{'_id': 1}], results['find_slow_result'])

        # Fails if there's a bug in socket allocation, since find_fast won't
        # complete
        self.assertEqual([{'_id': 1}], results['find_fast_result'])

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
Example #38
0
    def test_legacy_csharp_uuid(self):
        if not should_test_uuid:
            raise SkipTest("No uuid module")

        # Generated by the .net driver
        from_csharp = b('ZAAAABBfaWQAAAAAAAVuZXdndWlkABAAAAAD+MkoCd/Jy0iYJ7Vhl'
                        'iF3BAJuZXdndWlkc3RyaW5nACUAAAAwOTI4YzlmOC1jOWRmLTQ4Y2'
                        'ItOTgyNy1iNTYxOTYyMTc3MDQAAGQAAAAQX2lkAAEAAAAFbmV3Z3V'
                        'pZAAQAAAAA9MD0oXQe6VOp7mK4jkttWUCbmV3Z3VpZHN0cmluZwAl'
                        'AAAAODVkMjAzZDMtN2JkMC00ZWE1LWE3YjktOGFlMjM5MmRiNTY1A'
                        'ABkAAAAEF9pZAACAAAABW5ld2d1aWQAEAAAAAPRmIO2auc/Tprq1Z'
                        'oQ1oNYAm5ld2d1aWRzdHJpbmcAJQAAAGI2ODM5OGQxLWU3NmEtNGU'
                        'zZi05YWVhLWQ1OWExMGQ2ODM1OAAAZAAAABBfaWQAAwAAAAVuZXdn'
                        'dWlkABAAAAADISpriopuTEaXIa7arYOCFAJuZXdndWlkc3RyaW5nA'
                        'CUAAAA4YTZiMmEyMS02ZThhLTQ2NGMtOTcyMS1hZWRhYWQ4MzgyMT'
                        'QAAGQAAAAQX2lkAAQAAAAFbmV3Z3VpZAAQAAAAA98eg0CFpGlPihP'
                        'MwOmYGOMCbmV3Z3VpZHN0cmluZwAlAAAANDA4MzFlZGYtYTQ4NS00'
                        'ZjY5LThhMTMtY2NjMGU5OTgxOGUzAAA=')

        data = base64.b64decode(from_csharp)

        # Test decoding
        docs = bson.decode_all(data, SON, False, OLD_UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, JAVA_LEGACY)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, CSHARP_LEGACY)
        for d in docs:
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        # Test encoding
        encoded = b('').join([
            bson.BSON.encode(doc, uuid_subtype=OLD_UUID_SUBTYPE)
            for doc in docs
        ])
        self.assertNotEqual(data, encoded)

        encoded = b('').join(
            [bson.BSON.encode(doc, uuid_subtype=UUID_SUBTYPE) for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join(
            [bson.BSON.encode(doc, uuid_subtype=JAVA_LEGACY) for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([
            bson.BSON.encode(doc, uuid_subtype=CSHARP_LEGACY) for doc in docs
        ])
        self.assertEqual(data, encoded)

        # Test insert and find
        conn = get_connection()
        conn.pymongo_test.drop_collection('csharp_uuid')
        coll = conn.pymongo_test.csharp_uuid
        coll.uuid_subtype = CSHARP_LEGACY

        coll.insert(docs, safe=True)
        self.assertEqual(5, coll.count())
        for d in coll.find():
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        coll.uuid_subtype = OLD_UUID_SUBTYPE
        for d in coll.find():
            self.assertNotEqual(d['newguid'], d['newguidstring'])
        conn.pymongo_test.drop_collection('csharp_uuid')
    def _test_pool(self, use_greenlets, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        if use_greenlets:
            try:
                from gevent import monkey, Greenlet
            except ImportError:
                raise SkipTest('gevent not installed')

            # Note we don't do patch_thread() or patch_all() - we're
            # testing here that patch_thread() is unnecessary for
            # the connection pool to work properly.
            monkey.patch_socket()

        try:
            results = {
                'find_fast_result': None,
                'find_slow_result': None,
            }

            cx = get_connection(
                use_greenlets=use_greenlets,
                auto_start_request=False
            )

            db = cx.pymongo_test
            db.test.remove(safe=True)
            db.test.insert({'_id': 1})

            history = []

            def find_fast():
                if use_request:
                    cx.start_request()

                history.append('find_fast start')

                # With the old connection._Pool, this would throw
                # AssertionError: "This event is already used by another
                # greenlet"
                results['find_fast_result'] = list(db.test.find())
                history.append('find_fast done')

                if use_request:
                    cx.end_request()

            def find_slow():
                if use_request:
                    cx.start_request()

                history.append('find_slow start')

                # Javascript function that pauses
                where = delay(2)
                results['find_slow_result'] = list(db.test.find(
                    {'$where': where}
                ))

                history.append('find_slow done')

                if use_request:
                    cx.end_request()

            if use_greenlets:
                gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
                gr0.start()
                gr1.start_later(.1)
            else:
                gr0 = threading.Thread(target=find_slow)
                gr1 = threading.Thread(target=find_fast)
                gr0.start()
                time.sleep(.1)
                gr1.start()

            gr0.join()
            gr1.join()

            self.assertEqual([{'_id': 1}], results['find_slow_result'])

            # Fails, since find_fast doesn't complete
            self.assertEqual([{'_id': 1}], results['find_fast_result'])

            self.assertEqual([
                'find_slow start',
                'find_fast start',
                'find_fast done',
                'find_slow done',
            ], history)

        finally:
            # Undo Gevent patching
            reload(socket)
    def setUp(self):
        if not json_util.json_lib:
            raise SkipTest("No json or simplejson module")

        self.db = get_connection().pymongo_test
 def setUp(self):
     self.connection = get_connection()
Example #42
0
    def test_legacy_java_uuid(self):
        if not should_test_uuid:
            raise SkipTest("No uuid module")

        # Generated by the Java driver
        from_java = b('bAAAAAdfaWQAUCBQxkVm+XdxJ9tOBW5ld2d1aWQAEAAAAAMIQkfACFu'
                      'Z/0RustLOU/G6Am5ld2d1aWRzdHJpbmcAJQAAAGZmOTk1YjA4LWMwND'
                      'ctNDIwOC1iYWYxLTUzY2VkMmIyNmU0NAAAbAAAAAdfaWQAUCBQxkVm+'
                      'XdxJ9tPBW5ld2d1aWQAEAAAAANgS/xhRXXv8kfIec+dYdyCAm5ld2d1'
                      'aWRzdHJpbmcAJQAAAGYyZWY3NTQ1LTYxZmMtNGI2MC04MmRjLTYxOWR'
                      'jZjc5Yzg0NwAAbAAAAAdfaWQAUCBQxkVm+XdxJ9tQBW5ld2d1aWQAEA'
                      'AAAAPqREIbhZPUJOSdHCJIgaqNAm5ld2d1aWRzdHJpbmcAJQAAADI0Z'
                      'DQ5Mzg1LTFiNDItNDRlYS04ZGFhLTgxNDgyMjFjOWRlNAAAbAAAAAdf'
                      'aWQAUCBQxkVm+XdxJ9tRBW5ld2d1aWQAEAAAAANjQBn/aQuNfRyfNyx'
                      '29COkAm5ld2d1aWRzdHJpbmcAJQAAADdkOGQwYjY5LWZmMTktNDA2My'
                      '1hNDIzLWY0NzYyYzM3OWYxYwAAbAAAAAdfaWQAUCBQxkVm+XdxJ9tSB'
                      'W5ld2d1aWQAEAAAAAMtSv/Et1cAQUFHUYevqxaLAm5ld2d1aWRzdHJp'
                      'bmcAJQAAADQxMDA1N2I3LWM0ZmYtNGEyZC04YjE2LWFiYWY4NzUxNDc'
                      '0MQAA')

        data = base64.b64decode(from_java)

        # Test decoding
        docs = bson.decode_all(data, SON, False, OLD_UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, UUID_SUBTYPE)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, CSHARP_LEGACY)
        for d in docs:
            self.assertNotEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        docs = bson.decode_all(data, SON, False, JAVA_LEGACY)
        for d in docs:
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        # Test encoding
        encoded = b('').join([
            bson.BSON.encode(doc, uuid_subtype=OLD_UUID_SUBTYPE)
            for doc in docs
        ])
        self.assertNotEqual(data, encoded)

        encoded = b('').join(
            [bson.BSON.encode(doc, uuid_subtype=UUID_SUBTYPE) for doc in docs])
        self.assertNotEqual(data, encoded)

        encoded = b('').join([
            bson.BSON.encode(doc, uuid_subtype=CSHARP_LEGACY) for doc in docs
        ])
        self.assertNotEqual(data, encoded)

        encoded = b('').join(
            [bson.BSON.encode(doc, uuid_subtype=JAVA_LEGACY) for doc in docs])
        self.assertEqual(data, encoded)

        # Test insert and find
        conn = get_connection()
        conn.pymongo_test.drop_collection('java_uuid')
        coll = conn.pymongo_test.java_uuid
        coll.uuid_subtype = JAVA_LEGACY

        coll.insert(docs, safe=True)
        self.assertEqual(5, coll.count())
        for d in coll.find():
            self.assertEqual(d['newguid'], uuid.UUID(d['newguidstring']))

        coll.uuid_subtype = OLD_UUID_SUBTYPE
        for d in coll.find():
            self.assertNotEqual(d['newguid'], d['newguidstring'])
        conn.pymongo_test.drop_collection('java_uuid')
 def get_connection(self, *args, **kwargs):
     opts = kwargs.copy()
     opts['use_greenlets'] = self.use_greenlets
     return get_connection(*args, **opts)
    def test_mongos_connection(self):
        c = get_connection()
        is_mongos = utils.is_mongos(c)

        # Test default mode, PRIMARY
        cursor = c.pymongo_test.test.find()
        if is_mongos:
            # We only set $readPreference if it's something other than
            # PRIMARY to avoid problems with mongos versions that don't
            # support read preferences.
            self.assertEqual(
                None,
                cursor._Cursor__query_spec().get('$readPreference')
            )
        else:
            self.assertFalse(
                '$readPreference' in cursor._Cursor__query_spec())

        # Copy these constants for brevity
        PRIMARY_PREFERRED = ReadPreference.PRIMARY_PREFERRED
        SECONDARY = ReadPreference.SECONDARY
        SECONDARY_PREFERRED = ReadPreference.SECONDARY_PREFERRED
        NEAREST = ReadPreference.NEAREST
        SLAVE_OKAY = _QUERY_OPTIONS['slave_okay']

        # Test non-PRIMARY modes which can be combined with tags
        for kwarg, value, mongos_mode in (
            ('read_preference', PRIMARY_PREFERRED, 'primaryPreferred'),
            ('read_preference', SECONDARY, 'secondary'),
            ('read_preference', SECONDARY_PREFERRED, 'secondaryPreferred'),
            ('read_preference', NEAREST, 'nearest'),
            ('slave_okay', True, 'secondaryPreferred'),
            ('slave_okay', False, 'primary')
        ):
            for tag_sets in (
                None, [{}]
            ):
                # Create a connection e.g. with read_preference=NEAREST or
                # slave_okay=True
                c = get_connection(tag_sets=tag_sets, **{kwarg: value})

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    # We don't set $readPreference for SECONDARY_PREFERRED
                    # unless tags are in use. slaveOkay has the same effect.
                    if mongos_mode == 'secondaryPreferred':
                        self.assertEqual(
                            None,
                            cursor._Cursor__query_spec().get('$readPreference'))

                        self.assertTrue(
                            cursor._Cursor__query_options() & SLAVE_OKAY)

                    # Don't send $readPreference for PRIMARY either
                    elif mongos_mode == 'primary':
                        self.assertEqual(
                            None,
                            cursor._Cursor__query_spec().get('$readPreference'))

                        self.assertFalse(
                            cursor._Cursor__query_options() & SLAVE_OKAY)
                    else:
                        self.assertEqual(
                            {'mode': mongos_mode},
                            cursor._Cursor__query_spec().get('$readPreference'))

                        self.assertTrue(
                            cursor._Cursor__query_options() & SLAVE_OKAY)
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())

            for tag_sets in (
                [{'dc': 'la'}],
                [{'dc': 'la'}, {'dc': 'sf'}],
                [{'dc': 'la'}, {'dc': 'sf'}, {}],
            ):
                if kwarg == 'slave_okay':
                    # Can't use tags with slave_okay True or False, need a
                    # real read preference
                    self.assertRaises(
                        ConfigurationError,
                        get_connection, tag_sets=tag_sets, **{kwarg: value})

                    continue

                c = get_connection(tag_sets=tag_sets, **{kwarg: value})

                self.assertEqual(is_mongos, c.is_mongos)
                cursor = c.pymongo_test.test.find()
                if is_mongos:
                    self.assertEqual(
                        {'mode': mongos_mode, 'tags': tag_sets},
                        cursor._Cursor__query_spec().get('$readPreference'))
                else:
                    self.assertFalse(
                        '$readPreference' in cursor._Cursor__query_spec())
 def test_max_pool_size_with_leaked_request(self):
     # Call start_request() but not end_request() -- when threads die, they
     # should return their request sockets to the pool.
     c = get_connection(max_pool_size=4, auto_start_request=False)
     self._test_max_pool_size(c, 1, 0)
 def setUp(self):
     self.db = get_connection().pymongo_test
     self.db.fs.files.remove({})
     self.db.fs.chunks.remove({})
Example #47
0
 def setUp(self):
     self.db = get_connection().pymongo_test
     self.db.fs.files.remove({})
     self.db.fs.chunks.remove({})
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_connection(
            use_greenlets=self.use_greenlets,
            auto_start_request=False
        )

        db = cx.pymongo_test
        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses N seconds per document
            fn = delay(10)
            if (is_mongos(db.connection) or not
                version.at_least(db.connection, (1, 7, 2))):
                # mongos doesn't support eval so we have to use $where
                # which is less reliable in this context.
                self.assertEqual(1, db.test.find({"$where": fn}).count())
            else:
                # 'nolock' allows find_fast to start and finish while we're
                # waiting for this to complete.
                self.assertEqual({'ok': 1.0, 'retval': True},
                                 db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
 def setUp(self):
     self.connection = get_connection()
Example #50
0
 def setUp(self):
     self.connection = get_connection()
     self.db = self.connection.pymongo_test