Exemplo n.º 1
0
    def test_Mixed_Unordered_Unack(self):
        w0 = self.coll.with_options(write_concern=WriteConcern(w=0))
        yield w0.bulk_write([
            InsertOne({"_id": 2}),
            UpdateOne({"_id": 2}, {"$set": {
                'x': 1
            }}),
            InsertOne({"_id": 1}),
            UpdateOne({"_id": 1}, {"$set": {
                'x': 2
            }})
        ],
                            ordered=False)

        docs = yield self.coll.find()
        self.assertEqual(len(docs), 2)
        self.assertIn({"_id": 1, 'x': 2}, docs)
        self.assertIn({"_id": 2, 'x': 1}, docs)
Exemplo n.º 2
0
def add_user(name, email, hashedpw):
    """
    Given a name, email and password, inserts a document with those credentials
    to the `users` collection.
    """
    """
    Ticket: Durable Writes

    Please increase the durability of this method by using a non-default write
    concern with ``insert_one``.
    """

    try:
        users = db.users.with_options(write_concern=WriteConcern(w=1))
        users.insert_one({"name": name, "email": email, "password": hashedpw})
        return {"success": True}
    except DuplicateKeyError:
        return {"error": "A user with the given email already exists."}
Exemplo n.º 3
0
 def test_unacknowledged_write(self):
     unacknowledged = WriteConcern(w=0)
     collection = self.db.get_collection('test',
                                         write_concern=unacknowledged)
     with self.assertRaises(ConfigurationError):
         collection.update_one({'hello': 'world'},
                               {'$set': {
                                   'hello': 'moon'
                               }},
                               collation=self.collation)
     bulk = collection.initialize_ordered_bulk_op()
     bulk.find({
         'hello': 'world'
     }, collation=self.collation).update_one({'$set': {
         'hello': 'moon'
     }})
     with self.assertRaises(ConfigurationError):
         bulk.execute()
Exemplo n.º 4
0
    async def insert_many(
        self,
        collection: str,
        documents: Iterable[_DocumentIn],
        ordered: bool = True,
        bypass_document_validation: bool = False,
        session: Optional[AsyncIOMotorClientSession] = None
    ) -> InsertManyResult:
        """
        Insert an iterable of documents.

        :param collection: collection name.
        :param documents: a iterable of documents to insert.
        :param ordered: if True (the default) documents will be inserted on the server
                        serially, in the order provided. If an error occurs all remaining inserts are aborted.
                        If False, documents will be inserted on the server in arbitrary order, possibly in
                        parallel, and all document inserts will be attempted. Defaults to: True.
        :param bypass_document_validation: if True, allows the write to opt-out of document level validation. Defaults to: False.
        :param session: delegate the asyncio motor client a database session. Defaults to: None.
        :return: an instance of InsertManyResult.
        """
        insert_many_args = SimpleNamespace(
            documents=documents,
            ordered=ordered,
            bypass_document_validation=bypass_document_validation
        )

        use_consistency               = deepcopy(self._use_consistency)
        use_consistency.write_concern = WriteConcern(w='majority'),
        use_consistency.read_concern  = ReadConcern(level='majority')

        if Ver(self.version) >= Ver(PY_MONGODB_VERSION_ENOUGH):
            if session and not isinstance(session, AsyncIOMotorClientSession):
                async with await self.client.start_session() as session_inner:
                    async with session_inner.start_transaction(**vars(use_consistency)):
                        insert_many_args.session = session_inner
                        return await self.db[collection].insert_many(**vars(insert_many_args))
            else:
                async with session.start_transaction(**vars(use_consistency)):
                    insert_many_args.session = session
                    return await self.db[collection].insert_many(**vars(insert_many_args))

        return await self.db[collection].insert_many(**vars(insert_many_args))
Exemplo n.º 5
0
 def wrapper(session, *args, **kwargs):
     while True:
         try:
             with session.start_transaction(
                 read_concern=ReadConcern(level="snapshot"),
                 write_concern=WriteConcern(w="majority"),
                 read_preference=ReadPreference.PRIMARY
             ):
                 # Транзакция успешно завершилась commit'ом
                 # и функция успешно вернула результат
                 return txn_func(session, *args, **kwargs)
         except (ConnectionFailure, OperationFailure) as ex:
             if ex.has_error_label("TransientTransactionError"):
                 print(
                     "INFO: TransientTransactionError,"
                     "повторная попытка транзакции ..."
                 )
                 continue
             raise ErrorDataDB("O.o Что-то страшное при попытке транзакции")
    def parse_options(opts):
        if 'readPreference' in opts:
            opts['read_preference'] = parse_read_preference(
                opts.pop('readPreference'))

        if 'writeConcern' in opts:
            opts['write_concern'] = WriteConcern(
                **dict(opts.pop('writeConcern')))

        if 'readConcern' in opts:
            opts['read_concern'] = ReadConcern(**dict(opts.pop('readConcern')))

        if 'maxTimeMS' in opts:
            opts['max_time_ms'] = opts.pop('maxTimeMS')

        if 'maxCommitTimeMS' in opts:
            opts['max_commit_time_ms'] = opts.pop('maxCommitTimeMS')

        return dict(opts)
Exemplo n.º 7
0
    def remove(self, spec_or_id=None, multi=True, **kwargs):

        warnings.warn(
            "remove is deprecated. Use delete_one or delete_many "
            "instead.",
            DeprecationWarning,
            stacklevel=2)
        if spec_or_id is None:
            spec_or_id = {}
        if not isinstance(spec_or_id, abc.Mapping):
            spec_or_id = {"_id": spec_or_id}
        write_concern = None
        collation = validate_collation_or_none(kwargs.pop('collation', None))
        if kwargs:
            write_concern = WriteConcern(**kwargs)
        return self._delete_retryable(spec_or_id,
                                      multi,
                                      write_concern,
                                      collation=collation)
Exemplo n.º 8
0
    def test_auth_during_failover(self):
        self.assertTrue(self.db.authenticate('user', 'userpass'))
        db = self.db.client.get_database(
            self.db.name, write_concern=WriteConcern(w=3, wtimeout=3000))
        self.assertTrue(db.foo.insert_one({'foo': 'bar'}))
        self.db.logout()
        self.assertRaises(OperationFailure, self.db.foo.find_one)

        primary = self.c.primary
        ha_tools.kill_members(['%s:%d' % primary], 2)

        # Let monitor notice primary's gone
        time.sleep(2 * self.heartbeat_frequency)
        self.assertFalse(primary == self.c.primary)

        # Make sure we can still authenticate
        self.assertTrue(self.db.authenticate('user', 'userpass'))
        # And still query.
        self.db.read_preference = PRIMARY_PREFERRED
        self.assertEqual('bar', self.db.foo.find_one()['foo'])
Exemplo n.º 9
0
 def _finish_transaction(self, command_name, retrying):
     # Transaction spec says that after the initial commit attempt,
     # subsequent commitTransaction commands should be upgraded to use
     # w:"majority" and set a default value of 10 seconds for wtimeout.
     wc = self._transaction.opts.write_concern
     if retrying and command_name == "commitTransaction":
         wc_doc = wc.document
         wc_doc["w"] = "majority"
         wc_doc.setdefault("wtimeout", 10000)
         wc = WriteConcern(**wc_doc)
     cmd = SON([(command_name, 1)])
     if self._transaction.recovery_token:
         cmd['recoveryToken'] = self._transaction.recovery_token
     with self._client._socket_for_writes(self) as sock_info:
         return self._client.admin._command(
             sock_info,
             cmd,
             session=self,
             write_concern=wc,
             parse_write_concern_error=True)
Exemplo n.º 10
0
    def test_mongo_connect_and_pool(self):
        clear_all()
        TestDoc._pymongo_collection = {}
        import threading
        pool_size = 100
        client = connect(
            conn_name='test_connect',
            db_names=['test'],
            max_pool_size=pool_size,
            waitQueueTimeoutMS=1000,
            allow_async=True,
        ).sync
        self.assertEquals(client.write_concern, WriteConcern(w=1))
        self.assertEquals(client.max_pool_size, pool_size)

        self._clear()
        self._feed_data(50000)
        global total
        total = 0

        def thread_read():
            global total
            cur = threading.current_thread()
            try:
                it = TestDoc.find_iter({}, limit=1000)
                count = 0
                for x in it:
                    count += 1
                    pass
                total += 1
            except ConnectionFailure:
                return

        t_list = []
        for i in xrange(1000):
            t = threading.Thread(target=thread_read, name="%d" % i)
            t.start()
            t_list.append(t)
        for t in t_list:
            t.join()
        print '%d read threads end successfully' % total
Exemplo n.º 11
0
    def test_errors(self):
        # We must call getlasterror, etc. on same socket as last operation.
        db = rs_or_single_client(maxPoolSize=1).pymongo_test
        db.reset_error_history()
        self.assertEqual(None, db.error())
        if client_context.supports_getpreverror:
            self.assertEqual(None, db.previous_error())

        db.test.insert_one({"_id": 1})
        unacked = db.test.with_options(write_concern=WriteConcern(w=0))

        unacked.insert_one({"_id": 1})
        self.assertTrue(db.error())
        if client_context.supports_getpreverror:
            self.assertTrue(db.previous_error())

        unacked.insert_one({"_id": 1})
        self.assertTrue(db.error())

        if client_context.supports_getpreverror:
            prev_error = db.previous_error()
            self.assertEqual(prev_error["nPrev"], 1)
            del prev_error["nPrev"]
            prev_error.pop("lastOp", None)
            error = db.error()
            error.pop("lastOp", None)
            # getLastError includes "connectionId" in recent
            # server versions, getPrevError does not.
            error.pop("connectionId", None)
            self.assertEqualReply(error, prev_error)

        db.test.find_one()
        self.assertEqual(None, db.error())
        if client_context.supports_getpreverror:
            self.assertTrue(db.previous_error())
            self.assertEqual(db.previous_error()["nPrev"], 2)

        db.reset_error_history()
        self.assertEqual(None, db.error())
        if client_context.supports_getpreverror:
            self.assertEqual(None, db.previous_error())
Exemplo n.º 12
0
    def test_stressed_rollback(self):
        """Stress test for a rollback with many documents."""
        self.opman.start()

        c = self.main_conn.test.mc
        docman = self.opman.doc_managers[0]
        c2 = c.with_options(write_concern=WriteConcern(w=2))
        c2.insert_many([{'i': i} for i in range(STRESS_COUNT)])
        assert_soon(lambda: c2.count() == STRESS_COUNT)
        condition = lambda: len(docman._search()) == STRESS_COUNT
        assert_soon(condition, ("Was expecting %d documents in DocManager, "
                                "but %d found instead." %
                                (STRESS_COUNT, len(docman._search()))))

        primary_conn = self.repl_set.primary.client()
        self.repl_set.primary.stop(destroy=False)
        new_primary_conn = self.repl_set.secondary.client()

        admin = new_primary_conn.admin
        assert_soon(
            lambda: retry_until_ok(admin.command, "isMaster")['ismaster'])

        retry_until_ok(c.insert_many, [{
            'i': str(STRESS_COUNT + i)
        } for i in range(STRESS_COUNT)])

        self.repl_set.secondary.stop(destroy=False)

        self.repl_set.primary.start()
        admin = primary_conn.admin
        assert_soon(
            lambda: retry_until_ok(admin.command, "isMaster")['ismaster'])
        self.repl_set.secondary.start()

        assert_soon(lambda: retry_until_ok(c.count) == STRESS_COUNT)
        assert_soon(condition, ("Was expecting %d documents in DocManager, "
                                "but %d found instead." %
                                (STRESS_COUNT, len(docman._search()))))

        self.opman.join()
Exemplo n.º 13
0
    def execute_no_results(self, sock_info, generator):
        """Execute all operations, returning no results (w=0).
        """
        coll = self.collection
        # If ordered is True we have to send GLE or use write
        # commands so we can abort on the first error.
        write_concern = WriteConcern(w=int(self.ordered))
        op_id = _randint()

        for run in generator:
            try:
                if run.op_type == _INSERT:
                    coll._insert(sock_info,
                                 run.ops,
                                 self.ordered,
                                 write_concern=write_concern,
                                 op_id=op_id)
                elif run.op_type == _UPDATE:
                    for operation in run.ops:
                        doc = operation['u']
                        check_keys = True
                        if doc and next(iter(doc)).startswith('$'):
                            check_keys = False
                        coll._update(sock_info,
                                     operation['q'],
                                     doc,
                                     operation['upsert'],
                                     check_keys,
                                     operation['multi'],
                                     write_concern=write_concern,
                                     op_id=op_id,
                                     ordered=self.ordered)
                else:
                    for operation in run.ops:
                        coll._delete(sock_info, operation['q'],
                                     not operation['limit'], write_concern,
                                     op_id, self.ordered)
            except OperationFailure:
                if self.ordered:
                    break
Exemplo n.º 14
0
def update(schema, match, new_data, collection, append=False, db_name=None):
    """
        Update documents based on match query.

        :schema      - Marshmallow schema class
        :match       - id as string or dict filter query
        :new_data    - data dict which needs to be updated
        :collection  - collection name
        :append      - if true will APPEND new data to existing fields, if false will SET new data to fields  
        :db_name     - specify other db if needed by default is MONGO_DATABASE_NAME from .env

        returns number of modified documents

        If something fails will return a string with the error message.

    """
    db_name = return_db(db_name)
    collection_name = return_collection_name(collection)
    with Connect.get_connection() as mongo_connection:
        collection = mongo_connection[db_name][collection_name]
        match = parse_match(match)
        match = match['query'] or match['_id']
        if not match:
            match = match['_id'] = match['distinct_key']

        new_data = validate_data(schema, new_data)
        if isinstance(new_data, str):
            return new_data

        _filter = {"_id": match["_id"]} if "_id" in match else match
        updated_docs_nbr = collection.with_options(
            write_concern=WriteConcern("majority")).update_many(
                filter=_filter,
                update=_append_query(new_data) if append else {
                    "$set": new_data
                },
                upsert=True).modified_count

        return updated_docs_nbr
Exemplo n.º 15
0
    def parse_options(opts):
        if 'readPreference' in opts:
            opts['read_preference'] = parse_read_preference(
                opts.pop('readPreference'))

        if 'writeConcern' in opts:
            opts['write_concern'] = WriteConcern(
                **dict(opts.pop('writeConcern')))

        if 'readConcern' in opts:
            opts['read_concern'] = ReadConcern(
                **dict(opts.pop('readConcern')))

        if 'maxTimeMS' in opts:
            opts['max_time_ms'] = opts.pop('maxTimeMS')

        if 'maxCommitTimeMS' in opts:
            opts['max_commit_time_ms'] = opts.pop('maxCommitTimeMS')

        if 'hint' in opts:
            hint = opts.pop('hint')
            if not isinstance(hint, string_type):
                hint = list(iteritems(hint))
            opts['hint'] = hint

        # Properly format 'hint' arguments for the Bulk API tests.
        if 'requests' in opts:
            reqs = opts.pop('requests')
            for req in reqs:
                args = req.pop('arguments')
                if 'hint' in args:
                    hint = args.pop('hint')
                    if not isinstance(hint, string_type):
                        hint = list(iteritems(hint))
                    args['hint'] = hint
                req['arguments'] = args
            opts['requests'] = reqs

        return dict(opts)
Exemplo n.º 16
0
def load_data():
    """
    read redcap data and import to mongodb
    """

    import_from_redcap.get_json()

    # connect to mongo
    myclient = MongoClient(MONGO_ADDR)

    # database
    db = myclient["mongodb"]
    collection = db["surveys"]

    # Loading or Opening the json file
    with open(OUT_PUT_JSON_PATH) as file:
        file_data = json.load(file)

    # delete data and replace with new data
    collection.delete_many({})
    collection.with_options(write_concern=WriteConcern(
        w=0)).insert_many(file_data)
Exemplo n.º 17
0
    def test_writes_with_failover(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")
        primary = c.primary
        w = len(c.secondaries) + 1
        db = c.get_database("pymongo_test",
                            write_concern=WriteConcern(w=w))
        db.test.delete_many({})
        db.test.insert_one({'foo': 'bar'})
        self.assertEqual('bar', db.test.find_one()['foo'])

        killed = ha_tools.kill_primary(9)
        self.assertTrue(bool(len(killed)))

        # Wait past pool's check interval, so it throws an error from
        # get_socket().
        time.sleep(1)

        # Verify that we only raise AutoReconnect, not some other error,
        # while we wait for new primary.
        for _ in xrange(10000):
            try:
                db.test.insert_one({'bar': 'baz'})

                # No error, found primary.
                break
            except AutoReconnect:
                time.sleep(.01)
        else:
            self.fail("Couldn't connect to new primary")

        # Found new primary.
        self.assertTrue(c.primary)
        self.assertTrue(primary != c.primary)
        self.assertEqual('baz', db.test.find_one({'bar': 'baz'})['bar'])
Exemplo n.º 18
0
    async def delete_one(
        self,
        collection: str,
        filter: Mapping[str, Any],
        collation: Optional[_Collation] = None,
        hint: Optional[_IndexKeyHint] = None,  # backlog on versions above
        session: Optional[AsyncIOMotorClientSession] = None,
        **kwds: Any
    ) -> DeleteResult:
        """
        Delete a single document matching the filter.

        :param collection: collection name.
        :param filter: the query selection criteria.
        :param collation: specifies the collation to use for the operation. Defaults to: None.
        :param hint: an index to use to support the query predicate specified either by its string
                     name, or in the same format as passed to create_index() (e.g. [('field', ASCENDING)]). Defaults to: None.
        :param session: delegate the asyncio motor client a database session. Defaults to: None.
        :return: an instance of DeleteResult.
        """
        delete_one_args = SimpleNamespace(filter=filter, collation=collation, **kwds)

        use_consistency               = deepcopy(self._use_consistency)
        use_consistency.write_concern = WriteConcern(w='majority'),
        use_consistency.read_concern  = ReadConcern(level='majority')

        if Ver(self.version) >= Ver(PY_MONGODB_VERSION_ENOUGH):
            if session and not isinstance(session, AsyncIOMotorClientSession):
                async with await self.client.start_session() as session_inner:
                    async with session_inner.start_transaction(**vars(use_consistency)):
                        delete_one_args.session = session_inner
                        return await self.db[collection].delete_one(**vars(delete_one_args))
            else:
                async with session.start_transaction(**vars(use_consistency)):
                    delete_one_args.session = session
                    return await self.db[collection].delete_one(**vars(delete_one_args))

        return await self.db[collection].delete_one(**vars(delete_one_args))
Exemplo n.º 19
0
        def update_employee_info(session):
            employees_coll = session.client.hr.employees
            events_coll = session.client.reporting.events

            with session.start_transaction(
                    read_concern=ReadConcern("snapshot"),
                    write_concern=WriteConcern(w="majority")):
                employees_coll.update_one({"employee": 3},
                                          {"$set": {
                                              "status": "Inactive"
                                          }},
                                          session=session)
                events_coll.insert_one(
                    {
                        "employee": 3,
                        "status": {
                            "new": "Inactive",
                            "old": "Active"
                        }
                    },
                    session=session)

                while True:
                    try:
                        # Commit uses write concern set at transaction start.
                        session.commit_transaction()
                        print("Transaction committed.")
                        break
                    except (ConnectionFailure, OperationFailure) as exc:
                        # Can retry commit
                        if exc.has_error_label(
                                "UnknownTransactionCommitResult"):
                            print("UnknownTransactionCommitResult, retrying "
                                  "commit operation ...")
                            continue
                        else:
                            print("Error during commit ...")
                            raise
Exemplo n.º 20
0
    def execute(self, write_concern):
        """Execute operations.
        """
        if not self.ops:
            raise InvalidOperation('No operations to execute')
        if self.executed:
            raise InvalidOperation('Bulk operations can '
                                   'only be executed once.')
        self.executed = True
        write_concern = (WriteConcern(**write_concern) if
                         write_concern else self.collection.write_concern)

        if self.ordered:
            generator = self.gen_ordered()
        else:
            generator = self.gen_unordered()

        client = self.collection.database.client
        with client._socket_for_writes() as sock_info:
            if sock_info.max_wire_version < 5 and self.uses_collation:
                raise ConfigurationError(
                    'Must be connected to MongoDB 3.4+ to use a collation.')
            if sock_info.max_wire_version < 6 and self.uses_array_filters:
                raise ConfigurationError(
                    'Must be connected to MongoDB 3.6+ to use arrayFilters.')
            if not write_concern.acknowledged:
                if self.uses_collation:
                    raise ConfigurationError(
                        'Collation is unsupported for unacknowledged writes.')
                if self.uses_array_filters:
                    raise ConfigurationError(
                        'arrayFilters is unsupported for unacknowledged '
                        'writes.')
                self.execute_no_results(sock_info, generator)
            elif sock_info.max_wire_version > 1:
                return self.execute_command(sock_info, generator, write_concern)
            else:
                return self.execute_legacy(sock_info, generator, write_concern)
Exemplo n.º 21
0
    async def insert_one(
        self,
        collection: str,
        document: _DocumentIn,
        bypass_document_validation: bool = False,
        session: Optional[AsyncIOMotorClientSession] = None
    ) -> InsertOneResult:
        """
        Insert a single document.

        :param collection: collection name.
        :param document: the document to insert. Must be a mutable mapping type. If the
                         document does not have an _id field one will be added automatically.
        :param bypass_document_validation: if True, allows the write to opt-out of document level validation. Defaults to: False.
        :param session: delegate the asyncio motor client a database session. Defaults to: None.
        :return: an instance of InsertOneResult.
        """
        insert_one_args = SimpleNamespace(
            document=document,
            bypass_document_validation=bypass_document_validation
        )

        use_consistency               = deepcopy(self._use_consistency)
        use_consistency.write_concern = WriteConcern(w='majority'),
        use_consistency.read_concern  = ReadConcern(level='majority')

        if Ver(self.version) >= Ver(PY_MONGODB_VERSION_ENOUGH):
            if session and not isinstance(session, AsyncIOMotorClientSession):
                async with await self.client.start_session() as session_inner:
                    async with session_inner.start_transaction(**vars(use_consistency)):
                        insert_one_args.session = session_inner
                        return await self.db[collection].insert_one(**vars(insert_one_args))
            else:
                async with session.start_transaction(**vars(use_consistency)):
                    insert_one_args.session = session
                    return await self.db[collection].insert_one(**vars(insert_one_args))

        return await self.db[collection].insert_one(**vars(insert_one_args))
Exemplo n.º 22
0
 def execute_command_no_results(self, sock_info, generator):
     """Execute write commands with OP_MSG and w=0 WriteConcern, ordered.
     """
     full_result = {
         "writeErrors": [],
         "writeConcernErrors": [],
         "nInserted": 0,
         "nUpserted": 0,
         "nMatched": 0,
         "nModified": 0,
         "nRemoved": 0,
         "upserted": [],
     }
     # Ordered bulk writes have to be acknowledged so that we stop
     # processing at the first error, even when the application
     # specified unacknowledged writeConcern.
     write_concern = WriteConcern()
     op_id = _randint()
     try:
         self._execute_command(generator, write_concern, None, sock_info,
                               op_id, False, full_result)
     except OperationFailure:
         pass
Exemplo n.º 23
0
def add_user(name, email, hashedpw):
    """
    Given a name, email and password, inserts a document with those credentials
    to the `users` collection.
    """
    """
    Ticket: Durable Writes

    Please increase the durability of this method by using a non-default write
    concern with ``insert_one``.
    """

    try:
        # TODO: User Management
        # Insert a user with the "name", "email", and "password" fields.
        new_user = {"name": name, "email": email, "password": hashedpw}
        # TODO: Durable Writes
        # Use a more durable Write Concern for this operation.
        db.users.with_options(write_concern=WriteConcern(w=2))
        db.users.insert_one(new_user)
        return {"success": True}
    except DuplicateKeyError:
        return {"error": "A user with the given email already exists."}
Exemplo n.º 24
0
    def execute(self, write_concern, session):
        """Execute operations.
        """
        if not self.ops:
            raise InvalidOperation('No operations to execute')
        if self.executed:
            raise InvalidOperation('Bulk operations can '
                                   'only be executed once.')
        self.executed = True
        write_concern = (WriteConcern(**write_concern) if
                         write_concern else self.collection.write_concern)

        if self.ordered:
            generator = self.gen_ordered()
        else:
            generator = self.gen_unordered()

        client = self.collection.database.client
        if not write_concern.acknowledged:
            with client._socket_for_writes() as sock_info:
                self.execute_no_results(sock_info, generator)
        else:
            return self.execute_command(generator, write_concern, session)
    def run_test(self):
        valid = test_case['valid']

        if 'writeConcern' in test_case:
            normalized = normalize_write_concern(test_case['writeConcern'])
            if not valid:
                self.assertRaises((ConfigurationError, ValueError),
                                  WriteConcern, **normalized)
            else:
                concern = WriteConcern(**normalized)
                self.assertEqual(concern.document,
                                 test_case['writeConcernDocument'])
                self.assertEqual(concern.acknowledged,
                                 test_case['isAcknowledged'])
                self.assertEqual(concern.is_server_default,
                                 test_case['isServerDefault'])
        if 'readConcern' in test_case:
            # Any string for 'level' is equaly valid
            concern = ReadConcern(**test_case['readConcern'])
            self.assertEqual(concern.document,
                             test_case['readConcernDocument'])
            self.assertEqual(not bool(concern.level),
                             test_case['isServerDefault'])
Exemplo n.º 26
0
    def test_mongos(self):
        shard = client_context.client.config.shards.find_one()['host']
        num_members = shard.count(',') + 1
        if num_members == 1:
            raise SkipTest("Need a replica set shard to test.")
        coll = client_context.client.pymongo_test.get_collection(
            "test", write_concern=WriteConcern(w=num_members))
        coll.drop()
        res = coll.insert_many([{} for _ in range(5)])
        first_id = res.inserted_ids[0]
        last_id = res.inserted_ids[-1]

        # Note - this isn't a perfect test since there's no way to
        # tell what shard member a query ran on.
        for pref in (Primary(), PrimaryPreferred(), Secondary(),
                     SecondaryPreferred(), Nearest()):
            qcoll = coll.with_options(read_preference=pref)
            results = list(qcoll.find().sort([("_id", 1)]))
            self.assertEqual(first_id, results[0]["_id"])
            self.assertEqual(last_id, results[-1]["_id"])
            results = list(qcoll.find().sort([("_id", -1)]))
            self.assertEqual(first_id, results[-1]["_id"])
            self.assertEqual(last_id, results[0]["_id"])
Exemplo n.º 27
0
        def update_employee_info(session):
            employees_coll = session.client.hr.employees
            events_coll = session.client.reporting.events

            with session.start_transaction(
                    read_concern=ReadConcern("snapshot"),
                    write_concern=WriteConcern(w="majority")):
                employees_coll.update_one({"employee": 3},
                                          {"$set": {
                                              "status": "Inactive"
                                          }},
                                          session=session)
                events_coll.insert_one(
                    {
                        "employee": 3,
                        "status": {
                            "new": "Inactive",
                            "old": "Active"
                        }
                    },
                    session=session)

                commit_with_retry(session)
Exemplo n.º 28
0
    def save(self, force_insert=None, validate=True, **kwargs):
        """Save the :class:`~mongoengine.Document` to the database. If the
        document already exists, it will be updated, otherwise it will be
        created.

        :param force_insert: only try to create a new document, don't allow
            updates of existing documents
        :param validate: validates the document; set to ``False`` to skip.
        """
        if self._meta['hash_field']:
            # if we're hashing the ID and it hasn't been set yet, autogenerate it
            from ..fields import ObjectIdField
            if self._meta['hash_field'] == self._meta['id_field'] and \
               not self.id and isinstance(self._fields['id'], ObjectIdField):
                self.id = ObjectId()

            self['shard_hash'] = self._hash(self[self._meta['hash_field']])

        if force_insert is None:
            force_insert = self._meta['force_insert']

        if validate:
            self.validate()
        doc = self.to_mongo()
        try:
            w = self._meta.get('write_concern', 1)
            collection = self._pymongo(write_concern=WriteConcern(w=w))
            if force_insert or "_id" not in doc:
                pk_value = collection.insert_one(doc).inserted_id
            else:
                collection.replace_one({'_id': doc['_id']}, doc)
                pk_value = doc['_id']
        except (pymongo.errors.OperationFailure), err:
            message = 'Could not save document (%s)'
            if u'duplicate key' in unicode(err):
                message = u'Tried to save duplicate unique keys (%s)'
            raise OperationError(message % unicode(err))
    def test_database_aggregation_fake_cursor(self):
        coll_name = "test_output"
        if client_context.version < (4, 3):
            db_name = "admin"
            write_stage = {"$out": coll_name}
        else:
            # SERVER-43287 disallows writing with $out to the admin db, use
            # $merge instead.
            db_name = "pymongo_test"
            write_stage = {
                "$merge": {"into": {"db": db_name, "coll": coll_name}}}
        output_coll = self.client[db_name][coll_name]
        output_coll.drop()
        self.addCleanup(output_coll.drop)

        admin = self.admin.with_options(write_concern=WriteConcern(w=0))
        pipeline = self.pipeline[:]
        pipeline.append(write_stage)
        with admin.aggregate(pipeline) as cursor:
            with self.assertRaises(StopIteration):
                next(cursor)

        result = wait_until(output_coll.find_one, "read unacknowledged write")
        self.assertEqual(result["dummy"], self.result["dummy"])
Exemplo n.º 30
0
    def remove_user(self, name):
        """Remove user `name` from this :class:`Database`.

        User `name` will no longer have permissions to access this
        :class:`Database`.

        :Parameters:
          - `name`: the name of the user to remove
        """
        try:
            cmd = SON([("dropUser", name)])
            # Don't send {} as writeConcern.
            if self.write_concern.acknowledged and self.write_concern.document:
                cmd["writeConcern"] = self.write_concern.document
            self.command(cmd)
        except OperationFailure as exc:
            # See comment in add_user try / except above.
            if exc.code in common.COMMAND_NOT_FOUND_CODES:
                coll = self.system.users
                if not self.write_concern.acknowledged:
                    coll = coll.with_options(write_concern=WriteConcern())
                coll.delete_one({"user": name})
                return
            raise