def test_with_options(self):
        codec_options = DECIMAL_CODECOPTS
        read_preference = ReadPreference.SECONDARY_PREFERRED
        write_concern = WriteConcern(j=True)
        read_concern = ReadConcern(level="majority")

        # List of all options to compare.
        allopts = [
            'name', 'client', 'codec_options', 'read_preference',
            'write_concern', 'read_concern'
        ]

        db1 = self.client.get_database('with_options_test',
                                       codec_options=codec_options,
                                       read_preference=read_preference,
                                       write_concern=write_concern,
                                       read_concern=read_concern)

        # Case 1: swap no options
        db2 = db1.with_options()
        for opt in allopts:
            self.assertEqual(getattr(db1, opt), getattr(db2, opt))

        # Case 2: swap all options
        newopts = {
            'codec_options': CodecOptions(),
            'read_preference': ReadPreference.PRIMARY,
            'write_concern': WriteConcern(w=1),
            'read_concern': ReadConcern(level="local")
        }
        db2 = db1.with_options(**newopts)
        for opt in newopts:
            self.assertEqual(getattr(db2, opt),
                             newopts.get(opt, getattr(db1, opt)))
Example #2
0
    def test_read_concern(self):
        """Test readConcern is not validated by the driver."""
        # Read concern 'local' is not allowed for $changeStream.
        coll = self.coll.with_options(read_concern=ReadConcern('local'))
        with self.assertRaises(OperationFailure):
            coll.watch()

        # Does not error.
        coll = self.coll.with_options(read_concern=ReadConcern('majority'))
        with coll.watch():
            pass
Example #3
0
def fetch(match, collection, as_list=True, db_name=None):
    """
        Get data from mongo, based on match dict or string id.

        :match      - _id as string (will return a dict)
                    - mongo dict filter (will return a list of results)
                    - field_name as string (will return distinct values for that field)

        :collection - collection name
        :as_list    - set as_list to false to get a generator
        :db_name    - specify other db if needed by default is MONGO_DATABASE_NAME from .env

        If something fails will return a string with the error message.

    """

    match = parse_match(match)

    db_name = return_db(db_name)
    collection_name = return_collection_name(collection)
    with Connect.get_connection() as mongo_connection:
        collection = mongo_connection[db_name][collection_name]
        if not isinstance(collection, Collection):
            return collection

        if match['_id']:
            found_docs = collection.find(match['_id'])
            doc = []
            if found_docs:
                doc = list(found_docs)[0]
            if match['oid']:
                doc = parse_doc(doc)
            return doc

        if match['distinct_key']:
            found_docs = collection.with_options(
                read_concern=ReadConcern("majority")).distinct(
                    match['distinct_key'])
        elif match['query_tuple']:
            found_docs = collection.with_options(
                read_concern=ReadConcern("majority")).find(
                    *match['query_tuple'])
        else:
            found_docs = collection.with_options(
                read_concern=ReadConcern("majority")).find(match['query'])

        if as_list:
            return [parse_doc(doc) for doc in found_docs]

        return (parse_doc(doc) for doc in found_docs)
Example #4
0
    def test_read_concern(self):
        rc = ReadConcern()
        self.assertIsNone(rc.level)
        self.assertTrue(rc.ok_for_legacy)

        rc = ReadConcern('majority')
        self.assertEqual('majority', rc.level)
        self.assertFalse(rc.ok_for_legacy)

        rc = ReadConcern('local')
        self.assertEqual('local', rc.level)
        self.assertTrue(rc.ok_for_legacy)

        self.assertRaises(TypeError, ReadConcern, 42)
Example #5
0
    async def test_causal_consistency(self):
        # Causal consistency examples
        client = self.cx
        self.addCleanup(env.sync_cx.drop_database, "test")
        await client.test.drop_collection("items")
        await client.test.items.insert_one({
            "sku": "111",
            "name": "Peanuts",
            "start": datetime.datetime.today()
        })

        # Start Causal Consistency Example 1
        async with await client.start_session(causal_consistency=True) as s1:
            current_date = datetime.datetime.today()
            items = client.get_database(
                "test",
                read_concern=ReadConcern("majority"),
                write_concern=WriteConcern("majority", wtimeout=1000),
            ).items
            await items.update_one({
                "sku": "111",
                "end": None
            }, {"$set": {
                "end": current_date
            }},
                                   session=s1)
            await items.insert_one(
                {
                    "sku": "nuts-111",
                    "name": "Pecans",
                    "start": current_date
                },
                session=s1)
        # End Causal Consistency Example 1

        # Start Causal Consistency Example 2
        async with await client.start_session(causal_consistency=True) as s2:
            s2.advance_cluster_time(s1.cluster_time)
            s2.advance_operation_time(s1.operation_time)

            items = client.get_database(
                "test",
                read_preference=ReadPreference.SECONDARY,
                read_concern=ReadConcern("majority"),
                write_concern=WriteConcern("majority", wtimeout=1000),
            ).items
            async for item in items.find({"end": None}, session=s2):
                print(item)
Example #6
0
def execute_transfers_in_txn(conn, source_acct_list, destin_acct_list,
                             source_coll_list, destin_coll_list,
                             num_transfers_per_txn):
    session = conn.start_session()
    session.start_transaction(read_concern=ReadConcern('snapshot'),
                              write_concern=WriteConcern('majority',
                                                         wtimeout=1000))

    for i in range(num_transfers_per_txn):
        transfer_amount = random.randint(0, 15)

        source_acct = source_acct_list.pop()
        destin_acct = destin_acct_list.pop()
        source_coll = source_coll_list.pop()
        destin_coll = destin_coll_list.pop()

        source_coll.update_one({'cust_id': source_acct},
                               {'$inc': {
                                   'balance': -transfer_amount
                               }},
                               session=session)
        time.sleep(0.1)
        destin_coll.update_one({'cust_id': destin_acct},
                               {'$inc': {
                                   'balance': transfer_amount
                               }},
                               session=session)

        print(
            str(transfer_amount) + " transferred from: " +
            str(source_coll.name) + "." + str(source_acct) + " to  " +
            str(destin_coll.name) + "." + str(destin_acct))

    session.commit_transaction()
Example #7
0
def _setup_logging_dataset(store,
                           dsname,
                           logger,
                           collection=None,
                           size=10 * 1024 * 1024,
                           reset=False):
    # setup the dataset
    assert dsname, 'need a valid dsname, got {}'.format(dsname)
    if reset:
        store.drop(dsname, force=True)
    collection = collection or store.collection(dsname)
    # https://api.mongodb.com/python/current/api/pymongo/write_concern.html#pymongo.write_concern.WriteConcern
    FireAndForget = WriteConcern(w=0)
    ReadFast = ReadConcern('local')
    collection = collection.with_options(write_concern=FireAndForget,
                                         read_concern=ReadFast)
    store.put(collection, dsname)
    if collection.estimated_document_count() == 0:
        # initialize. we insert directly into the collection because the logger instance is not set up yet
        record = _make_record('SYSTEM', 999, 'system', 'log init', 'log init')
        collection.insert_one(record)
        store.mongodb.command('convertToCapped', collection.name, size=size)
    # ensure indexed
    idxs = collection.index_information()
    for idx in ('levelname', 'levelno', 'created'):
        if idx not in idxs:
            collection.create_index(idx)
    return collection
Example #8
0
def replace_with_correct_contig(mongo_source, assembly_accession, study_accession, incorrect_contig, correct_contig,
                                num_variants_to_replace):
    sve_collection = mongo_source.mongo_handle[mongo_source.db_name]["submittedVariantEntity"]
    filter_criteria = {'seq': assembly_accession, 'study': study_accession, 'contig': incorrect_contig}
    cursor = sve_collection.with_options(read_concern=ReadConcern("majority")) \
        .find(filter_criteria, no_cursor_timeout=True).limit(num_variants_to_replace)
    insert_statements = []
    drop_statements = []
    total_inserted, total_dropped = 0, 0
    try:
        for variant in cursor:
            original_id = get_SHA1(variant)
            assert variant['_id'] == original_id, "Original id is different from the one calculated %s != %s" % (
                variant['_id'], original_id)
            variant['contig'] = correct_contig
            variant['_id'] = get_SHA1(variant)
            insert_statements.append(pymongo.InsertOne(variant))
            drop_statements.append(pymongo.DeleteOne({'_id': original_id}))
        result_insert = sve_collection.with_options(write_concern=WriteConcern(w="majority", wtimeout=1200000)) \
            .bulk_write(requests=insert_statements, ordered=False)
        total_inserted += result_insert.inserted_count
        result_drop = sve_collection.with_options(write_concern=WriteConcern(w="majority", wtimeout=1200000)) \
            .bulk_write(requests=drop_statements, ordered=False)
        total_dropped += result_drop.deleted_count
        logger.info('%s / %s new documents inserted' % (total_inserted, num_variants_to_replace))
        logger.info('%s / %s old documents dropped' % (total_dropped, num_variants_to_replace))
    except Exception as e:
        print(traceback.format_exc())
        raise e
    finally:
        cursor.close()
    return total_inserted
Example #9
0
def cleanup_metadata(settings_xml_file, db_name):
    logger.info(f'Cleaning up {db_name}...')
    with get_mongo_connection_handle('production',
                                     settings_xml_file) as mongo_conn:
        db = mongo_conn[db_name]
        metadata_collection = db[annotation_metadata_collection_name]
        temp_collection = db[temp_collection_name]

        majority_read = ReadConcern('majority')
        majority_write = WriteConcern(w='majority', wtimeout=1200000)

        query = {'ct': {'$exists': True}}
        results = [
            x for x in metadata_collection.with_options(
                read_concern=majority_read).find(query, no_cursor_timeout=True)
        ]

        insert_result = temp_collection.with_options(
            write_concern=majority_write).insert_many(results)
        logger.info(
            f'Inserted {len(insert_result.inserted_ids)} documents into {temp_collection_name}'
        )

        delete_result = metadata_collection.with_options(
            write_concern=majority_write).delete_many(query)
        logger.info(
            f'Deleted {delete_result.deleted_count} documents from {annotation_metadata_collection_name}'
        )

        metadata_collection.drop_indexes()
        logger.info(
            f'Dropped non-id indexes from {annotation_metadata_collection_name}'
        )
def find_ids_of_declustered_variants(mongo_source, output_dir):
    affected_assemblies = [
        "GCA_001522545.2", "GCA_900700415.1", "GCA_003254395.2", "GCA_003957565.2",
        "GCA_000188115.3", "GCA_000219495.2", "GCA_000512255.2", "GCA_000001515.5",
        "GCA_000003195.3", "GCA_011100615.1", "GCA_002880775.3", "GCA_014441545.1",
        "GCA_000003025.6", "GCA_000751015.1", "GCA_002114115.1", "GCA_000181335.4",
        "GCA_000002315.5", "GCA_000146605.4", "GCA_000372685.2", "GCA_015227675.1",
        "GCA_002863925.1", "GCA_000298735.1", "GCA_001433935.1", "GCA_000002035.4",
        "GCA_001858045.3", "GCA_000001215.4", "GCA_000004515.4", "GCA_902167145.1",
        "GCA_000001635.9", "GCA_002263795.2", "GCA_001704415.1", "GCA_000002775.3",
        "GCA_000224145.1", "GCA_003339765.3", "GCA_008746955.1"
    ]

    logger.info(f"""Number of Affected Assemblies :  {len(affected_assemblies)}""")
    dbsnp_sve_collection = mongo_source.mongo_handle[mongo_source.db_name]["dbsnpSubmittedVariantEntity"]

    for assembly in affected_assemblies:
        logger.info('Running for assembly: ' + assembly)

        filter_criteria = {'remappedFrom': {'$exists': True}, 'rs': {'$exists': False}, 'seq': assembly}
        cursor = dbsnp_sve_collection.with_options(read_concern=ReadConcern("majority")) \
            .find(filter_criteria, no_cursor_timeout=True)

        with open(f"""{output_dir}/{assembly}.txt""", "a") as file:
            for variant in cursor:
                file.write(variant['_id'] + '\n')
Example #11
0
        def update_employee_info(session):
            employees_coll = session.client.hr.employees
            events_coll = session.client.reporting.events

            with session.start_transaction(
                    read_concern=ReadConcern("snapshot"),
                    write_concern=WriteConcern(w="majority")):
                employees_coll.update_one(
                    {"employee": 3}, {"$set": {"status": "Inactive"}},
                    session=session)
                events_coll.insert_one(
                    {"employee": 3, "status": {
                        "new": "Inactive", "old": "Active"}},
                    session=session)

                while True:
                    try:
                        # Commit uses write concern set at transaction start.
                        session.commit_transaction()
                        print("Transaction committed.")
                        break
                    except (ConnectionFailure, OperationFailure) as exc:
                        # Can retry commit
                        if exc.has_error_label(
                                "UnknownTransactionCommitResult"):
                            print("UnknownTransactionCommitResult, retrying "
                                  "commit operation ...")
                            continue
                        else:
                            print("Error during commit ...")
                            raise
Example #12
0
def mongo_transaction(doc, mongoClient, db_from, col_from, db_to, col_to,
                      logging_info):
    from pymongo.read_concern import ReadConcern
    from pymongo.write_concern import WriteConcern
    from pymongo.read_preferences import ReadPreference
    wc_majority = WriteConcern("majority", wtimeout=2000)
    session = mongoClient.start_session()
    import logging
    logging.info(logging_info + ' start')
    session.start_transaction(read_concern=ReadConcern('local'),
                              write_concern=wc_majority)
    # Important:: You must pass the session to the operations.
    collection_from = mongoClient[db_from][col_from]
    collection_to = mongoClient[db_to][col_to]
    collection_from.find_one_and_delete({'_id': doc['_id']}, session=session)
    collection_to.replace_one({'_id': doc['_id']},
                              doc,
                              upsert=True,
                              session=session)
    mongoClient['log']['controller_log'].insert_one(
        {
            'info': logging_info,
            "utctime": datetime.datetime.utcnow()
        },
        session=session)
    session.commit_transaction()
    session.end_session()
Example #13
0
def most_active_commenters():
    """
    Returns a list of the top 20 most frequent commenters.
    """
    """
    Ticket: User Report

    Construct a pipeline to find the users who comment the most on MFlix, sort
    by the number of comments, and then only return the 20 documents with the
    highest values.

    No field projection necessary.
    """
    # TODO: User Report
    # Return the 20 users who have commented the most on MFlix.
    group = {"$group": {"_id": "$email", "count": {"$sum": 1}}}
    sort = {"$sort": {"count": -1}}
    limit = {"$limit": 20}
    pipeline = [group, sort, limit]

    # or
    # pipeline = [{'$sortByCount':  '$email'}, {"$limit": 20}]

    rc = ReadConcern("majority")  # you may want to change this read concern!
    comments = db.comments.with_options(read_concern=rc)
    result = comments.aggregate(pipeline)
    return list(result)
Example #14
0
    def test_map_reduce_out(self):
        coll = self.db.get_collection('coll',
                                      read_concern=ReadConcern('local'))
        try:
            tuple(
                coll.map_reduce('function() { emit(this._id, this.value); }',
                                'function(key, values) { return 42; }',
                                out='output_collection'))
        except OperationFailure:
            # "ns doesn't exist"
            pass
        self.assertNotIn('readConcern',
                         self.listener.results['started'][0].command)

        if client_context.version.at_least(3, 1, 9, -1):
            self.listener.results.clear()
            try:
                tuple(
                    coll.map_reduce(
                        'function() { emit(this._id, this.value); }',
                        'function(key, values) { return 42; }',
                        out={'inline': 1}))
            except OperationFailure:
                # "ns doesn't exist"
                pass
            self.assertEqual(
                {'level': 'local'},
                self.listener.results['started'][0].command['readConcern'])
    def run_scenario(self):
        # Cleanup state and load data (if provided).
        drop_collections(self.db)
        data = scenario_def.get('data')
        if data:
            self.db.test.with_options(write_concern=WriteConcern(
                w="majority")).insert_many(scenario_def['data'])

        # Run operations and check results or errors.
        expected_result = test.get('outcome', {}).get('result')
        expected_error = test.get('outcome', {}).get('error')
        if expected_error is True:
            with self.assertRaises(PyMongoError):
                run_operation(self.db.test, test)
        else:
            result = run_operation(self.db.test, test)
            check_result(self, expected_result, result)

        # Assert final state is expected.
        expected_c = test['outcome'].get('collection')
        if expected_c is not None:
            expected_name = expected_c.get('name')
            if expected_name is not None:
                db_coll = self.db[expected_name]
            else:
                db_coll = self.db.test
            db_coll = db_coll.with_options(read_concern=ReadConcern(
                level="local"))
            self.assertEqual(list(db_coll.find()), expected_c['data'])
Example #16
0
    def test_sub_collection(self):
        # Verify that a collection with a dotted name inherits options from its
        # parent collection.
        write_concern = WriteConcern(w=2, j=True)
        read_concern = ReadConcern("majority")
        read_preference = Secondary([{"dc": "sf"}])
        codec_options = CodecOptions(tz_aware=True,
                                     uuid_representation=JAVA_LEGACY)

        coll1 = self.db.get_collection(
            "test",
            write_concern=write_concern,
            read_concern=read_concern,
            read_preference=read_preference,
            codec_options=codec_options,
        )

        coll2 = coll1.subcollection
        coll3 = coll1["subcollection"]

        for c in [coll1, coll2, coll3]:
            self.assertEqual(write_concern, c.write_concern)
            self.assertEqual(read_concern, c.read_concern)
            self.assertEqual(read_preference, c.read_preference)
            self.assertEqual(codec_options, c.codec_options)
Example #17
0
        async def update_employee_info(session):
            employees_coll = session.client.hr.employees
            events_coll = session.client.reporting.events

            async with session.start_transaction(
                    read_concern=ReadConcern("snapshot"),
                    write_concern=WriteConcern(w="majority"),
                    read_preference=ReadPreference.PRIMARY,
            ):
                await employees_coll.update_one(
                    {"employee": 3}, {"$set": {
                        "status": "Inactive"
                    }},
                    session=session)
                await events_coll.insert_one(
                    {
                        "employee": 3,
                        "status": {
                            "new": "Inactive",
                            "old": "Active"
                        }
                    },
                    session=session)

                await commit_with_retry(session)
 def test_transaction_options_validation(self):
     default_options = TransactionOptions()
     self.assertIsNone(default_options.read_concern)
     self.assertIsNone(default_options.write_concern)
     self.assertIsNone(default_options.read_preference)
     self.assertIsNone(default_options.max_commit_time_ms)
     # No error when valid options are provided.
     TransactionOptions(read_concern=ReadConcern(),
                        write_concern=WriteConcern(),
                        read_preference=ReadPreference.PRIMARY,
                        max_commit_time_ms=10000)
     with self.assertRaisesRegex(TypeError, "read_concern must be "):
         TransactionOptions(read_concern={})
     with self.assertRaisesRegex(TypeError, "write_concern must be "):
         TransactionOptions(write_concern={})
     with self.assertRaisesRegex(
             ConfigurationError,
             "transactions do not support unacknowledged write concern"):
         TransactionOptions(write_concern=WriteConcern(w=0))
     with self.assertRaisesRegex(TypeError,
                                 "is not valid for read_preference"):
         TransactionOptions(read_preference={})
     with self.assertRaisesRegex(
             TypeError, "max_commit_time_ms must be an integer or None"):
         TransactionOptions(max_commit_time_ms="10000")
Example #19
0
def most_active_commenters():
    """
    Returns a list of the top 20 most frequent commenters.
    """
    """
    Ticket: User Report

    Construct a pipeline to find the users who comment the most on MFlix, sort
    by the number of comments, and then only return the 20 documents with the
    highest values.

    No field projection necessary.
    """
    # TODO: User Report
    # Return the 20 users who have commented the most on MFlix.
    pipeline = [{
        "$group": {
            '_id': '$email',
            'count': {
                "$sum": 1
            }
        }
    }, {
        "$sort": {
            "count": -1
        }
    }, {
        "$limit": 20
    }]

    # rc = db.comments.read_concern # you may want to change this read concern!
    rc = ReadConcern(level='majority')
    comments = db.comments.with_options(read_concern=rc)
    result = comments.aggregate(pipeline)
    return list(result)
Example #20
0
    def find_and_remove_MIA(self,
                            worker_name,
                            register_collection_name='availableWorker'):
        self.get_worker_health(worker_name)
        import time
        time.sleep(5)
        from pymongo.read_concern import ReadConcern
        from pymongo.write_concern import WriteConcern
        from pymongo.read_preferences import ReadPreference
        wc_majority = WriteConcern("majority", wtimeout=2000)
        session = self.client.start_session()
        session.start_transaction(read_concern=ReadConcern('local'),
                                  write_concern=wc_majority)
        record = self.client[worker_db][self.name].find_one_and_delete(
            {'data.sender': worker_name})

        if record is None:
            print('lost touch with worker ' + worker_name)
            # kill_worker
            self.client[worker_db][register_collection_name].delete_many(
                {"_id": worker_name})
            pass
        else:
            print('worker repored alive ' + worker_name)
            self.client['log']['health_history'].insert_one(record)
        session.commit_transaction()
        session.end_session()
        import logging
        logging.info('removed worker' + worker_name + 'from ' +
                     register_collection_name)
        pass
Example #21
0
def aggregate(pipeline, collection, as_list=True, db_name=None):
    """
        Fetch documents based on pipeline queries.
        https://docs.mongodb.com/manual/reference/operator/aggregation-pipeline/

        :pipeline   - list of query stages
        :collection - collection name
        :as_list    - set as_list to false to get a generator
        :db_name    - specify other db if needed by default is MONGO_DATABASE_NAME from .env

        If something fails will return a string with the error message.

    """
    db_name = return_db(db_name)
    collection_name = return_collection_name(collection)
    with Connect.get_connection() as mongo_connection:
        collection = mongo_connection[db_name][collection_name]
        if not isinstance(collection, Collection):
            return collection

        found_docs = collection.with_options(
            read_concern=ReadConcern("majority")).aggregate(pipeline,
                                                            allowDiskUse=True)

        if as_list:
            return [parse_doc(doc) for doc in found_docs]

        return (parse_doc(doc) for doc in found_docs)
Example #22
0
def get_insert_statements(sve_collection, contig_equivalents):
    wrong_contigs = list(contig_equivalents.keys())
    filter_criteria = {
        'seq': 'GCA_000001895.4',
        'study': 'PRJEB42012',
        'contig': {
            '$in': wrong_contigs
        }
    }
    cursor = sve_collection.with_options(
        read_concern=ReadConcern("majority")).find(filter_criteria,
                                                   no_cursor_timeout=True)
    insert_statements = []
    drop_statements = []
    try:
        for variant in cursor:
            original_id = get_SHA1(variant)
            assert variant['_id'] == original_id, f"Original id is different from the one calculated " \
                                                  f"{variant['_id']} != {original_id}"
            variant['contig'] = contig_equivalents[variant['contig']]
            variant['_id'] = get_SHA1(variant)
            insert_statements.append(pymongo.InsertOne(variant))
            drop_statements.append(pymongo.DeleteOne({'_id': original_id}))
    except Exception as e:
        print(traceback.format_exc())
        raise e
    finally:
        cursor.close()

    return insert_statements, drop_statements
Example #23
0
    def test_causal_consistency(self):
        # Causal consistency examples
        client = self.client
        self.addCleanup(client.drop_database, 'test')
        client.test.drop_collection('items')
        client.test.items.insert_one({
            'sku': "111",
            'name': 'Peanuts',
            'start': datetime.datetime.today()
        })

        # Start Causal Consistency Example 1
        with client.start_session(causal_consistency=True) as s1:
            current_date = datetime.datetime.today()
            items = client.get_database('test',
                                        read_concern=ReadConcern('majority'),
                                        write_concern=WriteConcern(
                                            'majority', wtimeout=1000)).items
            items.update_one({
                'sku': "111",
                'end': None
            }, {'$set': {
                'end': current_date
            }},
                             session=s1)
            items.insert_one(
                {
                    'sku': "nuts-111",
                    'name': "Pecans",
                    'start': current_date
                },
                session=s1)
        # End Causal Consistency Example 1

        # Start Causal Consistency Example 2
        with client.start_session(causal_consistency=True) as s2:
            s2.advance_cluster_time(s1.cluster_time)
            s2.advance_operation_time(s1.operation_time)

            items = client.get_database(
                'test',
                read_preference=ReadPreference.SECONDARY,
                read_concern=ReadConcern('majority'),
                write_concern=WriteConcern('majority', wtimeout=1000)).items
            for item in items.find({'end': None}, session=s2):
                print(item)
 def test_inline_map_reduce(self):
     coll = self.db.get_collection('coll', read_concern=ReadConcern('local'))
     tuple(coll.inline_map_reduce(
         'function() { emit(this._id, this.value); }',
         'function(key, values) { return 42; }'))
     self.assertEqual(
         {'level': 'local'},
         self.listener.results['started'][0].command['readConcern'])
Example #25
0
 def test_invalid_read_concern(self):
     coll = self.db.get_collection(
         'coll', read_concern=ReadConcern('majority'))
     self.assertRaisesRegexp(
         ConfigurationError,
         'read concern level of majority is not valid '
         'with a max wire version of [0-3]',
         coll.count)
Example #26
0
def most_active_commenters():
    """
    Returns a list of the top 20 most frequent commenters.
    """
    """
    Ticket: User Report

    Construct a pipeline to find the users who comment the most on MFlix, sort
    by the number of comments, and then only return the 20 documents with the
    highest values.

    No field projection necessary.
    """
    """
    comments

    _id: 5a9427648b0beebeb69579cc
    name: "Andrea Le"
    email: "*****@*****.**"
    movie_id: 573a1390f29313caabcd418c
    text: "Rem officiis eaque repellendus amet eos doloribus. Porro dolor volupta..."
    date: 2012-03-26T23:20:16.000+00:00

    users

    _id: 59b99db4cfa9a34dcd7885b6
    name: "Ned Stark"
    email: "*****@*****.**"
    password: "******"""

    # TODO: User Report
    # Return the 20 users who have commented the most on MFlix.
    pipeline = [
        {
            "$group": {
                "_id": "$email",
                "count": {
                    "$sum": 1
                }
            }
        },
        {
            "$sort": {
                "count": -1
            }
        },
        {
            "$limit": 20
        },
    ]

    # rc = db.comments.read_concern  # you may want to change this read concern!
    rc = ReadConcern("majority")
    comments = db.comments.with_options(read_concern=rc)
    result = comments.aggregate(pipeline)
    return list(result)
Example #27
0
 def test_aggregate_out(self):
     coll = self.db.get_collection('coll', read_concern=ReadConcern('local'))
     try:
         tuple(coll.aggregate([{'$match': {'field': 'value'}},
                               {'$out': 'output_collection'}]))
     except OperationFailure:
         # "ns doesn't exist"
         pass
     self.assertNotIn('readConcern',
                      self.listener.results['started'][0].command)
Example #28
0
    def __init__(
        self,
        database: str,
        driver: str,
        host: str,
        port: int,
        *,
        username: str = _Undefined,
        password: str = _Undefined,
        read_preference: str = _Undefined,
        replica_set: Optional[str] = None,
        replica_read_preference: str = _Undefined,
        loop: Optional[asyncio.AbstractEventLoop] = None
    ) -> None:
        """
        Singleton constructor of MongoDB control instance.

        :param driver: mongo connection driver.
        :param host: mongo host.
        :param port: mongo port.
        :param database: mongo database name.
        :param username: mongo user name.
        :param password: mongo user password.
        :param read_preference: mongo read settings. Defaults to: _Undefined
        :param replica_set: mongo replication name. Defaults to: None
        :param replica_read_preference: mongo read from replication. Defaults to: _Undefined
        :param loop: asyncio event loop.
        """
        self._driver = driver or 'mongodb://'
        self._host = host
        self._port = port
        self._database = database
        self._username = username
        self._password = password

        self._read_preference = self._MongoDBReadPreference.get(read_preference)
        self._replica_set = replica_set
        self._replica_read_preference = self._MongoDBReadPreference.get(replica_read_preference)

        self._io_loop = loop

        self._use_consistency = SimpleNamespace(
            read_preference=self._read_preference,
            write_concern=WriteConcern(w=1),
            read_concern=ReadConcern(level='local')
        )

        connect_args = SimpleNamespace(
            database=self._database,
            host=self._host,
            password=self._password,
            port=self._port,
            username=self._username,
        )
        self._connect(**vars(connect_args))
Example #29
0
def parse_collection_options(opts):
    if 'readPreference' in opts:
        opts['read_preference'] = parse_read_preference(
            opts.pop('readPreference'))

    if 'writeConcern' in opts:
        opts['write_concern'] = WriteConcern(**dict(opts.pop('writeConcern')))

    if 'readConcern' in opts:
        opts['read_concern'] = ReadConcern(**dict(opts.pop('readConcern')))
    return opts
Example #30
0
    async def update_one(
        self,
        collection: str,
        filter: Mapping[str, Any],
        update: Optional[Union[Mapping[str, Any], _Pipeline]] = None,
        upsert: bool = False,
        bypass_document_validation: bool = False,
        collation: Optional[_Collation] = None,
        array_filters: Optional[List[Mapping[str, Any]]] = None,
        hint: Optional[_IndexKeyHint] = None,  # backlog on versions above
        session: Optional[AsyncIOMotorClientSession] = None,
        **kwds: Any
    ) -> UpdateResult:
        """
        Update a single document matching the filter.

        :param collection: collection name.
        :param filter: the query selection criteria.
        :param update: the modifications to apply. Defaults to: None.
        :param upsert: if True, perform an insert if no documents match the filter. Defaults to: False.
        :param bypass_document_validation: if True, allows the write to opt-out of document level validation. Default to: False.
        :param collation: an instance of Collation. Defaults to: None.
        :param array_filters: a list of filters specifying which array elements an update should apply. Defaults to: None.
        :param hint: an index to use to support the query predicate specified either by its string
                     name, or in the same format as passed to create_index() (e.g. [('field', ASCENDING)]).
        :param session: delegate the asyncio motor client a database session. Defaults to: None.
        :return: an instance of UpdateResult.
        """
        update_one_args = SimpleNamespace(
            filter=filter,
            update={'$set': update},
            upsert=upsert,
            array_filters=array_filters,
            bypass_document_validation=bypass_document_validation,
            collation=collation,
            **kwds
        )

        use_consistency               = deepcopy(self._use_consistency)
        use_consistency.write_concern = WriteConcern(w='majority'),
        use_consistency.read_concern  = ReadConcern(level='majority')

        if Ver(self.version) >= Ver(PY_MONGODB_VERSION_ENOUGH):
            if session and not isinstance(session, AsyncIOMotorClientSession):
                async with await self.client.start_session() as session_inner:
                    async with session_inner.start_transaction(**vars(use_consistency)):
                        update_one_args.session = session_inner
                        return await self.db[collection].update_one(**vars(update_one_args))
            else:
                async with session.start_transaction(**vars(use_consistency)):
                    update_one_args.session = session
                    return await self.db[collection].update_one(**vars(update_one_args))

        return await self.db[collection].update_one(**vars(update_one_args))