def benchmark_mongo4(self): rc = read_concern.ReadConcern("majority") wc = write_concern.WriteConcern("majority") batch_size = 1000 print(f"Batch size: {batch_size}") with self.collection.database.client.start_session() as session: for i in range(int(self.operations / batch_size)): ops = random.choices([READ, READMOD], [50, 50], k=batch_size) with session.start_transaction(read_concern=rc, write_concern=wc): for op in ops: if op == READ: self.num_read += 1 self.collection.find_one( {"item": self.req_set.pop()}, session=session) elif op == READMOD: self.num_readmod += 1 self.collection.find_one_and_update( {"item": self.req_set.pop()}, { "$set": { "title": f"Updated at operation {i}" } }, session=session, ) return (f"📖 Number of reads: {self.num_read}\n" + f"✍️ Number of read_modify_writes: {self.num_readmod}\n" + f"🔎 {(self.num_read / self.operations) * 100}% reads")
def benchmark_mongo4(self): rc = read_concern.ReadConcern("majority") wc = write_concern.WriteConcern("majority") batch_size = 1000 print(f"Batch size: {batch_size}") with self.collection.database.client.start_session() as session: for i in range(int(self.operations / batch_size)): ops = random.choices([READ, INSERT], [95, 5], k=batch_size) with session.start_transaction(read_concern=rc, write_concern=wc): for op in ops: if op == READ: self.num_read += 1 self.collection.find_one( {"_id": self.read_id}, session=session ) elif op == INSERT: self.num_insert += 1 self.read_id = self.collection.insert_one( { "item": self.records + i, "qty": 100 + i, "tags": ["tag"], "title": "title", } ).inserted_id return ( f"📖 Number of reads: {self.num_read}\n" + f"✍️ Number of inserts: {self.num_insert}\n" + f"🔎 {(self.num_read / self.operations) * 100}% reads" )
class Meta: connection_alias = 'Application' collection_name = 'registeredApps' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') indexes = [ IndexModel('appId', name='appIdIndex', unique=True, sparse=True), IndexModel('appHash', name='appHashIndex', unique=True, sparse=True), IndexModel('appCrypt', name='appCryptIndex', unique=True, sparse=True), IndexModel('createdDate', name='createdDateIndex', sparse=True), IndexModel('validUntil', name='validUntilIndex', sparse=True), IndexModel('appManager.email', name='managerEmailIndex', sparse=True, unique=True) ] ignore_unknown_fields = True
class Meta: connection_alias = 'Posts' collection_name = 'global_analytics' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') ignore_unknown_fields = True
class Meta: connection_alias = 'Application' collection_name = 'appsMeta' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') ignore_unknown_fields = True
def get_database(runner): client = get_client(runner) if runner in [TRANSACTIONAL_MONGO]: wc = write_concern.WriteConcern("majority") rc = read_concern.ReadConcern("majority") return client.get_database("fdb-benchmark", write_concern=wc, read_concern=rc) else: return client.get_database("fdb-benchmark")
class Meta: connection_alias = 'Comments' collection_name = 'comment_ranks' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') indexes = [ IndexModel('userId', name='rankUserIdIndex', unique=True, sparse=True), IndexModel('commentId', name='rankCommentIdIndex', unique=True, sparse=True), IndexModel('rankType', name='rankTypeIndex', unique=True, sparse=True) ] ignore_unknown_fields = True
class Meta: connection_alias = 'Channels' collection_name = 'channels' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') indexes = [ IndexModel('channelUsername', name='channelUsernameIndex', unique=True, sparse=True), IndexModel('channelId', name='channelIdIndex', unique=True, sparse=True), IndexModel('channelCreator', name='channelCreatorIndex', sparse=True), IndexModel('channelAdmins', name='channelAdminsIndex', sparse=True), IndexModel('channelBot', name='channelBotIndex', sparse=True) ] ignore_unknown_fields = True
def benchmark_mongo4(self): rc = read_concern.ReadConcern("majority") wc = write_concern.WriteConcern("majority") batch_size = 1000 print(f"Batch size: {batch_size}") with self.collection.database.client.start_session() as session: for i in range(int(self.operations / batch_size)): ops = random.choices([READ, INSERT], [95, 5], k=batch_size) with session.start_transaction(read_concern=rc, write_concern=wc): inserts = [] for op in ops: if op == READ: scan_length = random.randint(1, 10) start_id = self.req_set.pop() if start_id + scan_length > self.records: start_id = start_id - ( (start_id + scan_length) - self.records ) self.num_read += 1 self.run_scan_length += scan_length list( self.collection.find( { "item": {"$gte": start_id} }, session=session, ).limit(scan_length) ) elif op == INSERT: self.num_insert += 1 inserts.append( { "item": self.records + i, "qty": 100 + i, "tags": ["tag"], "title": "title", } ) if inserts: self.collection.insert_many(inserts, session=session) return ( f"📖 Number of reads: {self.num_read}\n" + f"✍️ Number of inserts: {self.num_insert}\n" + f"🔎 {(self.num_read / self.operations) * 100}% reads\n" + f"Average scan length: {self.run_scan_length / self.num_read}" )
def benchmark_mongo4(self): rc = read_concern.ReadConcern("majority") wc = write_concern.WriteConcern("majority") batch_size = 1000 print(f"Batch size: {batch_size}") with self.collection.database.client.start_session() as session: for i in range(int(self.operations / batch_size)): with session.start_transaction(read_concern=rc, write_concern=wc): for j in range(batch_size): self.num_read += 1 self.collection.find_one({"item": self.req_set.pop()}, session=session) return (f"📖 Number of reads: {self.num_read}\n" + f"🔎 {(self.num_read / self.operations) * 100}% reads")
class Meta: connection_alias = 'Posts' collection_name = 'reactions' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') indexes = [ IndexModel('userId', name='userReactionUserIdIndex', sparse=True), IndexModel('reactionIndex', name='userReactionReactionIndexIndex', sparse=True), IndexModel('reactionDate', name='userReactionDateIndex', sparse=True), IndexModel('postId', name='postIdIndex', sparse=True) ] ignore_unknown_fields = True
class Meta: connection_alias = 'Users' collection_name = 'bots' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') indexes = [ IndexModel('botUsername', name='botUsernameIndex', unique=True, sparse=True), IndexModel('userId', name='botIdIndex', unique=True, sparse=True), IndexModel('botToken', name='botTokenIndex', unique=True, sparse=True) ] ignore_unknown_fields = True
class Meta: connection_alias = 'Posts' collection_name = 'posts' cascade = True write_concern = wc.WriteConcern(j=True) read_preference = ReadPreference.NEAREST read_concern = rc.ReadConcern(level='majority') indexes = [ IndexModel('creator', name='postCreatorIndex', sparse=True), IndexModel('postId', name='postIdIndex', unique=True, sparse=True), IndexModel('groupHash', name='postGroupHashIndex', unique=True, sparse=True), IndexModel('messageId', name='postMessageIdIndex', sparse=True), IndexModel('channelId', name='postChannelIdIndex', sparse=True), IndexModel('createdDate', name='postCreatedDateIndex', sparse=True) ] ignore_unknown_fields = True
# uriString = 'mongodb://mongos0.example.com:27017,mongos1.example.com:27017/' from pymongo import MongoClient, WriteConcern, read_concern, ReadPreference uriString = 'mongodb://*****:*****@localhost:27017' client = MongoClient(uriString) wc_majority = WriteConcern("majority", wtimeout=1000) # Prereq: Create collections. client.get_database("mydb1", write_concern=wc_majority).foo.insert_one({'abc': 0}) client.get_database("mydb2", write_concern=wc_majority).bar.insert_one({'xyz': 0}) # Step 1: Define the callback that specifies the sequence of operations to perform inside the transactions. def callback(session): collection_one = session.client.mydb1.foo collection_two = session.client.mydb2.bar # Important:: You must pass the session to the operations. collection_one.insert_one({'abc': 1}, session=session) collection_two.insert_one({'xyz': 999}, session=session) # Step 2: Start a client session. with client.start_session() as session: # Step 3: Use with_transaction to start a transaction, execute the callback, and commit (or abort on error). session.with_transaction(callback, read_concern=read_concern.ReadConcern('local'), write_concern=wc_majority, read_preference=ReadPreference.PRIMARY)
def start(self): self.session.start_transaction(read_concern.ReadConcern('snapshot'), write_concern.WriteConcern('majority'))