def setUp(self): self.buff_time = 1 self.conn = MongoClient(**MONGO_DB_SETTINGS) self.conn.drop_database('testdb') self.b = MongoQueryAggregator( self.buff_time, MONGO_DB_SETTINGS, logger) self.b.start()
def test_3(self): '''inserting 6 documents and updating it using uosert in 2 diff dbs and checking values in both dbs''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 5) docs = [{ 'id': 1 }, { 'id': 2 }, { 'id': 3 }, { 'id': 4 }, { 'id': 5 }, { 'id': 6 }] self.conn['testdb1'].profiles.insert(docs[:3]) self.conn['testdb2'].profiles.insert(docs[3:]) mongo_agg.testdb1.profiles.find({}).upsert().update( {'$set': { 'status': 'updated' }}) mongo_agg.testdb2.profiles.find({}).upsert().update( {'$set': { 'status': 'updated' }}) time.sleep(0.1) # query just to flush older queries mongo_agg.testdb2.profiles.find({ 'css': 'sacas' }).update({'$set': docs[0]}) docs_in_db = [] aggregators_expected_results = { ('testdb1', 'profiles'): Counter({ 'nModified': 3, 'nMatched': 3 }), ('testdb2', 'profiles'): Counter({ 'nModified': 3, 'nMatched': 3 }) } data = self.conn.testdb1.profiles.find() output_data = [] for doc in data: output_data.append(doc) data = self.conn.testdb2.profiles.find() for doc in data: output_data.append(doc) self.assertEqual(aggregators_expected_results, mongo_agg.get_results()) for doc in docs: doc['status'] = 'updated' self.assertListEqual(output_data, docs)
def test_4(self): '''inserting to multiple data to multiple dbs and checking mongo data and aggregators resuts''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 5) dbs_to_data = { 'testdb1': [{ 'name': 'User2', 'id': 1 }, { 'name': 'User2', 'id': 2 }], 'testdb2': [{ 'name': 'User3', 'id': 3 }], 'testdb3': [{ 'name': 'User5', 'id': 5 }, { 'name': 'User6', 'id': 6 }, { 'name': 'User7', 'id': 8 }] } for db_name in dbs_to_data: for doc in dbs_to_data[db_name]: mongo_agg[db_name].profiles.insert(doc) time.sleep(0.1) mongo_agg.testdb1.profiles.insert({'key': 1 }) # dummy data to flush older data projection = {'name': 1, 'id': 1, '_id': 0} for db_name in dbs_to_data: data = self.conn[db_name].profiles.find().sort([('id', 1)]) self.assertEqual(data.count(), len(dbs_to_data[db_name])) docs_in_db = [] for doc in data: docs_in_db.append(doc) self.assertListEqual(docs_in_db, dbs_to_data[db_name]) aggregators_expected_results = { ('testdb1', 'profiles'): Counter({'nInserted': 2}), ('testdb3', 'profiles'): Counter({'nInserted': 3}), ('testdb2', 'profiles'): Counter({'nInserted': 1}) } aggregators_results = mongo_agg.get_results() self.assertEqual(aggregators_expected_results, aggregators_results)
def test_2(self): '''inserting 6 documents with max_ops_limit=5 so first five docs shoulld be present in mongodb''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 1, 5) docs = [{ 'name': 'User1', 'id': 1 }, { 'name': 'User2', 'id': 2 }, { 'name': 'User3', 'id': 3 }, { 'name': 'User4', 'id': 4 }, { 'name': 'User5', 'id': 5 }, { 'name': 'User6', 'id': 6 }] for doc in docs: mongo_agg.testdb1.profiles.insert(doc) # while inserting 6 records, 6th record will flush first 5 to db data = self.conn['testdb1'].profiles.find().sort([('id', 1)]) mongo_docs = [] self.assertEqual(data.count(), 5) for doc in data: mongo_docs.append(doc) self.assertListEqual(mongo_docs, docs[:5]) # checking first five in docs
def test_2(self): '''inserting data using mongo_agg upsert''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 5) docs = [{ 'id': 1 }, { 'id': 2 }, { 'id': 3 }, { 'id': 4 }, { 'id': 5 }, { 'id': 6 }] for doc in docs: mongo_agg.testdb1.profiles.find(doc).upsert().update({'$set': doc}) # while inserting 6 records, 6th record will flush first 5 to db data = self.conn['testdb1'].profiles.find({}, { '_id': 0, 'id': 1 }).sort([('id', 1)]) mongo_docs = [] self.assertEqual(data.count(), 5) for doc in data: mongo_docs.append(doc) self.assertListEqual(mongo_docs, docs[:5]) # checking first five in docs
def test_1(self): '''inserting 2 document in interval of 0.1 sec and updating same 2 documents''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 10) docs = [{'name': 'User1', 'id': 1}, {'name': 'User2', 'id': 2}] #------ insert data to update -------- mongo_agg.testdb1.profiles.insert(docs[0]) mongo_agg.testdb1.profiles.insert(docs[1]) #------------------------------------- mongo_agg.testdb1.profiles.find({ 'id': 1 }).upsert().update({'$set': { 'updated': True }}) mongo_agg.testdb1.profiles.find({ 'id': 2 }).upsert().update({'$set': { 'updated': True }}) self.assertEqual(self.conn['testdb1'].profiles.count(), 0) time.sleep(0.1) mongo_agg.testdb1.profiles.insert({'dummy': 1}) # just to flush old data data = self.conn['testdb1'].profiles.find() self.assertEqual(self.conn['testdb1'].profiles.count(), 2) data_in_dbs = [] for doc in data: data_in_dbs.append(doc) for doc in docs: doc['updated'] = True self.assertListEqual(docs, data_in_dbs) aggregators_expected_results = { ('testdb1', 'profiles'): Counter({ 'nModified': 2, 'nMatched': 2, 'nInserted': 2 }) } self.assertEqual(aggregators_expected_results, mongo_agg.get_results())
def test_3(self): '''inserting 6 documents with max_ops_limit=5 so first five docs should be present in mongodb here inserting to multiple dbs''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 1, 5) docs = [{ 'name': 'User1', 'id': 1 }, { 'name': 'User2', 'id': 2 }, { 'name': 'User3', 'id': 3 }, { 'name': 'User4', 'id': 4 }, { 'name': 'User5', 'id': 5 }, { 'name': 'User6', 'id': 6 }] for doc in docs[:6]: # inserting first 6 records to db testdb1 mongo_agg.testdb1.profiles.insert(doc) mongo_agg.testdb2.profiles.insert({'name': 'User1', 'id': 1}) # while inserting 6 records, 6th record will flush first 5 to db data = self.conn['testdb1'].profiles.find().sort([('id', 1)]) docs_in_db = [] for doc in data: docs_in_db.append(doc) self.assertListEqual(docs_in_db, docs[:5]) aggregators_expected_results = { ('testdb1', 'profiles'): Counter({'nInserted': 5}) } aggregators_results = mongo_agg.get_results() self.assertEqual(aggregators_expected_results, aggregators_results)
class TestUpsertDoc(unittest.TestCase): def setUp(self): self.buff_time = 1 self.conn = MongoClient(**MONGO_DB_SETTINGS) self.conn.drop_database('testdb') self.b = MongoQueryAggregator(self.buff_time, MONGO_DB_SETTINGS, logger) self.b.start() def tearDown(self): self.conn.drop_database('testdb') self.conn.close() self.conn = None self.b.stop() def test_docs_insert(self): self.b.testdb.testtable.insert({'key': 1}) self.b.testdb.testtable.insert({'key': 2}) self.b.testdb.testtable.insert({'key': 3}) sleep(2 * self.buff_time) self.b.testdb.testtable.find({ 'key': 1 }).upsert().update({'$set': { 'key': 6 }}) self.b.testdb.testtable.find({ 'key': 2 }).upsert().update({'$set': { 'key': 7 }}) self.b.testdb.testtable.find({ 'key': 3 }).upsert().update({'$set': { 'key': 8 }}) sleep(2 * self.buff_time) coll = self.conn['testdb']['testtable'] for i in coll.find(): self.assertEqual(i['key'] in [6, 7, 8], True)
class TestBulk(unittest.TestCase): def setUp(self): self.buff_time = 1 self.conn = MongoClient(**MONGO_DB_SETTINGS) self.conn.drop_database('testdb') self.b = MongoQueryAggregator( self.buff_time, MONGO_DB_SETTINGS, logger) self.b.start() def tearDown(self): self.conn.drop_database('testdb') self.conn.close() self.conn = None self.b.stop() def test_docs_insert(self): for i in range(1000000): self.b.testdb.testtable.insert({'key': 1}) sleep(5 * self.buff_time) coll = self.conn['testdb']['testtable'] count = coll.count() self.assertEqual(count, 1000000)
def test_1(self): '''inserting 2 document in interval of 0.1 sec''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 10) docs = [{'name': 'User1', 'id': 1}, {'name': 'User2', 'id': 2}] x = mongo_agg.testdb1.profiles.insert(docs[0]) data = self.conn['testdb1'].profiles.find() # added one doc to aggregator, it should not be inserted to DB yet # as time interval 0.1 sec is not passed and doc limit of 10 is not crossed self.assertEqual(self.conn['testdb1'].profiles.count(), 0) time.sleep(0.1) mongo_agg.testdb1.profiles.insert(docs[1]) # docs[1] is inserted after 0.1 sec so it should flush older data # so docs[0] should be inserted to mongo data = self.conn['testdb1'].profiles.find() self.assertEqual(data.count(), 1) for doc in data: self.assertEqual(doc, docs[0])
def test_1(self): '''updating one doc waiting for 0.1 sec flushing it and checking data''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 10) docs = [{'name': 'User1', 'id': 1}, {'name': 'User2', 'id': 2}] self.conn['testdb1'].profiles.insert(docs) mongo_agg.testdb1.profiles.find({ 'id': 1 }).upsert().update({'$set': { 'id2': 1 }}) time.sleep(0.1) mongo_agg.testdb1.profiles.find({ 'id': 2 }).upsert().update({'$set': { 'id2': 2 }}) self.assertEqual(self.conn['testdb1'].profiles.count(), 2) self.assertEqual(self.conn['testdb1'].profiles.count({'id2': 1}), 1) for doc in self.conn['testdb1'].profiles.find({'id': 1}, { 'id2': 1, '_id': 0 }): self.assertEqual(doc['id2'], 1)
from pymongo import MongoClient from moquag import MongoQueryAggregator import thread from time import sleep from tests.settings import MONGO_DB_SETTINGS, logger buff_time = 0.5 conn = MongoClient(**MONGO_DB_SETTINGS) b = MongoQueryAggregator( buff_time, MONGO_DB_SETTINGS, logger) b.start() def write_to_db(threadName, delay, table): count = 0 while True: sleep(delay) count += 1 b.testdb[table].insert({'key': 1}) try: thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'profiles')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'profiles')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'profiles')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'events')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'events')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'events')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'events')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'events')) thread.start_new_thread(write_to_db, ("Thread-1", 0.001, 'user-details')) thread.start_new_thread(write_to_db, ("Thread-10", 0.001, 'user-details'))
def setUp(self): self.buff_time = 5 self.conn = MongoClient(**MONGO_DB_SETTINGS) self.b = MongoQueryAggregator( self.buff_time, MONGO_DB_SETTINGS, logger) self.b.start()
def test_2(self): '''Inserting/updating multiple documents to multiple dbs db doing operation greater than max_ops_limit and checking the dbs ''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 0.1, 5) dbs_to_data_to_insert = { 'testdb1': [{ 'id': 1 }, { 'id': 2 }], 'testdb2': [{ 'id': 3 }], 'testdb3': [{ 'id': 5 }, { 'id': 6 }, { 'id': 8 }] } db_to_data_to_update = { 'testdb1': [({ 'id': 1 }, { 'name': 'new1' })], 'testdb2': [({ 'id': 100 }, { 'name': 'new100' })], # will insert 'testdb3': [({ 'id': 5 }, { 'name': 'new5' }), ({ 'id': 6 }, { 'name': 'new6' })] } for db_name, docs in dbs_to_data_to_insert.iteritems(): for doc in docs: mongo_agg[db_name].profiles.insert(doc) for db_name, data in db_to_data_to_update.iteritems(): for search_query, update_query in data: mongo_agg[db_name].profiles.find(search_query).upsert().update( {'$set': update_query}) time.sleep(0.1) mongo_agg.testdb1.profiles.insert({'dummy': 1}) # just to flush old data # expected data on dbs expected_data = { 'testdb1': [{ 'id': 1, 'name': 'new1' }, { 'id': 2 }], 'testdb2': [{ 'id': 3 }, { 'id': 100, 'name': 'new100' }], 'testdb3': [{ 'id': 5, 'name': 'new5' }, { 'id': 6, 'name': 'new6' }, { 'id': 8 }] } for db in expected_data: data = self.conn[db].profiles.find({}, { 'name': 1, 'id': 1, '_id': 0 }) # db data count check self.assertEqual(data.count(), len(expected_data[db])) data_in_db = [] for doc in data: data_in_db.append(doc) self.assertListEqual(data_in_db, expected_data[db]) aggregators_expected_results = { ('testdb1', 'profiles'): Counter({ 'nInserted': 2, 'nModified': 1, 'nMatched': 1 }), ('testdb3', 'profiles'): Counter({ 'nInserted': 3, 'nModified': 2, 'nMatched': 2 }), ('testdb2', 'profiles'): Counter({ 'nUpserted': 1, 'nInserted': 1 }) } self.assertEqual(mongo_agg.get_results(), aggregators_expected_results)
def test_5(self): '''testing get_buffered_query_count ''' mongo_agg = MongoQueryAggregator(MONGO_DB_SETTINGS, 1, 50) dbs_to_data_to_insert = { ('testdb1', 'profiles'): [{ 'id': 1 }, { 'id': 2 }], ('testdb1', 'events'): [{ 'id': 3 }], ('testdb3', 'users_details'): [{ 'id': 5 }, { 'id': 6 }, { 'id': 8 }] } db_to_data_to_update = { ('testdb1', 'profiles'): [({ 'id': 1 }, { 'name': 'new1' })], ('testdb1', 'events'): [({ 'id': 100 }, { 'name': 'new100' })], ('testdb3', 'users_details'): [({ 'id': 5 }, { 'name': 'new5' }), ({ 'id': 6 }, { 'name': 'new6' })] } for (db_name, collection_name), docs in dbs_to_data_to_insert.iteritems(): for doc in docs: mongo_agg[db_name][collection_name].insert(doc) for (db_name, collection_name), data in db_to_data_to_update.iteritems(): for search_query, update_query in data: mongo_agg[db_name][collection_name].find( search_query).upsert().update({'$set': update_query}) expected_query_count = { ('events', 'testdb1'): { 'insert': 1, 'find': 1 }, ('profiles', 'testdb1'): { 'insert': 2, 'find': 1 }, ('users_details', 'testdb3'): { 'insert': 3, 'find': 2 } } query_count = mongo_agg.get_buffered_query_count() self.assertEqual(expected_query_count, query_count)