class ConfigReaderTest(unittest.TestCase): def setUp(self): self.parser = ConfigParser(1) def _assertEqualSections(self, section1, section2): attributeNames1 = section1.getAttributeNames() attributeNames2 = section2.getAttributeNames() self.assertEqual(attributeNames1, attributeNames2, "") for attribute in attributeNames1: self.assertEqual(section1.getAttribute(attribute), section2.getAttribute(attribute), "") sectionNames1 = section1.getSectionNames() sectionNames2 = section2.getSectionNames() self.assertEqual(sectionNames1, sectionNames2, "") for section in sectionNames1: self._assertEqualSections(section1.getSection(section), section2.getSection(section)) def test_configParser(self): configString = """ attr1 = 'long value'; attr2 = [value1, value2]; [section1] { [section11] { attr111 = ['el1', 'el2', 'el3']; } attr11 = 'val val'; } [section2] { attr21 = val; } dict1 = {val1 : 'long val', 'val val' : value}; """ result = self.parser.parseConfig(configString) section11 = ConfigSection() section11.addAttribute('attr111', ('el1', 'el2', 'el3')) section1 = ConfigSection() section1.addAttribute('attr11', 'val val') section1.addSection('section11', section11) section2 = ConfigSection() section2.addAttribute('attr21', 'val') topSection = ConfigSection() topSection.addSection('section1', section1) topSection.addSection('section2', section2) topSection.addAttribute('attr1', 'long value') topSection.addAttribute('attr2', ('value1', 'value2')) topSection.addAttribute('dict1', {'val1' : 'long val', 'val val' : 'value'}) expected = topSection self._assertEqualSections(result, expected)
def main(): #Command line argument filePath = sys.argv[1] poolSize = 50 threadCount = 500 threadSleepTime = 5 #Configurations influxConfiguration = ConfigParser.parseInfluxDbConfig(filePath) cassandraConfiguration = ConfigParser.parseCassandraDbConfig(filePath) postgresConfiguration = ConfigParser.parsePostgresDbConfig(filePath) mongoConfiguration = ConfigParser.parseMongoDbConfig(filePath) influxDbTest = InfluxDbTest(influxConfiguration) cassandraDbTest = CassandraDbTest(cassandraConfiguration) postgresDbTest = PostgresDbTest(postgresConfiguration) mongoDbTest = MongoDbTest(mongoConfiguration) writerToCsv = TestResultCsvWriter() print("READ OPERATION STARTED") #INFLUX READ if influxConfiguration != None: pool = mp.Pool(poolSize) readInfluxPool = pool.map(influxDbTest.performReadTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readInfluxPool, "read_influx.csv") print("Influx read done") time.sleep(threadSleepTime) #CASSANDRA READ pool = mp.Pool(poolSize) readCassandraPool = pool.map(cassandraDbTest.performReadTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readCassandraPool, "read_cassandra.csv") print("Cassandra read done") time.sleep(threadSleepTime) #POSTGRES READ pool = mp.Pool(poolSize) readPostgresPool = pool.map(postgresDbTest.performReadTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readPostgresPool, "read_postgres.csv") print("Postgres read done") time.sleep(threadSleepTime) #MONGO READ pool = mp.Pool(poolSize) readMongoPool = pool.map(mongoDbTest.performReadTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readMongoPool, "read_mongo.csv") print("Mongo read done") time.sleep(threadSleepTime) print("WRITE OPERATIONS STARTED") pool = mp.Pool(poolSize) writeInfluxPool = pool.map(influxDbTest.performWriteTest, range(0, threadCount)) pool.close() pool.join() time.sleep(threadSleepTime) writerToCsv.writeToCsv(writeInfluxPool, "write_influx.csv") print("Influx Db Write done") #CASSANDRA WRITE pool = mp.Pool(poolSize) writeCassandraPool = pool.map(cassandraDbTest.performWriteTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(writeCassandraPool, "write_cassandra.csv") time.sleep(threadSleepTime) print("Cassandra Db Write done") #POSTGRES WRITE pool = mp.Pool(poolSize) writePostgresPool = pool.map(postgresDbTest.performWriteTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(writePostgresPool, "write_postgres.csv") time.sleep(threadSleepTime) print("Postgres Db Write done") #MONGO DB pool = mp.Pool(poolSize) writeMongoPool = pool.map(mongoDbTest.performWriteTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(writeMongoPool, "write_mongo.csv") time.sleep(threadSleepTime) print("Mongo Db Write done") #SELECT WITH CONDITION #INFLUX pool = mp.Pool(poolSize) readInfluxWithConditionPool = pool.map( influxDbTest.performReadWithConditionTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readInfluxWithConditionPool, "read_with_condition_influx.csv") print("Influx Db Read With condition done") time.sleep(threadSleepTime) #CASSANDRA pool = mp.Pool(poolSize) readCassandraWithConditionPool = pool.map( cassandraDbTest.performReadWithConditionTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readCassandraWithConditionPool, "read_with_condition_cassandra.csv") print("Cassandra Db Read With condition done") time.sleep(threadSleepTime) #POSTGRES pool = mp.Pool(poolSize) readPostgresWithConditionPool = pool.map( postgresDbTest.performReadWithConditionTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readPostgresWithConditionPool, "read_with_condition_postgres.csv") print("Postgres Db Read With condition done") time.sleep(threadSleepTime) #MONGO pool = mp.Pool(poolSize) readMongoWithConditionPool = pool.map( mongoDbTest.performReadWithConditionTest, range(0, threadCount)) pool.close() pool.join() writerToCsv.writeToCsv(readMongoWithConditionPool, "read_with_condition_mongo.csv") print("Mongo Db Read With condition done") visualization = Visualization() visualization.dbReadResultPlot() visualization.dbWriteResultPlot() visualization.dbReadWithConditionResultPlot()
def setUp(self): self.parser = ConfigParser(1)