Example #1
0
    def test_load_reader_real2(self):
        reader1 = ApiritifLoadReader(self.obj.log)
        reader1.engine = EngineEmul()
        reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-0.csv")
        reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-1.csv")

        reader2 = ApiritifLoadReader(self.obj.log)
        reader2.engine = EngineEmul()
        reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--10.csv")
        reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--11.csv")

        reader = ConsolidatingAggregator()
        reader.engine = EngineEmul()
        reader.add_underling(reader1)
        reader.add_underling(reader2)

        items = list(reader.datapoints())
        self.assertEqual(0, len(items))

        all_items = []
        while True:
            items = list(reader.datapoints())
            all_items.extend(items)
            if not items:
                break

            for point in items:
                cnc = point[DataPoint.CURRENT][''][KPISet.CONCURRENCY]
                logging.info("%s: %s", point[DataPoint.TIMESTAMP], cnc)
                self.assertLessEqual(cnc, 4)
                cnc1 = point[DataPoint.CUMULATIVE][''][KPISet.CONCURRENCY]
                self.assertLessEqual(cnc1, 4)

        self.assertEqual(4, all_items[-1][DataPoint.CURRENT][''][KPISet.CONCURRENCY])
    def test_inconsistent(self):
        self.skipTest("just keep this code for future troubleshooting")
        agg = ConsolidatingAggregator()
        obj = ResultsFromBZA(MasterFromLog(data={'id': 0}))
        with open("/tmp/downloads/bzt.log") as fhd:
            obj.master.loglines = fhd.readlines()

        class Listener(AggregatorListener):
            def aggregated_second(self, data):
                for x in data[DataPoint.CURRENT].values():
                    a = x[KPISet.FAILURES] / x[KPISet.SAMPLE_COUNT]
                    obj.log.debug("TS: %s %s", data[DataPoint.TIMESTAMP],
                                  x[KPISet.SAMPLE_COUNT])

        agg.add_underling(obj)
        agg.add_listener(Listener())
        agg.prepare()
        agg.startup()
        try:
            while not agg.check():
                pass  # 1537973736 fail, prev  1537973735 1537973734 1537973733
        except NormalShutdown:
            obj.log.warning("Shutting down")
        agg.shutdown()
        agg.post_process()
Example #3
0
    def test_load_reader_real2(self):
        reader1 = ApiritifLoadReader(self.obj.log)
        reader1.engine = EngineEmul()
        reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-0.csv")
        reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-1.csv")

        reader2 = ApiritifLoadReader(self.obj.log)
        reader2.engine = EngineEmul()
        reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--10.csv")
        reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--11.csv")

        reader = ConsolidatingAggregator()
        reader.engine = EngineEmul()
        reader.add_underling(reader1)
        reader.add_underling(reader2)

        items = list(reader.datapoints())
        self.assertEqual(0, len(items))

        all_items = []
        while True:
            items = list(reader.datapoints())
            all_items.extend(items)
            if not items:
                break

            for point in items:
                cnc = point[DataPoint.CURRENT][''][KPISet.CONCURRENCY]
                logging.info("%s: %s", point[DataPoint.TIMESTAMP], cnc)
                self.assertLessEqual(cnc, 4)
                cnc1 = point[DataPoint.CUMULATIVE][''][KPISet.CONCURRENCY]
                self.assertLessEqual(cnc1, 4)

        self.assertEqual(4, all_items[-1][DataPoint.CURRENT][''][KPISet.CONCURRENCY])
Example #4
0
    def test_load_reader_real2(self):
        reader1 = ApiritifLoadReader(self.obj.log)
        reader1.engine = EngineEmul()
        reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-0.csv")
        reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-1.csv")

        reader2 = ApiritifLoadReader(self.obj.log)
        reader2.engine = EngineEmul()
        reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--10.csv")
        reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--11.csv")

        reader = ConsolidatingAggregator()
        reader.engine = EngineEmul()
        reader.add_underling(reader1)
        reader.add_underling(reader2)

        items = list(reader.datapoints())
        self.assertEqual(39, len(items))
        self.assertEqual(4, items[-1][DataPoint.CURRENT][''][KPISet.CONCURRENCY])