def test_stop_counting_criteria(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {"criteria": ["avg-rt>10ms for 2s, continue as failed"]}
        obj.prepare()
        obj.get_widget()
        start_time = time.time()

        for _n in range(0, 10):
            point = random_datapoint(start_time)
            point[DataPoint.CURRENT]['']["avg_rt"] = 1.0
            obj.aggregated_second(point)
            obj.check()
            start_time += 1

        self.assertEqual(obj.widget.text_widget.text, "Failed: avg-rt>10ms for 10 sec\n")

        for _n in range(0, 10):
            point = random_datapoint(start_time)
            point[DataPoint.CURRENT]['']["avg_rt"] = 0.01
            obj.aggregated_second(point)
            obj.check()
            start_time += 1

        self.assertEqual(obj.widget.text_widget.text, "")
    def test_stop_counting_criteria(self):
        self.configure(
            {"criteria": ["avg-rt>10ms for 2s, continue as failed"]})
        self.obj.prepare()
        self.obj.get_widget()
        start_time = time.time()

        for _n in range(0, 10):
            point = random_datapoint(start_time)
            point[DataPoint.CURRENT]['']["avg_rt"] = 1.0
            self.obj.aggregated_second(point)
            self.obj.check()
            start_time += 1

        self.assertEqual(self.obj.widget.text_widget.text,
                         "Failed: avg-rt>10ms for 10 sec\n")

        for _n in range(0, 10):
            point = random_datapoint(start_time)
            point[DataPoint.CURRENT]['']["avg_rt"] = 0.01
            self.obj.aggregated_second(point)
            self.obj.check()
            start_time += 1

        self.assertEqual(self.obj.widget.text_widget.text, "")
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start", 'result': {'session': {'id': 'sess1', 'userId': 1}, 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        # client.results.append(None)  # first check error stats
        client.results.append({"marker": "mon push", "result": True})
        client.results.append({"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        # client.results.append(None)  # second check error stats
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0}]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
Exemple #4
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start", 'result': {'session': {'id': 'sess1', 'userId': 1}, 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        # client.results.append(None)  # first check error stats
        client.results.append(
            {"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        # client.results.append(None)  # second check error stats
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
    def test_within(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {"criteria": [
            "fail>10% within 5s",
            "fail>1000 within 5s",
            "avg-rt>100ms within 10s",
        ]}
        obj.prepare()

        start_time = time.time()
        for _n in range(0, 20):
            point = random_datapoint(start_time)
            obj.aggregated_second(point)
            if _n % 2 == 0:
                try:
                    obj.check()
                except KeyboardInterrupt:
                    pass

            try:
                obj.check()
            except KeyboardInterrupt:
                pass
            start_time += 1
Exemple #6
0
    def test_bytes(self):
        self.configure({
            "criteria":
            [  # bytes number can only be generated in range from 1 to 1000
                "bytes>0 for 1s, continue as successful",
                "bytes<1kb for 1s, continue as successful",
                "bytes<1mib for 1s, continue as successful",
                "bytes<1b for 1s, continue as failed",
                "bytes>1024 for 1s, continue as failed",
            ]
        })
        self.obj.prepare()

        self.assertEqual(self.obj.processors[0].criteria[1].threshold,
                         1024)  # conversion check
        self.assertEqual(self.obj.processors[0].criteria[2].threshold,
                         1024 * 1024)
        self.assertEqual(self.obj.processors[0].criteria[3].threshold, 1)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        self.assertTrue(self.obj.criteria[0].is_triggered)
        self.assertTrue(self.obj.criteria[1].is_triggered)
        self.assertTrue(self.obj.criteria[2].is_triggered)
        self.assertFalse(self.obj.criteria[3].is_triggered)
        self.assertFalse(self.obj.criteria[4].is_triggered)
    def test_rc_within(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {
            "criteria": [
                "rc413>10 within 3s, stop as successful",  # this one to cover branch that caused bug
                "rc413>10 within 10s, stop as failed",
            ]
        }
        obj.prepare()
        self.assertEquals(len(obj.criteria), 2)

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['413'] = 3
            obj.aggregated_second(point)
            try:
                obj.check()
            except AutomatedShutdown:
                break

            self.assertLess(n, 3)

        obj.shutdown()
        obj.post_process()
        self.assertFalse(obj.criteria[0].is_triggered)
        self.assertTrue(obj.criteria[1].is_triggered)
    def test_rc_over2(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {
            "criteria": [
                "rc200>8 over 3s",
            ]
        }
        obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['200'] = 5
            # logging.debug("Datapoint %s: %s", n, point)
            obj.aggregated_second(point)
            try:
                obj.check()
            except AutomatedShutdown:
                break

            self.assertLess(n, 3)

        self.assertTrue(obj.criteria[0].is_triggered)
        obj.shutdown()
        obj.post_process()
    def test_no_notes_for_public_reporting(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.extend([{'result': {}} for _ in range(6)])

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = ''  # public reporting
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()

        client.session_id = 'sess1'
        client.master_id = 'master1'

        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.send_monitoring = obj.send_custom_metrics = obj.send_custom_tables = False
        obj.post_process()

        # check for note appending in _postproc_phase3()
        reqs = [{'url': '', 'data': ''} for _ in range(4)]     # add template for minimal size
        reqs = (reqs + obj.client.requests)[-4:]
        self.assertNotIn('api/latest/sessions/sess1', reqs[0]['url'])
        self.assertNotIn('api/latest/sessions/sess1', reqs[1]['url'])
        self.assertNotIn('api/latest/masters/master1', reqs[2]['url'])
        self.assertNotIn('api/latest/masters/master1', reqs[3]['url'])
        if reqs[1]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[1]['data'])
        if reqs[3]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[3]['data'])
Exemple #10
0
    def test_within(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {
            "criteria": [
                "fail>10% within 5s",
                "fail>1000 within 5s",
                "avg-rt>100ms within 10s",
            ]
        }
        obj.prepare()

        start_time = time.time()
        for _n in range(0, 20):
            point = random_datapoint(start_time)
            obj.aggregated_second(point)
            if _n % 2 == 0:
                try:
                    obj.check()
                except KeyboardInterrupt:
                    pass

            try:
                obj.check()
            except KeyboardInterrupt:
                pass
            start_time += 1
    def test_within(self):
        self.configure({
            "criteria": [
                "fail>10% within 5s",
                "fail>1000 within 5s",
                "avg-rt>100ms within 10s",
            ]
        })
        self.obj.prepare()

        start_time = time.time()
        for _n in range(0, 20):
            point = random_datapoint(start_time)
            self.obj.aggregated_second(point)
            if _n % 2 == 0:
                try:
                    self.obj.check()
                except KeyboardInterrupt:
                    pass

            try:
                self.obj.check()
            except KeyboardInterrupt:
                pass
            start_time += 1
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})
        client.results.append({"marker": "terminate2", 'result': {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'session': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'master': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CURRENT][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.post_process()
        self.assertEqual(0, len(client.results))
        data = json.loads(client.requests[6]['data'])
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'], [{
            'failureMessage': 'Forbidden',
            'failures': 10,
            'name': 'All Assertions'}])
        self.assertEqual(total_item['errors'], [{
            'm': 'Allowed',
            'count': 20,
            'rc': '222'}])
    def test_prepare2(self):
        self.configure({"criteria": ["avg-rt>10ms, continue as non-failed"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.post_process()
Exemple #14
0
    def test_prepare2(self):
        self.configure({"criteria": ["avg-rt>10ms, continue as non-failed"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.post_process()
Exemple #15
0
    def test_prepare3(self):
        self.configure({"criteria": ["avg-rt>10ms for 3s, continue as failed"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            point[DataPoint.CURRENT][''][KPISet.AVG_RESP_TIME] = 1
            self.obj.aggregated_second(point)
            self.obj.check()

        self.assertRaises(AutomatedShutdown, self.obj.post_process)
Exemple #16
0
    def test_prepare2(self):
        obj = PassFailStatus()
        obj.parameters = {"criterias": ["avg-rt>10ms, continue as non-failed"]}
        obj.prepare()
        self.assertGreater(len(obj.criterias), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            obj.aggregated_second(point)
            obj.check()

        obj.post_process()
    def test_long_kpi(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict.from_dict({"dump-xml": obj.engine.create_artifact("status", ".xml")})

        datapoint = random_datapoint(time.time())
        datapoint[datapoint.CUMULATIVE][""]["stdev_rt"] = long(0)
        obj.aggregated_second(datapoint)
        obj.startup()
        obj.shutdown()

        obj.post_process()
    def test_prepare2(self):
        obj = PassFailStatus()
        obj.parameters = {"criterias": ["avg-rt>10ms, continue as non-failed"]}
        obj.prepare()
        self.assertGreater(len(obj.criterias), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            obj.aggregated_second(point)
            obj.check()

        obj.post_process()
    def test_prepare3(self):
        self.configure(
            {"criteria": ["avg-rt>10ms for 3s, continue as failed"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            point[DataPoint.CURRENT][''][KPISet.AVG_RESP_TIME] = 1
            self.obj.aggregated_second(point)
            self.obj.check()

        self.assertRaises(AutomatedShutdown, self.obj.post_process)
Exemple #20
0
    def test_widget(self):
        self.configure({"criteria": ["avg-rt>10ms for 2s, continue as failed"]})
        self.obj.prepare()
        self.obj.get_widget()
        start_time = time.time()

        for _n in range(0, 10):
            point = random_datapoint(start_time + _n)
            point[DataPoint.CURRENT]['']["avg_rt"] = 1.0
            self.obj.aggregated_second(point)
            self.obj.check()

        self.assertEqual(self.obj.widget.text_widget.text, "Failed: avg-rt>10ms for 10 sec\n")
    def test_long_kpi(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict.from_dict(
            {"dump-xml": obj.engine.create_artifact("status", ".xml")})

        datapoint = random_datapoint(time.time())
        datapoint[datapoint.CUMULATIVE][""]["stdev_rt"] = long(0)
        obj.aggregated_second(datapoint)
        obj.startup()
        obj.shutdown()

        obj.post_process()
Exemple #22
0
    def test_dump(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict.from_dict({
            "dump-xml": obj.engine.create_artifact("status", ".xml"),
            "dump-csv": obj.engine.create_artifact("status", ".csv")
        })
        self.sniff_log(obj.log)

        obj.aggregated_second(random_datapoint(time.time()))
        obj.startup()
        obj.shutdown()
        obj.post_process()
        self.assertIn("XML", self.log_recorder.info_buff.getvalue())
    def test_dump(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)
        obj.parameters.merge({
            "dump-xml": obj.engine.create_artifact("status", ".xml"),
            "dump-csv": obj.engine.create_artifact("status", ".csv")
        })

        obj.aggregated_second(random_datapoint(time.time()))
        obj.post_process()
        self.assertIn("XML", log_recorder.info_buff.getvalue())
    def test_dump(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)
        obj.parameters.merge({
            "dump-xml": obj.engine.create_artifact("status", ".xml"),
            "dump-csv": obj.engine.create_artifact("status", ".csv")
        })

        obj.aggregated_second(random_datapoint(time.time()))
        obj.post_process()
        self.assertIn("XML", log_recorder.info_buff.getvalue())
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start", 'result': {'session': {'id': 'sess1', 'userId': 1}, 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        # client.results.append(None)  # first check error stats
        client.results.append(
            {"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        # client.results.append(None)  # second check error stats
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
    def test_dump(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict.from_dict({
            "dump-xml": obj.engine.create_artifact("status", ".xml"),
            "dump-csv": obj.engine.create_artifact("status", ".csv")
        })
        self.sniff_log(obj.log)

        obj.aggregated_second(random_datapoint(time.time()))
        obj.startup()
        obj.shutdown()

        obj.post_process()
        self.assertIn("XML", self.log_recorder.info_buff.getvalue())
    def test_percentiles_track(self):
        self.configure({"criteria": ["p90>0ms"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        try:
            self.obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
Exemple #28
0
    def test_percentiles_track(self):
        self.configure({"criteria": ["p90>0ms"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        try:
            self.obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
    def test_prepare3(self):
        obj = PassFailStatus()
        obj.parameters = {"criterias": ["avg-rt>10ms for 3s, continue as failed"]}
        obj.prepare()
        self.assertGreater(len(obj.criterias), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            point[DataPoint.CURRENT][''][KPISet.AVG_RESP_TIME] = 1
            obj.aggregated_second(point)
            obj.check()

        try:
            obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
Exemple #30
0
    def test_rc_over1(self):
        self.configure({"criteria": [
            "rc200<8 over 5s",
        ]})

        self.obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['200'] = 3
            self.obj.aggregated_second(point)
            self.obj.check()
            self.assertFalse(self.obj.criteria[0].is_triggered)

        self.obj.shutdown()
        self.obj.post_process()
Exemple #31
0
    def test_prepare3(self):
        obj = PassFailStatus()
        obj.parameters = {"criterias": ["avg-rt>10ms for 3s, continue as failed"]}
        obj.prepare()
        self.assertGreater(len(obj.criterias), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            point[DataPoint.CURRENT][''][KPISet.AVG_RESP_TIME] = 1
            obj.aggregated_second(point)
            obj.check()

        try:
            obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
Exemple #32
0
    def test_cumulative_criteria_post_process(self):
        self.configure({"criteria": [
            "p90>0ms, continue as failed",
            "avg-rt>0ms, continue as failed",
        ]})
        self.obj.prepare()
        self.assertEquals(len(self.obj.criteria), 2)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        self.assertRaises(AutomatedShutdown, self.obj.post_process)
        for crit in self.obj.criteria:
            self.assertTrue(crit.is_triggered)
    def test_rc_over1(self):
        self.configure({"criteria": [
            "rc200<8 over 5s",
        ]})

        self.obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['200'] = 3
            self.obj.aggregated_second(point)
            self.obj.check()
            self.assertFalse(self.obj.criteria[0].is_triggered)

        self.obj.shutdown()
        self.obj.post_process()
    def test_within(self):
        obj = PassFailStatus()
        obj.parameters = {"criterias": [
            "fail>10% within 5s",
            "fail>1000 within 5s",
            "avg-rt>100ms within 10s",
        ]}
        obj.prepare()

        start_time = time.time()
        for _n in range(0, 20):
            point = random_datapoint(start_time)
            obj.aggregated_second(point)
            if _n % 2 == 0:
                obj.check()
            obj.check()
            start_time += 1
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})
        client.results.append({"marker": "terminate2", 'result': {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'session': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'master': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 7373, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '403'},
            {'msg': 'Allowed', 'cnt': 7373, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '403'}]
        obj.post_process()
    def test_percentiles_track(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {"criteria": ["p90>0ms"]}
        obj.prepare()
        self.assertGreater(len(obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            obj.aggregated_second(point)
            obj.check()

        obj.shutdown()
        try:
            obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
Exemple #37
0
    def test_percentiles_track(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {"criteria": ["p90>0ms"]}
        obj.prepare()
        self.assertGreater(len(obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            obj.aggregated_second(point)
            obj.check()

        obj.shutdown()
        try:
            obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
    def test_cumulative_criteria_post_process(self):
        self.configure({
            "criteria": [
                "p90>0ms, continue as failed",
                "avg-rt>0ms, continue as failed",
            ]
        })
        self.obj.prepare()
        self.assertEquals(len(self.obj.criteria), 2)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        self.assertRaises(AutomatedShutdown, self.obj.post_process)
        for crit in self.obj.criteria:
            self.assertTrue(crit.is_triggered)
    def test_prepare(self):
        config = json.loads(open(RESOURCES_DIR + "json/passfail.json").read())
        self.configure(config['reporting'][0])
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            ROOT_LOGGER.info("%s: %s", n, point)
            self.obj.aggregated_second(point)
            try:
                self.obj.check()
            except AutomatedShutdown:
                pass

        try:
            self.obj.post_process()
        except AutomatedShutdown:
            pass
Exemple #40
0
    def test_prepare(self):
        config = json.loads(open(RESOURCES_DIR + "json/passfail.json").read())
        self.configure(config['reporting'][0])
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            ROOT_LOGGER.info("%s: %s", n, point)
            self.obj.aggregated_second(point)
            try:
                self.obj.check()
            except AutomatedShutdown:
                pass

        try:
            self.obj.post_process()
        except AutomatedShutdown:
            pass
Exemple #41
0
    def test_executor_level(self):
        executor = ModuleMock()
        executor.engine = self.obj.engine
        self.obj.engine.provisioning.executors.append(executor)
        executor.execution.merge({"criteria": ["p90>0ms"], "scenario": {"criteria": ["p50>0ms"]}})
        self.obj.prepare()
        self.assertEqual(len(self.obj.criteria), 2)

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        try:
            self.obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
    def test_within(self):
        obj = PassFailStatus()
        obj.parameters = {
            "criterias": [
                "fail>10% within 5s",
                "fail>1000 within 5s",
                "avg-rt>100ms within 10s",
            ]
        }
        obj.prepare()

        start_time = time.time()
        for _n in range(0, 20):
            point = random_datapoint(start_time)
            obj.aggregated_second(point)
            if _n % 2 == 0:
                obj.check()
            obj.check()
            start_time += 1
    def test_cumulative_criteria_post_process(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {"criteria": [
            "p90>0ms, continue as failed",
            "avg-rt>0ms, continue as failed",
        ]}
        obj.prepare()
        self.assertEquals(len(obj.criteria), 2)

        for n in range(0, 10):
            point = random_datapoint(n)
            obj.aggregated_second(point)
            obj.check()

        obj.shutdown()
        self.assertRaises(AutomatedShutdown, obj.post_process)
        for crit in obj.criteria:
            self.assertTrue(crit.is_triggered)
    def test_public_report(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []}
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests': {'result': {'id': 'unittest1'}},
            'https://a.blazemeter.com/api/v4/tests/unittest1/start-external': {"result": {
                'session': {'id': 'sess1', 'userId': 1, 'testId': 1},
                'master': {'id': 'master1', 'userId': 1},
                'signature': ''
            }},
            'https://a.blazemeter.com/api/v4/masters/master1/public-token': {'result': {'publicToken': 'publicToken'}},
            'https://data.blazemeter.com/submit.php?session_id=sess1&signature=&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': {
                "result": {'session': {}}},
            'https://a.blazemeter.com/api/v4/image/sess1/files?signature=': {'result': True},
        })

        log_recorder = RecordingHandler()

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        obj.log.addHandler(log_recorder)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()

        log_buff = log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
        logging.warning("\n".join([x['url'] for x in mock.requests]))
        self.assertEqual(14, len(mock.requests))
    def test_prepare(self):
        obj = PassFailStatus()
        config = json.loads(open(__dir__() + "/../json/passfail.json").read())
        obj.parameters = config['reporting'][0]
        obj.prepare()
        self.assertGreater(len(obj.criterias), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            logging.info("%s: %s", n, point)
            obj.aggregated_second(point)
            try:
                obj.check()
            except AutomatedShutdown:
                pass

        try:
            obj.post_process()
        except AutomatedShutdown:
            pass
Exemple #46
0
    def test_monitoring(self):
        self.configure({
            "criteria": [{
                "class": "bzt.modules.monitoring.MonitoringCriteria",
                "subject": "local/cpu",
                "condition": ">",
                "threshold": 90,
                "timeframe": "5s"
            }]
        })

        self.obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()
        self.assertFalse(self.obj.criteria[0].is_triggered)
Exemple #47
0
    def test_prepare(self):
        obj = PassFailStatus()
        config = json.loads(open(__dir__() + "/../json/passfail.json").read())
        obj.parameters = config['reporting'][0]
        obj.prepare()
        self.assertGreater(len(obj.criterias), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            logging.info("%s: %s", n, point)
            obj.aggregated_second(point)
            try:
                obj.check()
            except AutomatedShutdown:
                pass

        try:
            obj.post_process()
        except AutomatedShutdown:
            pass
Exemple #48
0
    def test_prepare_label_issue(self):
        # https://groups.google.com/forum/?utm_medium=email&utm_source=footer#!msg/codename-taurus/PWjU7xVucZ0/WkjUAbE1EwAJ
        self.configure({"criteria": ["avg-rt of spaced label>10ms"]})
        self.obj.prepare()
        self.assertGreater(len(self.obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            point[DataPoint.CUMULATIVE]['spaced label'] = point[DataPoint.CUMULATIVE]['']
            point[DataPoint.CURRENT]['spaced label'] = point[DataPoint.CURRENT]['']
            self.obj.aggregated_second(point)
            self.obj.check()

        self.obj.shutdown()

        try:
            self.obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
    def test_rc_over1(self):
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {
            "criteria": [
                "rc200<8 over 5s",
            ]
        }
        obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['200'] = 3
            obj.aggregated_second(point)
            obj.check()
            self.assertFalse(obj.criteria[0].is_triggered)

        obj.shutdown()
        obj.post_process()
    def test_rc_over2(self):
        self.configure({"criteria": [
            "rc200>8 over 3s",
        ]})
        self.obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['200'] = 5
            self.obj.aggregated_second(point)
            try:
                self.obj.check()
            except AutomatedShutdown:
                break

            self.assertLess(n, 3)

        self.assertTrue(self.obj.criteria[0].is_triggered)
        self.obj.shutdown()
        self.obj.post_process()
Exemple #51
0
    def test_rc_over2(self):
        self.configure({"criteria": [
            "rc200>8 over 3s",
        ]})
        self.obj.prepare()

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['200'] = 5
            self.obj.aggregated_second(point)
            try:
                self.obj.check()
            except AutomatedShutdown:
                break

            self.assertLess(n, 3)

        self.assertTrue(self.obj.criteria[0].is_triggered)
        self.obj.shutdown()
        self.obj.post_process()
    def test_public_report(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "share-report", 'result': {'publicToken': 'publicToken'}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        log_recorder = RecordingHandler()

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        obj.client = client
        obj.log.addHandler(log_recorder)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()
        self.assertEqual(0, len(client.results))

        log_buff = log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
    def test_public_report(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []}
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests': {'result': {'id': 'unittest1'}},
            'https://a.blazemeter.com/api/v4/tests/unittest1/start-external': {"result": {
                'session': {'id': 'sess1', 'userId': 1, 'testId': 1},
                'master': {'id': 'master1', 'userId': 1},
                'signature': ''
            }},
            'https://a.blazemeter.com/api/v4/masters/master1/public-token': {'result': {'publicToken': 'publicToken'}},
            'https://data.blazemeter.com/submit.php?session_id=sess1&signature=&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': {
                "result": {'session': {}}},
            'https://data.blazemeter.com/api/v4/image/sess1/files?signature=': {'result': True},
        })

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        self.sniff_log(obj.log)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()

        log_buff = self.log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
        ROOT_LOGGER.warning("\n".join([x['url'] for x in mock.requests]))
        self.assertEqual(14, len(mock.requests))
    def test_no_notes_for_public_reporting(self):
        mock = BZMock()
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/sessions/1/terminate-external': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=None&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': {},
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = ''  # public reporting
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        obj.prepare()

        obj._session = Session(obj._user, {'id': 1, 'testId': 1, 'userId': 1})
        obj._master = Master(obj._user, {'id': 1})

        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111',
             'tag': ""},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.send_monitoring = False
        obj.post_process()

        # TODO: looks like this whole block of checks is useless
        # check for note appending in _postproc_phase3()
        reqs = [{'url': '', 'data': ''} for _ in range(4)]  # add template for minimal size
        reqs = (reqs + mock.requests)[-4:]
        self.assertNotIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertNotIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertNotIn('api/v4/masters/1', reqs[2]['url'])
        self.assertNotIn('api/v4/masters/1', reqs[3]['url'])
        if reqs[1]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[1]['data'])
        if reqs[3]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[3]['data'])
    def test_prepare_label_issue(self):
        # https://groups.google.com/forum/?utm_medium=email&utm_source=footer#!msg/codename-taurus/PWjU7xVucZ0/WkjUAbE1EwAJ
        obj = PassFailStatus()
        obj.engine = EngineEmul()
        obj.parameters = {"criteria": ["avg-rt of spaced label>10ms"]}
        obj.prepare()
        self.assertGreater(len(obj.criteria), 0)

        for n in range(0, 10):
            point = random_datapoint(n)
            point[DataPoint.CUMULATIVE]['spaced label'] = point[DataPoint.CUMULATIVE]['']
            point[DataPoint.CURRENT]['spaced label'] = point[DataPoint.CURRENT]['']
            obj.aggregated_second(point)
            obj.check()

        obj.shutdown()

        try:
            obj.post_process()
            self.fail()
        except AutomatedShutdown:
            pass
Exemple #56
0
    def test_rc_within(self):
        self.configure({"criteria": [
            "rc413>10 within 3s, stop as successful",  # this one to cover branch that caused bug
            "rc413>10 within 10s, stop as failed",
        ]})
        self.obj.prepare()
        self.assertEquals(len(self.obj.criteria), 2)

        for n in range(0, 10):
            point = random_datapoint(n)
            rcs = point[DataPoint.CURRENT][''][KPISet.RESP_CODES]
            rcs['413'] = 3
            self.obj.aggregated_second(point)
            try:
                self.obj.check()
            except AutomatedShutdown:
                break

            self.assertLess(n, 3)

        self.obj.shutdown()
        self.obj.post_process()
        self.assertFalse(self.obj.criteria[0].is_triggered)
        self.assertTrue(self.obj.criteria[1].is_triggered)
Exemple #57
0
    def test_within(self):
        self.configure({
            "criteria": [
                "fail>10% within 5s",
                "fail>1000 within 5s",
                "avg-rt>100ms within 10s",
            ]})
        self.obj.prepare()

        start_time = time.time()
        for _n in range(0, 20):
            point = random_datapoint(start_time)
            self.obj.aggregated_second(point)
            if _n % 2 == 0:
                try:
                    self.obj.check()
                except KeyboardInterrupt:
                    pass

            try:
                self.obj.check()
            except KeyboardInterrupt:
                pass
            start_time += 1
    def test_some_errors(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1&name=Proj+name': {"result": []},
            'https://a.blazemeter.com/api/v4/sessions/1': {"result": {'id': 1, "note": "somenote"}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {'id': 1, "note": "somenote"}},
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": {
                "session": {'id': 1, "testId": 1, "userId": 1},
                "master": {'id': 1},
                "signature": "sign"
            }},
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign': {"result": True},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1' +
            '&pq=0&target=labels_bulk&update=1': {},
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {"result": True},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1': {
                'result': {'session': {}}}
        })

        mock.mock_patch.update({
            'https://a.blazemeter.com/api/v4/sessions/1': {"result": {"id": 1, "note": "somenote"}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {"id": 1, "note": "somenote"}},
        })

        obj = BlazeMeterUploader()
        mock.apply(obj._user)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111',
             'tag': None},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'},
            {'msg': 'Not Found', 'cnt': 10, 'type': KPISet.ERRTYPE_SUBSAMPLE, 'urls': {'/non': '404'},
             KPISet.RESP_CODES: '404', 'tag': None}
        ]
        obj.post_process()
        obj.log.info("Requests: %s", mock.requests)

        # check for note appending in _postproc_phase3()
        reqs = mock.requests[-4:]
        self.assertIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertIn('api/v4/masters/1', reqs[2]['url'])
        self.assertIn('api/v4/masters/1', reqs[3]['url'])
        self.assertIn('ValueError: wrong value', str(reqs[1]['data']))
        self.assertIn('ValueError: wrong value', str(reqs[3]['data']))

        labels = mock.requests[8]['data']
        if not isinstance(labels, str):
            labels = labels.decode("utf-8")
        obj.log.info("Labels: %s", labels)
        data = json.loads(str(labels))
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'],
                         [{'failureMessage': 'Forbidden', 'failures': 10, 'name': 'All Assertions'}])
        self.assertEqual(total_item['errors'], [{'m': 'Allowed', 'count': 20, 'rc': '222'}])
        self.assertEqual(total_item['failedEmbeddedResources'],
                         [{'url': '/non', 'count': 10, 'rc': '404', 'rm': 'Not Found'}])