def test_401(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     mock = BZMock(obj._user)
     mock.mock_get.update({
         'https://a.blazemeter.com/api/v4/web/version': HTTPError(None, None, None, None, None, ),
     })
     self.assertRaises(HTTPError, obj.prepare)
 def test_monitoring_buffer_limit_option(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     obj.client = BlazeMeterClientEmul(logging.getLogger(''))
     obj.client.results.append({"marker": "ping", 'result': {}})
     obj.settings["monitoring-buffer-limit"] = 100
     obj.prepare()
     for i in range(1000):
         mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
         obj.monitoring_data(mon)
         for source, buffer in iteritems(obj.monitoring_buffer.data):
             self.assertLessEqual(len(buffer), 100)
     self.assertEqual(0, len(obj.client.results))
 def test_monitoring_buffer_limit_option(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     obj.client = BlazeMeterClientEmul(logging.getLogger(''))
     obj.client.results.append({"marker": "ping", 'result': {}})
     obj.settings["monitoring-buffer-limit"] = 100
     obj.prepare()
     for i in range(1000):
         mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
         obj.monitoring_data(mon)
         for source, buffer in iteritems(obj.monitoring_buffer.data):
             self.assertLessEqual(len(buffer), 100)
 def test_monitoring_buffer_limit_option(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     mock = BZMock(obj._user)
     obj.settings["monitoring-buffer-limit"] = 100
     obj.prepare()
     for i in range(1000):
         mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
         obj.monitoring_data(mon)
         for source, buffer in iteritems(obj.monitoring_buffer.data):
             self.assertLessEqual(len(buffer), 100)
     self.assertEqual(1, len(mock.requests))
Example #5
0
 def test_results_link_blazemeter(self):
     obj = JUnitXMLReporter()
     obj.engine = EngineEmul()
     obj.engine.provisioning = Local()
     obj.engine.reporters.append(BlazeMeterUploader())
     obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log)
     rep = obj.engine.reporters[0]
     rep.client.results_url = 'url2'
     rep.parameters.merge({'test': 'test2'})
     report_info = obj.get_bza_report_info()
     self.assertEqual(report_info,
                      [('BlazeMeter report link: url2\n', 'test2')])
Example #6
0
    def test_blazemeter_report_link(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()
        xml_report = obj.engine.create_artifact("status", ".xml")
        obj.parameters.merge({
            "dump-xml": xml_report,
        })

        rep = BlazeMeterUploader()
        rep.results_url = "http://report/link"
        obj.engine.reporters.append(rep)

        obj.aggregated_second(self.__get_datapoint())
        obj.post_process()

        self.assertTrue(os.path.exists(xml_report))
        with open(xml_report) as fds:
            report_content = fds.read()
        self.assertIn('<ReportURL>http://report/link</ReportURL>',
                      report_content)
    def test_blazemeter_report_link(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        xml_report = obj.engine.create_artifact("status", ".xml")
        obj.parameters = BetterDict.from_dict({
            "dump-xml": xml_report,
        })

        rep = BlazeMeterUploader()
        rep.results_url = "http://report/link"
        obj.engine.reporters.append(rep)

        obj.startup()
        obj.shutdown()

        obj.aggregated_second(self.__get_datapoint())
        obj.post_process()

        self.assertTrue(os.path.exists(xml_report))
        with open(xml_report) as fds:
            report_content = fds.read()
        self.assertIn('<ReportURL>http://report/link</ReportURL>', report_content)
Example #8
0
    def test_new_project_existing_test(self):
        obj = BlazeMeterUploader()
        mock = BZMock(obj._user)
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test':
            {
                'result': [{
                    'id': 1,
                    'name': 'Taurus Test',
                    'configuration': {
                        "type": 'external'
                    }
                }]
            },
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test':
            {
                'result': []
            }
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    "id": 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    "id": 1
                }
            },
        })

        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        self.assertEquals('https://a.blazemeter.com/api/v4/projects',
                          mock.requests[4]['url'])
        self.assertEquals('POST', mock.requests[4]['method'])
        self.assertEquals('https://a.blazemeter.com/api/v4/tests',
                          mock.requests[6]['url'])
        self.assertEquals('POST', mock.requests[6]['method'])
    def test_new_project_new_test(self):
        obj = BlazeMeterUploader()
        mock = BZMock(obj._user)
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {'result': []},
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1': {'result': []}
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {"id": 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {"id": 1}},
        })

        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        self.assertEquals('https://a.blazemeter.com/api/v4/projects', mock.requests[6]['url'])
        self.assertEquals('POST', mock.requests[6]['method'])
        self.assertEquals('https://a.blazemeter.com/api/v4/tests', mock.requests[7]['url'])
        self.assertEquals('POST', mock.requests[7]['method'])
Example #10
0
    def test_existing_project_new_test(self):
        obj = BlazeMeterUploader()
        mock = BZMock(obj._user)
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {'result': []},
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1': {'result': [
                {'id': 1, 'name': 'Proj name'}
            ]}
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {"id": 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {"id": 1}},
        })

        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        self.assertEquals('https://a.blazemeter.com/api/v4/tests', mock.requests[6]['url'])
        self.assertEquals('POST', mock.requests[6]['method'])
Example #11
0
 def test_anonymous_feeding(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     obj.browser_open = False
     mock = BZMock(obj._user)
     mock.mock_post.update({
         'https://a.blazemeter.com/api/v4/sessions': {
             "result": {
                 "signature": "sign",
                 "publicTokenUrl": "publicUrl",
                 "session": {
                     "id": 1,
                     "testId": 1,
                     "userId": 1
                 },
                 "master": {
                     "id": 1
                 },
             }
         },
         'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
         {},
         'https://data.blazemeter.com/api/v4/image/1/files?signature=sign':
         {
             "result": True
         },
         'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
         {
             'result': {
                 'session': {}
             }
         },
     })
     obj.prepare()
     obj.startup()
     obj.check()
     obj.shutdown()
     obj.post_process()
     self.assertEquals(1, obj._session['id'])
     self.assertEqual(6, len(mock.requests),
                      "Requests were: %s" % mock.requests)
Example #12
0
    def test_check(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test':
            {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test':
            {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1&name=Proj+name':
            {
                "result": []
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    "id": 1,
                    "name": "boo",
                    "userId": 2,
                    "description": None,
                    "created": time.time(),
                    "updated": time.time(),
                    "organizationId": None
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {
                "result": {
                    'session': {
                        'id': 1,
                        'userId': 1,
                        'testId': 1
                    },
                    'master': {
                        'id': 1,
                        'userId': 1
                    },
                    'signature': 'sign'
                }
            },
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
            [
                {},
                {
                    "result": {
                        'session': {
                            "statusCode": 140,
                            'status': 'ENDED'
                        }
                    }
                },
                {},
            ],
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign':
            [
                IOError("monitoring push expected fail"),
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
            ],
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
            {
                "result": {
                    'session': {}
                }
            }
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(
            __file__,
            os.path.join(obj.engine.artifacts_dir, os.path.basename(__file__)))
        mock.apply(obj._user)
        obj._user.timeout = 0.1
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{
            "ts": 1,
            "source": "local",
            "cpu": 1,
            "mem": 2,
            "bytes-recv": 100,
            "other": 0
        }]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        obj.last_dispatch = time.time() - 2 * obj.send_interval
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        handler = logging.FileHandler(log_file)
        obj.engine.log.parent.addHandler(handler)
        obj.engine.config.get('modules').get('shellexec').get(
            'env')['TAURUS_INDEX_ALL'] = 1
        obj.post_process()
        self.assertEqual(20, len(mock.requests))
        obj.engine.log.parent.removeHandler(handler)
Example #13
0
    def test_no_notes_for_public_reporting(self):
        mock = BZMock()
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/sessions/1/terminate-external':
            {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=None&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
            {},
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = ''  # public reporting
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        obj.prepare()

        obj._session = Session(obj._user, {'id': 1, 'testId': 1, 'userId': 1})
        obj._master = Master(obj._user, {'id': 1})

        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [{
            'msg':
            'Forbidden',
            'cnt':
            10,
            'type':
            KPISet.ERRTYPE_ASSERT,
            'urls': [],
            KPISet.RESP_CODES:
            '111',
            'tag':
            ""
        }, {
            'msg':
            'Allowed',
            'cnt':
            20,
            'type':
            KPISet.ERRTYPE_ERROR,
            'urls': [],
            KPISet.RESP_CODES:
            '222'
        }]
        obj.send_monitoring = False
        obj.post_process()

        # TODO: looks like this whole block of checks is useless
        # check for note appending in _postproc_phase3()
        reqs = [{
            'url': '',
            'data': ''
        } for _ in range(4)]  # add template for minimal size
        reqs = (reqs + mock.requests)[-4:]
        self.assertNotIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertNotIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertNotIn('api/v4/masters/1', reqs[2]['url'])
        self.assertNotIn('api/v4/masters/1', reqs[3]['url'])
        if reqs[1]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[1]['data'])
        if reqs[3]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[3]['data'])
Example #14
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({
            "marker": "project-create",
            'result': {
                "id": time.time(),
                "name": "boo",
                "userId": time.time(),
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }
        })
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            "result": {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'master': {
                    'id': 'master1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "first push",
            'result': {
                'session': {}
            }
        })
        client.results.append(IOError("monitoring push expected fail"))
        client.results.append({"marker": "mon push", "result": True})
        client.results.append(IOError("custom metric push expected fail"))
        client.results.append({
            "marker": "custom metrics push",
            "result": True
        })
        client.results.append({
            "marker": "second push",
            'result': {
                'session': {
                    "statusCode": 140,
                    'status': 'ENDED'
                }
            }
        })
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "post process monitoring push",
            "result": True
        })
        client.results.append({
            "marker": "post process custom metrics push",
            "result": True
        })
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['send-custom-metrics'] = True
        obj.settings['send-custom-tables'] = True
        obj.engine = EngineEmul()
        shutil.copy(__file__,
                    obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{
            "ts": 1,
            "source": "local",
            "cpu": 1,
            "mem": 2,
            "bytes-recv": 100,
            "other": 0
        }, {
            "ts": 1,
            "source": "chrome",
            "memory": 32,
            "cpu": 23
        }]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        obj.engine.log.parent.handlers.append(logging.FileHandler(log_file))
        obj.post_process()
        self.assertEqual(0, len(client.results))
Example #15
0
    def test_public_report(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []}
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests': {'result': {'id': 'unittest1'}},
            'https://a.blazemeter.com/api/v4/tests/unittest1/start-external': {"result": {
                'session': {'id': 'sess1', 'userId': 1, 'testId': 1},
                'master': {'id': 'master1', 'userId': 1},
                'signature': ''
            }},
            'https://a.blazemeter.com/api/v4/masters/master1/public-token': {'result': {'publicToken': 'publicToken'}},
            'https://data.blazemeter.com/submit.php?session_id=sess1&signature=&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': {
                "result": {'session': {}}},
            'https://data.blazemeter.com/api/v4/image/sess1/files?signature=': {'result': True},
        })

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        self.sniff_log(obj.log)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()

        log_buff = self.log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
        ROOT_LOGGER.warning("\n".join([x['url'] for x in mock.requests]))
        self.assertEqual(14, len(mock.requests))
    def test_multiple_reporters_one_monitoring(self):
        obj1 = BlazeMeterUploader()
        obj1.engine = EngineEmul()
        obj1.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj1.client.results.append({"marker": "ping", 'result': {}})

        obj2 = BlazeMeterUploader()
        obj2.engine = EngineEmul()
        obj2.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj2.client.results.append({"marker": "ping", 'result': {}})

        obj1.prepare()
        obj2.prepare()

        for i in range(10):
            mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
            obj1.monitoring_data(mon)
            obj2.monitoring_data(mon)
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})
        client.results.append({"marker": "terminate2", 'result': {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'session': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'master': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CURRENT][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.post_process()
        self.assertEqual(0, len(client.results))
        data = json.loads(client.requests[6]['data'])
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'], [{
            'failureMessage': 'Forbidden',
            'failures': 10,
            'name': 'All Assertions'}])
        self.assertEqual(total_item['errors'], [{
            'm': 'Allowed',
            'count': 20,
            'rc': '222'}])
Example #18
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            'result': {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "first push",
            'result': {
                'session': {}
            }
        })
        # client.results.append(None)  # first check error stats
        client.results.append({
            "marker": "second push",
            'result': {
                'session': {
                    "statusCode": 140,
                    'status': 'ENDED'
                }
            }
        })
        # client.results.append(None)  # second check error stats
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "upload1",
            "result": True
        })  # post-proc error stats
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__,
                    obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(0))
        obj.aggregated_second(random_datapoint(1))
        obj.aggregated_second(random_datapoint(2))
        obj.aggregated_second(random_datapoint(3))
        obj.aggregated_second(random_datapoint(4))
        obj.check()
        obj.aggregated_second(random_datapoint(5))
        obj.aggregated_second(random_datapoint(6))
        obj.aggregated_second(random_datapoint(7))
        obj.aggregated_second(random_datapoint(8))
        obj.aggregated_second(random_datapoint(9))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
Example #19
0
    def test_excluded_cumulative(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test':
            {
                "result": []
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {
                "result": {
                    "session": {
                        'id': 1,
                        "testId": 1,
                        "userId": 1
                    },
                    "master": {
                        'id': 1
                    },
                    "signature": "sign"
                }
            },
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign':
            {
                "result": True
            },
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1' + '&pq=0&target=labels_bulk&update=1':
            {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
            {
                'result': {
                    'session': {}
                }
            }
        })

        obj = BlazeMeterUploader()
        mock.apply(obj._user)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE] = {
        }  # remove cumulative when ramp-up data is excluded
        obj.post_process(
        )  # no 'Cumulative KPISet is non-consistent' exception here
 def test_direct_feeding(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     mock = BZMock(obj._user)
     mock.mock_post.update({
         'https://data.blazemeter.com/submit.php?session_id=direct&signature=sign&test_id=None&user_id=None&pq=0&target=labels_bulk&update=1': {},
         'https://a.blazemeter.com/api/v4/image/direct/files?signature=sign': {"result": True},
     })
     obj.parameters['session-id'] = 'direct'
     obj.parameters['signature'] = 'sign'
     obj.prepare()
     obj.startup()
     obj.check()
     obj.shutdown()
     obj.post_process()
     self.assertEquals('direct', obj._session['id'])
     self.assertEqual(2, len(mock.requests))
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        client.results.append({"marker": "mon push", "result": True})
        client.results.append({"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0}]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})
        client.results.append({"marker": "terminate2", 'result': {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'session': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'master': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 7373, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '403'},
            {'msg': 'Allowed', 'cnt': 7373, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '403'}]
        obj.post_process()
    def test_extend_datapoints(self):
        # check reported data format conversion for test state filtering on BM side

        def get_mock(origin_func, store):
            # generate replacement for BlazemeterUploader._dpoint_serializer.get_kpi_body
            def mock_get_kpi_body(data, isfinal):
                store.append(data)  # save received data for verifying
                return origin_func(
                    data, isfinal)  # call original get_kpi_body as well

            return mock_get_kpi_body

        mock = BZMock()
        mock.mock_get.update({
            '1': {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test':
            {
                "result": []
            },
            '3': {
                "result": []
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    "id": 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {
                "result": {
                    'session': {
                        'id': 1,
                        'userId': 1,
                        'testId': 1
                    },
                    'master': {
                        'id': 1,
                        'userId': 1
                    },
                    'signature': 'sign'
                }
            },
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
            [
                {},
                {
                    "result": {
                        'session': {
                            "statusCode": 140,
                            'status': 'ENDED'
                        }
                    }
                },
                {},
            ],
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign':
            [
                IOError("monitoring push expected fail"),
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
            ],
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
            {
                "result": {
                    'session': {}
                }
            }
        })

        obj = BlazeMeterUploader()
        sent_data_points = []
        obj._dpoint_serializer.get_kpi_body = get_mock(
            obj._dpoint_serializer.get_kpi_body, sent_data_points)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        aggregator = ConsolidatingAggregator()
        aggregator.engine = obj.engine
        aggregator.settings['extend-aggregation'] = True
        reader = MockReader()
        watcher = MockReader()

        reader.buffer_scale_idx = '100.0'
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        reader.data.append((1, "a", 1, 1, 1, 1, 200, None, '', 1))
        reader.data.append((2, "b", 1, 2, 2, 2, 200, 'OK', '', 2))
        reader.data.append((2, "b", 1, 3, 3, 3, 404, "Not Found", '', 3))
        reader.data.append((2, "c", 1, 4, 4, 4, 200, None, '', 4))
        reader.data.append((3, "d", 1, 5, 5, 5, 200, None, '', 5))
        reader.data.append((5, "b", 1, 6, 6, 6, 200, None, '', 6))
        reader.data.append((5, "c", 1, 7, 7, 7, 200, None, '', 7))
        original_labels = list(d[1] for d in reader.data)

        aggregator.add_underling(reader)
        aggregator.add_listener(watcher)
        obj.engine.aggregator = aggregator

        mock.apply(obj._user)
        obj._user.timeout = 0.001

        obj.engine.aggregator.prepare()
        obj.prepare()

        obj.engine.aggregator.startup()
        obj.startup()

        obj.engine.aggregator.check()
        obj.check()

        obj.engine.aggregator.shutdown()
        obj.shutdown()

        obj.engine.aggregator.post_process()
        obj.post_process()

        sent_data_points = sent_data_points[0] + sent_data_points[1]

        state_labels = [0, 1, 2]
        for dp in sent_data_points:
            for data in dp['cumulative'], dp['current']:
                for label in data:
                    self.assertIn(label, original_labels + [''])
                    self.assertIsInstance(data[label], dict)
                    for key in data[label]:
                        self.assertIn(key, state_labels)
Example #24
0
    def test_public_report(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test':
            {
                "result": []
            }
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                'result': {
                    'id': 'unittest1'
                }
            },
            'https://a.blazemeter.com/api/v4/tests/unittest1/start-external': {
                "result": {
                    'session': {
                        'id': 'sess1',
                        'userId': 1,
                        'testId': 1
                    },
                    'master': {
                        'id': 'master1',
                        'userId': 1
                    },
                    'signature': ''
                }
            },
            'https://a.blazemeter.com/api/v4/masters/master1/public-token': {
                'result': {
                    'publicToken': 'publicToken'
                }
            },
            'https://data.blazemeter.com/submit.php?session_id=sess1&signature=&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
            {
                "result": {
                    'session': {}
                }
            },
            'https://data.blazemeter.com/api/v4/image/sess1/files?signature=':
            {
                'result': True
            },
        })

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        self.sniff_log(obj.log)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()

        log_buff = self.log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
        ROOT_LOGGER.warning("\n".join([x['url'] for x in mock.requests]))
        self.assertEqual(14, len(mock.requests))
Example #25
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({
            "marker": "project-create",
            'result': {
                "id": time.time(),
                "name": "boo",
                "userId": time.time(),
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }
        })
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            'result': {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "first push",
            'result': {
                'session': {}
            }
        })
        # client.results.append(None)  # first check error stats
        client.results.append({"marker": "mon push", "result": True})
        client.results.append({
            "marker": "second push",
            'result': {
                'session': {
                    "statusCode": 140,
                    'status': 'ENDED'
                }
            }
        })
        # client.results.append(None)  # second check error stats
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "upload1",
            "result": True
        })  # post-proc error stats
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__,
                    obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{
            "ts": 1,
            "source": "local",
            "cpu": 1,
            "mem": 2,
            "bytes-recv": 100,
            "other": 0
        }]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        client.results.append(IOError("monitoring push expected fail"))
        client.results.append({"marker": "mon push", "result": True})
        client.results.append(IOError("custom metric push expected fail"))
        client.results.append({"marker": "custom metrics push", "result": True})
        client.results.append({"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "post process monitoring push", "result": True})
        client.results.append({"marker": "post process custom metrics push", "result": True})
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['send-custom-metrics'] = True
        obj.settings['send-custom-tables'] = True
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0},
               {"ts": 1, "source": "chrome", "memory": 32, "cpu": 23}]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        obj.engine.log.parent.handlers.append(logging.FileHandler(log_file))
        obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1
        obj.post_process()
        self.assertEqual(0, len(client.results))
Example #27
0
    def test_multiple_reporters_one_monitoring(self):
        obj1 = BlazeMeterUploader()
        obj1.engine = EngineEmul()
        BZMock(obj1._user)

        obj2 = BlazeMeterUploader()
        obj2.engine = EngineEmul()
        BZMock(obj2._user)

        obj1.prepare()
        obj2.prepare()

        for i in range(10):
            mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
            obj1.monitoring_data(mon)
            obj2.monitoring_data(mon)
Example #28
0
    def test_xml_format_sample_labels(self):
        # generate xml, compare hash

        obj = JUnitXMLReporter()
        obj.engine = EngineEmul()
        rep = BlazeMeterUploader()
        rep.results_url = "http://report/123"
        obj.engine.reporters.append(rep)

        path_from_config = tempfile.mktemp(suffix='.xml', prefix='junit-xml-sample-labels',
                                           dir=obj.engine.artifacts_dir)

        # data-source: finalstats by default
        obj.parameters = BetterDict.from_dict({"filename": path_from_config})

        obj.prepare()

        datapoint = DataPoint(0, [])
        cumul_data = datapoint[DataPoint.CUMULATIVE]

        cumul_data[""] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME: 7.890211417203362e-06,
            KPISet.RESP_TIMES: Counter({
                0.0: 32160, 0.001: 24919, 0.002: 1049, 0.003: 630, 0.004: 224, 0.005: 125,
                0.006: 73, 0.007: 46, 0.008: 32, 0.009: 20, 0.011: 8, 0.01: 8, 0.017: 3,
                0.016: 3, 0.014: 3, 0.013: 3, 0.04: 2, 0.012: 2, 0.079: 1, 0.081: 1,
                0.019: 1, 0.015: 1}),
            KPISet.ERRORS: [{'msg': 'Forbidden', 'cnt': 7300, 'type': 0,
                             'urls': Counter({'http://192.168.1.1/anotherquery': 7300}),
                             KPISet.RESP_CODES: '403'},
                            {'msg': 'Assertion failed: text /smth/ not found', 'cnt': 73, 'type': 1,
                             'urls': Counter({'http://192.168.1.1/anotherquery': 73}),
                             KPISet.RESP_CODES: '200'},
                            ],
            KPISet.STDEV_RESP_TIME: 0.04947974228872108,
            KPISet.AVG_LATENCY: 0.0002825639815220692,
            KPISet.RESP_CODES: Counter({'304': 29656, '403': 29656, '200': 2}),
            KPISet.PERCENTILES: {'95.0': 0.001, '0.0': 0.0, '99.9': 0.008, '90.0': 0.001,
                                 '100.0': 0.081, '99.0': 0.003, '50.0': 0.0},
            KPISet.SUCCESSES: 29658,
            KPISet.SAMPLE_COUNT: 59314,
            KPISet.CONCURRENCY: 0,
            KPISet.AVG_RESP_TIME: 0.0005440536804127192,
            KPISet.FAILURES: 29656})

        cumul_data["http://192.168.1.1/somequery"] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME: 9.609548856969457e-06,
            KPISet.RESP_TIMES: Counter({
                0.0: 17219, 0.001: 11246, 0.002: 543, 0.003: 341,
                0.004: 121,
                0.005: 66, 0.006: 36, 0.007: 33, 0.008: 18,
                0.009: 12, 0.011: 6,
                0.01: 5, 0.013: 2, 0.017: 2, 0.012: 2, 0.079: 1,
                0.016: 1,
                0.014: 1, 0.019: 1, 0.04: 1, 0.081: 1}),
            KPISet.ERRORS: [],
            KPISet.STDEV_RESP_TIME: 0.04073402130687656,
            KPISet.AVG_LATENCY: 1.7196034796682178e-06,
            KPISet.RESP_CODES: Counter({'304': 29656, '200': 2}),
            KPISet.PERCENTILES: {'95.0': 0.001, '0.0': 0.0,
                                 '99.9': 0.009,
                                 '90.0': 0.001,
                                 '100.0': 0.081,
                                 '99.0': 0.004,
                                 '50.0': 0.0},
            KPISet.SUCCESSES: 29658,
            KPISet.SAMPLE_COUNT: 29658,
            KPISet.CONCURRENCY: 0,
            KPISet.AVG_RESP_TIME: 0.0005164542450603551, KPISet.FAILURES: 0})

        cumul_data["http://192.168.1.1/anotherquery"] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME: 6.1707580253574335e-06,
            KPISet.RESP_TIMES: Counter({0.0: 14941, 0.001: 13673, 0.002: 506,
                                        0.003: 289, 0.004: 103,
                                        0.005: 59, 0.006: 37, 0.008: 14,
                                        0.007: 13, 0.009: 8, 0.01: 3,
                                        0.011: 2, 0.016: 2, 0.014: 2,
                                        0.017: 1, 0.013: 1, 0.015: 1,
                                        0.04: 1}),
            KPISet.ERRORS: [
                {'msg': 'Forbidden', 'cnt': 7300, 'type': 0,
                 'urls': Counter({'http://192.168.1.1/anotherquery': 7300}),
                 KPISet.RESP_CODES: '403'},
                {'msg': 'Assertion failed: text /smth/ not found', 'cnt': 73, 'type': 1,
                 'urls': Counter({'http://192.168.1.1/anotherquery': 73}),
                 KPISet.RESP_CODES: '200'},
            ],
            KPISet.STDEV_RESP_TIME: 0.032465137860758844,
            KPISet.AVG_LATENCY: 0.0005634272997032645,
            KPISet.RESP_CODES: Counter({'403': 29656}),
            KPISet.PERCENTILES: {'95.0': 0.001, '0.0': 0.0,
                                 '99.9': 0.008, '90.0': 0.001,
                                 '100.0': 0.04, '99.0': 0.003,
                                 '50.0': 0.0},
            KPISet.SUCCESSES: 0,
            KPISet.SAMPLE_COUNT: 29656,
            KPISet.CONCURRENCY: 0,
            KPISet.AVG_RESP_TIME: 0.0005716549770704078,
            KPISet.FAILURES: 29656})

        cumul_data["http://192.168.100.100/somequery"] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME: 9.609548856969457e-06,
            KPISet.RESP_TIMES: Counter({
                0.0: 17219, 0.001: 11246, 0.002: 543,
                0.003: 341, 0.004: 121,
                0.005: 66, 0.006: 36, 0.007: 33, 0.008: 18,
                0.009: 12, 0.011: 6,
                0.01: 5, 0.013: 2, 0.017: 2, 0.012: 2,
                0.079: 1, 0.016: 1,
                0.014: 1, 0.019: 1, 0.04: 1, 0.081: 1}),
            KPISet.ERRORS: [],
            KPISet.STDEV_RESP_TIME: 0.04073402130687656,
            KPISet.AVG_LATENCY: 1.7196034796682178e-06,
            KPISet.RESP_CODES: Counter({'304': 29656, '200': 2}),
            KPISet.PERCENTILES: {'95.0': 0.001, '0.0': 0.0,
                                 '99.9': 0.009, '90.0': 0.001,
                                 '100.0': 0.081, '99.0': 0.004,
                                 '50.0': 0.0},
            KPISet.SUCCESSES: 29658,
            KPISet.SAMPLE_COUNT: 29658,
            KPISet.CONCURRENCY: 0,
            KPISet.AVG_RESP_TIME: 0.0005164542450603551,
            KPISet.FAILURES: 0})

        obj.aggregated_second(datapoint)

        obj.post_process()

        with open(obj.report_file_path, 'rb') as fds:
            f_contents = fds.read()

        ROOT_LOGGER.info("File: %s", f_contents)
        xml_tree = etree.fromstring(f_contents)
        self.assertEqual('testsuites', xml_tree.tag)
        suite = xml_tree.getchildren()[0]
        self.assertEqual('testsuite', suite.tag)
        self.assertListEqual(['sample_labels', "bzt"], suite.values())
        test_cases = suite.getchildren()
        self.assertEqual(3, len(test_cases))
        self.assertEqual('testcase', test_cases[0].tag)
        self.assertEqual('error', test_cases[0].getchildren()[1].tag)
        self.assertEqual('failure', test_cases[0].getchildren()[2].tag)
        self.assertEqual('system-out', test_cases[0].getchildren()[0].tag)
        self.assertIn('BlazeMeter report link: http://report/123', test_cases[0].getchildren()[0].text)
Example #29
0
    def test_xml_format_passfail(self):
        obj = JUnitXMLReporter()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()

        pass_fail1 = PassFailStatus()

        fc1_triggered = DataCriteria(
            {
                'stop': True,
                'label': 'Sample 1 Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '150ms',
                'condition': '<',
                'subject': 'avg-rt'
            }, pass_fail1)

        fc1_not_triggered = DataCriteria(
            {
                'stop': True,
                'label': 'Sample 1 Not Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '300ms',
                'condition': '>',
                'subject': 'avg-rt'
            }, pass_fail1)

        pass_fail2 = PassFailStatus()

        fc2_triggered = DataCriteria(
            {
                'stop': True,
                'label': 'Sample 2 Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '150ms',
                'condition': '<=',
                'subject': 'avg-rt'
            }, pass_fail1)

        fc2_not_triggered = DataCriteria(
            {
                'stop': True,
                'label': 'Sample 2 Not Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '300ms',
                'condition': '=',
                'subject': 'avg-rt'
            }, pass_fail1)

        pass_fail1.criterias.append(fc1_triggered)
        pass_fail1.criterias.append(fc1_not_triggered)
        pass_fail2.criterias.append(fc2_triggered)
        pass_fail2.criterias.append(fc2_not_triggered)

        fc1_triggered.is_triggered = True
        fc2_triggered.is_triggered = True

        obj.engine.reporters.append(pass_fail1)
        obj.engine.reporters.append(pass_fail2)
        obj.engine.reporters.append(BlazeMeterUploader())

        path_from_config = tempfile.mktemp(suffix='.xml',
                                           prefix='junit-xml_passfail',
                                           dir=obj.engine.artifacts_dir)

        obj.parameters.merge({
            "filename": path_from_config,
            "data-source": "pass-fail"
        })
        obj.prepare()
        obj.last_second = DataPoint(0)
        obj.post_process()

        with open(obj.report_file_path, 'rb') as fds:
            f_contents = fds.read()

        xml_tree = etree.fromstring(f_contents)
        self.assertEqual('testsuite', xml_tree.tag)
        self.assertEqual(4, len(xml_tree.getchildren()))
        self.assertEqual('testcase', xml_tree.getchildren()[0].tag)
        self.assertEqual('error',
                         xml_tree.getchildren()[0].getchildren()[0].tag)
        self.assertEqual('error',
                         xml_tree.getchildren()[2].getchildren()[0].tag)
    def test_no_notes_for_public_reporting(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.extend([{'result': {}} for _ in range(6)])

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = ''  # public reporting
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()

        client.session_id = 'sess1'
        client.master_id = 'master1'

        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.send_monitoring = obj.send_custom_metrics = obj.send_custom_tables = False
        obj.post_process()

        # check for note appending in _postproc_phase3()
        reqs = [{'url': '', 'data': ''} for _ in range(4)]     # add template for minimal size
        reqs = (reqs + obj.client.requests)[-4:]
        self.assertNotIn('api/latest/sessions/sess1', reqs[0]['url'])
        self.assertNotIn('api/latest/sessions/sess1', reqs[1]['url'])
        self.assertNotIn('api/latest/masters/master1', reqs[2]['url'])
        self.assertNotIn('api/latest/masters/master1', reqs[3]['url'])
        if reqs[1]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[1]['data'])
        if reqs[3]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[3]['data'])
Example #31
0
    def test_no_notes_for_public_reporting(self):
        mock = BZMock()
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/sessions/1/terminate-external': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=None&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': {},
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = ''  # public reporting
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        obj.prepare()

        obj._session = Session(obj._user, {'id': 1, 'testId': 1, 'userId': 1})
        obj._master = Master(obj._user, {'id': 1})

        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111',
             'tag': ""},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.send_monitoring = False
        obj.post_process()

        # TODO: looks like this whole block of checks is useless
        # check for note appending in _postproc_phase3()
        reqs = [{'url': '', 'data': ''} for _ in range(4)]  # add template for minimal size
        reqs = (reqs + mock.requests)[-4:]
        self.assertNotIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertNotIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertNotIn('api/v4/masters/1', reqs[2]['url'])
        self.assertNotIn('api/v4/masters/1', reqs[3]['url'])
        if reqs[1]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[1]['data'])
        if reqs[3]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[3]['data'])
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start", 'result': {'session': {'id': 'sess1', 'userId': 1}, 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        # client.results.append(None)  # first check error stats
        client.results.append(
            {"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        # client.results.append(None)  # second check error stats
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
Example #33
0
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({
            "marker": "project-create",
            'result': {
                "id": time.time(),
                "name": "boo",
                "userId": time.time(),
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }
        })
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            "result": {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'master': {
                    'id': 'master1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "upload1",
            "result": True
        })  # post-proc error stats
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "terminate2",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "sess-e",
            "result": {
                'session': {
                    'id': 'sess1',
                    'note': 'n'
                }
            }
        })
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({
            "marker": "sess-e",
            "result": {
                'master': {
                    'id': 'sess1',
                    'note': 'n'
                }
            }
        })
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [{
            'msg':
            'Forbidden',
            'cnt':
            7373,
            'type':
            KPISet.ERRTYPE_ASSERT,
            'urls': [],
            KPISet.RESP_CODES:
            '403'
        }, {
            'msg':
            'Allowed',
            'cnt':
            7373,
            'type':
            KPISet.ERRTYPE_ERROR,
            'urls': [],
            KPISet.RESP_CODES:
            '403'
        }]
        obj.post_process()
        self.assertEqual(0, len(client.results))
Example #34
0
    def test_xml_format_sample_labels(self):
        # generate xml, compare hash

        obj = JUnitXMLReporter()
        obj.engine = EngineEmul()
        rep = BlazeMeterUploader()
        rep.results_url = "http://report/123"
        obj.engine.reporters.append(rep)
        obj.parameters = BetterDict()

        path_from_config = tempfile.mktemp(suffix='.xml',
                                           prefix='junit-xml-sample-labels',
                                           dir=obj.engine.artifacts_dir)

        # data-source: finalstats by default
        obj.parameters.merge({"filename": path_from_config})

        obj.prepare()

        datapoint = DataPoint(0, [])
        cumul_data = datapoint[DataPoint.CUMULATIVE]

        cumul_data[""] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME:
            7.890211417203362e-06,
            KPISet.RESP_TIMES:
            Counter({
                0.0: 32160,
                0.001: 24919,
                0.002: 1049,
                0.003: 630,
                0.004: 224,
                0.005: 125,
                0.006: 73,
                0.007: 46,
                0.008: 32,
                0.009: 20,
                0.011: 8,
                0.01: 8,
                0.017: 3,
                0.016: 3,
                0.014: 3,
                0.013: 3,
                0.04: 2,
                0.012: 2,
                0.079: 1,
                0.081: 1,
                0.019: 1,
                0.015: 1
            }),
            KPISet.ERRORS: [
                {
                    'msg': 'Forbidden',
                    'cnt': 7300,
                    'type': 0,
                    'urls': Counter({'http://192.168.1.1/anotherquery': 7300}),
                    KPISet.RESP_CODES: '403'
                },
                {
                    'msg': 'Assertion failed: text /smth/ not found',
                    'cnt': 73,
                    'type': 1,
                    'urls': Counter({'http://192.168.1.1/anotherquery': 73}),
                    KPISet.RESP_CODES: '200'
                },
            ],
            KPISet.STDEV_RESP_TIME:
            0.04947974228872108,
            KPISet.AVG_LATENCY:
            0.0002825639815220692,
            KPISet.RESP_CODES:
            Counter({
                '304': 29656,
                '403': 29656,
                '200': 2
            }),
            KPISet.PERCENTILES: {
                '95.0': 0.001,
                '0.0': 0.0,
                '99.9': 0.008,
                '90.0': 0.001,
                '100.0': 0.081,
                '99.0': 0.003,
                '50.0': 0.0
            },
            KPISet.SUCCESSES:
            29658,
            KPISet.SAMPLE_COUNT:
            59314,
            KPISet.CONCURRENCY:
            0,
            KPISet.AVG_RESP_TIME:
            0.0005440536804127192,
            KPISet.FAILURES:
            29656
        })

        cumul_data["http://192.168.1.1/somequery"] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME:
            9.609548856969457e-06,
            KPISet.RESP_TIMES:
            Counter({
                0.0: 17219,
                0.001: 11246,
                0.002: 543,
                0.003: 341,
                0.004: 121,
                0.005: 66,
                0.006: 36,
                0.007: 33,
                0.008: 18,
                0.009: 12,
                0.011: 6,
                0.01: 5,
                0.013: 2,
                0.017: 2,
                0.012: 2,
                0.079: 1,
                0.016: 1,
                0.014: 1,
                0.019: 1,
                0.04: 1,
                0.081: 1
            }),
            KPISet.ERRORS: [],
            KPISet.STDEV_RESP_TIME:
            0.04073402130687656,
            KPISet.AVG_LATENCY:
            1.7196034796682178e-06,
            KPISet.RESP_CODES:
            Counter({
                '304': 29656,
                '200': 2
            }),
            KPISet.PERCENTILES: {
                '95.0': 0.001,
                '0.0': 0.0,
                '99.9': 0.009,
                '90.0': 0.001,
                '100.0': 0.081,
                '99.0': 0.004,
                '50.0': 0.0
            },
            KPISet.SUCCESSES:
            29658,
            KPISet.SAMPLE_COUNT:
            29658,
            KPISet.CONCURRENCY:
            0,
            KPISet.AVG_RESP_TIME:
            0.0005164542450603551,
            KPISet.FAILURES:
            0
        })

        cumul_data["http://192.168.1.1/anotherquery"] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME:
            6.1707580253574335e-06,
            KPISet.RESP_TIMES:
            Counter({
                0.0: 14941,
                0.001: 13673,
                0.002: 506,
                0.003: 289,
                0.004: 103,
                0.005: 59,
                0.006: 37,
                0.008: 14,
                0.007: 13,
                0.009: 8,
                0.01: 3,
                0.011: 2,
                0.016: 2,
                0.014: 2,
                0.017: 1,
                0.013: 1,
                0.015: 1,
                0.04: 1
            }),
            KPISet.ERRORS: [
                {
                    'msg': 'Forbidden',
                    'cnt': 7300,
                    'type': 0,
                    'urls': Counter({'http://192.168.1.1/anotherquery': 7300}),
                    KPISet.RESP_CODES: '403'
                },
                {
                    'msg': 'Assertion failed: text /smth/ not found',
                    'cnt': 73,
                    'type': 1,
                    'urls': Counter({'http://192.168.1.1/anotherquery': 73}),
                    KPISet.RESP_CODES: '200'
                },
            ],
            KPISet.STDEV_RESP_TIME:
            0.032465137860758844,
            KPISet.AVG_LATENCY:
            0.0005634272997032645,
            KPISet.RESP_CODES:
            Counter({'403': 29656}),
            KPISet.PERCENTILES: {
                '95.0': 0.001,
                '0.0': 0.0,
                '99.9': 0.008,
                '90.0': 0.001,
                '100.0': 0.04,
                '99.0': 0.003,
                '50.0': 0.0
            },
            KPISet.SUCCESSES:
            0,
            KPISet.SAMPLE_COUNT:
            29656,
            KPISet.CONCURRENCY:
            0,
            KPISet.AVG_RESP_TIME:
            0.0005716549770704078,
            KPISet.FAILURES:
            29656
        })

        cumul_data["http://192.168.100.100/somequery"] = KPISet.from_dict({
            KPISet.AVG_CONN_TIME:
            9.609548856969457e-06,
            KPISet.RESP_TIMES:
            Counter({
                0.0: 17219,
                0.001: 11246,
                0.002: 543,
                0.003: 341,
                0.004: 121,
                0.005: 66,
                0.006: 36,
                0.007: 33,
                0.008: 18,
                0.009: 12,
                0.011: 6,
                0.01: 5,
                0.013: 2,
                0.017: 2,
                0.012: 2,
                0.079: 1,
                0.016: 1,
                0.014: 1,
                0.019: 1,
                0.04: 1,
                0.081: 1
            }),
            KPISet.ERRORS: [],
            KPISet.STDEV_RESP_TIME:
            0.04073402130687656,
            KPISet.AVG_LATENCY:
            1.7196034796682178e-06,
            KPISet.RESP_CODES:
            Counter({
                '304': 29656,
                '200': 2
            }),
            KPISet.PERCENTILES: {
                '95.0': 0.001,
                '0.0': 0.0,
                '99.9': 0.009,
                '90.0': 0.001,
                '100.0': 0.081,
                '99.0': 0.004,
                '50.0': 0.0
            },
            KPISet.SUCCESSES:
            29658,
            KPISet.SAMPLE_COUNT:
            29658,
            KPISet.CONCURRENCY:
            0,
            KPISet.AVG_RESP_TIME:
            0.0005164542450603551,
            KPISet.FAILURES:
            0
        })

        obj.aggregated_second(datapoint)

        obj.post_process()

        with open(obj.report_file_path, 'rb') as fds:
            f_contents = fds.read()

        logging.info("File: %s", f_contents)
        xml_tree = etree.fromstring(f_contents)
        self.assertEqual('testsuites', xml_tree.tag)
        suite = xml_tree.getchildren()[0]
        self.assertEqual('testsuite', suite.tag)
        self.assertListEqual(['sample_labels', "bzt"], suite.values())
        test_cases = suite.getchildren()
        self.assertEqual(3, len(test_cases))
        self.assertEqual('testcase', test_cases[0].tag)
        self.assertEqual('error', test_cases[0].getchildren()[1].tag)
        self.assertEqual('failure', test_cases[0].getchildren()[2].tag)
        self.assertEqual('system-out', test_cases[0].getchildren()[0].tag)
        self.assertIn('BlazeMeter report link: http://report/123',
                      test_cases[0].getchildren()[0].text)
Example #35
0
    def test_some_errors(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test':
            {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1&name=Proj+name':
            {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/sessions/1': {
                "result": {
                    'id': 1,
                    "note": "somenote"
                }
            },
            'https://a.blazemeter.com/api/v4/masters/1': {
                "result": {
                    'id': 1,
                    "note": "somenote"
                }
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {
                "result": {
                    "session": {
                        'id': 1,
                        "testId": 1,
                        "userId": 1
                    },
                    "master": {
                        'id': 1
                    },
                    "signature": "sign"
                }
            },
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign':
            {
                "result": True
            },
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1' + '&pq=0&target=labels_bulk&update=1':
            {},
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {
                "result": True
            },
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
            {
                'result': {
                    'session': {}
                }
            }
        })

        mock.mock_patch.update({
            'https://a.blazemeter.com/api/v4/sessions/1': {
                "result": {
                    "id": 1,
                    "note": "somenote"
                }
            },
            'https://a.blazemeter.com/api/v4/masters/1': {
                "result": {
                    "id": 1,
                    "note": "somenote"
                }
            },
        })

        obj = BlazeMeterUploader()
        mock.apply(obj._user)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [{
            'msg':
            'Forbidden',
            'cnt':
            10,
            'type':
            KPISet.ERRTYPE_ASSERT,
            'urls': [],
            KPISet.RESP_CODES:
            '111',
            'tag':
            None
        }, {
            'msg':
            'Allowed',
            'cnt':
            20,
            'type':
            KPISet.ERRTYPE_ERROR,
            'urls': [],
            KPISet.RESP_CODES:
            '222'
        }, {
            'msg':
            'Not Found',
            'cnt':
            10,
            'type':
            KPISet.ERRTYPE_SUBSAMPLE,
            'urls': {
                '/non': '404'
            },
            KPISet.RESP_CODES:
            '404',
            'tag':
            None
        }]
        obj.post_process()
        obj.log.info("Requests: %s", mock.requests)

        # check for note appending in _postproc_phase3()
        reqs = mock.requests[-4:]
        self.assertIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertIn('api/v4/masters/1', reqs[2]['url'])
        self.assertIn('api/v4/masters/1', reqs[3]['url'])
        self.assertIn('ValueError: wrong value', str(reqs[1]['data']))
        self.assertIn('ValueError: wrong value', str(reqs[3]['data']))

        labels = mock.requests[8]['data']
        if not isinstance(labels, str):
            labels = labels.decode("utf-8")
        obj.log.info("Labels: %s", labels)
        data = json.loads(str(labels))
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'], [{
            'failureMessage': 'Forbidden',
            'failures': 10,
            'name': 'All Assertions'
        }])
        self.assertEqual(total_item['errors'], [{
            'm': 'Allowed',
            'count': 20,
            'rc': '222'
        }])
        self.assertEqual(total_item['failedEmbeddedResources'],
                         [{
                             'url': '/non',
                             'count': 10,
                             'rc': '404',
                             'rm': 'Not Found'
                         }])
    def test_public_report(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "share-report", 'result': {'publicToken': 'publicToken'}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        log_recorder = RecordingHandler()

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        obj.client = client
        obj.log.addHandler(log_recorder)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()
        self.assertEqual(0, len(client.results))

        log_buff = log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
Example #37
0
 def test_direct_feeding(self):
     obj = BlazeMeterUploader()
     self.sniff_log(obj.log)
     obj.engine = EngineEmul()
     mock = BZMock(obj._user)
     mock.mock_post.update({
         'https://data.blazemeter.com/submit.php?session_id=direct&signature=sign&test_id=None&user_id=None&pq=0&target=labels_bulk&update=1':
         {},
         'https://data.blazemeter.com/api/v4/image/direct/files?signature=sign':
         {
             "result": True
         },
         'https://a.blazemeter.com/api/v4/sessions/direct/stop': {
             "result": True
         },
         'https://data.blazemeter.com/submit.php?session_id=direct&signature=sign&test_id=None&user_id=None&pq=0&target=engine_health&update=1':
         {
             'result': {
                 'session': {}
             }
         }
     })
     mock.mock_get.update({
         'https://a.blazemeter.com/api/v4/sessions/direct': {
             "result": {}
         }
     })
     mock.mock_patch.update({
         'https://a.blazemeter.com/api/v4/sessions/direct': {
             "result": {}
         }
     })
     obj.parameters['session-id'] = 'direct'
     obj.parameters['signature'] = 'sign'
     obj.settings['token'] = 'FakeToken'
     obj.prepare()
     obj.startup()
     obj.check()
     obj.shutdown()
     obj.engine.stopping_reason = TaurusException("To cover")
     obj.post_process()
     self.assertNotIn("Failed to finish online",
                      self.log_recorder.warn_buff.getvalue())
     self.assertEquals('direct', obj._session['id'])
     self.assertEqual(9, len(mock.requests),
                      "Requests were: %s" % mock.requests)
Example #38
0
 def test_direct_feeding(self):
     obj = BlazeMeterUploader()
     self.sniff_log(obj.log)
     obj.engine = EngineEmul()
     mock = BZMock(obj._user)
     mock.mock_post.update({
         'https://data.blazemeter.com/submit.php?session_id=direct&signature=sign&test_id=None&user_id=None&pq=0&target=labels_bulk&update=1': {},
         'https://data.blazemeter.com/api/v4/image/direct/files?signature=sign': {"result": True},
         'https://a.blazemeter.com/api/v4/sessions/direct/stop': {"result": True},
         'https://data.blazemeter.com/submit.php?session_id=direct&signature=sign&test_id=None&user_id=None&pq=0&target=engine_health&update=1': {
             'result': {'session': {}}}
     })
     mock.mock_get.update({
         'https://a.blazemeter.com/api/v4/sessions/direct': {"result": {}}
     })
     mock.mock_patch.update({
         'https://a.blazemeter.com/api/v4/sessions/direct': {"result": {}}
     })
     obj.parameters['session-id'] = 'direct'
     obj.parameters['signature'] = 'sign'
     obj.settings['token'] = 'FakeToken'
     obj.prepare()
     obj.startup()
     obj.check()
     obj.shutdown()
     obj.engine.stopping_reason = TaurusException("To cover")
     obj.post_process()
     self.assertNotIn("Failed to finish online", self.log_recorder.warn_buff.getvalue())
     self.assertEquals('direct', obj._session['id'])
     self.assertEqual(9, len(mock.requests), "Requests were: %s" % mock.requests)
Example #39
0
    def test_multiple_reporters_one_monitoring(self):
        obj1 = BlazeMeterUploader()
        obj1.engine = EngineEmul()
        BZMock(obj1._user)

        obj2 = BlazeMeterUploader()
        obj2.engine = EngineEmul()
        BZMock(obj2._user)

        obj1.prepare()
        obj2.prepare()

        for i in range(10):
            mon = [{
                "ts": i,
                "source": "local",
                "cpu": float(i) / 1000 * 100,
                "mem": 2,
                "bytes-recv": 100,
                "other": 0
            }]
            obj1.monitoring_data(mon)
            obj2.monitoring_data(mon)
Example #40
0
 def test_anonymous_feeding(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     obj.browser_open = False
     mock = BZMock(obj._user)
     mock.mock_post.update({
         'https://a.blazemeter.com/api/v4/sessions': {"result": {
             "signature": "sign",
             "publicTokenUrl": "publicUrl",
             "session": {"id": 1, "testId": 1, "userId": 1},
             "master": {"id": 1},
         }},
         'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': {},
         'https://data.blazemeter.com/api/v4/image/1/files?signature=sign': {"result": True},
         'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1': {
             'result': {'session': {}}},
     })
     obj.prepare()
     obj.startup()
     obj.check()
     obj.shutdown()
     obj.post_process()
     self.assertEquals(1, obj._session['id'])
     self.assertEqual(6, len(mock.requests), "Requests were: %s" % mock.requests)
Example #41
0
    def test_check(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1&name=Proj+name': {"result": []},
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {
                "id": 1,
                "name": "boo",
                "userId": 2,
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }},
            'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": {
                'session': {'id': 1, 'userId': 1, 'testId': 1},
                'master': {'id': 1, 'userId': 1},
                'signature': 'sign'}},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': [
                {},
                {"result": {'session': {"statusCode": 140, 'status': 'ENDED'}}},
                {},
            ],
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign': [
                IOError("monitoring push expected fail"),
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
            ],
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
                {"result": {'session': {}}}
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, os.path.join(obj.engine.artifacts_dir, os.path.basename(__file__)))
        mock.apply(obj._user)
        obj._user.timeout = 0.1
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0}]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        obj.last_dispatch = time.time() - 2 * obj.send_interval
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        handler = logging.FileHandler(log_file)
        obj.engine.log.parent.addHandler(handler)
        obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1
        obj.post_process()
        self.assertEqual(20, len(mock.requests))
        obj.engine.log.parent.removeHandler(handler)
Example #42
0
 def test_proxy(self):
     client = BlazeMeterClientEmul(logging.getLogger(''))
     client.results.append({"marker": "ping", 'result': {}})
     client.results.append({"marker": "tests", 'result': {}})
     client.results.append({
         "marker": "test-create",
         'result': {
             'id': 'unittest1'
         }
     })
     obj = BlazeMeterUploader()
     obj.settings['token'] = '123'
     obj.settings['browser-open'] = 'none'
     obj.engine = EngineEmul()
     obj.engine.config.merge({
         "settings": {
             'proxy': {
                 "username": "******",
                 "password": "******",
                 "address": "http://127.0.0.1:8080"
             }
         }
     })
     obj.client = client
     try:
         from urllib2 import _opener
     except ImportError:
         from urllib.request import _opener
     old_opener = _opener
     obj.prepare()
     try:
         from urllib2 import _opener
     except ImportError:
         from urllib.request import _opener
     new_opener = _opener
     self.assertNotEqual(old_opener, new_opener)  # test if opener installed
     obj = BlazeMeterUploader()
     obj.settings['token'] = '123'
     obj.settings['browser-open'] = 'none'
     obj.engine = EngineEmul()
     client.results.append({"marker": "ping", 'result': {}})
     client.results.append({"marker": "tests", 'result': {}})
     client.results.append({
         "marker": "test-create",
         'result': {
             'id': 'unittest1'
         }
     })
     obj.client = client
     try:
         from urllib2 import _opener
     except ImportError:
         from urllib.request import _opener
     _opener = None
     obj.prepare()
     self.assertIsNone(_opener)
 def test_proxy(self):
     client = BlazeMeterClientEmul(logging.getLogger(''))
     client.results.append({"marker": "ping", 'result': {}})
     client.results.append({"marker": "tests", 'result': {}})
     client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
     obj = BlazeMeterUploader()
     obj.settings['token'] = '123'
     obj.settings['browser-open'] = 'none'
     obj.engine = EngineEmul()
     obj.engine.config.merge(
         {"settings": {'proxy': {"username": "******", "password": "******", "address": "http://127.0.0.1:8080"}}})
     obj.client = client
     try:
         from urllib2 import _opener
     except ImportError:
         from urllib.request import _opener
     old_opener = _opener
     obj.prepare()
     try:
         from urllib2 import _opener
     except ImportError:
         from urllib.request import _opener
     new_opener = _opener
     self.assertNotEqual(old_opener, new_opener)  # test if opener installed
     obj = BlazeMeterUploader()
     obj.settings['token'] = '123'
     obj.settings['browser-open'] = 'none'
     obj.engine = EngineEmul()
     client.results.append({"marker": "ping", 'result': {}})
     client.results.append({"marker": "tests", 'result': {}})
     client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
     obj.client = client
     try:
         from urllib2 import _opener
     except ImportError:
         from urllib.request import _opener
     _opener = None
     obj.prepare()
     self.assertIsNone(_opener)
Example #44
0
    def test_some_errors(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/projects?workspaceId=1&name=Proj+name': {"result": []},
            'https://a.blazemeter.com/api/v4/sessions/1': {"result": {'id': 1, "note": "somenote"}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {'id': 1, "note": "somenote"}},
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": {
                "session": {'id': 1, "testId": 1, "userId": 1},
                "master": {'id': 1},
                "signature": "sign"
            }},
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign': {"result": True},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1' +
            '&pq=0&target=labels_bulk&update=1': {},
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {"result": True},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1': {
                'result': {'session': {}}}
        })

        mock.mock_patch.update({
            'https://a.blazemeter.com/api/v4/sessions/1': {"result": {"id": 1, "note": "somenote"}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {"id": 1, "note": "somenote"}},
        })

        obj = BlazeMeterUploader()
        mock.apply(obj._user)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111',
             'tag': None},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'},
            {'msg': 'Not Found', 'cnt': 10, 'type': KPISet.ERRTYPE_SUBSAMPLE, 'urls': {'/non': '404'},
             KPISet.RESP_CODES: '404', 'tag': None}
        ]
        obj.post_process()
        obj.log.info("Requests: %s", mock.requests)

        # check for note appending in _postproc_phase3()
        reqs = mock.requests[-4:]
        self.assertIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertIn('api/v4/masters/1', reqs[2]['url'])
        self.assertIn('api/v4/masters/1', reqs[3]['url'])
        self.assertIn('ValueError: wrong value', str(reqs[1]['data']))
        self.assertIn('ValueError: wrong value', str(reqs[3]['data']))

        labels = mock.requests[8]['data']
        if not isinstance(labels, str):
            labels = labels.decode("utf-8")
        obj.log.info("Labels: %s", labels)
        data = json.loads(str(labels))
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'],
                         [{'failureMessage': 'Forbidden', 'failures': 10, 'name': 'All Assertions'}])
        self.assertEqual(total_item['errors'], [{'m': 'Allowed', 'count': 20, 'rc': '222'}])
        self.assertEqual(total_item['failedEmbeddedResources'],
                         [{'url': '/non', 'count': 10, 'rc': '404', 'rm': 'Not Found'}])
Example #45
0
    def test_xml_format_passfail(self):
        obj = JUnitXMLReporter()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()
        obj.engine.provisioning = CloudProvisioning()
        obj.engine.provisioning.results_url = "http://test/report/123"

        pass_fail1 = PassFailStatus()

        fc1_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 1 Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '150ms',
                'condition': '<',
                'subject': 'avg-rt'
            }, pass_fail1)

        fc1_not_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 1 Not Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '300ms',
                'condition': '>',
                'subject': 'avg-rt'
            }, pass_fail1)

        pass_fail2 = PassFailStatus()

        fc2_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 2 Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '150ms',
                'condition': '<=',
                'subject': 'avg-rt'
            }, pass_fail1)

        fc2_not_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 2 Not Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '300ms',
                'condition': '=',
                'subject': 'avg-rt'
            }, pass_fail1)

        pass_fail1.criteria.append(fc1_triggered)
        pass_fail1.criteria.append(fc1_not_triggered)
        pass_fail2.criteria.append(fc2_triggered)
        pass_fail2.criteria.append(fc2_not_triggered)

        fc1_triggered.is_triggered = True
        fc2_triggered.is_triggered = True

        obj.engine.reporters.append(pass_fail1)
        obj.engine.reporters.append(pass_fail2)
        obj.engine.reporters.append(BlazeMeterUploader())

        path_from_config = tempfile.mktemp(suffix='.xml',
                                           prefix='junit-xml_passfail',
                                           dir=obj.engine.artifacts_dir)

        obj.parameters.merge({
            "filename": path_from_config,
            "data-source": "pass-fail"
        })
        obj.prepare()
        obj.last_second = DataPoint(0)
        obj.post_process()

        with open(obj.report_file_path, 'rb') as fds:
            f_contents = fds.read()

        logging.info("File: %s", f_contents)
        xml_tree = etree.fromstring(f_contents)
        self.assertEqual('testsuites', xml_tree.tag)
        suite = xml_tree.getchildren()[0]
        self.assertEqual('testsuite', suite.tag)
        test_cases = suite.getchildren()
        self.assertEqual(4, len(test_cases))
        self.assertEqual('testcase', test_cases[0].tag)
        self.assertEqual('error', test_cases[0].getchildren()[1].tag)
        self.assertEqual('error', test_cases[2].getchildren()[1].tag)

        sys_out = test_cases[0].getchildren()[0]
        self.assertEqual('system-out', sys_out.tag)
        self.assertIn('BlazeMeter report link: http://test/report/123',
                      sys_out.text)
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})
        client.results.append({"marker": "terminate2", 'result': {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'session': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({"marker": "sess-e", "result": {'master': {'id': 'sess1', 'note': 'n'}}})
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.post_process()

        # check for note appending in _postproc_phase3()
        reqs = obj.client.requests[-4:]
        self.assertIn('api/latest/sessions/sess1', reqs[0]['url'])
        self.assertIn('api/latest/sessions/sess1', reqs[1]['url'])
        self.assertIn('api/latest/masters/master1', reqs[2]['url'])
        self.assertIn('api/latest/masters/master1', reqs[3]['url'])
        self.assertIn('ValueError: wrong value', reqs[1]['data'])
        self.assertIn('ValueError: wrong value', reqs[3]['data'])

        self.assertEqual(0, len(client.results))
        data = json.loads(client.requests[6]['data'])
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'], [{
            'failureMessage': 'Forbidden',
            'failures': 10,
            'name': 'All Assertions'}])
        self.assertEqual(total_item['errors'], [{
            'm': 'Allowed',
            'count': 20,
            'rc': '222'}])