def test_json(self):
        obj = self.obj

        mock = MockReader()
        mock.buffer_scale_idx = '100.0'
        mock.data.append((1, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((3, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((3, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((4, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((4, "", 1, r(), r(), r(), 200, None, '', 0))

        obj.add_listener(mock)

        for point in mock.datapoints(True):
            pass

        for point in mock.results:
            serialized = json.loads(to_json(point))
            rt_keys = serialized["current"][""]["rt"].keys()
            for key in rt_keys:
                rt = float(key)
                self.assertGreaterEqual(rt, 1.0)
                self.assertLessEqual(rt, 2.0)
    def test_speed(self):
        obj = self.obj

        mock = MockReader()
        mock.buffer_scale_idx = '100.0'
        obj.add_listener(mock)

        res = {}
        # current measurements shows ~25K samples/sec
        for cnt in (10000, 25000, 50000):
            for a in range(0, cnt):
                sample = (cnt, "", 1, r(1000), r(1000), r(1000), rc(), err(), '', 0)
                mock.data.append(sample)
            before = time.time()
            for point in mock.datapoints():
                pass
            after = time.time()
            res[cnt] = after - before
            ROOT_LOGGER.info("Times: %s", res)

            while mock.results:
                point = mock.results.pop(0)
                overall = point[DataPoint.CURRENT]['']
                self.assertTrue(len(overall[KPISet.PERCENTILES]) > 0)

        for point in mock.datapoints(True):
            pass
    def test_extend_data_avg(self):
        self.obj.settings['extend-aggregation'] = True
        reader = MockReader()
        watcher = MockReader()

        reader.buffer_scale_idx = '100.0'
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        reader.data.append((1, "a", 1, 1, 1, 1, '200', None, '', 1))
        reader.data.append((2, "b", 1, 2, 2, 2, '200', 'OK', '', 2))
        reader.data.append((2, "b", 1, 3, 3, 3, '404', "Not Found", '', 3))
        reader.data.append((2, "c", 1, 4, 4, 4, '200', None, '', 4))
        reader.data.append((3, "d", 1, 5, 5, 5, '200', None, '', 5))
        reader.data.append((5, "b", 1, 6, 6, 6, '200', None, '', 6))
        reader.data.append((5, "c", 1, 7, 7, 7, '200', None, '', 7))

        self.obj.add_underling(reader)
        self.obj.add_listener(watcher)

        self.obj.prepare()
        self.obj.startup()
        self.obj.check()
        self.obj.shutdown()
        self.obj.post_process()

        converted_data = [self.obj.converter(dp) for dp in watcher.results]
        a, overall = (converted_data[0]["current"][key]["success"]["avg_rt"] for key in ("a", ""))
        self.assertEqual(a, overall)

        b, c, overall = (converted_data[-1]["current"][key]["success"]["avg_rt"] for key in ("b", "c", ""))
        self.assertEqual(overall, (b + c) / 2.0)
Beispiel #4
0
    def test_new_aggregator(self):
        # aggregator's config
        self.obj.extend_aggregation = True

        reader = MockReader()
        watcher = MockReader()

        # executor/reporter prepare level
        self.obj.add_underling(reader)
        self.obj.add_listener(watcher)

        # send rules to underlings
        self.obj.startup()

        reader.buffer_scale_idx = '100.0'
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        reader.data.append((1, "a", 1, 1, 1, 1, 200, None, '', 0))
        reader.data.append((2, "b", 1, 2, 2, 2, 200, 'OK', '', 0))
        reader.data.append((2, "b", 1, 3, 3, 3, 404, "Not Found", '', 0))
        reader.data.append((2, "c", 1, 4, 4, 4, 200, None, '', 0))
        reader.data.append((3, "d", 1, 5, 5, 5, 200, None, '', 0))
        reader.data.append((4, "b", 1, 6, 6, 6, 200, None, '', 0))

        # let's collect data to seconds and send something aggregated to watcher
        self.obj.shutdown()
        self.obj.post_process()

        data_points = watcher.results[-1][DataPoint.CUMULATIVE]
        self.assertEquals(7, len(data_points))
        sample_labels = {'a-0', 'b-0', 'b-1', 'b-2', 'c-0', 'd-0', ''}
        self.assertEquals(sample_labels, set(data_points.keys()))
Beispiel #5
0
    def test_max_concurrency(self):
        mock = MockReader()
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        mock.data.append((1, "a", 1, 1, 1, 1, 200, None, '', 0))
        mock.data.append((1, "b", 3, 2, 2, 2, 200, None, '', 0))
        mock.data.append((1, "c", 2, 4, 4, 4, 200, None, '', 0))

        data_point = list(mock.datapoints(True))[0]
        self.assertEqual(3, data_point[DataPoint.CURRENT][''].concurrency)
        self.assertEqual(3, data_point[DataPoint.CUMULATIVE][''].concurrency)
def get_fail_reader_alot(offset=0):
    mock = MockReader()
    for x in range(2, 200):
        rnd = int(random() * x)
        mock.data.append((x + offset, "first", 1, r(), r(), r(), 200,
                          (random_string(1 + rnd)), '', 0))
    return mock
def get_success_reader_alot(prefix='', offset=0):
    mock = MockReader()
    for x in range(2, 100):
        rnd = int(random() * x)
        mock.data.append((x + offset, prefix + random_string(1 + rnd), 1, r(),
                          r(), r(), 200, '', '', 0))
    return mock
    def test_sample_ignores(self):
        mock = MockReader()
        mock.ignored_labels = ["ignore"]
        mock.buffer_scale_idx = '100.0'
        mock.data.append((1, "ignore", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "ignore1", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "ignore2", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((3, "not-ignore", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((3, "not-ignore", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((4, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((4, "", 1, r(), r(), r(), 200, None, '', 0))

        for point in mock.datapoints(True):
            self.assertNotIn("ignore", point[DataPoint.CUMULATIVE].keys())
            self.assertNotIn("ignore1", point[DataPoint.CUMULATIVE].keys())
            self.assertNotIn("ignore2", point[DataPoint.CUMULATIVE].keys())
Beispiel #9
0
def get_success_reader_shrinking_labels(max_label_size=20, count=500):
    mock = MockReader()
    half_size = max_label_size // 2
    for x in range(2, count):
        target_size = max_label_size - int(float(half_size) * float(x) / float(count))
        label = random_url(target_size)
        mock.data.append((x, label, 1, r(), r(), r(), 200, '', '', 0))
    return mock
    def test_extend_data(self):
        # test migrated from taurus-cloud (LDJSONExtractor tests)

        # check aggregated results for the following hierarchy:
        # {...
        # 'current': {
        # <label>:
        # {'success':{..}, 'http_errors':{..}, 'jmeter_errors':{..},
        # 'success_jmeter_errors':{..}, 'http_errors_jmeter_errors':{..}, 'success_http_errors':{..},
        # '':{..}}},
        # '': <the same states>} # end of 'current' record
        # ...}
        self.obj.settings['extend-aggregation'] = True
        reader = MockReader()
        watcher = MockListener()
        watcher.engine = self.obj.engine

        reader.buffer_scale_idx = '100.0'
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        reader.data.append((1, "a", 1, 1, 1, 1, '200', None, '', 1))
        reader.data.append((2, "b", 1, 2, 2, 2, '200', 'OK', '', 2))
        reader.data.append((2, "b", 1, 3, 3, 3, '404', "Not Found", '', 3))
        reader.data.append((2, "c", 1, 4, 4, 4, '200', None, '', 4))
        reader.data.append((3, "d", 1, 5, 5, 5, '200', None, '', 5))
        reader.data.append((5, "b", 1, 6, 6, 6, '200', None, '', 6))
        reader.data.append((5, "c", 1, 7, 7, 7, '200', None, '', 7))
        original_labels = list(d[1] for d in reader.data)

        self.obj.add_underling(reader)
        self.obj.add_listener(watcher)

        self.obj.prepare()
        self.obj.startup()
        self.obj.check()
        self.obj.shutdown()
        self.obj.post_process()

        self.assertEqual(4, len(watcher.results))
        allowed_states = set(SAMPLE_STATES + AGGREGATED_STATES + (ConsolidatingAggregator.OVERALL_STATE,))

        for dp in watcher.results:
            written_kpis = dp['current']
            for label in written_kpis:
                self.assertIn(label, original_labels + [''], f"Wrong original label: {label}")
                for state in written_kpis[label].keys():
                    self.assertIn(state, allowed_states, f"Wrong state '{state}' for label '{label}'")
Beispiel #11
0
    def test_new_reader(self):
        mock = MockReader()
        mock.buffer_scale_idx = '100.0'
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        mock.data.append((1, "a", 1, 1, 1, 1, 200, None, '', 0))
        mock.data.append((2, "b", 1, 2, 2, 2, 200, None, '', 0))
        mock.data.append((2, "b", 1, 3, 3, 3, 404, "Not Found", '', 0))
        mock.data.append((2, "c", 1, 4, 4, 4, 200, None, '', 0))
        mock.data.append((3, "d", 1, 5, 5, 5, 200, None, '', 0))
        mock.data.append((4, "b", 1, 6, 6, 6, 200, None, '', 0))

        list(mock.datapoints(True))

        failed = mock.results[1]
        self.assertEqual(2, failed['ts'])

        for kpis in (failed['current'], failed['cumulative']):
            self.assertEqual(1, kpis['b']['fail'])
Beispiel #12
0
def get_fail_reader(offset=0):
    mock = MockReader()
    mock.data.append((1 + offset, "first", 1, r(), r(), r(), 200, 'FAILx3', '', 0))
    mock.data.append((2 + offset, "first", 1, r(), r(), r(), 200, 'FAILx1', '', 0))
    mock.data.append((5 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((7 + offset, "second", 1, r(), r(), r(), 200, 'FAILx3', '', 0))
    mock.data.append((3 + offset, "first", 1, r(), r(), r(), 200, 'FAILx3', '', 0))
    mock.data.append((6 + offset, "second", 1, r(), r(), r(), 200, 'unique FAIL', '', 0))
    return mock
Beispiel #13
0
def get_success_reader_selected_labels(offset=0):
    mock = MockReader()
    labels = ['http://blazedemo.com/reserve.php',
              'http://blazedemo.com/purchase.php',
              'http://blazedemo.com/vacation.html',
              'http://blazedemo.com/confirmation.php',
              'http://blazedemo.com/another.php']
    for x in range(2, 200):
        mock.data.append((x + offset, choice(labels), 1, r(), r(), r(), 200, '', '', 0))
    return mock
    def test_1(self):
        obj = self.obj

        mock = MockReader()
        mock.buffer_scale_idx = '100.0'
        mock.data.append((1, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((3, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((3, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((4, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((4, "", 1, r(), r(), r(), 200, None, '', 0))

        obj.add_listener(mock)

        for point in mock.datapoints():
            self.assertNotEquals(0, point[DataPoint.CUMULATIVE][''][KPISet.CONCURRENCY])

        mock.data.append((2, "", 1, r(), r(), r(), 200, None, '', 0))
        mock.data.append((2, "", 1, r(), r(), r(), 200, None, '', 0))

        for point in mock.datapoints():
            pass

        for point in mock.datapoints(True):
            pass

        for point in mock.results:
            overall = point[DataPoint.CURRENT]['']
            self.assertTrue(len(overall[KPISet.PERCENTILES]) > 0)
Beispiel #15
0
def get_success_reader(offset=0):
    mock = MockReader()
    mock.data.append((1 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((2 + offset, "second", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((2 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((3 + offset, "second", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((3 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((4 + offset, "third", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((4 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((6 + offset, "second", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((6 + offset, "third", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((6 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    mock.data.append((5 + offset, "first", 1, r(), r(), r(), 200, None, '', 0))
    return mock
Beispiel #16
0
    def test_negative_response_time_scaling_crash(self):
        self.obj.track_percentiles = [0.0, 50.0, 95.0, 99.0, 100.0]
        self.obj.prepare()

        self.sniff_log(self.obj.log)

        mock = MockReader()
        mock.data.append((1, "first", 1, -r(), r(), r(), 200, 'FAILx3', '', 0))
        mock.data.append((2, "first", 1, -r(), r(), r(), 200, 'FAILx1', '', 0))
        mock.data.append((5, "first", 1, -r(), r(), r(), 200, None, '', 0))
        mock.data.append((7, "second", 1, -r(), r(), r(), 200, 'FAILx3', '', 0))
        mock.data.append((3, "first", 1, -r(), r(), r(), 200, 'FAILx3', '', 0))
        mock.data.append((6, "second", 1, -r(), r(), r(), 200, 'unique FAIL', '', 0))

        self.obj.add_underling(mock)

        self.obj.check()
        for point in self.obj.datapoints():
            self.obj.log.info(to_json(point))

        self.assertIn("Negative response time reported", self.log_recorder.warn_buff.getvalue())
    def test_0buffer_scaling(self):
        obj = self.obj

        mock = MockReader()
        obj.add_listener(mock)

        # t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname
        mock.min_buffer_len = 1
        mock.buffer_len = 1
        mock.buffer_multiplier = 1
        mock.buffer_scale_idx = '50.0'

        buffer_len = mock.buffer_len
        for i in range(5):
            mock.data.append((100 + i, "", 1, 2, 2, 2, 200, None, '', 0))
        points = list(mock.datapoints())
        points = list(mock.datapoints())
        self.assertTrue(mock.buffer_len > buffer_len)
        buffer_len = mock.buffer_len

        for i in range(10):
            mock.data.append((200 + i, "", 1, 3, 3, 3, 200, None, '', 0))
        points = list(mock.datapoints())
        points = list(mock.datapoints())
        self.assertTrue(mock.buffer_len > buffer_len)
        buffer_len = mock.buffer_len

        for i in range(20):
            mock.data.append((300 + i, "", 1, 4, 4, 4, 200, None, '', 0))
        points = list(mock.datapoints())
        points = list(mock.datapoints())
        self.assertTrue(mock.buffer_len > buffer_len)
        buffer_len = mock.buffer_len

        for i in range(15):
            mock.data.append((400 + i, "", 1, 1, 1, 1, 200, None, '', 0))
        points = list(mock.datapoints())
        points = list(mock.datapoints())
        self.assertTrue(mock.buffer_len < buffer_len)
        buffer_len = mock.buffer_len

        for i in range(30):
            mock.data.append((500 + i, "", 1, 1, 1, 1, 200, None, '', 0))
        points = list(mock.datapoints())
        points = list(mock.datapoints())
        self.assertTrue(mock.buffer_len < buffer_len)
Beispiel #18
0
 def configure(self, config):
     super(TestExternalResultsLoader, self).configure(config)
     self.results_listener = MockReader()
     self.obj.engine.aggregator = ConsolidatingAggregator()
     self.obj.engine.aggregator.engine = self.obj.engine
     self.obj.engine.aggregator.add_listener(self.results_listener)
    def test_extend_datapoints(self):
        # check reported data format conversion for test state filtering on BM side

        def get_mock(origin_func, store):
            # generate replacement for BlazemeterUploader._dpoint_serializer.get_kpi_body
            def mock_get_kpi_body(data, isfinal):
                store.append(data)  # save received data for verifying
                return origin_func(
                    data, isfinal)  # call original get_kpi_body as well

            return mock_get_kpi_body

        mock = BZMock()
        mock.mock_get.update({
            '1': {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test':
            {
                "result": []
            },
            '3': {
                "result": []
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    "id": 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {
                "result": {
                    'session': {
                        'id': 1,
                        'userId': 1,
                        'testId': 1
                    },
                    'master': {
                        'id': 1,
                        'userId': 1
                    },
                    'signature': 'sign'
                }
            },
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
            [
                {},
                {
                    "result": {
                        'session': {
                            "statusCode": 140,
                            'status': 'ENDED'
                        }
                    }
                },
                {},
            ],
            'https://data.blazemeter.com/api/v4/image/1/files?signature=sign':
            [
                IOError("monitoring push expected fail"),
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
                {
                    "result": True
                },
            ],
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
            {
                "result": {
                    'session': {}
                }
            }
        })

        obj = BlazeMeterUploader()
        sent_data_points = []
        obj._dpoint_serializer.get_kpi_body = get_mock(
            obj._dpoint_serializer.get_kpi_body, sent_data_points)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        aggregator = ConsolidatingAggregator()
        aggregator.engine = obj.engine
        aggregator.settings['extend-aggregation'] = True
        reader = MockReader()
        watcher = MockReader()

        reader.buffer_scale_idx = '100.0'
        # data format: t_stamp, label, conc, r_time, con_time, latency, r_code, error, trname, byte_count
        reader.data.append((1, "a", 1, 1, 1, 1, 200, None, '', 1))
        reader.data.append((2, "b", 1, 2, 2, 2, 200, 'OK', '', 2))
        reader.data.append((2, "b", 1, 3, 3, 3, 404, "Not Found", '', 3))
        reader.data.append((2, "c", 1, 4, 4, 4, 200, None, '', 4))
        reader.data.append((3, "d", 1, 5, 5, 5, 200, None, '', 5))
        reader.data.append((5, "b", 1, 6, 6, 6, 200, None, '', 6))
        reader.data.append((5, "c", 1, 7, 7, 7, 200, None, '', 7))
        original_labels = list(d[1] for d in reader.data)

        aggregator.add_underling(reader)
        aggregator.add_listener(watcher)
        obj.engine.aggregator = aggregator

        mock.apply(obj._user)
        obj._user.timeout = 0.001

        obj.engine.aggregator.prepare()
        obj.prepare()

        obj.engine.aggregator.startup()
        obj.startup()

        obj.engine.aggregator.check()
        obj.check()

        obj.engine.aggregator.shutdown()
        obj.shutdown()

        obj.engine.aggregator.post_process()
        obj.post_process()

        sent_data_points = sent_data_points[0] + sent_data_points[1]

        state_labels = [0, 1, 2]
        for dp in sent_data_points:
            for data in dp['cumulative'], dp['current']:
                for label in data:
                    self.assertIn(label, original_labels + [''])
                    self.assertIsInstance(data[label], dict)
                    for key in data[label]:
                        self.assertIn(key, state_labels)