Esempio n. 1
0
 def setUp(self):
     super(TestExternalResultsLoader, self).setUp()
     self.obj = ExternalResultsLoader()
     self.obj.engine = EngineEmul()
Esempio n. 2
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            'result': {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "first push",
            'result': {
                'session': {}
            }
        })
        # client.results.append(None)  # first check error stats
        client.results.append({
            "marker": "second push",
            'result': {
                'session': {
                    "statusCode": 140,
                    'status': 'ENDED'
                }
            }
        })
        # client.results.append(None)  # second check error stats
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "upload1",
            "result": True
        })  # post-proc error stats
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__,
                    obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
Esempio n. 3
0
    def test_install_tools(self):
        """
        check installation of selenium-server, junit
        :return:
        """
        dummy_installation_path = __dir__(
        ) + "/../../build/tmp/selenium-taurus"
        base_link = "file:///" + __dir__() + "/../../resources/selenium"

        shutil.rmtree(os.path.dirname(dummy_installation_path),
                      ignore_errors=True)

        selenium_server_link = java.SELENIUM_DOWNLOAD_LINK
        junit_link = java.JUNIT_DOWNLOAD_LINK
        junit_mirrors = java.JUNIT_MIRRORS_SOURCE
        hamcrest_link = java.HAMCREST_DOWNLOAD_LINK
        try:
            java.SELENIUM_DOWNLOAD_LINK = base_link + "/selenium-server-standalone-2.46.0.jar"
            java.JUNIT_DOWNLOAD_LINK = base_link + "/junit-4.12.jar"
            java.JUNIT_MIRRORS_SOURCE = base_link + "unicode_file"
            java.HAMCREST_DOWNLOAD_LINK = base_link + "/hamcrest-core-1.3.jar"

            self.assertFalse(os.path.exists(dummy_installation_path))

            self.obj = JUnitTester()
            self.obj.engine = EngineEmul()
            self.obj.settings.merge({
                "selenium-server":
                os.path.join(dummy_installation_path, "selenium-server.jar"),
                "hamcrest-core":
                os.path.join(dummy_installation_path, "tools", "junit",
                             "hamcrest-core.jar"),
                "path":
                os.path.join(dummy_installation_path, "tools", "junit",
                             "junit.jar")
            })

            self.obj.execution.merge({
                "scenario": {
                    "script":
                    __dir__() + "/../../resources/selenium/junit/jar/"
                },
                "runner": "junit"
            })
            self.obj.install_required_tools()
            self.obj.prepare()
            self.assertIsInstance(self.obj, JUnitTester)
            self.assertTrue(
                os.path.exists(
                    os.path.join(dummy_installation_path,
                                 "selenium-server.jar")))
            self.assertTrue(
                os.path.exists(
                    os.path.join(dummy_installation_path, "tools", "junit",
                                 "junit.jar")))
            self.assertTrue(
                os.path.exists(
                    os.path.join(dummy_installation_path, "tools", "junit",
                                 "hamcrest-core.jar")))
        finally:
            java.SELENIUM_DOWNLOAD_LINK = selenium_server_link
            java.JUNIT_DOWNLOAD_LINK = junit_link
            java.HAMCREST_DOWNLOAD_LINK = hamcrest_link
            java.JUNIT_MIRRORS_SOURCE = junit_mirrors
Esempio n. 4
0
 def setUp(self):
     super(TestConverter, self).setUp()
     self.engine = EngineEmul()
     self.obj = JMX2YAML(file_name=None, options=FakeOptions())
Esempio n. 5
0
 def setUp(self):
     super(TestConverter, self).setUp()
     self.engine = EngineEmul()
Esempio n. 6
0
    def test_check(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []}
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {
                "id": 1,
                "name": "boo",
                "userId": 2,
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }},
            'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": {
                'session': {'id': 1, 'userId': 1, 'testId': 1},
                'master': {'id': 1, 'userId': 1},
                'signature': 'sign'}},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': [
                {},
                {"result": {'session': {"statusCode": 140, 'status': 'ENDED'}}},
                {},
            ],
            'https://a.blazemeter.com/api/v4/image/1/files?signature=sign': [
                IOError("monitoring push expected fail"),
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
                {"result": True},
            ],
            'https://a.blazemeter.com/api/v4/data/masters/1/custom-metrics': [
                IOError("custom metric push expected fail"),
                {"result": True},
                {"result": True},
            ],
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
                {"result": {'session': {}}}
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['send-custom-metrics'] = True
        obj.settings['send-custom-tables'] = True
        obj.engine = EngineEmul()
        shutil.copy(__file__, os.path.join(obj.engine.artifacts_dir, os.path.basename(__file__)))
        mock.apply(obj._user)
        obj._user.timeout = 0.1
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0},
               {"ts": 1, "source": "chrome", "memory": 32, "cpu": 23}]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        obj.last_dispatch = time.time() - 2 * obj.send_interval
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        handler = logging.FileHandler(log_file)
        obj.engine.log.parent.addHandler(handler)
        obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1
        obj.post_process()
        self.assertEqual(23, len(mock.requests))
        obj.engine.log.parent.removeHandler(handler)
Esempio n. 7
0
    def test_simple(self):
        obj = PBenchExecutor()
        obj.engine = EngineEmul()
        obj.engine.aggregator = ConsolidatingAggregator()
        obj.engine.aggregator.add_listener(DataPointLogger())
        obj.engine.config.merge({"provisioning": "test"})

        if os.path.exists("/home/undera/Sources/phantom"
                          ):  # FIXME: not good, get rid of it
            obj.settings.merge({
                "path":
                "/home/undera/Sources/phantom/bin/phantom",
                "modules-path":
                "/home/undera/Sources/phantom/lib/phantom",
            })
        else:
            obj.settings.merge({
                "path":
                os.path.join(os.path.dirname(__file__), '..', "phantom.sh"),
            })

        obj.execution.merge({
            "log-responses": "proto_error",
            # "iterations": 5000000,
            "concurrency": 10,
            "throughput": 1000,
            "ramp-up": "1m",
            # "steps": 5,
            "hold-for": "15",
            "scenario": {
                "timeout":
                1,
                "default-address":
                "http://localhost:33",
                "headers": {
                    "Connection": "close"
                },
                "requests": [
                    # "/",
                    {
                        "url": "/api",
                        "method": "POST",
                        "headers": {
                            "Content-Length": 0
                        },
                        "body": {
                            "param": "value"
                        }
                    }
                ]
            }
        })
        obj.engine.aggregator.prepare()
        obj.prepare()

        obj.engine.aggregator.startup()
        obj.startup()

        while not obj.check():
            logging.debug("Running...")
            obj.engine.aggregator.check()
            time.sleep(1)

        obj.shutdown()
        obj.engine.aggregator.shutdown()

        obj.post_process()
        obj.engine.aggregator.post_process()
Esempio n. 8
0
 def test_problematic(self):
     obj = InstallChecker()
     obj.engine = EngineEmul()
     obj.engine.config.get("modules")["err"] = "hello there"
     self.assertRaises(ToolError, obj.prepare)
Esempio n. 9
0
 def setUp(self):
     sys.path.append(__dir__() + "/../locust/")
     self.obj = LocustIOExecutor()
     self.obj.engine = EngineEmul()
     self.obj.engine.config['provisioning'] = 'local'
 def test_xml_report_test_duration_failed_prepare(self):
     obj = FinalStatus()
     obj.engine = EngineEmul()
     obj.parameters = BetterDict()
     obj.aggregated_second(self.__get_datapoint(ts=100))
     obj.post_process()  # shouldn't raise ValueError because obj.start_time is None
Esempio n. 11
0
 def test_regular(self):
     obj = InstallChecker()
     obj.engine = EngineEmul()
     obj.engine.config.get("modules")["base"] = EngineModule.__module__ + "." + EngineModule.__name__
     obj.engine.config.get("modules")["dummy"] = ModuleMock.__module__ + "." + ModuleMock.__name__
     self.assertRaises(NormalShutdown, obj.prepare)
Esempio n. 12
0
 def setUp(self):
     super(TestNoseRunner, self).setUp()
     self.obj = NoseTester()
     self.obj.engine = EngineEmul()
Esempio n. 13
0
 def test_func_reader(self):
     reader = FuncSamplesReader(__dir__() + "/../../resources/selenium/report.ldjson", EngineEmul(), logging.getLogger(), None)
     items = list(reader.read())
     self.assertEqual(4, len(items))
     self.assertEqual(items[0].test_case, 'testFailure')
     self.assertEqual(items[0].status, "FAILED")
     self.assertEqual(items[1].test_case, 'testBroken')
     self.assertEqual(items[1].status, "BROKEN")
     self.assertEqual(items[2].test_case, 'testSuccess')
     self.assertEqual(items[2].status, "PASSED")
Esempio n. 14
0
 def setUp(self):
     super(TestTsungConfig, self).setUp()
     self.obj = TsungExecutor()
     self.obj.engine = EngineEmul()
     self.obj.env = self.obj.engine.env
Esempio n. 15
0
    def test_script_generation(self):
        obj = GrinderExecutor()
        obj.engine = EngineEmul()
        obj.settings.merge(
            {'path': __dir__() + "/../resources/grinder/fake_grinder.jar"})
        obj.execution.merge({
            "scenario": {
                "default-address":
                "http://blazedemo.com",
                "headers": {
                    "My-Header": "Its-Value",
                    "Another-Header": "Another-Value",
                },
                "timeout":
                "30s",
                "think-time":
                "2s",
                "store-cookie":
                True,
                "requests": [
                    '/',
                    {
                        'url': 'http://example.com/',
                        'method': 'POST',
                        'think-time': "1s",
                        'headers': {
                            'Custom': 'Header',
                        }
                    },
                ]
            }
        })
        obj.prepare()
        script = open(
            os.path.join(obj.engine.artifacts_dir,
                         'grinder_requests.py')).read()

        default_addr = re.findall(r"url='http://blazedemo.com'", script)
        self.assertEquals(1, len(default_addr))

        requests = re.findall(r"request\.([A-Z]+)\('(.+?)'", script)
        self.assertEquals(2, len(requests))
        self.assertEquals(requests[0], ('GET', '/'))
        self.assertEquals(requests[1], ('POST', 'http://example.com/'))

        sleeps = re.findall(r"grinder\.sleep\((.+)\)", script)
        self.assertEquals(3, len(sleeps))
        self.assertEquals(sleeps[0], 'sleep_time, 0')
        self.assertEquals(sleeps[1], '2000')
        self.assertEquals(sleeps[2], '1000')

        headers = re.findall(r"NVPair\('(.+)', '(.+)'\)", script)
        self.assertEquals(3, len(headers))
        self.assertIn(("My-Header", "Its-Value"), headers)
        self.assertIn(("Another-Header", "Another-Value"), headers)
        self.assertIn(("Custom", "Header"), headers)

        timeout = re.findall(r"defaults.setTimeout\((\d+)\)", script)
        self.assertEquals(1, len(timeout))
        self.assertEquals(timeout[0], '30000')

        cookies = re.findall(r"defaults.setUseCookies\(1\)", script)
        self.assertEquals(1, len(cookies))
Esempio n. 16
0
 def setUp(self):
     self.obj = Proxy2JMXEmul()
     self.obj.engine = EngineEmul()
Esempio n. 17
0
 def setUp(self):
     super(TestSwagger2YAML, self).setUp()
     self.engine = EngineEmul()
Esempio n. 18
0
 def setUp(self):
     super(TestRobotExecutor, self).setUp()
     self.obj = RobotExecutor()
     self.obj.engine = EngineEmul()
     self.obj.env = self.obj.engine.env
Esempio n. 19
0
    def test_some_errors(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/sessions/1': {"result": {'id': 1, "note": "somenote"}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {'id': 1, "note": "somenote"}},
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}},
            'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": {
                "session": {'id': 1, "testId": 1, "userId": 1},
                "master": {'id': 1},
                "signature": "sign"
            }},
            'https://a.blazemeter.com/api/v4/image/1/files?signature=sign': {"result": True},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1' +
            '&pq=0&target=labels_bulk&update=1': {},
            'https://a.blazemeter.com/api/v4/sessions/1/stop': {"result": True},
            'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1': {'result': {'session': {}}}
        })

        mock.mock_patch.update({
            'https://a.blazemeter.com/api/v4/sessions/1': {"result": {"id": 1, "note": "somenote"}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {"id": 1, "note": "somenote"}},
        })

        obj = BlazeMeterUploader()
        mock.apply(obj._user)
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.post_process()
        obj.log.info("Requests: %s", mock.requests)

        # check for note appending in _postproc_phase3()
        reqs = mock.requests[-4:]
        self.assertIn('api/v4/sessions/1', reqs[0]['url'])
        self.assertIn('api/v4/sessions/1', reqs[1]['url'])
        self.assertIn('api/v4/masters/1', reqs[2]['url'])
        self.assertIn('api/v4/masters/1', reqs[3]['url'])
        self.assertIn('ValueError: wrong value', str(reqs[1]['data']))
        self.assertIn('ValueError: wrong value', str(reqs[3]['data']))

        labels = mock.requests[8]['data']
        if not isinstance(labels, str):
            labels = labels.decode("utf-8")
        obj.log.info("Labels: %s", labels)
        data = json.loads(str(labels))
        self.assertEqual(1, len(data['labels']))
        total_item = data['labels'][0]
        self.assertEqual('ALL', total_item['name'])
        self.assertEqual(total_item['assertions'],
                         [{'failureMessage': 'Forbidden', 'failures': 10, 'name': 'All Assertions'}])
        self.assertEqual(total_item['errors'], [{'m': 'Allowed', 'count': 20, 'rc': '222'}])
Esempio n. 20
0
 def setUp(self):
     super(TestNoseRunner, self).setUp()
     self.obj = ApiritifNoseExecutor()
     self.obj.engine = EngineEmul()
     self.obj.env = self.obj.engine.env
Esempio n. 21
0
def get_grinder():
    obj = GrinderExecutor()
    obj.engine = EngineEmul()
    obj.env = obj.engine.env
    obj.settings.merge({'path': RESOURCES_DIR + "grinder/fake_grinder.jar"})
    return obj
Esempio n. 22
0
 def setUp(self):
     super(TestApiritifScriptGenerator, self).setUp()
     self.obj = ApiritifNoseExecutor()
     self.obj.engine = EngineEmul()
Esempio n. 23
0
def get_gatling():
    path = os.path.abspath(RESOURCES_DIR + "gatling/gatling" + EXE_SUFFIX)
    obj = GatlingExecutor()
    obj.engine = EngineEmul()
    obj.settings.merge({"path": path})
    return obj
Esempio n. 24
0
 def setUp(self):
     super(TestEngine, self).setUp()
     self.obj = EngineEmul()
     self.paths = local_paths_config()
Esempio n. 25
0
 def getGatling(self):
     path = os.path.abspath(__dir__() + "/../gatling/gatling" + EXE_SUFFIX)
     obj = GatlingExecutor()
     obj.engine = EngineEmul()
     obj.settings.merge({"path": path})
     return obj
Esempio n. 26
0
 def setUp(self):
     super(TestScenarioExecutor, self).setUp()
     self.engine = EngineEmul()
     self.executor = ScenarioExecutor()
     self.executor.engine = self.engine
     self.executor.env = self.executor.engine.env
Esempio n. 27
0
def get_pbench():
    obj = PBenchExecutor()
    obj.engine = EngineEmul()
    obj.env = obj.engine.env
    obj.settings.merge({"path": join(RESOURCES_DIR, "pbench", "phantom.sh")})
    return obj
Esempio n. 28
0
 def _get_pbench(self):
     obj = PBenchExecutor()
     obj.engine = EngineEmul()
     obj.settings = BetterDict()
     obj.engine.config = BetterDict()
     return obj
Esempio n. 29
0
 def configure(self, jtl_file):
     engine = EngineEmul()
     self.obj = FuncJTLReader(jtl_file, engine, logging.getLogger(''))
Esempio n. 30
0
    def test_public_report(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test':
            {
                "result": []
            }
        })

        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    'id': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                'result': {
                    'id': 'unittest1'
                }
            },
            'https://a.blazemeter.com/api/v4/tests/unittest1/start-external': {
                "result": {
                    'session': {
                        'id': 'sess1',
                        'userId': 1,
                        'testId': 1
                    },
                    'master': {
                        'id': 'master1',
                        'userId': 1
                    },
                    'signature': ''
                }
            },
            'https://a.blazemeter.com/api/v4/masters/master1/public-token': {
                'result': {
                    'publicToken': 'publicToken'
                }
            },
            'https://data.blazemeter.com/submit.php?session_id=sess1&signature=&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1':
            {
                "result": {
                    'session': {}
                }
            },
            'https://data.blazemeter.com/api/v4/image/sess1/files?signature=':
            {
                'result': True
            },
        })

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        mock.apply(obj._user)
        self.sniff_log(obj.log)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()

        log_buff = self.log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
        logging.warning("\n".join([x['url'] for x in mock.requests]))
        self.assertEqual(14, len(mock.requests))