def test_no_notes_for_public_reporting(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.extend([{'result': {}} for _ in range(6)])

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = ''  # public reporting
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()

        client.session_id = 'sess1'
        client.master_id = 'master1'

        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [
            {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'},
            {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}]
        obj.send_monitoring = obj.send_custom_metrics = obj.send_custom_tables = False
        obj.post_process()

        # check for note appending in _postproc_phase3()
        reqs = [{'url': '', 'data': ''} for _ in range(4)]     # add template for minimal size
        reqs = (reqs + obj.client.requests)[-4:]
        self.assertNotIn('api/latest/sessions/sess1', reqs[0]['url'])
        self.assertNotIn('api/latest/sessions/sess1', reqs[1]['url'])
        self.assertNotIn('api/latest/masters/master1', reqs[2]['url'])
        self.assertNotIn('api/latest/masters/master1', reqs[3]['url'])
        if reqs[1]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[1]['data'])
        if reqs[3]['data']:
            self.assertNotIn('ValueError: wrong value', reqs[3]['data'])
Example #2
0
    def test_create_collection(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({
            "token": "FakeToken",
            'default-location': "us-west-1",
            "delete-test-files": False,
            "use-deprecated-api": False
        })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection

        obj.prepare()
        self.assertIsInstance(obj.test, CloudCollectionTest)
Example #3
0
    def test_aaskip_reporting(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "blazemeter": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "second_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "third_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__,
            },
            "provisioning": "mock",
            "reporting": ["blazemeter",
                          {"module": "blazemeter", "option": "value"},
                          "second_reporter",
                          {"module": "third_reporter"}]
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings['default-location'] = "us-west-1"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # test files
        client.results.append({})  # upload files

        obj.prepare()
        modules = [reporter['module'] for reporter in obj.engine.config['reporting']]
        self.assertEquals(modules, ['second_reporter', 'third_reporter'])
Example #4
0
    def test_widget_cloud_collection(self):
        obj = CloudProvisioning()
        obj.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj.test = CloudCollectionTest(obj.client, None, None, None, None,
                                       logging.getLogger(''))
        obj.client.results.append({
            "result": {
                "sessions": [{
                    "id": "session-id",
                    "locationId": "loc-name",
                    "readyStatus": {
                        "servers": ["server" for _ in range(10)]
                    },
                }]
            }
        })
        obj.client.results.append({
            "result": {
                "sessions": [{
                    "id": "session-id",
                    "name": "loc-name/scenario",
                    "configuration": {}
                }]
            }
        })
        obj.test.get_master_status()
        widget = obj.get_widget()
        widget.update()

        self.assertEqual("None #None\n scenario:\n  Agents in loc-name: 10\n",
                         widget.text.get_text()[0])
Example #5
0
    def test_nonexistent_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-not-found": 1,
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        self.assertRaises(TaurusConfigError, obj.prepare)
    def test_widget(self):
        obj = CloudProvisioning()
        obj.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj.client.results.append({"result": []})
        obj.client.results.append({
            "result": {
                "sessions": [{
                    "name": "executor/scenario/location",
                    "configuration": {}
                }]
            }
        })

        obj.client.results.append({
            "result": {
                "sessions": [{
                    "name": "executor/scenario/location",
                    "configuration": {
                        "location": "loc-name",
                        "serversCount": "10"
                    }
                }]
            }
        })

        widget = obj.get_widget()
        widget.update()
        widget.update()
        widget.update()
        widget.update()

        self.assertEqual(
            "None #None\n executor scenario:\n  Agents in loc-name: 10\n",
            widget.text.get_text()[0])
Example #7
0
    def test_sandbox_default_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        self.assertEquals(1, exec_locations['us-west-1'])
Example #8
0
    def test_collection_defloc_sandbox(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        expected_location = 'harbor-5591335d8588531f5cde3a04'
        self.assertIn(expected_location, exec_locations)
        self.assertEquals(1, exec_locations[expected_location])
Example #9
0
    def test_delete_test_files(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({"token": "FakeToken",
                            "delete-test-files": True,
                            'default-location': "us-west-1",
                            })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": [{"id": 5174715,
                                           "name": "Taurus Cloud Test",
                                           "configuration": {"type": "taurus"},}]})  # find test
        client.results.append({"files": [{"hash": "hash1", "name": "file1"},
                                         {"hash": "hash1", "name": "file2"}]})  # test files
        client.results.append({"removed": ["hash1", "hash2"]})  # remove test files
        client.results.append({})  # upload files

        obj.prepare()
        self.assertTrue(client.delete_files_before_test)
Example #10
0
    def test_reuse_project_id(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({
            "token": "FakeToken",
            'default-location': "us-west-1",
            "delete-test-files": False,
            "project": 1428
        })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({
            "result": [{
                "id": 5174715,
                "projectId": 1428,
                "name": "Taurus Cloud Test",
                "configuration": {
                    "type": "taurus"
                }
            }]
        })  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        obj.prepare()
Example #11
0
    def test_no_settings(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings['default-location'] = "us-west-1"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # test files
        client.results.append({})  # upload files

        obj.prepare()
        self.assertEquals(1, obj.executors[0].execution['locations']['us-west-1'])
    def test_multiple_reporters_one_monitoring(self):
        obj1 = BlazeMeterUploader()
        obj1.engine = EngineEmul()
        obj1.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj1.client.results.append({"marker": "ping", 'result': {}})

        obj2 = BlazeMeterUploader()
        obj2.engine = EngineEmul()
        obj2.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj2.client.results.append({"marker": "ping", 'result': {}})

        obj1.prepare()
        obj2.prepare()

        for i in range(10):
            mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
            obj1.monitoring_data(mon)
            obj2.monitoring_data(mon)
Example #13
0
 def test_results_link_cloud(self):
     obj = JUnitXMLReporter()
     obj.engine = EngineEmul()
     obj.engine.provisioning = CloudProvisioning()
     obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log)
     prov = obj.engine.provisioning
     prov.client.results_url = 'url1'
     prov.settings.merge({'test': 'test1'})
     report_info = obj.get_bza_report_info()
     self.assertEqual(report_info, [('Cloud report link: url1\n', 'test1')])
Example #14
0
    def test_full_collection(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"files": []})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {
            "id": id(client)
        }})  # create collection
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {
            "id": id(obj),
            "sessions": []
        }})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()
        self.assertEquals(1,
                          obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(2,
                          obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        obj.shutdown()
        obj.post_process()
Example #15
0
 def test_results_link_blazemeter(self):
     obj = JUnitXMLReporter()
     obj.engine = EngineEmul()
     obj.engine.provisioning = Local()
     obj.engine.reporters.append(BlazeMeterUploader())
     obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log)
     rep = obj.engine.reporters[0]
     rep.client.results_url = 'url2'
     rep.parameters.merge({'test': 'test2'})
     report_info = obj.get_bza_report_info()
     self.assertEqual(report_info,
                      [('BlazeMeter report link: url2\n', 'test2')])
    def test_public_report(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "share-report", 'result': {'publicToken': 'publicToken'}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        log_recorder = RecordingHandler()

        obj = BlazeMeterUploader()
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['public-report'] = True
        obj.settings['send-monitoring'] = False
        obj.engine = EngineEmul()
        obj.client = client
        obj.log.addHandler(log_recorder)
        obj.prepare()
        obj.startup()
        obj.aggregated_second(random_datapoint(10))
        obj.check()
        obj.shutdown()
        obj.post_process()
        self.assertEqual(0, len(client.results))

        log_buff = log_recorder.info_buff.getvalue()
        log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary"
        self.assertIn(log_line, log_buff)
 def test_monitoring_buffer_limit_option(self):
     obj = BlazeMeterUploader()
     obj.engine = EngineEmul()
     obj.client = BlazeMeterClientEmul(logging.getLogger(''))
     obj.client.results.append({"marker": "ping", 'result': {}})
     obj.settings["monitoring-buffer-limit"] = 100
     obj.prepare()
     for i in range(1000):
         mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
         obj.monitoring_data(mon)
         for source, buffer in iteritems(obj.monitoring_buffer.data):
             self.assertLessEqual(len(buffer), 100)
     self.assertEqual(0, len(obj.client.results))
Example #18
0
    def test_locations_on_both_levels(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: [{
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "eu-west-1": 1,
                }
            }],
            "locations": {
                "ams3": 1,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning":
            "mock"
        })
        obj.parameters = obj.engine.config['execution'][0]
        obj.engine.aggregator = ConsolidatingAggregator()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find test
        client.results.append({"result": []})  # find collection
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        cloud_config = yaml.load(
            open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertNotIn("locations", cloud_config)
        for execution in cloud_config["execution"]:
            self.assertIn("locations", execution)
        log_buff = log_recorder.warn_buff.getvalue()
        self.assertIn(
            "Each execution has locations specified, global locations won't have any effect",
            log_buff)
Example #19
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start", 'result': {'session': {'id': 'sess1', 'userId': 1}, 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        # client.results.append(None)  # first check error stats
        client.results.append(
            {"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        # client.results.append(None)  # second check error stats
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "upload1", "result": True})  # post-proc error stats
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        try:
            obj.check()
            self.fail()
        except KeyboardInterrupt:
            pass
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        obj.post_process()
Example #20
0
    def test_settings_from_blazemeter_mod(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "blazemeter": {
                    "class": ModuleMock.__module__ + "." + ModuleMock.__name__,
                    "token": "bmtoken",
                    "detach": True,
                    "browser-open": None,
                    "check-interval": 10.0,
                }
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        # these should override 'blazemeter' settings
        obj.settings["check-interval"] = 20.0
        obj.settings["browser-open"] = "both"

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files

        obj.prepare()

        self.assertEqual(obj.detach, True)
        self.assertEqual(obj.browser_open, "both")
        self.assertEqual(obj.client.token, "bmtoken")
        self.assertEqual(obj.check_interval, 20.0)

        self.assertEqual(client.results, [])
Example #21
0
    def test_simple(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            "execution": {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({})  # terminate

        obj.prepare()
        widget = obj.get_widget()
        self.assertEquals(2,
                          obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(4,
                          obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        widget.render((200, ), False)
        txt = widget.text.get_text()[0]
        logging.info("Text: '%s'", txt)
        self.assertIn("us-east-1: 2", txt)
        self.assertIn("us-west: 4", txt)
        obj.shutdown()
        obj.post_process()
Example #22
0
    def test_check_interval(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["check-interval"] = "1s"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start test
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions

        obj.prepare()
        obj.startup()
        obj.check()  # this one should work
        obj.check()  # this one should be skipped
        time.sleep(1)
        obj.check()  # this one should work
        obj.check()  # this one should skip

        self.assertEqual(client.results, [])
Example #23
0
    def test_pack_and_send_to_blazemeter(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()

        obj.engine.config.merge({
            "execution": {
                "executor": "selenium",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                },
                "scenario": {
                    "script": __dir__() + "/../selenium/java_package"
                }
            },
            "modules": {
                "selenium": "bzt.modules.selenium.SeleniumExecutor",
                "cloud": "bzt.modules.blazemeter.CloudProvisioning"
            },
            "provisioning": "cloud"
        })

        obj.parameters = obj.engine.config['execution']
        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()

        unpack_cfgs = obj.engine.config.get(Service.SERV)
        self.assertEqual(len(unpack_cfgs), 1)
        self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK)
        self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip'])
        self.assertTrue(
            zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
Example #24
0
    def test_toplevel_locations(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-east-1": 1,
                "us-west": 2
            },
            "locations-weighted": True,
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        conf = yaml.load(
            open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertIn('locations', conf)
        self.assertIn('locations-weighted', conf)
        self.assertEqual(conf['locations']['us-east-1'], 1)
        self.assertEqual(conf['locations']['us-west'], 2)
        self.assertNotIn('locations', conf['execution'][0])
Example #25
0
    def test_cloud_config_cleanup(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate
        obj.prepare()

        cloud_config = obj.get_config_for_cloud()
        execution = cloud_config["execution"][0]
        self.assertNotIn("throughput", execution)
        self.assertNotIn("ramp-up", execution)
        self.assertNotIn("hold-for", execution)
        self.assertNotIn("steps", execution)
Example #26
0
    def test_detach(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 55,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings["detach"] = True
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start

        obj.prepare()
        self.assertEqual(1, len(client.results))
        obj.startup()
        self.assertEqual([], client.results)
        obj.check()
        obj.shutdown()
        obj.post_process()
Example #27
0
    def test_dump_locations_new_style(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)

        obj.settings["dump-locations"] = True
        obj.settings["token"] = "FakeToken"
        obj.settings["use-deprecated-api"] = False
        obj.client = BlazeMeterClientEmul(obj.log)
        obj.client.results.append(self.__get_user_info())
        self.assertRaises(ManualShutdown, obj.prepare)

        warnings = log_recorder.warn_buff.getvalue()
        self.assertIn(
            "Dumping available locations instead of running the test",
            warnings)
        info = log_recorder.info_buff.getvalue()
        self.assertIn("Location: DFW	Dallas (Rackspace)", info)
        self.assertIn("Location: us-west-2	US West (Oregon)", info)
        self.assertIn("Location: harbor-5591335d8588531f5cde3a04	Sandbox",
                      info)

        obj.post_process()
Example #28
0
    def test_datapoint(self):
        client = BlazeMeterClientEmul(logging.getLogger(""))
        client.results.append({
            "api_version":
            2,
            "error":
            None,
            "result": [{
                "sessions": ["r-t-5746a8e38569a"],
                "id": "ALL",
                "name": "ALL"
            }, {
                "sessions": ["r-t-5746a8e38569a"],
                "id": "e843ff89a5737891a10251cbb0db08e5",
                "name": "http://blazedemo.com/"
            }]
        })
        client.results.append({
            "api_version":
            2,
            "error":
            None,
            "result": [{
                "labelId":
                "ALL",
                "labelName":
                "ALL",
                "label":
                "ALL",
                "kpis": [{
                    "n": 1,
                    "na": 1,
                    "ec": 0,
                    "p90": 0,
                    "t_avg": 817,
                    "lt_avg": 82,
                    "by_avg": 0,
                    "n_avg": 1,
                    "ec_avg": 0,
                    "ts": 1464248743
                }]
            }]
        })
        client.results.append({
            "api_version":
            2,
            "error":
            None,
            "result": [{
                "labelId": "ALL",
                "labelName": "ALL",
                "samples": 152,
                "avgResponseTime": 786,
                "90line": 836,
                "95line": 912,
                "99line": 1050,
                "minResponseTime": 531,
                "maxResponseTime": 1148,
                "avgLatency": 81,
                "geoMeanResponseTime": None,
                "stDev": 108,
                "duration": 119,
                "avgBytes": 0,
                "avgThroughput": 1.2773109243697,
                "medianResponseTime": 0,
                "errorsCount": 0,
                "errorsRate": 0,
                "hasLabelPassedThresholds": None
            }, {
                "labelId": "e843ff89a5737891a10251cbb0db08e5",
                "labelName": "http://blazedemo.com/",
                "samples": 152,
                "avgResponseTime": 786,
                "90line": 836,
                "95line": 912,
                "99line": 1050,
                "minResponseTime": 531,
                "maxResponseTime": 1148,
                "avgLatency": 81,
                "geoMeanResponseTime": None,
                "stDev": 108,
                "duration": 119,
                "avgBytes": 0,
                "avgThroughput": 1.2773109243697,
                "medianResponseTime": 0,
                "errorsCount": 0,
                "errorsRate": 0,
                "hasLabelPassedThresholds": None
            }]
        })

        obj = ResultsFromBZA(client)
        obj.master_id = 0

        res = list(obj.datapoints(True))
        cumulative_ = res[0][DataPoint.CUMULATIVE]
        total = cumulative_['']
        percentiles_ = total[KPISet.PERCENTILES]
        self.assertEquals(1050, percentiles_['99.0'])
Example #29
0
    def test_some_errors(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})
        client.results.append({
            "marker": "project-create",
            'result': {
                "id": time.time(),
                "name": "boo",
                "userId": time.time(),
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }
        })
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            "result": {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'master': {
                    'id': 'master1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "upload1",
            "result": True
        })  # post-proc error stats
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "terminate2",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "sess-e",
            "result": {
                'session': {
                    'id': 'sess1',
                    'note': 'n'
                }
            }
        })
        client.results.append({"marker": "sess-e", "result": {'session': {}}})
        client.results.append({
            "marker": "sess-e",
            "result": {
                'master': {
                    'id': 'sess1',
                    'note': 'n'
                }
            }
        })
        client.results.append({"marker": "sess-e", "result": {'master': {}}})
        client.results.append({"marker": "upload-file", "result": {}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.engine = EngineEmul()
        obj.client = client
        obj.prepare()
        obj.startup()
        obj.engine.stopping_reason = ValueError('wrong value')
        obj.aggregated_second(random_datapoint(10))
        obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [{
            'msg':
            'Forbidden',
            'cnt':
            7373,
            'type':
            KPISet.ERRTYPE_ASSERT,
            'urls': [],
            KPISet.RESP_CODES:
            '403'
        }, {
            'msg':
            'Allowed',
            'cnt':
            7373,
            'type':
            KPISet.ERRTYPE_ERROR,
            'urls': [],
            KPISet.RESP_CODES:
            '403'
        }]
        obj.post_process()
        self.assertEqual(0, len(client.results))
Example #30
0
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({
            "marker": "project-create",
            'result': {
                "id": time.time(),
                "name": "boo",
                "userId": time.time(),
                "description": None,
                "created": time.time(),
                "updated": time.time(),
                "organizationId": None
            }
        })
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({
            "marker": "test-create",
            'result': {
                'id': 'unittest1'
            }
        })
        client.results.append({
            "marker": "sess-start",
            "result": {
                'session': {
                    'id': 'sess1',
                    'userId': 1
                },
                'master': {
                    'id': 'master1',
                    'userId': 1
                },
                'signature': ''
            }
        })
        client.results.append({
            "marker": "first push",
            'result': {
                'session': {}
            }
        })
        client.results.append(IOError("monitoring push expected fail"))
        client.results.append({"marker": "mon push", "result": True})
        client.results.append(IOError("custom metric push expected fail"))
        client.results.append({
            "marker": "custom metrics push",
            "result": True
        })
        client.results.append({
            "marker": "second push",
            'result': {
                'session': {
                    "statusCode": 140,
                    'status': 'ENDED'
                }
            }
        })
        client.results.append({
            "marker": "post-proc push",
            'result': {
                'session': {}
            }
        })
        client.results.append({
            "marker": "post process monitoring push",
            "result": True
        })
        client.results.append({
            "marker": "post process custom metrics push",
            "result": True
        })
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({
            "marker": "terminate",
            'result': {
                'session': {}
            }
        })

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['send-custom-metrics'] = True
        obj.settings['send-custom-tables'] = True
        obj.engine = EngineEmul()
        shutil.copy(__file__,
                    obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{
            "ts": 1,
            "source": "local",
            "cpu": 1,
            "mem": 2,
            "bytes-recv": 100,
            "other": 0
        }, {
            "ts": 1,
            "source": "chrome",
            "memory": 32,
            "cpu": 23
        }]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        obj.engine.log.parent.handlers.append(logging.FileHandler(log_file))
        obj.post_process()
        self.assertEqual(0, len(client.results))
    def test_check(self):
        client = BlazeMeterClientEmul(logging.getLogger(''))
        client.timeout = 1
        client.results.append({"marker": "ping", 'result': {}})
        client.results.append({"marker": "projects", 'result': []})

        client.results.append({"marker": "project-create", 'result': {
            "id": time.time(),
            "name": "boo",
            "userId": time.time(),
            "description": None,
            "created": time.time(),
            "updated": time.time(),
            "organizationId": None
        }})
        client.results.append({"marker": "tests", 'result': {}})
        client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
        client.results.append(
            {"marker": "sess-start",
             "result": {
                 'session': {'id': 'sess1', 'userId': 1},
                 'master': {'id': 'master1', 'userId': 1},
                 'signature': ''}})
        client.results.append({"marker": "first push", 'result': {'session': {}}})
        client.results.append(IOError("monitoring push expected fail"))
        client.results.append({"marker": "mon push", "result": True})
        client.results.append(IOError("custom metric push expected fail"))
        client.results.append({"marker": "custom metrics push", "result": True})
        client.results.append({"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
        client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
        client.results.append({"marker": "post process monitoring push", "result": True})
        client.results.append({"marker": "post process custom metrics push", "result": True})
        client.results.append({"marker": "artifacts push", 'result': True})
        client.results.append({"marker": "logs push", 'result': True})
        client.results.append({"marker": "terminate", 'result': {'session': {}}})

        obj = BlazeMeterUploader()
        obj.parameters['project'] = 'Proj name'
        obj.settings['token'] = '123'
        obj.settings['browser-open'] = 'none'
        obj.settings['send-custom-metrics'] = True
        obj.settings['send-custom-tables'] = True
        obj.engine = EngineEmul()
        shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
        obj.client = client
        obj.prepare()
        obj.startup()
        for x in range(0, 31):
            obj.aggregated_second(random_datapoint(x))
        mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0},
               {"ts": 1, "source": "chrome", "memory": 32, "cpu": 23}]
        obj.monitoring_data(mon)
        obj.check()
        for x in range(32, 65):
            obj.aggregated_second(random_datapoint(x))
        self.assertRaises(KeyboardInterrupt, obj.check)
        obj.aggregated_second(random_datapoint(10))
        obj.shutdown()
        log_file = obj.engine.create_artifact('log', '.tmp')
        obj.engine.log.parent.handlers.append(logging.FileHandler(log_file))
        obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1
        obj.post_process()
        self.assertEqual(0, len(client.results))