Пример #1
0
    def test_widget_cloud_collection(self):
        obj = CloudProvisioning()
        obj.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj.test = CloudCollectionTest(obj.client, None, None, None, None,
                                       logging.getLogger(''))
        obj.client.results.append({
            "result": {
                "sessions": [{
                    "id": "session-id",
                    "locationId": "loc-name",
                    "readyStatus": {
                        "servers": ["server" for _ in range(10)]
                    },
                }]
            }
        })
        obj.client.results.append({
            "result": {
                "sessions": [{
                    "id": "session-id",
                    "name": "loc-name/scenario",
                    "configuration": {}
                }]
            }
        })
        obj.test.get_master_status()
        widget = obj.get_widget()
        widget.update()

        self.assertEqual("None #None\n scenario:\n  Agents in loc-name: 10\n",
                         widget.text.get_text()[0])
Пример #2
0
    def test_widget(self):
        obj = CloudProvisioning()
        obj.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj.client.results.append({"result": []})
        obj.client.results.append({
            "result": {
                "sessions": [{
                    "name": "executor/scenario/location",
                    "configuration": {}
                }]
            }
        })

        obj.client.results.append({
            "result": {
                "sessions": [{
                    "name": "executor/scenario/location",
                    "configuration": {
                        "location": "loc-name",
                        "serversCount": "10"
                    }
                }]
            }
        })

        widget = obj.get_widget()
        widget.update()
        widget.update()
        widget.update()
        widget.update()

        self.assertEqual(
            "None #None\n executor scenario:\n  Agents in loc-name: 10\n",
            widget.text.get_text()[0])
Пример #3
0
    def test_reuse_project_id(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({"token": "FakeToken",
                            'default-location': "us-west-1",
                            "delete-test-files": False,
                            "project": 1428})
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": [{"id": 5174715,
                                           "projectId": 1428,
                                           "name": "Taurus Cloud Test",
                                           "configuration": {"type": "taurus"}}]})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        obj.prepare()
Пример #4
0
    def test_create_collection(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({"token": "FakeToken",
                            'default-location': "us-west-1",
                            "delete-test-files": False,
                            "use-deprecated-api": False})
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append({"result": {"name": "Taurus Collection", "items": []}})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection

        obj.prepare()
        self.assertIsInstance(obj.test, CloudCollectionTest)
Пример #5
0
    def test_widget_cloud_collection(self):
        obj = CloudProvisioning()
        obj.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj.test = CloudCollectionTest({}, {}, obj.client, None, None, None, logging.getLogger(''))
        obj.client.results.append({"result": {"sessions": [
            {
                "id": "session-id",
                "locationId": "loc-name",
                "readyStatus": {
                    "servers": ["server" for _ in range(10)]
                },
            }
        ]}})
        obj.client.results.append({"result": {"sessions": [
            {
                "id": "session-id",
                "name": "loc-name/scenario",
                "configuration": {}
            }
        ]}})
        obj.test.get_master_status()
        widget = obj.get_widget()
        widget.update()

        self.assertEqual("None #None\n scenario:\n  Agents in loc-name: 10\n", widget.text.get_text()[0])
Пример #6
0
    def test_widget(self):
        obj = CloudProvisioning()
        obj.client = BlazeMeterClientEmul(logging.getLogger(''))
        obj.client.results.append({"result": []})
        obj.client.results.append({"result": {"sessions": [
            {
                "name": "executor/scenario/location",
                "configuration": {}
            }
        ]}})

        obj.client.results.append({"result": {"sessions": [
            {
                "name": "executor/scenario/location",
                "configuration": {
                    "location": "loc-name",
                    "serversCount": "10"
                }
            }
        ]}})

        widget = obj.get_widget()
        widget.update()
        widget.update()
        widget.update()
        widget.update()

        self.assertEqual("None #None\n executor scenario:\n  Agents in loc-name: 10\n", widget.text.get_text()[0])
Пример #7
0
    def test_delete_test_files(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({"token": "FakeToken",
                            "delete-test-files": True,
                            'default-location': "us-west-1",
                            })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": [{"id": 5174715,
                                           "name": "Taurus Cloud Test",
                                           "configuration": {"type": "taurus"},}]})  # find test
        client.results.append({"files": [{"hash": "hash1", "name": "file1"},
                                         {"hash": "hash1", "name": "file2"}]})  # test files
        client.results.append({"removed": ["hash1", "hash2"]})  # remove test files
        client.results.append({})  # upload files

        obj.prepare()
        self.assertTrue(client.delete_files_before_test)
Пример #8
0
 def test_cloud_config_cleanup(self):
     obj = CloudProvisioning()
     obj.engine = EngineEmul()
     obj.engine.config.merge({
         ScenarioExecutor.EXEC: {
             "concurrency": {
                 "local": 1,
                 "cloud": 10,
             },
             "locations": {
                 "us-east-1": 1,
                 "us-west": 2
             }
         },
     })
     obj.parameters = obj.engine.config['execution']
     obj.test = CloudTaurusTest(obj.engine.config['execution'], {},
                                obj.client, None, None, "name",
                                logging.getLogger(''))
     cloud_config = obj.test.prepare_cloud_config(obj.engine.config)
     execution = cloud_config["execution"][0]
     self.assertNotIn("throughput", execution)
     self.assertNotIn("ramp-up", execution)
     self.assertNotIn("hold-for", execution)
     self.assertNotIn("steps", execution)
Пример #9
0
    def test_collection_defloc_sandbox(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        expected_location = 'harbor-5591335d8588531f5cde3a04'
        self.assertIn(expected_location, exec_locations)
        self.assertEquals(1, exec_locations[expected_location])
Пример #10
0
    def test_no_settings(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings['default-location'] = "us-west-1"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # test files
        client.results.append({})  # upload files

        obj.prepare()
        self.assertEquals(1, obj.executors[0].execution['locations']['us-west-1'])
Пример #11
0
    def test_aaskip_reporting(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "blazemeter": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "second_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "third_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__,
            },
            "provisioning": "mock",
            "reporting": ["blazemeter",
                          {"module": "blazemeter", "option": "value"},
                          "second_reporter",
                          {"module": "third_reporter"}]
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings['default-location'] = "us-west-1"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # test files
        client.results.append({})  # upload files

        obj.prepare()
        modules = [reporter['module'] for reporter in obj.engine.config['reporting']]
        self.assertEquals(modules, ['second_reporter', 'third_reporter'])
Пример #12
0
    def test_collection_defloc_sandbox(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append({"result": {"name": "Taurus Collection", "items": []}})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        expected_location = 'harbor-5591335d8588531f5cde3a04'
        self.assertIn(expected_location, exec_locations)
        self.assertEquals(1, exec_locations[expected_location])
Пример #13
0
    def test_sandbox_default_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        self.assertEquals(1, exec_locations['us-west-1'])
Пример #14
0
    def test_sandbox_default_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        self.assertEquals(1, exec_locations['us-west-1'])
Пример #15
0
    def test_no_settings(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings['default-location'] = "us-west-1"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # test files
        client.results.append({})  # upload files

        obj.prepare()
        self.assertEquals(1, obj.executors[0].execution['locations']['us-west-1'])
Пример #16
0
    def test_aaskip_reporting(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "blazemeter": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "second_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "third_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__,
            },
            "provisioning": "mock",
            "reporting": ["blazemeter",
                          {"module": "blazemeter", "option": "value"},
                          "second_reporter",
                          {"module": "third_reporter"}]
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings['default-location'] = "us-west-1"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # test files
        client.results.append({})  # upload files

        obj.prepare()
        modules = [reporter['module'] for reporter in obj.engine.config['reporting']]
        self.assertEquals(modules, ['second_reporter', 'third_reporter'])
Пример #17
0
    def test_nonexistent_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-not-found": 1,
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        self.assertRaises(TaurusConfigError, obj.prepare)
Пример #18
0
    def test_locations_on_both_levels(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: [{
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "eu-west-1": 1,
                }
            }],
            "locations": {
                "ams3": 1,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning":
            "mock"
        })
        obj.parameters = obj.engine.config['execution'][0]
        obj.engine.aggregator = ConsolidatingAggregator()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find test
        client.results.append({"result": []})  # find collection
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        cloud_config = yaml.load(
            open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertNotIn("locations", cloud_config)
        for execution in cloud_config["execution"]:
            self.assertIn("locations", execution)
        log_buff = log_recorder.warn_buff.getvalue()
        self.assertIn(
            "Each execution has locations specified, global locations won't have any effect",
            log_buff)
Пример #19
0
    def test_pack_and_send_to_blazemeter(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()

        obj.engine.config.merge({
            "execution": {
                "executor": "selenium",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                },
                "scenario": {
                    "script": RESOURCES_DIR + "selenium/junit/java_package"
                }
            },
            "modules": {
                "selenium": "bzt.modules.selenium.SeleniumExecutor",
                "cloud": "bzt.modules.blazemeter.CloudProvisioning",
                "junit": "bzt.modules.java.JUnitTester"
            },
            "provisioning": "cloud"
        })

        obj.parameters = obj.engine.config['execution']
        obj.settings["token"] = "FakeToken"
        mock = BZMock(obj.user)
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/web/elfinder/1?cmd=open&target=s1_Lw':
            {
                "files": []
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    "id": 1,
                    'workspaceId': 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    "id": 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/files': {}
        })
        mock.mock_patch.update(
            {'https://a.blazemeter.com/api/v4/tests/1': {
                "result": {}
            }})
        obj.prepare()

        unpack_cfgs = obj.engine.config.get(Service.SERV)
        self.assertEqual(len(unpack_cfgs), 1)
        self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK)
        self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip'])
        self.assertTrue(
            zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
Пример #20
0
 def test_results_link_cloud(self):
     obj = JUnitXMLReporter()
     obj.engine = EngineEmul()
     obj.engine.provisioning = CloudProvisioning()
     obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log)
     prov = obj.engine.provisioning
     prov.client.results_url = 'url1'
     prov.settings.merge({'test': 'test1'})
     report_info = obj.get_bza_report_info()
     self.assertEqual(report_info, [('Cloud report link: url1\n', 'test1')])
Пример #21
0
    def test_blazemeter_cloud_report_link(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()
        xml_report = obj.engine.create_artifact("status", ".xml")
        obj.parameters.merge({
            "dump-xml": xml_report,
        })

        prov = CloudProvisioning()
        prov.results_url = "http://report/link"
        obj.engine.provisioning = prov

        obj.aggregated_second(self.__get_datapoint())
        obj.post_process()

        self.assertTrue(os.path.exists(xml_report))
        with open(xml_report) as fds:
            report_content = fds.read()
        self.assertIn('<ReportURL>http://report/link</ReportURL>',
                      report_content)
Пример #22
0
    def test_nonexistent_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-not-found": 1,
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        self.assertRaises(ValueError, obj.prepare)
Пример #23
0
    def test_create_collection(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({
            "token": "FakeToken",
            'default-location': "us-west-1",
            "delete-test-files": False,
            "use-deprecated-api": False
        })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection

        obj.prepare()
        self.assertIsInstance(obj.test, CloudCollectionTest)
Пример #24
0
    def test_reuse_project_id(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({
            "token": "FakeToken",
            'default-location': "us-west-1",
            "delete-test-files": False,
            "project": 1428
        })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({
            "result": [{
                "id": 5174715,
                "projectId": 1428,
                "name": "Taurus Cloud Test",
                "configuration": {
                    "type": "taurus"
                }
            }]
        })  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        obj.prepare()
Пример #25
0
    def test_delete_test_files(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings.merge({"token": "FakeToken",
                            "delete-test-files": True,
                            'default-location': "us-west-1",
                            })
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": [{"id": 5174715,
                                           "name": "Taurus Cloud Test",
                                           "configuration": {"type": "taurus"},}]})  # find test
        client.results.append({"files": [{"hash": "hash1", "name": "file1"},
                                         {"hash": "hash1", "name": "file2"}]})  # test files
        client.results.append({"removed": ["hash1", "hash2"]})  # remove test files
        client.results.append({})  # upload files

        obj.prepare()
        self.assertTrue(client.delete_files_before_test)
Пример #26
0
 def test_cloud_config_cleanup(self):
     obj = CloudProvisioning()
     obj.engine = EngineEmul()
     obj.engine.config.merge({
         ScenarioExecutor.EXEC: {
             "concurrency": {
                 "local": 1,
                 "cloud": 10,
             },
             "locations": {
                 "us-east-1": 1,
                 "us-west": 2
             }
         },
     })
     obj.parameters = obj.engine.config['execution']
     cloud_config = obj.get_config_for_cloud()
     execution = cloud_config["execution"][0]
     self.assertNotIn("throughput", execution)
     self.assertNotIn("ramp-up", execution)
     self.assertNotIn("hold-for", execution)
     self.assertNotIn("steps", execution)
Пример #27
0
    def test_toplevel_locations(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-east-1": 1,
                "us-west": 2
            },
            "locations-weighted": True,
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        conf = yaml.load(
            open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertIn('locations', conf)
        self.assertIn('locations-weighted', conf)
        self.assertEqual(conf['locations']['us-east-1'], 1)
        self.assertEqual(conf['locations']['us-west'], 2)
        self.assertNotIn('locations', conf['execution'][0])
Пример #28
0
    def test_blazemeter_cloud_report_link(self):
        obj = FinalStatus()
        obj.engine = EngineEmul()
        xml_report = obj.engine.create_artifact("status", ".xml")
        obj.parameters = BetterDict.from_dict({
            "dump-xml": xml_report,
        })

        prov = CloudProvisioning()
        prov.results_url = "http://report/link"
        obj.engine.provisioning = prov

        obj.startup()
        obj.shutdown()

        obj.aggregated_second(self.__get_datapoint())
        obj.post_process()

        self.assertTrue(os.path.exists(xml_report))
        with open(xml_report) as fds:
            report_content = fds.read()
        self.assertIn('<ReportURL>http://report/link</ReportURL>', report_content)
Пример #29
0
 def test_cloud_config_cleanup(self):
     obj = CloudProvisioning()
     obj.engine = EngineEmul()
     obj.engine.config.merge({
         ScenarioExecutor.EXEC: {
             "concurrency": {
                 "local": 1,
                 "cloud": 10,
             },
             "locations": {
                 "us-east-1": 1,
                 "us-west": 2
             }
         },
     })
     obj.parameters = obj.engine.config['execution']
     cloud_config = obj.get_config_for_cloud()
     execution = cloud_config["execution"][0]
     self.assertNotIn("throughput", execution)
     self.assertNotIn("ramp-up", execution)
     self.assertNotIn("hold-for", execution)
     self.assertNotIn("steps", execution)
Пример #30
0
    def test_settings_from_blazemeter_mod(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "blazemeter": {
                    "class": ModuleMock.__module__ + "." + ModuleMock.__name__,
                    "token": "bmtoken",
                    "detach": True,
                    "browser-open": None,
                    "check-interval": 10.0,
                }
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        # these should override 'blazemeter' settings
        obj.settings["check-interval"] = 20.0
        obj.settings["browser-open"] = "both"

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files

        obj.prepare()

        self.assertEqual(obj.detach, True)
        self.assertEqual(obj.browser_open, "both")
        self.assertEqual(obj.client.token, "bmtoken")
        self.assertEqual(obj.check_interval, 20.0)

        self.assertEqual(client.results, [])
Пример #31
0
 def test_cloud_config_cleanup(self):
     obj = CloudProvisioning()
     obj.engine = EngineEmul()
     obj.engine.config.merge({
         ScenarioExecutor.EXEC: {
             "concurrency": {
                 "local": 1,
                 "cloud": 10,
             },
             "locations": {
                 "us-east-1": 1,
                 "us-west": 2
             }
         },
     })
     obj.parameters = obj.engine.config['execution']
     obj.test = CloudTaurusTest(obj.engine.config['execution'], {}, obj.client, None, None, "name",
                                logging.getLogger(''))
     cloud_config = obj.test.prepare_cloud_config(obj.engine.config)
     execution = cloud_config["execution"][0]
     self.assertNotIn("throughput", execution)
     self.assertNotIn("ramp-up", execution)
     self.assertNotIn("hold-for", execution)
     self.assertNotIn("steps", execution)
Пример #32
0
    def test_locations_on_both_levels(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: [{
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "eu-west-1": 1,
                }
            }],
            "locations": {
                "ams3": 1,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution'][0]
        obj.engine.aggregator = ConsolidatingAggregator()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find test
        client.results.append({"result": []})  # find collection
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append({"result": {"name": "Taurus Collection", "items": []}})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        cloud_config = yaml.load(open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertNotIn("locations", cloud_config)
        for execution in cloud_config["execution"]:
            self.assertIn("locations", execution)
        log_buff = log_recorder.warn_buff.getvalue()
        self.assertIn("Each execution has locations specified, global locations won't have any effect", log_buff)
Пример #33
0
    def test_dump_locations_new_style(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)

        obj.settings["dump-locations"] = True
        obj.settings["token"] = "FakeToken"
        obj.settings["use-deprecated-api"] = False
        obj.client = BlazeMeterClientEmul(obj.log)
        obj.client.results.append(self.__get_user_info())
        self.assertRaises(ManualShutdown, obj.prepare)

        warnings = log_recorder.warn_buff.getvalue()
        self.assertIn(
            "Dumping available locations instead of running the test",
            warnings)
        info = log_recorder.info_buff.getvalue()
        self.assertIn("Location: DFW	Dallas (Rackspace)", info)
        self.assertIn("Location: us-west-2	US West (Oregon)", info)
        self.assertIn("Location: harbor-5591335d8588531f5cde3a04	Sandbox",
                      info)

        obj.post_process()
Пример #34
0
    def test_cloud_config_cleanup(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate
        obj.prepare()

        cloud_config = obj.get_config_for_cloud()
        execution = cloud_config["execution"][0]
        self.assertNotIn("throughput", execution)
        self.assertNotIn("ramp-up", execution)
        self.assertNotIn("hold-for", execution)
        self.assertNotIn("steps", execution)
Пример #35
0
    def test_pack_and_send_to_blazemeter(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()

        obj.engine.config.merge({
            "execution": {
                "executor": "selenium",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                },
                "scenario": {
                    "script": __dir__() + "/../selenium/java_package"
                }
            },
            "modules": {
                "selenium": "bzt.modules.selenium.SeleniumExecutor",
                "cloud": "bzt.modules.blazemeter.CloudProvisioning"
            },
            "provisioning": "cloud"
        })

        obj.parameters = obj.engine.config['execution']
        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()

        unpack_cfgs = obj.engine.config.get(Service.SERV)
        self.assertEqual(len(unpack_cfgs), 1)
        self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK)
        self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip'])
        self.assertTrue(
            zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
Пример #36
0
    def test_toplevel_locations(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-east-1": 1,
                "us-west": 2
            },
            "locations-weighted": True,
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append({"result": {"name": "Taurus Collection", "items": []}})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        conf = yaml.load(open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertIn('locations', conf)
        self.assertIn('locations-weighted', conf)
        self.assertEqual(conf['locations']['us-east-1'], 1)
        self.assertEqual(conf['locations']['us-west'], 2)
        self.assertNotIn('locations', conf['execution'][0])
Пример #37
0
    def test_pack_and_send_to_blazemeter(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()

        obj.engine.config.merge({
            "execution": {
                "executor": "selenium",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2},
                "scenario": {
                    "script": RESOURCES_DIR + "selenium/junit/java_package"}},
            "modules": {
                "selenium": "bzt.modules.selenium.SeleniumExecutor",
                "cloud": "bzt.modules.blazemeter.CloudProvisioning",
                "junit": "bzt.modules.java.JUnitTester"},
            "provisioning": "cloud"
        })
        obj.engine.unify_config()

        obj.parameters = obj.engine.config['execution'][0]
        obj.settings["token"] = "FakeToken"
        mock = BZMock(obj.user)
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/web/elfinder/1?cmd=open&target=s1_Lw': {"files": []},
            'https://a.blazemeter.com/api/v4/multi-tests?projectId=1&name=Taurus+Cloud+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': {
                "result": [{"id": 1, 'name': 'Taurus Cloud Test', "configuration": {"type": "taurus"}}]
            },
        })
        mock.mock_post.update({
            'https://a.blazemeter.com/api/v4/projects': {"result": {"id": 1, 'workspaceId': 1}},
            'https://a.blazemeter.com/api/v4/multi-tests': {"result": {}},
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': {
                "result": {"id": 1, "configuration": {"type": "taurus"}}
            },
            'https://a.blazemeter.com/api/v4/tests/1/files': {}
        })
        mock.mock_patch.update({'https://a.blazemeter.com/api/v4/tests/1': {"result": {}}})
        obj.prepare()

        unpack_cfgs = obj.engine.config.get(Service.SERV)
        self.assertEqual(len(unpack_cfgs), 1)
        self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK)
        self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip'])
        self.assertTrue(zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
Пример #38
0
    def test_pack_and_send_to_blazemeter(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()

        obj.engine.config.merge({
            "execution": {
                "executor": "selenium",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2},
                "scenario": {
                    "script": __dir__() + "/../selenium/java_package"}},
            "modules": {
                "selenium": "bzt.modules.selenium.SeleniumExecutor",
                "cloud": "bzt.modules.blazemeter.CloudProvisioning"},
            "provisioning": "cloud"
        })

        obj.parameters = obj.engine.config['execution']
        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()

        unpack_cfgs = obj.engine.config.get(Service.SERV)
        self.assertEqual(len(unpack_cfgs), 1)
        self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK)
        self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip'])
        self.assertTrue(zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
Пример #39
0
    def test_terminate_only(self):
        "test is terminated only when it was started and didn't finished"
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["check-interval"] = "0ms"  # do not skip checks
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append({"result": {"name": "Taurus Collection", "items": []}})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj), "sessions": [{"id": "s1", "status": "JMETER_CONSOLE_INIT"},
                                                                      {"id": "s2", "status": "JMETER_CONSOLE_INIT"}]}})
        client.results.append({"result": []})  # sessions
        client.results.append({"result": {}})  # force start
        client.results.append({"result": {"progress": 120, "status": "ENDED"}})  # status should trigger shutdown
        client.results.append({"result": []})  # sessions

        obj.prepare()
        obj.startup()
        obj.check()  # this one should trigger force start
        self.assertTrue(obj.check())
        obj.shutdown()
        obj.post_process()
        self.assertEqual(client.results, [])
    def test_simple(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            "execution": {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({})  # terminate

        obj.prepare()
        widget = obj.get_widget()
        self.assertEquals(2, obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(4, obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        widget.render((200,), False)
        txt = widget.text.get_text()[0]
        logging.info("Text: '%s'", txt)
        self.assertIn("us-east-1: 2", txt)
        self.assertIn("us-west: 4", txt)
        obj.shutdown()
        obj.post_process()
Пример #41
0
    def test_check_interval(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["check-interval"] = "1s"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start test
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions

        obj.prepare()
        obj.startup()
        obj.check()  # this one should work
        obj.check()  # this one should be skipped
        time.sleep(1)
        obj.check()  # this one should work
        obj.check()  # this one should skip

        self.assertEqual(client.results, [])
Пример #42
0
    def test_case1(self):
        mock = BZMock()

        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/multi-tests?projectId=1&name=Taurus+Cloud+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': {"result": []},
            'https://a.blazemeter.com/api/v4/masters/1/multi-tests': {"result": []},
            'https://a.blazemeter.com/api/v4/masters/1/sessions': {"result": {"sessions": []}},
            'https://a.blazemeter.com/api/v4/masters/1/full': {"result": {"sessions": []}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {"note": "message"}},
            'https://a.blazemeter.com/api/v4/masters/1/status': [
                {"result": {"id": 1, "status": "CREATE"}},
                {"result": {"id": 1, "status": "ENDED", "progress": 101}}
            ],
        })

        mock.mock_post = {
            'https://a.blazemeter.com/api/v4/projects': {"result": {"id": 1, "workspaceId": 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {"id": 1, "configuration": {"type": "taurus"}}},
            'https://a.blazemeter.com/api/v4/tests/1/files': {"result": None},
            'https://a.blazemeter.com/api/v4/tests/1/start': {"result": {"id": 1}},
            'https://a.blazemeter.com/api/v4/masters/1/stop': {"result": None},
            'https://a.blazemeter.com/api/v4/masters/1/public-token': {"result": {"publicToken": "token"}},
        }

        mock.mock_patch = {
            'https://a.blazemeter.com/api/v4/tests/1': {"result": {}}
        }

        prov = CloudProvisioning()
        prov.browser_open = None
        prov.public_report = True
        prov.user.token = "test"
        prov.engine = EngineEmul()
        prov.engine.aggregator = ConsolidatingAggregator()

        prov.engine.config.merge({
            ScenarioExecutor.EXEC: [{
                "executor": "mock",
                "locations": {
                    "aws": 1},
                "files": ModuleMock().get_resource_files()}]})

        mock.apply(prov.user)

        prov.prepare()
        prov.startup()
        prov.check()
        prov._last_check_time = 0
        prov.check()
        prov.shutdown()
        prov.post_process()
Пример #43
0
    def test_simple(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()
        self.assertEquals(1, obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(2, obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        obj.shutdown()
        obj.post_process()
Пример #44
0
    def test_simple(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()
        self.assertEquals(1,
                          obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(2,
                          obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        obj.shutdown()
        obj.post_process()
Пример #45
0
    def test_case1(self):
        mock = BZMock()

        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/multi-tests?projectId=1&name=Taurus+Cloud+Test':
            {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test':
            {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/masters/1/multi-tests': {
                "result": []
            },
            'https://a.blazemeter.com/api/v4/masters/1/sessions': {
                "result": {
                    "sessions": []
                }
            },
            'https://a.blazemeter.com/api/v4/masters/1/full': {
                "result": {
                    "sessions": []
                }
            },
            'https://a.blazemeter.com/api/v4/masters/1': {
                "result": {
                    "note": "message"
                }
            },
            'https://a.blazemeter.com/api/v4/masters/1/status': [{
                "result": {
                    "id": 1,
                    "status": "CREATE"
                }
            }, {
                "result": {
                    "id": 1,
                    "status": "ENDED",
                    "progress": 101
                }
            }],
        })

        mock.mock_post = {
            'https://a.blazemeter.com/api/v4/projects': {
                "result": {
                    "id": 1,
                    "workspaceId": 1
                }
            },
            'https://a.blazemeter.com/api/v4/tests': {
                "result": {
                    "id": 1,
                    "configuration": {
                        "type": "taurus"
                    }
                }
            },
            'https://a.blazemeter.com/api/v4/tests/1/files': {
                "result": None
            },
            'https://a.blazemeter.com/api/v4/tests/1/start': {
                "result": {
                    "id": 1
                }
            },
            'https://a.blazemeter.com/api/v4/masters/1/stop': {
                "result": None
            },
            'https://a.blazemeter.com/api/v4/masters/1/public-token': {
                "result": {
                    "publicToken": "token"
                }
            },
        }

        mock.mock_patch = {
            'https://a.blazemeter.com/api/v4/tests/1': {
                "result": {}
            }
        }

        prov = CloudProvisioning()
        prov.browser_open = None
        prov.public_report = True
        prov.user.token = "test"
        prov.engine = EngineEmul()
        prov.engine.aggregator = ConsolidatingAggregator()
        # prov.engine.config.merge({"modules": {"blazemeter": {"browser-open": False}}})
        prov.engine.config[ScenarioExecutor.EXEC] = [{
            "executor":
            "mock",
            "locations": {
                "aws": 1
            },
            "files":
            ModuleMock().get_resource_files()
        }]
        mock.apply(prov.user)

        prov.prepare()
        prov.startup()
        prov.check()
        prov._last_check_time = 0
        prov.check()
        prov.shutdown()
        prov.post_process()
Пример #46
0
    def test_detach(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 55,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings["detach"] = True
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start

        obj.prepare()
        self.assertEqual(1, len(client.results))
        obj.startup()
        self.assertEqual([], client.results)
        obj.check()
        obj.shutdown()
        obj.post_process()
Пример #47
0
    def test_terminate_only(self):
        "test is terminated only when it was started and didn't finished"
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["check-interval"] = "0ms"  # do not skip checks
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({
            "result": {
                "id":
                id(obj),
                "sessions": [{
                    "id": "s1",
                    "status": "JMETER_CONSOLE_INIT"
                }, {
                    "id": "s2",
                    "status": "JMETER_CONSOLE_INIT"
                }]
            }
        })
        client.results.append({"result": []})  # sessions
        client.results.append({"result": {}})  # force start
        client.results.append({"result": {
            "progress": 120,
            "status": "ENDED"
        }})  # status should trigger shutdown
        client.results.append({"result": []})  # sessions

        obj.prepare()
        obj.startup()
        obj.check()  # this one should trigger force start
        self.assertTrue(obj.check())
        obj.shutdown()
        obj.post_process()
        self.assertEqual(client.results, [])
Пример #48
0
    def test_check_interval(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["check-interval"] = "1s"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start test
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions

        obj.prepare()
        obj.startup()
        obj.check()  # this one should work
        obj.check()  # this one should be skipped
        time.sleep(1)
        obj.check()  # this one should work
        obj.check()  # this one should skip

        self.assertEqual(client.results, [])
Пример #49
0
    def test_xml_format_passfail(self):
        obj = JUnitXMLReporter()
        obj.engine = EngineEmul()
        obj.parameters = BetterDict()
        obj.engine.provisioning = CloudProvisioning()
        obj.engine.provisioning.results_url = "http://test/report/123"

        pass_fail1 = PassFailStatus()

        fc1_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 1 Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '150ms',
                'condition': '<',
                'subject': 'avg-rt'
            }, pass_fail1)

        fc1_not_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 1 Not Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '300ms',
                'condition': '>',
                'subject': 'avg-rt'
            }, pass_fail1)

        pass_fail2 = PassFailStatus()

        fc2_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 2 Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '150ms',
                'condition': '<=',
                'subject': 'avg-rt'
            }, pass_fail1)

        fc2_not_triggered = DataCriterion(
            {
                'stop': True,
                'label': 'Sample 2 Not Triggered',
                'fail': True,
                'timeframe': -1,
                'threshold': '300ms',
                'condition': '=',
                'subject': 'avg-rt'
            }, pass_fail1)

        pass_fail1.criteria.append(fc1_triggered)
        pass_fail1.criteria.append(fc1_not_triggered)
        pass_fail2.criteria.append(fc2_triggered)
        pass_fail2.criteria.append(fc2_not_triggered)

        fc1_triggered.is_triggered = True
        fc2_triggered.is_triggered = True

        obj.engine.reporters.append(pass_fail1)
        obj.engine.reporters.append(pass_fail2)
        obj.engine.reporters.append(BlazeMeterUploader())

        path_from_config = tempfile.mktemp(suffix='.xml',
                                           prefix='junit-xml_passfail',
                                           dir=obj.engine.artifacts_dir)

        obj.parameters.merge({
            "filename": path_from_config,
            "data-source": "pass-fail"
        })
        obj.prepare()
        obj.last_second = DataPoint(0)
        obj.post_process()

        with open(obj.report_file_path, 'rb') as fds:
            f_contents = fds.read()

        logging.info("File: %s", f_contents)
        xml_tree = etree.fromstring(f_contents)
        self.assertEqual('testsuites', xml_tree.tag)
        suite = xml_tree.getchildren()[0]
        self.assertEqual('testsuite', suite.tag)
        test_cases = suite.getchildren()
        self.assertEqual(4, len(test_cases))
        self.assertEqual('testcase', test_cases[0].tag)
        self.assertEqual('error', test_cases[0].getchildren()[1].tag)
        self.assertEqual('error', test_cases[2].getchildren()[1].tag)

        sys_out = test_cases[0].getchildren()[0]
        self.assertEqual('system-out', sys_out.tag)
        self.assertIn('BlazeMeter report link: http://test/report/123',
                      sys_out.text)
Пример #50
0
    def test_detach(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 55,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']

        obj.settings["token"] = "FakeToken"
        obj.settings["detach"] = True
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start

        obj.prepare()
        self.assertEqual(1, len(client.results))
        obj.startup()
        self.assertEqual([], client.results)
        obj.check()
        obj.shutdown()
        obj.post_process()