def test_delete_test_files(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings.merge({"token": "FakeToken", "delete-test-files": True, 'default-location': "us-west-1", }) obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": [{"id": 5174715, "name": "Taurus Cloud Test", "configuration": {"type": "taurus"},}]}) # find test client.results.append({"files": [{"hash": "hash1", "name": "file1"}, {"hash": "hash1", "name": "file2"}]}) # test files client.results.append({"removed": ["hash1", "hash2"]}) # remove test files client.results.append({}) # upload files obj.prepare() self.assertTrue(client.delete_files_before_test)
def test_no_settings(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" obj.settings['default-location'] = "us-west-1" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": []}) # tests client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # test files client.results.append({}) # upload files obj.prepare() self.assertEquals(1, obj.executors[0].execution['locations']['us-west-1'])
def test_nonexistent_location(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "locations": { "us-not-found": 1, }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user self.assertRaises(ValueError, obj.prepare)
def test_create_collection(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings.merge({ "token": "FakeToken", 'default-location': "us-west-1", "delete-test-files": False, "use-deprecated-api": False }) obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() self.assertIsInstance(obj.test, CloudCollectionTest)
def test_create_collection(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings.merge({"token": "FakeToken", 'default-location': "us-west-1", "delete-test-files": False, "use-deprecated-api": False}) obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append({"result": {"name": "Taurus Collection", "items": []}}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() self.assertIsInstance(obj.test, CloudCollectionTest)
def test_reuse_project_id(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings.merge({"token": "FakeToken", 'default-location': "us-west-1", "delete-test-files": False, "project": 1428}) obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": [{"id": 5174715, "projectId": 1428, "name": "Taurus Cloud Test", "configuration": {"type": "taurus"}}]}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files obj.prepare()
def test_collection_defloc_sandbox(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append({"result": {"name": "Taurus Collection", "items": []}}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() exec_locations = obj.executors[0].execution['locations'] expected_location = 'harbor-5591335d8588531f5cde3a04' self.assertIn(expected_location, exec_locations) self.assertEquals(1, exec_locations[expected_location])
def test_aaskip_reporting(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__, "blazemeter": ModuleMock.__module__ + "." + ModuleMock.__name__, "second_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__, "third_reporter": ModuleMock.__module__ + "." + ModuleMock.__name__, }, "provisioning": "mock", "reporting": ["blazemeter", {"module": "blazemeter", "option": "value"}, "second_reporter", {"module": "third_reporter"}] }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" obj.settings['default-location'] = "us-west-1" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": []}) # tests client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # test files client.results.append({}) # upload files obj.prepare() modules = [reporter['module'] for reporter in obj.engine.config['reporting']] self.assertEquals(modules, ['second_reporter', 'third_reporter'])
def test_reuse_project_id(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings.merge({ "token": "FakeToken", 'default-location': "us-west-1", "delete-test-files": False, "project": 1428 }) obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({ "result": [{ "id": 5174715, "projectId": 1428, "name": "Taurus Cloud Test", "configuration": { "type": "taurus" } }] }) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files obj.prepare()
def test_sandbox_default_location(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files obj.prepare() exec_locations = obj.executors[0].execution['locations'] self.assertEquals(1, exec_locations['us-west-1'])
def test_cloud_config_cleanup(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "concurrency": { "local": 1, "cloud": 10, }, "locations": { "us-east-1": 1, "us-west": 2 } }, }) obj.parameters = obj.engine.config['execution'] obj.test = CloudTaurusTest(obj.engine.config['execution'], {}, obj.client, None, None, "name", logging.getLogger('')) cloud_config = obj.test.prepare_cloud_config(obj.engine.config) execution = cloud_config["execution"][0] self.assertNotIn("throughput", execution) self.assertNotIn("ramp-up", execution) self.assertNotIn("hold-for", execution) self.assertNotIn("steps", execution)
def test_collection_defloc_sandbox(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() exec_locations = obj.executors[0].execution['locations'] expected_location = 'harbor-5591335d8588531f5cde3a04' self.assertIn(expected_location, exec_locations) self.assertEquals(1, exec_locations[expected_location])
def test_nonexistent_location(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "locations": { "us-not-found": 1, }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user self.assertRaises(TaurusConfigError, obj.prepare)
def test_pack_and_send_to_blazemeter(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "selenium", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 }, "scenario": { "script": RESOURCES_DIR + "selenium/junit/java_package" } }, "modules": { "selenium": "bzt.modules.selenium.SeleniumExecutor", "cloud": "bzt.modules.blazemeter.CloudProvisioning", "junit": "bzt.modules.java.JUnitTester" }, "provisioning": "cloud" }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" mock = BZMock(obj.user) mock.mock_get.update({ 'https://a.blazemeter.com/api/v4/web/elfinder/1?cmd=open&target=s1_Lw': { "files": [] }, }) mock.mock_post.update({ 'https://a.blazemeter.com/api/v4/projects': { "result": { "id": 1, 'workspaceId': 1 } }, 'https://a.blazemeter.com/api/v4/tests': { "result": { "id": 1 } }, 'https://a.blazemeter.com/api/v4/tests/1/files': {} }) mock.mock_patch.update( {'https://a.blazemeter.com/api/v4/tests/1': { "result": {} }}) obj.prepare() unpack_cfgs = obj.engine.config.get(Service.SERV) self.assertEqual(len(unpack_cfgs), 1) self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK) self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip']) self.assertTrue( zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
def test_full_collection(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"files": []}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": { "id": id(client) }}) # create collection client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": { "id": id(obj), "sessions": [] }}) # get master client.results.append({"result": []}) # get master sessions client.results.append({}) # terminate obj.prepare() self.assertEquals(1, obj.executors[0].execution['locations']['us-east-1']) self.assertEquals(2, obj.executors[0].execution['locations']['us-west']) obj.startup() obj.check() obj.shutdown() obj.post_process()
def test_collection_simultaneous_start(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 1, } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["check-interval"] = "0ms" # do not skip checks obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append({"result": {"name": "Taurus Collection", "items": []}}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj), "sessions": [{"id": "s1", "status": "JMETER_CONSOLE_INIT"}, {"id": "s2", "status": "INIT_SCRIPT"}]}}) # status client.results.append({"result": []}) # sessions client.results.append({"result": {"id": id(obj), "sessions": [{"id": "s1", "status": "JMETER_CONSOLE_INIT"}, {"id": "s2", "status": "JMETER_CONSOLE_INIT"}]}}) client.results.append({"result": []}) # sessions client.results.append({"result": {}}) # force start client.results.append({"result": {"id": id(obj)}}) # master status client.results.append({"result": []}) # sessions client.results.append({}) # graceful shutdown client.results.append({"result": {"status": "ENDED"}}) # master status obj.prepare() obj.startup() obj.check() obj.check() # this one should trigger force start obj.check() obj.shutdown() obj.post_process() self.assertEqual(client.results, [])
def test_locations_on_both_levels(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: [{ "executor": "mock", "concurrency": 5500, "locations": { "eu-west-1": 1, } }], "locations": { "ams3": 1, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'][0] obj.engine.aggregator = ConsolidatingAggregator() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find test client.results.append({"result": []}) # find collection client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() cloud_config = yaml.load( open(os.path.join(obj.engine.artifacts_dir, "cloud.yml"))) self.assertNotIn("locations", cloud_config) for execution in cloud_config["execution"]: self.assertIn("locations", execution) log_buff = log_recorder.warn_buff.getvalue() self.assertIn( "Each execution has locations specified, global locations won't have any effect", log_buff)
def test_pack_and_send_to_blazemeter(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "selenium", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2}, "scenario": { "script": RESOURCES_DIR + "selenium/junit/java_package"}}, "modules": { "selenium": "bzt.modules.selenium.SeleniumExecutor", "cloud": "bzt.modules.blazemeter.CloudProvisioning", "junit": "bzt.modules.java.JUnitTester"}, "provisioning": "cloud" }) obj.engine.unify_config() obj.parameters = obj.engine.config['execution'][0] obj.settings["token"] = "FakeToken" mock = BZMock(obj.user) mock.mock_get.update({ 'https://a.blazemeter.com/api/v4/web/elfinder/1?cmd=open&target=s1_Lw': {"files": []}, 'https://a.blazemeter.com/api/v4/multi-tests?projectId=1&name=Taurus+Cloud+Test': {"result": []}, 'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': { "result": [{"id": 1, 'name': 'Taurus Cloud Test', "configuration": {"type": "taurus"}}] }, }) mock.mock_post.update({ 'https://a.blazemeter.com/api/v4/projects': {"result": {"id": 1, 'workspaceId': 1}}, 'https://a.blazemeter.com/api/v4/multi-tests': {"result": {}}, 'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': { "result": {"id": 1, "configuration": {"type": "taurus"}} }, 'https://a.blazemeter.com/api/v4/tests/1/files': {} }) mock.mock_patch.update({'https://a.blazemeter.com/api/v4/tests/1': {"result": {}}}) obj.prepare() unpack_cfgs = obj.engine.config.get(Service.SERV) self.assertEqual(len(unpack_cfgs), 1) self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK) self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip']) self.assertTrue(zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
def test_settings_from_blazemeter_mod(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 1, } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__, "blazemeter": { "class": ModuleMock.__module__ + "." + ModuleMock.__name__, "token": "bmtoken", "detach": True, "browser-open": None, "check-interval": 10.0, } }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() # these should override 'blazemeter' settings obj.settings["check-interval"] = 20.0 obj.settings["browser-open"] = "both" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collection client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files obj.prepare() self.assertEqual(obj.detach, True) self.assertEqual(obj.browser_open, "both") self.assertEqual(obj.client.token, "bmtoken") self.assertEqual(obj.check_interval, 20.0) self.assertEqual(client.results, [])
def test_simple(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": []}) # tests client.results.append({"result": {"id": id(client)}}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj)}}) # get master client.results.append({}) # terminate obj.prepare() widget = obj.get_widget() self.assertEquals(2, obj.executors[0].execution['locations']['us-east-1']) self.assertEquals(4, obj.executors[0].execution['locations']['us-west']) obj.startup() obj.check() widget.render((200, ), False) txt = widget.text.get_text()[0] logging.info("Text: '%s'", txt) self.assertIn("us-east-1: 2", txt) self.assertIn("us-west: 4", txt) obj.shutdown() obj.post_process()
def test_check_interval(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 1, } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["check-interval"] = "1s" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collection client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start test client.results.append({"result": {"id": id(obj)}}) # status client.results.append({"result": []}) # sessions client.results.append({"result": {"id": id(obj)}}) # status client.results.append({"result": []}) # sessions obj.prepare() obj.startup() obj.check() # this one should work obj.check() # this one should be skipped time.sleep(1) obj.check() # this one should work obj.check() # this one should skip self.assertEqual(client.results, [])
def test_simple(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": []}) # tests client.results.append({"result": {"id": id(client)}}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj)}}) # get master client.results.append({}) # terminate obj.prepare() widget = obj.get_widget() self.assertEquals(2, obj.executors[0].execution['locations']['us-east-1']) self.assertEquals(4, obj.executors[0].execution['locations']['us-west']) obj.startup() obj.check() widget.render((200,), False) txt = widget.text.get_text()[0] logging.info("Text: '%s'", txt) self.assertIn("us-east-1: 2", txt) self.assertIn("us-west: 4", txt) obj.shutdown() obj.post_process()
def test_pack_and_send_to_blazemeter(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "selenium", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 }, "scenario": { "script": __dir__() + "/../selenium/java_package" } }, "modules": { "selenium": "bzt.modules.selenium.SeleniumExecutor", "cloud": "bzt.modules.blazemeter.CloudProvisioning" }, "provisioning": "cloud" }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": []}) # tests client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj)}}) # get master client.results.append({"result": []}) # get master sessions client.results.append({}) # terminate obj.prepare() unpack_cfgs = obj.engine.config.get(Service.SERV) self.assertEqual(len(unpack_cfgs), 1) self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK) self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip']) self.assertTrue( zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
def test_toplevel_locations(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "locations": { "us-east-1": 1, "us-west": 2 }, "locations-weighted": True, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() conf = yaml.load( open(os.path.join(obj.engine.artifacts_dir, "cloud.yml"))) self.assertIn('locations', conf) self.assertIn('locations-weighted', conf) self.assertEqual(conf['locations']['us-east-1'], 1) self.assertEqual(conf['locations']['us-west'], 2) self.assertNotIn('locations', conf['execution'][0])
def test_full_collection(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"files": []}) # upload files client.results.append({"result": {"name": "Taurus Collection", "items": []}}) # transform config to collection client.results.append({"result": {"id": id(client)}}) # create collection client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj), "sessions": []}}) # get master client.results.append({"result": []}) # get master sessions client.results.append({}) # terminate obj.prepare() self.assertEquals(1, obj.executors[0].execution['locations']['us-east-1']) self.assertEquals(2, obj.executors[0].execution['locations']['us-west']) obj.startup() obj.check() obj.shutdown() obj.post_process()
def test_locations_on_both_levels(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: [{ "executor": "mock", "concurrency": 5500, "locations": { "eu-west-1": 1, } }], "locations": { "ams3": 1, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'][0] obj.engine.aggregator = ConsolidatingAggregator() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find test client.results.append({"result": []}) # find collection client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append({"result": {"name": "Taurus Collection", "items": []}}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() cloud_config = yaml.load(open(os.path.join(obj.engine.artifacts_dir, "cloud.yml"))) self.assertNotIn("locations", cloud_config) for execution in cloud_config["execution"]: self.assertIn("locations", execution) log_buff = log_recorder.warn_buff.getvalue() self.assertIn("Each execution has locations specified, global locations won't have any effect", log_buff)
def test_pack_and_send_to_blazemeter(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "selenium", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2}, "scenario": { "script": __dir__() + "/../selenium/java_package"}}, "modules": { "selenium": "bzt.modules.selenium.SeleniumExecutor", "cloud": "bzt.modules.blazemeter.CloudProvisioning"}, "provisioning": "cloud" }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj)}}) # get master client.results.append({"result": []}) # get master sessions client.results.append({}) # terminate obj.prepare() unpack_cfgs = obj.engine.config.get(Service.SERV) self.assertEqual(len(unpack_cfgs), 1) self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK) self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip']) self.assertTrue(zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
def test_cloud_config_cleanup(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "concurrency": { "local": 1, "cloud": 10, }, "locations": { "us-east-1": 1, "us-west": 2 } }, }) obj.parameters = obj.engine.config['execution'] cloud_config = obj.get_config_for_cloud() execution = cloud_config["execution"][0] self.assertNotIn("throughput", execution) self.assertNotIn("ramp-up", execution) self.assertNotIn("hold-for", execution) self.assertNotIn("steps", execution)
def test_cloud_config_cleanup(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append(self.__get_user_info()) # user client.results.append({"result": []}) # tests client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start client.results.append({"result": {"id": id(obj)}}) # get master client.results.append({"result": []}) # get master sessions client.results.append({}) # terminate obj.prepare() cloud_config = obj.get_config_for_cloud() execution = cloud_config["execution"][0] self.assertNotIn("throughput", execution) self.assertNotIn("ramp-up", execution) self.assertNotIn("hold-for", execution) self.assertNotIn("steps", execution)
def test_toplevel_locations(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "locations": { "us-east-1": 1, "us-west": 2 }, "locations-weighted": True, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append({"result": {"name": "Taurus Collection", "items": []}}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() conf = yaml.load(open(os.path.join(obj.engine.artifacts_dir, "cloud.yml"))) self.assertIn('locations', conf) self.assertIn('locations-weighted', conf) self.assertEqual(conf['locations']['us-east-1'], 1) self.assertEqual(conf['locations']['us-west'], 2) self.assertNotIn('locations', conf['execution'][0])
def test_detach(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 55, "locations": { "us-east-1": 1, "us-west": 2 } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" obj.settings["detach"] = True obj.settings["browser-open"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # collections client.results.append({"result": []}) # tests client.results.append(self.__get_user_info()) # user client.results.append({"result": {"id": id(client)}}) # create test client.results.append({"files": []}) # create test client.results.append({}) # upload files client.results.append({"result": {"id": id(obj)}}) # start obj.prepare() self.assertEqual(1, len(client.results)) obj.startup() self.assertEqual([], client.results) obj.check() obj.shutdown() obj.post_process()
def test_terminate_only(self): "test is terminated only when it was started and didn't finished" obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 1, } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["check-interval"] = "0ms" # do not skip checks obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection client.results.append({"result": {"id": id(obj)}}) # start client.results.append({ "result": { "id": id(obj), "sessions": [{ "id": "s1", "status": "JMETER_CONSOLE_INIT" }, { "id": "s2", "status": "JMETER_CONSOLE_INIT" }] } }) client.results.append({"result": []}) # sessions client.results.append({"result": {}}) # force start client.results.append({"result": { "progress": 120, "status": "ENDED" }}) # status should trigger shutdown client.results.append({"result": []}) # sessions obj.prepare() obj.startup() obj.check() # this one should trigger force start self.assertTrue(obj.check()) obj.shutdown() obj.post_process() self.assertEqual(client.results, [])