def test_datapoint_to_json(self):
     obj = ConsolidatingAggregator()
     obj.track_percentiles = [0.0, 50.0, 95.0, 99.0, 100.0]
     obj.prepare()
     obj.add_underling(self.get_success_reader())
     for point in obj.datapoints():
         obj.log.info(to_json(point))
Beispiel #2
0
    def test_sandbox_default_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        self.assertEquals(1, exec_locations['us-west-1'])
Beispiel #3
0
    def test_collection_defloc_sandbox(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find collection
        client.results.append({"result": []})  # find test
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()
        exec_locations = obj.executors[0].execution['locations']
        expected_location = 'harbor-5591335d8588531f5cde3a04'
        self.assertIn(expected_location, exec_locations)
        self.assertEquals(1, exec_locations[expected_location])
    def test_negative_response_time_scaling_crash(self):
        obj = ConsolidatingAggregator()
        obj.track_percentiles = [0.0, 50.0, 95.0, 99.0, 100.0]
        obj.prepare()

        self.sniff_log(obj.log)

        mock = MockReader()
        mock.data.append((1, "first", 1, -r(), r(), r(), 200, 'FAILx3', '', 0))
        mock.data.append((2, "first", 1, -r(), r(), r(), 200, 'FAILx1', '', 0))
        mock.data.append((5, "first", 1, -r(), r(), r(), 200, None, '', 0))
        mock.data.append(
            (7, "second", 1, -r(), r(), r(), 200, 'FAILx3', '', 0))
        mock.data.append((3, "first", 1, -r(), r(), r(), 200, 'FAILx3', '', 0))
        mock.data.append(
            (6, "second", 1, -r(), r(), r(), 200, 'unique FAIL', '', 0))

        obj.add_underling(mock)

        obj.check()
        for point in obj.datapoints():
            obj.log.info(to_json(point))

        self.assertIn("Negative response time reported",
                      self.log_recorder.warn_buff.getvalue())
Beispiel #5
0
    def test_nonexistent_location(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-not-found": 1,
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        self.assertRaises(TaurusConfigError, obj.prepare)
Beispiel #6
0
    def test_simple(self):
        self.obj.engine.aggregator = ConsolidatingAggregator()
        self.obj.execution.merge({
            "scenario": {
                "script": RESOURCES_DIR + "BlazeDemo.java",
                "properties": {
                    "scenprop": 3
                }
            },
            "properties": {
                "execprop": 2
            }
        })
        self.obj.settings.merge({
            "properties": {
                "settprop": 1
            },
            "junit-version": 5
        })
        self.obj._compile_scripts = lambda: None
        self.obj.prepare()
        self.obj.engine.aggregator.prepare()
        self.obj.engine.start_subprocess = lambda **kwargs: None
        self.obj.startup()
        self.obj.post_process()
        self.obj.engine.aggregator.post_process()

        orig_prop_file = RESOURCES_DIR + "selenium/junit/runner.properties"
        start1 = (self.obj.engine.artifacts_dir + os.path.sep).replace(
            '\\', '/')
        start2 = "ARTIFACTS+"
        self.assertFilesEqual(orig_prop_file,
                              self.obj.props_file,
                              replace_str=start1,
                              replace_with=start2)
Beispiel #7
0
 def setUp(self):
     super(TestSiegeExecutor, self).setUp()
     self.obj = SiegeExecutor()
     self.obj.engine = EngineEmul()
     self.obj.env = self.obj.engine.env
     self.obj.engine.aggregator = ConsolidatingAggregator()
     self.obj.settings.merge({"path": TOOL_PATH})
Beispiel #8
0
 def test_broken(self):
     obj = NewmanExecutor()
     obj.engine = EngineEmul()
     obj.env = obj.engine.env
     obj.engine.aggregator = ConsolidatingAggregator()
     obj.engine.config.merge({
         "scenarios": {
             "newman": {
                 "script": RESOURCES_DIR + 'functional/postman.json',
                 "globals": {
                     "a": 123
                 },
             }
         }
     })
     obj.execution.merge({"scenario": "newman"})
     obj.engine.aggregator.prepare()
     obj.prepare()
     obj.startup()
     obj.engine.aggregator.startup()
     while not obj.check():
         obj.engine.aggregator.check()
         time.sleep(obj.engine.check_interval)
     obj.shutdown()
     obj.engine.aggregator.shutdown()
     obj.post_process()
     obj.engine.aggregator.post_process()
     self.assertTrue(obj.has_results())
     with open(obj.report_file) as fds:
         samples = [json.loads(line) for line in fds.readlines()]
     self.assertEqual(1, len(samples))
     sample = samples[0]
     self.assertEqual(sample["status"], "FAILED")
     self.assertEqual(sample["error_msg"], "expect response be 200")
Beispiel #9
0
 def test_flow(self):
     obj = NewmanExecutor()
     obj.engine = EngineEmul()
     obj.env = obj.engine.env
     obj.engine.aggregator = ConsolidatingAggregator()
     obj.engine.config.merge({
         "scenarios": {
             "newman": {
                 "script": RESOURCES_DIR + 'functional/postman.json',
                 "globals": {
                     "a": 123
                 },
             }
         }
     })
     obj.execution.merge({"scenario": "newman"})
     obj.engine.aggregator.prepare()
     obj.prepare()
     obj.startup()
     obj.engine.aggregator.startup()
     while not obj.check():
         obj.engine.aggregator.check()
         time.sleep(obj.engine.check_interval)
     obj.shutdown()
     obj.engine.aggregator.shutdown()
     obj.post_process()
     obj.engine.aggregator.post_process()
     self.assertTrue(obj.has_results())
Beispiel #10
0
    def test_simple(self):
        self.obj.engine.aggregator = ConsolidatingAggregator()
        self.obj.execution.merge({
            "scenario": {"script": RESOURCES_DIR + "BlazeDemo.java", "properties": {"scenprop": 3}},
            "properties": {"execprop": 2}
        })
        self.obj.settings.merge({"properties": {"settprop": 1}, "junit-version": 5})
        self.obj.prepare()
        self.obj.engine.aggregator.prepare()
        self.obj.startup()
        while not self.obj.check():
            time.sleep(self.obj.engine.check_interval)
        self.obj.shutdown()
        self.obj.post_process()
        self.obj.engine.aggregator.post_process()

        orig_prop_file = RESOURCES_DIR + "selenium/junit/runner.properties"
        start1 = (self.obj.engine.artifacts_dir + os.path.sep).replace('\\', '/')
        start2 = "ARTIFACTS+"
        self.assertFilesEqual(orig_prop_file, self.obj.props_file, replace_str=start1, replace_with=start2)

        self.assertTrue(self.obj.has_results())

        cumulative = self.obj.engine.aggregator.cumulative
        self.assertEqual("java.lang.RuntimeException: 123", cumulative[''][KPISet.ERRORS][0]['msg'])
        self.assertEqual(1, cumulative[''][KPISet.SUCCESSES])
Beispiel #11
0
 def test_fail_on_zero_results(self):
     obj = JMeterExecutor()
     obj.engine = EngineEmul()
     obj.engine.aggregator=ConsolidatingAggregator()
     obj.execution = BetterDict()
     obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}})
     obj.prepare()
     self.assertRaises(RuntimeWarning, obj.post_process)
Beispiel #12
0
 def test_worker_aggregation(self):
     self.configure({"execution": {
         "scenario": {"script": RESOURCES_DIR + "locust/simple.py"}}})
     self.obj.prepare()
     self.obj.reader = WorkersReader(RESOURCES_DIR + "locust/locust-workers.ldjson", 2, ROOT_LOGGER)
     self.obj.engine.aggregator = ConsolidatingAggregator()
     self.obj.engine.aggregator.engine = EngineEmul()
     self.obj.engine.aggregator.add_underling(self.obj.reader)
     self.assertEqual(107, len(list(self.obj.engine.aggregator.datapoints(final_pass=True))))
     self.obj.post_process()
Beispiel #13
0
        def test_simple(self):
            obj = PBenchExecutor()
            obj.engine = EngineEmul()
            obj.engine.aggregator = ConsolidatingAggregator()
            obj.engine.aggregator.add_listener(DataPointLogger())
            obj.engine.config.merge({"provisioning": "test"})

            if os.path.exists("/home/undera/Sources/phantom"):  # FIXME: not good, get rid of it
                obj.settings.merge({
                    "path": "/home/undera/Sources/phantom/bin/phantom",
                    "modules-path": "/home/undera/Sources/phantom/lib/phantom"})
            else:
                obj.settings.merge({
                    "path": os.path.join(os.path.dirname(__file__), "..", "resources", "pbench", "phantom.sh")})

            obj.execution.merge({
                "log-responses": "proto_error",
                # "iterations": 5000000,
                "concurrency": 10,
                "throughput": 1000,
                "ramp-up": "1m",
                # "steps": 5,
                "hold-for": "15",
                "scenario": {
                    "timeout": 1,
                    "default-address": "http://localhost:33",
                    "headers": {
                        "Connection": "close"
                    },
                    "requests": [
                        # "/",
                        {
                            "url": "/api",
                            "method": "POST",
                            "headers": {
                                "Content-Length": 0
                            },
                            "body": {
                                "param": "value"}}]}})
            obj.engine.aggregator.prepare()
            obj.prepare()

            obj.engine.aggregator.startup()
            obj.startup()

            while not obj.check():
                logging.debug("Running...")
                obj.engine.aggregator.check()
                time.sleep(1)

            obj.shutdown()
            obj.engine.aggregator.shutdown()

            obj.post_process()
            obj.engine.aggregator.post_process()
Beispiel #14
0
    def test_full_collection(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"files": []})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {
            "id": id(client)
        }})  # create collection
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {
            "id": id(obj),
            "sessions": []
        }})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate

        obj.prepare()
        self.assertEquals(1,
                          obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(2,
                          obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        obj.shutdown()
        obj.post_process()
Beispiel #15
0
    def configure(self, config):
        self.obj.engine.config.merge(config)
        execution = self.obj.engine.config['execution']
        if isinstance(execution, list):
            self.obj.execution = execution[0]
        else:
            self.obj.execution = execution

        self.results_listener = MockReader()
        self.obj.engine.aggregator = ConsolidatingAggregator()
        self.obj.engine.aggregator.add_listener(self.results_listener)
Beispiel #16
0
    def test_simple(self):
        self.configure({
            "provisioning": "test",
            EXEC: {
                "log-responses": "proto_error",
                # "iterations": 5000000,
                "concurrency": 10,
                "throughput": 1000,
                "ramp-up": "1m",
                # "steps": 5,
                "hold-for": "15",
                "scenario": {
                    "timeout":
                    1,
                    "default-address":
                    "http://localhost:33",
                    "headers": {
                        "Connection": "close"
                    },
                    "requests": [{
                        "url": "/api",
                        "method": "POST",
                        "headers": {
                            "Content-Length": 0
                        },
                        "body": {
                            "param": "value"
                        }
                    }]
                }
            }
        })

        self.obj.engine.aggregator = ConsolidatingAggregator()
        self.obj.engine.aggregator.engine = self.obj.engine
        self.obj.engine.aggregator.add_listener(DataPointLogger())

        self.obj.engine.aggregator.prepare()
        self.obj.prepare()

        self.obj.engine.aggregator.startup()
        self.obj.startup()

        while not self.obj.check():
            ROOT_LOGGER.debug("Running...")
            self.obj.engine.aggregator.check()
            time.sleep(0.1)

        self.obj.shutdown()
        self.obj.engine.aggregator.shutdown()

        self.obj.post_process()
        self.obj.engine.aggregator.post_process()
Beispiel #17
0
    def test_locations_on_both_levels(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: [{
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "eu-west-1": 1,
                }
            }],
            "locations": {
                "ams3": 1,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning":
            "mock"
        })
        obj.parameters = obj.engine.config['execution'][0]
        obj.engine.aggregator = ConsolidatingAggregator()
        log_recorder = RecordingHandler()
        obj.log.addHandler(log_recorder)

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # find test
        client.results.append({"result": []})  # find collection
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        cloud_config = yaml.load(
            open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertNotIn("locations", cloud_config)
        for execution in cloud_config["execution"]:
            self.assertIn("locations", execution)
        log_buff = log_recorder.warn_buff.getvalue()
        self.assertIn(
            "Each execution has locations specified, global locations won't have any effect",
            log_buff)
 def test_set_rtimes_len(self):
     obj = ConsolidatingAggregator()
     obj.settings['rtimes-len'] = 42
     obj.prepare()
     reader = self.get_fail_reader()
     obj.add_underling(reader)
     listener = MockListener()
     obj.add_listener(listener)
     obj.check()
     for dp in listener.results:
         for kpiset in dp['cumulative'].values():
             self.assertEqual(42, kpiset.rtimes_len)
         for kpiset in dp['current'].values():
             self.assertNotEqual(42, kpiset.rtimes_len)
Beispiel #19
0
    def test_case1(self):
        mock = BZMock()

        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/masters/1/sessions': {"result": {"sessions": []}},
            'https://a.blazemeter.com/api/v4/masters/1/full': {"result": {"sessions": []}},
            'https://a.blazemeter.com/api/v4/masters/1': {"result": {"note": "message"}},
            'https://a.blazemeter.com/api/v4/masters/1/status': [
                {"result": {"id": 1, "status": "CREATE"}},
                {"result": {"id": 1, "status": "ENDED", "progress": 101}}
            ],
        })

        mock.mock_post = {
            'https://a.blazemeter.com/api/v4/projects': {"result": {"id": 1, "workspaceId": 1}},
            'https://a.blazemeter.com/api/v4/tests': {"result": {"id": 1}},
            'https://a.blazemeter.com/api/v4/tests/1/files': {"result": None},
            'https://a.blazemeter.com/api/v4/tests/1/start': {"result": {"id": 1}},
            'https://a.blazemeter.com/api/v4/masters/1/stop': {"result": None},
            'https://a.blazemeter.com/api/v4/masters/1/public-token': {"result": {"publicToken": "token"}},
        }

        mock.mock_patch = {
            'https://a.blazemeter.com/api/v4/tests/1': {"result": {}}
        }

        prov = CloudProvisioning()
        prov.browser_open = None
        prov.public_report = True
        prov.user.token = "test"
        prov.engine = EngineEmul()
        prov.engine.aggregator = ConsolidatingAggregator()
        # prov.engine.config.merge({"modules": {"blazemeter": {"browser-open": False}}})
        prov.engine.config[ScenarioExecutor.EXEC] = [{
            "executor": "mock",
            "locations": {
                "aws": 1
            },
            "files": ModuleMock().get_resource_files()
        }]
        mock.apply(prov.user)

        prov.prepare()
        prov.startup()
        prov.check()
        prov._last_check_time = 0
        prov.check()
        prov.shutdown()
        prov.post_process()
Beispiel #20
0
 def test_simple(self):
     self.obj.engine.aggregator = ConsolidatingAggregator()
     self.obj.execution.merge({"scenario": {"script": RESOURCES_DIR + "BlazeDemo.java"}})
     self.obj.prepare()
     self.obj.engine.aggregator.prepare()
     self.obj.startup()
     while not self.obj.check():
         self.obj.engine.aggregator.check()
         time.sleep(1)
     self.obj.shutdown()
     self.obj.engine.aggregator.shutdown()
     self.obj.post_process()
     self.obj.engine.aggregator.post_process()
     self.assertTrue(self.obj.has_results())
     self.assertEqual(1, self.obj.engine.aggregator.cumulative[''][KPISet.SUCCESSES])
Beispiel #21
0
 def test_load_mode(self):
     self.obj.engine.aggregator = ConsolidatingAggregator()
     self.obj.execution.merge({
         "iterations": 10,
         "scenario": {"script": RESOURCES_DIR + "selenium/invalid/SimpleTest.java"},
     })
     self.obj.settings.merge({"junit-version": 5})
     self.obj._compile_scripts = lambda: None
     self.obj.prepare()
     self.obj.engine.aggregator.prepare()
     self.obj.engine.start_subprocess = lambda **kwargs: None
     self.obj.startup()
     self.obj.post_process()
     self.obj.engine.aggregator.post_process()
     self.assertTrue(self.obj.report_file.endswith(".csv"))
     self.assertIsInstance(self.obj.reader, JTLReader)
Beispiel #22
0
    def test_settings_from_blazemeter_mod(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__,
                "blazemeter": {
                    "class": ModuleMock.__module__ + "." + ModuleMock.__name__,
                    "token": "bmtoken",
                    "detach": True,
                    "browser-open": None,
                    "check-interval": 10.0,
                }
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        # these should override 'blazemeter' settings
        obj.settings["check-interval"] = 20.0
        obj.settings["browser-open"] = "both"

        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files

        obj.prepare()

        self.assertEqual(obj.detach, True)
        self.assertEqual(obj.browser_open, "both")
        self.assertEqual(obj.client.token, "bmtoken")
        self.assertEqual(obj.check_interval, 20.0)

        self.assertEqual(client.results, [])
Beispiel #23
0
    def test_simple(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            "execution": {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({})  # terminate

        obj.prepare()
        widget = obj.get_widget()
        self.assertEquals(2,
                          obj.executors[0].execution['locations']['us-east-1'])
        self.assertEquals(4,
                          obj.executors[0].execution['locations']['us-west'])

        obj.startup()
        obj.check()
        widget.render((200, ), False)
        txt = widget.text.get_text()[0]
        logging.info("Text: '%s'", txt)
        self.assertIn("us-east-1: 2", txt)
        self.assertIn("us-west: 4", txt)
        obj.shutdown()
        obj.post_process()
Beispiel #24
0
    def test_distributed_jtl(self):
        obj = JMeterJTLLoaderExecutor()
        obj.engine = EngineEmul()
        obj.engine.aggregator = ConsolidatingAggregator()
        self.maxc = 0

        def clb(x):
            self.maxc = max(self.maxc, x[DataPoint.CURRENT][''][KPISet.CONCURRENCY])

        obj.engine.aggregator.add_listener(ResultChecker(clb))
        obj.execution = BetterDict()
        obj.execution.merge({"kpi-jtl": __dir__() + "/../data/distributed.jtl"})
        obj.prepare()
        obj.reader.is_distributed = True

        obj.engine.aggregator.post_process()
        self.assertEquals(23, self.maxc)
Beispiel #25
0
    def test_check_interval(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 1,
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["check-interval"] = "1s"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collection
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start test
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions
        client.results.append({"result": {"id": id(obj)}})  # status
        client.results.append({"result": []})  # sessions

        obj.prepare()
        obj.startup()
        obj.check()  # this one should work
        obj.check()  # this one should be skipped
        time.sleep(1)
        obj.check()  # this one should work
        obj.check()  # this one should skip

        self.assertEqual(client.results, [])
Beispiel #26
0
 def test_load_mode(self):
     self.obj.engine.aggregator = ConsolidatingAggregator()
     self.obj.execution.merge({
         "iterations": 10,
         "scenario": {"script": RESOURCES_DIR + "selenium/invalid/SimpleTest.java"},
     })
     self.obj.settings.merge({"junit-version": 5})
     self.obj.prepare()
     self.obj.engine.aggregator.prepare()
     self.obj.startup()
     while not self.obj.check():
         time.sleep(self.obj.engine.check_interval)
     self.obj.shutdown()
     self.obj.post_process()
     self.obj.engine.aggregator.post_process()
     self.assertTrue(self.obj.has_results())
     self.assertTrue(self.obj.report_file.endswith(".csv"))
     self.assertIsInstance(self.obj.reader, JTLReader)
 def test_errors_cumulative(self):
     aggregator = ConsolidatingAggregator()
     aggregator.track_percentiles = [50]
     aggregator.prepare()
     reader = self.get_fail_reader()
     aggregator.add_underling(reader)
     aggregator.shutdown()
     aggregator.post_process()
     cum_dict = aggregator.underlings[0].cumulative
     first_err_ids = [id(err) for err in cum_dict['first']['errors']]
     second_err_ids = [id(err) for err in cum_dict['second']['errors']]
     total_err_ids = [id(err) for err in cum_dict['']['errors']]
     all_ids = first_err_ids + second_err_ids + total_err_ids
     self.assertEqual(len(all_ids), len(set(all_ids)))
     for label in cum_dict:
         data = cum_dict[label]
         total_errors_count = sum(err['cnt'] for err in data['errors'])
         self.assertEqual(data['fail'], total_errors_count)
Beispiel #28
0
    def test_toplevel_locations(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "locations": {
                "us-east-1": 1,
                "us-west": 2
            },
            "locations-weighted": True,
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.settings["browser-open"] = False
        obj.settings["use-deprecated-api"] = False
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append({"result": []})  # collections
        client.results.append({"result": []})  # tests
        client.results.append(self.__get_user_info())  # user
        client.results.append({})  # upload files
        client.results.append(
            {"result": {
                "name": "Taurus Collection",
                "items": []
            }})  # transform config to collection
        client.results.append({"result": {"id": 42}})  # create collection
        obj.prepare()

        conf = yaml.load(
            open(os.path.join(obj.engine.artifacts_dir, "cloud.yml")))
        self.assertIn('locations', conf)
        self.assertIn('locations-weighted', conf)
        self.assertEqual(conf['locations']['us-east-1'], 1)
        self.assertEqual(conf['locations']['us-west'], 2)
        self.assertNotIn('locations', conf['execution'][0])
Beispiel #29
0
    def test_two_executions(self):
        # check consolidator
        obj = ConsolidatingAggregator()
        obj.prepare()
        obj.track_percentiles = [0, 50, 100]
        underling1 = self.get_reader()
        underling2 = self.get_reader()
        obj.add_underling(underling1)
        obj.add_underling(underling2)

        cnt = 0
        for _ in range(1, 10):
            for point in obj.datapoints():
                overall = point[DataPoint.CURRENT]['']
                self.assertEquals(2, overall[KPISet.CONCURRENCY])
                self.assertGreater(overall[KPISet.PERCENTILES]["100.0"], 0)
                self.assertGreater(overall[KPISet.AVG_RESP_TIME], 0)
                cnt += 1

        self.assertEquals(2, cnt)
    def test_cloud_config_cleanup(self):
        obj = CloudProvisioning()
        obj.engine = EngineEmul()
        obj.engine.config.merge({
            ScenarioExecutor.EXEC: {
                "executor": "mock",
                "concurrency": 5500,
                "locations": {
                    "us-east-1": 1,
                    "us-west": 2
                }
            },
            "modules": {
                "mock": ModuleMock.__module__ + "." + ModuleMock.__name__
            },
            "provisioning": "mock"
        })
        obj.parameters = obj.engine.config['execution']
        obj.engine.aggregator = ConsolidatingAggregator()

        obj.settings["token"] = "FakeToken"
        obj.client = client = BlazeMeterClientEmul(obj.log)
        client.results.append(self.__get_user_info())  # user
        client.results.append({"result": []})  # tests
        client.results.append({"result": {"id": id(client)}})  # create test
        client.results.append({"files": []})  # create test
        client.results.append({})  # upload files
        client.results.append({"result": {"id": id(obj)}})  # start
        client.results.append({"result": {"id": id(obj)}})  # get master
        client.results.append({"result": []})  # get master sessions
        client.results.append({})  # terminate
        obj.prepare()

        cloud_config = obj.get_config_for_cloud()
        execution = cloud_config["execution"][0]
        self.assertNotIn("throughput", execution)
        self.assertNotIn("ramp-up", execution)
        self.assertNotIn("hold-for", execution)
        self.assertNotIn("steps", execution)