def test_fail_on_zero_results(self): self.configure(yaml.load(open(RESOURCES_DIR + "yaml/selenium_executor_requests.yml").read())) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.assertRaises(ToolError, self.obj.engine.provisioning.post_process)
def test_fail_on_zero_results(self): self.configure(yaml.load(open(__dir__() + "/../yaml/selenium_executor_requests.yml").read())) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.assertRaises(RuntimeWarning, self.obj.engine.provisioning.post_process)
def test_start_shift(self): local = Local() _today = datetime.date.today() _yesterday = _today - datetime.timedelta(days=1) _tomorrow = _today + datetime.timedelta(days=1) _start_time = datetime.time(12, 30, 5) _scheduled_time = datetime.time(13, 31, 7) local.start_time = time.mktime(datetime.datetime.combine(_today, _start_time).timetuple()) date = datetime.datetime.combine(_tomorrow, _scheduled_time).strftime('%Y-%m-%d %H:%M:%S') shift = local._get_start_shift(date) self.assertEqual(shift, 90062.0) date = datetime.datetime.combine(_yesterday, _scheduled_time).strftime('%Y-%m-%d %H:%M') shift = local._get_start_shift(date) self.assertEqual(shift, 3655.0) date = datetime.datetime.combine(_today, _scheduled_time).strftime('%H:%M:%S') shift = local._get_start_shift(date) self.assertEqual(shift, 3662.0) date = datetime.datetime.combine(_today, _scheduled_time).strftime('%H:%M') shift = local._get_start_shift(date) self.assertEqual(shift, 3655.0) shift = local._get_start_shift('') self.assertEqual(shift, 0) shift = local._get_start_shift('lorem ipsum') self.assertEqual(shift, 0)
def test_fail_on_zero_results(self): self.obj.execution.merge({"scenario": {"script": RESOURCES_DIR + "gatling/bs/BasicSimulation.scala"}}) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.obj.reader.buffer = ['some info'] self.obj.engine.provisioning.post_process()
def test_fail_on_zero_results(self): obj = self.getGatling() obj.execution.merge({"scenario": {"script": __dir__() + "/../resources/gatling/bs/BasicSimulation.scala"}}) obj.prepare() obj.engine.prepared = [obj] obj.engine.started = [obj] prov = Local() prov.engine = obj.engine prov.executors = [obj] obj.engine.provisioning = prov obj.reader.buffer = ['some info'] obj.engine.provisioning.post_process()
def test_with_results(self): self.obj.execution.merge({ "concurrency": {"local": 2}, "scenario": {"script": RESOURCES_DIR + "grinder/helloworld.py"}}) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.obj.reader.buffer = ['some info'] self.obj.engine.provisioning.post_process()
def test_disabled(self): obj = ConsoleStatusReporter() obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.config[Provisioning.PROV] = '' jmeter = self.get_jmeter() jmeter.engine = obj.engine jmeter.start_time = time.time() jmeter.execution[ScenarioExecutor.HOLD_FOR] = 10 obj.engine.provisioning.executors = [jmeter] obj.settings["disable"] = True obj.settings['dummy_cols'] = 160 obj.settings['dummy_rows'] = 40 obj.prepare() obj.startup() for n in range(0, 10): point = self.__get_datapoint(0) obj.aggregated_second(point) obj.check() self.assertFalse(obj.screen.started) obj.check() obj.shutdown() obj.post_process()
def test_disabled_0(self): obj = ConsoleStatusReporter() obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.config[Provisioning.PROV] = '' obj.settings["disable"] = 0 obj.prepare() self.assertEquals(False, obj.disabled)
def test_with_results(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({'path': __dir__() + "/../resources/grinder/fake_grinder.jar"}) obj.execution.merge({ "concurrency": {"local": 2}, "scenario": {"script": __dir__() + "/../resources/grinder/helloworld.py"}}) obj.prepare() obj.engine.prepared = [obj] obj.engine.started = [obj] prov = Local() prov.engine = obj.engine prov.executors = [obj] obj.engine.provisioning = prov obj.reader.buffer = ['some info'] obj.engine.provisioning.post_process()
def test_with_results(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"}) obj.execution.merge({ "concurrency": {"local": 2}, "scenario": {"script": __dir__() + "/../grinder/helloworld.py"}}) obj.prepare() obj.engine.prepared = [obj] obj.engine.started = [obj] prov = Local() prov.engine = obj.engine prov.executors = [obj] obj.engine.provisioning = prov obj.reader.buffer = ['some info'] obj.engine.provisioning.post_process()
def test_fail_on_zero_results(self): self.configure({"execution": { "concurrency": 1, "iterations": 10, "hold-for": 30, "scenario": { "default-address": "http://blazedemo.com", "script": RESOURCES_DIR + "locust/simple.py" } }}) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.assertRaises(ToolError, self.obj.engine.provisioning.post_process)
def test_fail_on_zero_results(self): self.obj.execution.merge({ "concurrency": 1, "iterations": 10, "hold-for": 30, "scenario": { "default-address": "http://blazedemo.com", "script": RESOURCES_DIR + "locust/simple.py" } }) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.assertRaises(ToolError, self.obj.engine.provisioning.post_process)
def test_fail_on_zero_results(self): self.obj.execution.merge({"concurrency": {"local": 2}, "scenario": {"script": RESOURCES_DIR + "grinder/helloworld.py"}}) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] self.obj.engine.provisioning = Local() self.obj.engine.provisioning.engine = self.obj.engine self.obj.engine.provisioning.executors = [self.obj] self.assertRaises(ToolError, self.obj.engine.provisioning.post_process)
def test_fail_on_zero_results(self): if six.PY3: logging.warning("No locust available for python 3") self.obj.execution.merge({ "concurrency": 1, "iterations": 10, "hold-for": 30, "scenario": { "default-address": "http://blazedemo.com", "script": __dir__() + "/../resources/locust/simple.py" } }) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.assertRaises(ToolError, self.obj.engine.provisioning.post_process)
def test_fail_on_zero_results(self): if six.PY3: logging.warning("No locust available for python 3") self.obj.execution.merge({ "concurrency": 1, "iterations": 10, "hold-for": 30, "scenario": { "default-address": "http://blazedemo.com", "script": __dir__() + "/../locust/simple.py" } }) self.obj.prepare() self.obj.engine.prepared = [self.obj] self.obj.engine.started = [self.obj] prov = Local() prov.engine = self.obj.engine prov.executors = [self.obj] self.obj.engine.provisioning = prov self.assertRaises(RuntimeWarning, self.obj.engine.provisioning.post_process)
def test_results_link_blazemeter(self): obj = JUnitXMLReporter() obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.reporters.append(BlazeMeterUploader()) obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log) rep = obj.engine.reporters[0] rep.client.results_url = 'url2' rep.parameters.merge({'test': 'test2'}) report_info = obj.get_bza_report_info() self.assertEqual(report_info, [('BlazeMeter report link: url2\n', 'test2')])
def test_fail_on_zero_results(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({'path': __dir__() + "/../resources/grinder/fake_grinder.jar"}) obj.execution.merge({"concurrency": {"local": 2}, "scenario": {"script": __dir__() + "/../resources/grinder/helloworld.py"}}) obj.prepare() obj.engine.prepared = [obj] obj.engine.started = [obj] obj.engine.provisioning = Local() obj.engine.provisioning.engine = obj.engine obj.engine.provisioning.executors = [obj] self.assertRaises(ToolError, obj.engine.provisioning.post_process)
def test_1(self): obj = ConsoleStatusReporter() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.provisioning.start_time = time.time() obj.engine.config[Provisioning.PROV] = '' jmeter = self.get_jmeter() jmeter.engine = obj.engine jmeter.execution[ScenarioExecutor.HOLD_FOR] = 10 jmeter.execution.merge({'hold-for': 0, 'ramp-up': 0}) jmeter.delay = 10 jmeter.prepare() widget = jmeter.get_widget() widget.update() jmeter.startup() widget.update() obj.engine.provisioning.executors = [jmeter] obj.settings["disable"] = False obj.settings['dummy_cols'] = 160 obj.settings['dummy_rows'] = 40 obj.settings['disable'] = False obj.prepare() obj.startup() obj.check() obj.temp_stream.write("test1\n") obj.temp_stream.flush() obj.temp_stream.write("test1\n") obj.temp_stream.flush() obj.check() for n in range(0, 10): point = self.__get_datapoint(n) obj.aggregated_second(point) obj.temp_stream.write("test %s\n" % n) obj.temp_stream.flush() obj.check() self.assertTrue(obj.screen.started) point = self.__get_datapoint(11) point[DataPoint.CURRENT][''][KPISet.RESP_CODES][''] = 1 obj.aggregated_second(point) obj.check() obj.shutdown() obj.post_process() obj.log.removeHandler(handler) self.assertNotIn('Failed', handler.warn_buff.getvalue())
def test_1(self): obj = ConsoleStatusReporter() obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.provisioning.start_time = time.time() obj.engine.config[Provisioning.PROV] = '' jmeter = self.get_jmeter() jmeter.engine = obj.engine jmeter.execution[ScenarioExecutor.HOLD_FOR] = 10 jmeter.execution.merge({'hold-for': 0, 'ramp-up': 0}) jmeter.delay = 10 jmeter.prepare() widget = jmeter.get_widget() widget.update() jmeter.startup() widget.update() obj.engine.provisioning.executors = [jmeter] obj.settings["disable"] = False obj.settings['dummy_cols'] = 160 obj.settings['dummy_rows'] = 40 obj.settings['disable'] = False obj.prepare() obj.startup() obj.check() obj.temp_stream.write("test1\n") obj.temp_stream.flush() obj.temp_stream.write("test1\n") obj.temp_stream.flush() obj.check() for n in range(0, 10): point = self.__get_datapoint(n) obj.aggregated_second(point) obj.temp_stream.write("test %s\n" % n) obj.temp_stream.flush() obj.check() self.assertTrue(obj.screen.started) obj.check() obj.shutdown() obj.post_process()
def test_start_sequential_global(self): local = Local() local.settings["sequential"] = True local.engine = EngineEmul() local.engine.config[ScenarioExecutor.EXEC] = [{}, {}] local.engine.config.get("settings")["default-executor"] = "mock" local.prepare() local.startup() cnt = 0 while not local.check(): cnt += 1 self.assertEqual(3, cnt) local.shutdown() for executor in local.executors: executor.is_has_results = True local.post_process()
def test_exception(self): local = Local() local.engine = EngineEmul() local.engine.config[ScenarioExecutor.EXEC] = [{}] local.engine.config.get("settings")["default-executor"] = "mock" local.prepare() local.startup() local.check() local.shutdown() try: local.post_process() except ToolError as exc: self.assertNotIn('DIAGNOSTICS', str(exc)) self.assertIsNotNone(exc.diagnostics) self.assertEqual(exc.diagnostics, ['DIAGNOSTICS']) except BaseException as exc: self.fail("Was supposed to fail with ToolError, but crashed with %s" % exc)
def test_exception(self): local = Local() local.engine = EngineEmul() local.engine.config.merge({ScenarioExecutor.EXEC: [{}]}) local.engine.config.get("settings")["default-executor"] = "mock" local.engine.unify_config() local.prepare() local.startup() local.check() local.shutdown() try: local.post_process() except ToolError as exc: self.assertNotIn('DIAGNOSTICS', str(exc)) self.assertIsNotNone(exc.diagnostics) self.assertEqual(exc.diagnostics, ['DIAGNOSTICS']) except BaseException as exc: self.fail("Was supposed to fail with ToolError, but crashed with %s" % exc)
def test_start_sequential_global(self): local = Local() local.settings["sequential"] = True local.engine = EngineEmul() local.engine.config.merge({ScenarioExecutor.EXEC: [{}, {}]}) local.engine.config.get("settings")["default-executor"] = "mock" local.engine.unify_config() local.prepare() local.startup() cnt = 0 while not local.check(): cnt += 1 self.assertEqual(3, cnt) local.shutdown() for executor in local.executors: executor.is_has_results = True local.post_process()
def test_check_sequential_slots(self): local = Local() local.settings["capacity"] = 2 local.engine = EngineEmul() local.engine.config.merge({EXEC: [{}, {}, {}, {}, {}]}) local.engine.config.get("settings")["default-executor"] = "mock" local.engine.unify_config() local.prepare() local.startup() cnt = 0 while not local.check(): cnt += 1 self.assertEqual(5, cnt) local.shutdown() for executor in local.executors: executor.is_has_results = True local.post_process()