def test_csv_stats_on_master_from_aggregated_stats(self): # Failing test for: https://github.com/locustio/locust/issues/1315 with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server: environment = Environment() stats_writer = StatsCSVFileWriter( environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True ) master = environment.create_master_runner(master_bind_host="*", master_bind_port=0) greenlet = gevent.spawn(stats_writer) gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC) server.mocked_send(Message("client_ready", __version__, "fake_client")) master.stats.get("/", "GET").log(100, 23455) master.stats.get("/", "GET").log(800, 23455) master.stats.get("/", "GET").log(700, 23455) data = {"user_count": 1} environment.events.report_to_master.fire(client_id="fake_client", data=data) master.stats.clear_all() server.mocked_send(Message("stats", data, "fake_client")) s = master.stats.get("/", "GET") self.assertEqual(700, s.median_response_time) gevent.kill(greenlet) stats_writer.close_files() self.assertTrue(os.path.exists(self.STATS_FILENAME)) self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME)) self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME)) self.assertTrue(os.path.exists(self.STATS_EXCEPTIONS_FILENAME))
def _write_csv_files(environment, stats_base_name, full_history=False): """Spawn CVS writer and exit loop after first iteration.""" stats_writer = StatsCSVFileWriter(environment, PERCENTILES_TO_REPORT, stats_base_name, full_history=full_history) greenlet = gevent.spawn(stats_writer) gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC) gevent.kill(greenlet) stats_writer.close_files()
def test_csv_stats_writer_full_history(self): stats_writer = StatsCSVFileWriter(self.environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True) self.runner.stats.log_request("GET", "/", 10, content_length=666) greenlet = gevent.spawn(stats_writer) gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC) gevent.kill(greenlet) stats_writer.close_files() self.assertTrue(os.path.exists(self.STATS_FILENAME)) self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME)) self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME)) self.assertTrue(os.path.exists(self.STATS_EXCEPTIONS_FILENAME)) with open(self.STATS_HISTORY_FILENAME) as f: reader = csv.DictReader(f) rows = [r for r in reader] self.assertEqual(4, len(rows)) self.assertEqual("/", rows[0]["Name"]) self.assertEqual("Aggregated", rows[1]["Name"]) self.assertEqual("/", rows[2]["Name"]) self.assertEqual("Aggregated", rows[3]["Name"])
def test_user_count_in_csv_history_stats(self): start_time = int(time.time()) class TestUser(User): wait_time = constant(10) @task def t(self): self.environment.runner.stats.log_request("GET", "/", 10, 10) environment = Environment(user_classes=[TestUser]) stats_writer = StatsCSVFileWriter(environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True) runner = environment.create_local_runner() # spawn a user every _TEST_CSV_STATS_INTERVAL_SEC second user_count = 15 spawn_rate = 5 assert 1 / 5 == _TEST_CSV_STATS_INTERVAL_SEC runner_greenlet = gevent.spawn(runner.start, user_count, spawn_rate) gevent.sleep(0.1) greenlet = gevent.spawn(stats_writer) gevent.sleep(user_count / spawn_rate) gevent.kill(greenlet) stats_writer.close_files() runner.stop() gevent.kill(runner_greenlet) with open(self.STATS_HISTORY_FILENAME) as f: reader = csv.DictReader(f) rows = [r for r in reader] self.assertEqual(2 * user_count, len(rows)) for i in range(int(user_count / spawn_rate)): for _ in range(spawn_rate): row = rows.pop(0) self.assertEqual("%i" % ((i + 1) * spawn_rate), row["User Count"]) self.assertEqual("/", row["Name"]) self.assertEqual("%i" % ((i + 1) * spawn_rate), row["Total Request Count"]) self.assertGreaterEqual(int(row["Timestamp"]), start_time) row = rows.pop(0) self.assertEqual("%i" % ((i + 1) * spawn_rate), row["User Count"]) self.assertEqual("Aggregated", row["Name"]) self.assertEqual("%i" % ((i + 1) * spawn_rate), row["Total Request Count"]) self.assertGreaterEqual(int(row["Timestamp"]), start_time)
def test_csv_stats_writer_full_history(self): stats_writer = StatsCSVFileWriter(self.environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True) for i in range(10): self.runner.stats.log_request("GET", "/", 100, content_length=666) greenlet = gevent.spawn(stats_writer) gevent.sleep(10) for i in range(10): self.runner.stats.log_request("GET", "/", 10, content_length=666) gevent.sleep(5) gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC) gevent.kill(greenlet) stats_writer.close_files() self.assertTrue(os.path.exists(self.STATS_FILENAME)) self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME)) self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME)) self.assertTrue(os.path.exists(self.STATS_EXCEPTIONS_FILENAME)) with open(self.STATS_HISTORY_FILENAME) as f: reader = csv.DictReader(f) rows = [r for r in reader] self.assertGreaterEqual(len(rows), 130) self.assertEqual("/", rows[0]["Name"]) self.assertEqual("Aggregated", rows[1]["Name"]) self.assertEqual("/", rows[2]["Name"]) self.assertEqual("Aggregated", rows[3]["Name"]) self.assertEqual("20", rows[-1]["Total Request Count"]) saw100 = False saw10 = False for row in rows: if not saw100 and row["95%"] == "100": saw100 = True elif saw100 and row["95%"] == "10": saw10 = True break self.assertTrue(saw100, "Never saw 95th percentile increase to 100") self.assertTrue(saw10, "Never saw 95th percentile decrease to 10")
def setUp(self): super().setUp() self.remove_files_if_exists() parser = get_parser(default_config_files=[]) self.environment.parsed_options = parser.parse_args(["--csv", self.STATS_BASE_NAME, "--csv-full-history"]) self.stats = self.environment.stats self.stats.CSV_STATS_INTERVAL_SEC = 0.02 locust.stats.CSV_STATS_INTERVAL_SEC = 0.1 self.stats_csv_writer = StatsCSVFileWriter( self.environment, stats.PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True ) self.web_ui = self.environment.create_web_ui("127.0.0.1", 0, stats_csv_writer=self.stats_csv_writer) self.web_ui.app.view_functions["request_stats"].clear_cache() gevent.sleep(0.01) self.web_port = self.web_ui.server.server_port
setup_logging("INFO", None) logger = logging.getLogger() greenlet_exception_handler = greenlet_exception_logger(logger) options = parse_options() from app.OnceUser import WebsiteUser env = Environment(user_classes=[WebsiteUser], host="http://www.google.com", reset_stats=True, parsed_options=options) if os.getenv('OnceUser.py'): pass else: pass #raise ValueError('test was not loaded') stats_csv_writer = StatsCSVFileWriter(env, stats.PERCENTILES_TO_REPORT, './app/reports', options.stats_history_enabled) print(type(stats_csv_writer)) env.create_local_runner() env.runner.start(LOCUST_USER_COUNT, LOCUST_SPAWN_RATE) gevent.spawn(stats_printer(env.stats)) gevent.spawn(stats_csv_writer.stats_writer).link_exception(greenlet_exception_handler) gevent.spawn_later(LOCUST_RUN_TIME, lambda: env.runner.quit()) env.runner.greenlet.join() env.runner.stop() env.runner.quit() ftp = FTP(host="192.168.18.16", user='******', passwd='1234') ftp.set_pasv(False) for filename in listdir(TARGET_APP_DIR):
class TestWebUIFullHistory(LocustTestCase, _HeaderCheckMixin): STATS_BASE_NAME = "web_test" STATS_FILENAME = "{}_stats.csv".format(STATS_BASE_NAME) STATS_HISTORY_FILENAME = "{}_stats_history.csv".format(STATS_BASE_NAME) STATS_FAILURES_FILENAME = "{}_failures.csv".format(STATS_BASE_NAME) def setUp(self): super().setUp() self.remove_files_if_exists() parser = get_parser(default_config_files=[]) self.environment.parsed_options = parser.parse_args( ["--csv", self.STATS_BASE_NAME, "--csv-full-history"]) self.stats = self.environment.stats self.stats.CSV_STATS_INTERVAL_SEC = 0.02 locust.stats.CSV_STATS_INTERVAL_SEC = 0.1 self.stats_csv_writer = StatsCSVFileWriter(self.environment, stats.PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True) self.web_ui = self.environment.create_web_ui( "127.0.0.1", 0, stats_csv_writer=self.stats_csv_writer) self.web_ui.app.view_functions["request_stats"].clear_cache() gevent.sleep(0.01) self.web_port = self.web_ui.server.server_port def tearDown(self): super().tearDown() self.web_ui.stop() self.runner.quit() self.remove_files_if_exists() def remove_file_if_exists(self, filename): if os.path.exists(filename): os.remove(filename) def remove_files_if_exists(self): self.remove_file_if_exists(self.STATS_FILENAME) self.remove_file_if_exists(self.STATS_HISTORY_FILENAME) self.remove_file_if_exists(self.STATS_FAILURES_FILENAME) def test_request_stats_full_history_csv(self): self.stats.log_request("GET", "/test", 1.39764125, 2) self.stats.log_request("GET", "/test", 999.9764125, 1000) self.stats.log_request("GET", "/test2", 120, 5612) greenlet = gevent.spawn(self.stats_csv_writer.stats_writer) gevent.sleep(0.01) self.stats_csv_writer.stats_history_flush() gevent.kill(greenlet) response = requests.get( "http://127.0.0.1:%i/stats/requests_full_history/csv" % self.web_port) self.assertEqual(200, response.status_code) self._check_csv_headers(response.headers, "requests_full_history") self.assertIn("Content-Length", response.headers) reader = csv.reader(StringIO(response.text)) rows = [r for r in reader] self.assertEqual(4, len(rows)) self.assertEqual("Timestamp", rows[0][0]) self.assertEqual("GET", rows[1][2]) self.assertEqual("/test", rows[1][3]) self.assertEqual("/test2", rows[2][3]) self.assertEqual("", rows[3][2]) self.assertEqual("Aggregated", rows[3][3])
def main(): # find specified locustfile and make sure it exists, using a very simplified # command line parser that is only used to parse the -f option locustfile = parse_locustfile_option() # import the locustfile docstring, user_classes, shape_class = load_locustfile(locustfile) # parse all command line options options = parse_options() if options.slave or options.expect_slaves: sys.stderr.write( "The --slave/--expect-slaves parameters have been renamed --worker/--expect-workers\n" ) sys.exit(1) if options.hatch_rate: sys.stderr.write( "[DEPRECATED] The --hatch-rate parameter has been renamed --spawn-rate\n" ) options.spawn_rate = options.hatch_rate # setup logging if not options.skip_log_setup: if options.loglevel.upper() in [ "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" ]: setup_logging(options.loglevel, options.logfile) else: sys.stderr.write( "Invalid --loglevel. Valid values are: DEBUG/INFO/WARNING/ERROR/CRITICAL\n" ) sys.exit(1) logger = logging.getLogger(__name__) greenlet_exception_handler = greenlet_exception_logger(logger) if options.list_commands: print("Available Users:") for name in user_classes: print(" " + name) sys.exit(0) if not user_classes: logger.error("No User class found!") sys.exit(1) # make sure specified User exists if options.user_classes: missing = set(options.user_classes) - set(user_classes.keys()) if missing: logger.error("Unknown User(s): %s\n" % (", ".join(missing))) sys.exit(1) else: names = set(options.user_classes) & set(user_classes.keys()) user_classes = [user_classes[n] for n in names] else: # list() call is needed to consume the dict_view object in Python 3 user_classes = list(user_classes.values()) try: import resource if resource.getrlimit(resource.RLIMIT_NOFILE)[0] < 10000: # Increasing the limit to 10000 within a running process should work on at least MacOS. # It does not work on all OS:es, but we should be no worse off for trying. resource.setrlimit(resource.RLIMIT_NOFILE, [10000, resource.RLIM_INFINITY]) except: logger.warning( "System open file limit setting is not high enough for load testing, and the OS didn't allow locust to increase it by itself. See https://github.com/locustio/locust/wiki/Installation#increasing-maximum-number-of-open-files-limit for more info." ) # create locust Environment environment = create_environment(user_classes, options, events=locust.events, shape_class=shape_class) if shape_class and (options.num_users or options.spawn_rate or options.step_load): logger.error( "The specified locustfile contains a shape class but a conflicting argument was specified: users, spawn-rate or step-load" ) sys.exit(1) if options.show_task_ratio: print("\n Task ratio per User class") print("-" * 80) print_task_ratio(user_classes) print("\n Total task ratio") print("-" * 80) print_task_ratio(user_classes, total=True) sys.exit(0) if options.show_task_ratio_json: from json import dumps task_data = { "per_class": get_task_ratio_dict(user_classes), "total": get_task_ratio_dict(user_classes, total=True) } print(dumps(task_data)) sys.exit(0) if options.step_time: if not options.step_load: logger.error( "The --step-time argument can only be used together with --step-load" ) sys.exit(1) if options.worker: logger.error( "--step-time should be specified on the master node, and not on worker nodes" ) sys.exit(1) try: options.step_time = parse_timespan(options.step_time) except ValueError: logger.error( "Valid --step-time formats are: 20, 20s, 3m, 2h, 1h20m, 3h30m10s, etc." ) sys.exit(1) if options.master: runner = environment.create_master_runner( master_bind_host=options.master_bind_host, master_bind_port=options.master_bind_port, ) elif options.worker: try: runner = environment.create_worker_runner(options.master_host, options.master_port) except socket.error as e: logger.error("Failed to connect to the Locust master: %s", e) sys.exit(-1) else: runner = environment.create_local_runner() # main_greenlet is pointing to runners.greenlet by default, it will point the web greenlet later if in web mode main_greenlet = runner.greenlet if options.run_time: if not options.headless: logger.error( "The --run-time argument can only be used together with --headless" ) sys.exit(1) if options.worker: logger.error( "--run-time should be specified on the master node, and not on worker nodes" ) sys.exit(1) try: options.run_time = parse_timespan(options.run_time) except ValueError: logger.error( "Valid --run-time formats are: 20, 20s, 3m, 2h, 1h20m, 3h30m10s, etc." ) sys.exit(1) def spawn_run_time_limit_greenlet(): logger.info("Run time limit set to %s seconds" % options.run_time) def timelimit_stop(): logger.info("Time limit reached. Stopping Locust.") runner.quit() gevent.spawn_later( options.run_time, timelimit_stop).link_exception(greenlet_exception_handler) if options.csv_prefix: stats_csv_writer = StatsCSVFileWriter(environment, stats.PERCENTILES_TO_REPORT, options.csv_prefix, options.stats_history_enabled) else: stats_csv_writer = StatsCSV(environment, stats.PERCENTILES_TO_REPORT) # start Web UI if not options.headless and not options.worker: # spawn web greenlet protocol = "https" if options.tls_cert and options.tls_key else "http" try: if options.web_host == "*": # special check for "*" so that we're consistent with --master-bind-host web_host = '' else: web_host = options.web_host if web_host: logger.info("Starting web interface at %s://%s:%s" % (protocol, web_host, options.web_port)) else: logger.info( "Starting web interface at %s://0.0.0.0:%s (accepting connections from all network interfaces)" % (protocol, options.web_port)) web_ui = environment.create_web_ui( host=web_host, port=options.web_port, auth_credentials=options.web_auth, tls_cert=options.tls_cert, tls_key=options.tls_key, stats_csv_writer=stats_csv_writer, ) except AuthCredentialsError: logger.error( "Credentials supplied with --web-auth should have the format: username:password" ) sys.exit(1) else: main_greenlet = web_ui.greenlet else: web_ui = None # Fire locust init event which can be used by end-users' code to run setup code that # need access to the Environment, Runner or WebUI environment.events.init.fire(environment=environment, runner=runner, web_ui=web_ui) if options.headless: # headless mode if options.master: # wait for worker nodes to connect while len(runner.clients.ready) < options.expect_workers: logging.info( "Waiting for workers to be ready, %s of %s connected", len(runner.clients.ready), options.expect_workers) time.sleep(1) if not options.worker: # apply headless mode defaults if options.num_users is None: options.num_users = 1 if options.spawn_rate is None: options.spawn_rate = 1 if options.step_users is None: options.step_users = 1 # start the test if options.step_time: runner.start_stepload(options.num_users, options.spawn_rate, options.step_users, options.step_time) if environment.shape_class: environment.runner.start_shape() else: runner.start(options.num_users, options.spawn_rate) if options.run_time: spawn_run_time_limit_greenlet() stats_printer_greenlet = None if not options.only_summary and (options.print_stats or (options.headless and not options.worker)): # spawn stats printing greenlet stats_printer_greenlet = gevent.spawn(stats_printer(runner.stats)) stats_printer_greenlet.link_exception(greenlet_exception_handler) if options.csv_prefix: gevent.spawn(stats_csv_writer.stats_writer).link_exception( greenlet_exception_handler) gevent.spawn(stats_history, runner) def shutdown(): """ Shut down locust by firing quitting event, printing/writing stats and exiting """ logger.info("Running teardowns...") environment.events.quitting.fire(environment=environment, reverse=True) # determine the process exit code if log.unhandled_greenlet_exception: code = 2 elif environment.process_exit_code is not None: code = environment.process_exit_code elif len(runner.errors) or len(runner.exceptions): code = options.exit_code_on_error else: code = 0 logger.info("Shutting down (exit code %s), bye." % code) if stats_printer_greenlet is not None: stats_printer_greenlet.kill(block=False) logger.info("Cleaning up runner...") if runner is not None: runner.quit() print_stats(runner.stats, current=False) print_percentile_stats(runner.stats) print_error_report(runner.stats) sys.exit(code) # install SIGTERM handler def sig_term_handler(): logger.info("Got SIGTERM signal") shutdown() gevent.signal_handler(signal.SIGTERM, sig_term_handler) try: logger.info("Starting Locust %s" % version) main_greenlet.join() shutdown() except KeyboardInterrupt as e: shutdown()