def find_plugins(cls): """Load all entry points with group name 'stressor.plugins'.""" if cls.entry_points_searched: return cls.entry_points_searched = True ep_map = cls._entry_point_map logger.debug("Search entry points for group '{}'...".format( cls.namespace)) for ep in iter_entry_points(group=cls.namespace, name=None): plugin_name = "{}".format(ep.dist) logger.debug("Found plugin {} from entry point `{}`".format( plugin_name, ep)) if ep.name in ep_map: logger.warning( "Duplicate entry point name: {}; skipping...".format( ep.name)) continue # elif ep.name in cls.task_class_map: # # TODO: support overriding standard tasks? # # Maybe when 'extras=[override]' is passed... # logger.warning( # "Plugin task name already exists: {}; skipping...".format(ep.name) # ) # continue ep_map[ep.name] = ep return
def run_in_threads(self, user_list, context): self.publish("start_run", run_manager=self) self.stop_request.clear() thread_list = [] self.session_list = [] for i, user in enumerate(user_list, 1): name = "t{:02}".format(i) sess = SessionManager(self, context, name, user) self.session_list.append(sess) t = threading.Thread(name=name, target=self._run_one, args=[sess]) t.setDaemon(True) # Required to make Ctrl-C work thread_list.append(t) logger.info("Starting {} session workers...".format(len(thread_list))) self.set_stage("running") self.stats.report_start(None, None, None) ramp_up_delay = self.config_manager.sessions.get("ramp_up_delay") start_run = time.monotonic() for i, t in enumerate(thread_list): if ramp_up_delay and i > 1: delay = get_random_number(ramp_up_delay) logger.info( "Ramp-up delay for t{:02}: {:.2f} seconds...".format(i, delay) ) time.sleep(delay) t.start() logger.important( "All {} sessions running, waiting for them to terminate...".format( len(thread_list) ) ) for t in thread_list: t.join() self.set_stage("done") elap = time.monotonic() - start_run # self.stats.add_timing("run", elap) self.stats.report_end(None, None, None) self.publish("end_run", run_manager=self, elap=elap) logger.debug("Results for {}:\n{}".format(self, self.stats.format_result())) return not self.has_errors()
def repl(value, context, parent, parent_key): with stack.enter(str(parent_key or "?")): if isinstance(value, dict): for key, sub_val in value.items(): repl(sub_val, context, value, key) elif isinstance(value, (list, tuple)): for idx, elem in enumerate(value): repl(elem, context, value, idx) elif isinstance(value, str): org_value = value if "$" in value and str(stack) == "/?/script": # Don't replace macros inside RunActivity scripts logger.debug("Not replacing macros inside `script`s.") return value while "$" in value: found_one = False temp_val = value for match in VAR_MACRO_REX.finditer(temp_val): found_one = True # print(match, match.groups()) macro, var_name = match.group(), match.groups()[0] # resolve dotted names: # var_value = context[var_name] try: var_value = get_dict_attr(context, var_name) if value.strip() == macro: # Replace macro string with resolved int, float, or str value = var_value break # value contains a macro but also prefix or suffix. # Cast macro-result to string and check for more macros value = value.replace(macro, str(var_value)) except (KeyError, TypeError): raise RuntimeError( "Error evaluating {}: '{}': '{}' not found in context (or is None).".format( stack, org_value, var_name ) ) if not found_one or not isinstance(value, str): break parent[parent_key] = value return value
def _work(name): # logger.debug("StaticRequests({}) started...".format(name, )) while not session.stop_request.is_set(): try: url = queue.get(False) except Empty: break if debug: logger.info("StaticRequests({}, {})...".format(name, url)) # The actual HTTP request: # TODO: requests.Session is not guaranteed to be thread-safe! try: res = bs.request(method, url, **r_args) res.raise_for_status() results.append((True, name, url, None)) except Exception as e: results.append((False, name, url, "{}".format(e))) queue.task_done() logger.debug("StaticRequests({}) stopped.".format(name)) return
def register_plugins(cls, arg_parser): """Call `register_fn` on all loaded entry points.""" if cls.plugins_registered: return cls.plugins_registered = True # Import stock class definitions, so we can scan the subclasses. # This will add standard plugins to ActivityBase.__subclasses__() # and MacroBase.__subclasses__(): import stressor.plugins.common # noqa F401 import stressor.plugins.http_activities # noqa F401 import stressor.plugins.script_activities # noqa F401 # Load entry points from all installed mosules that have the # 'stressor.plugins' namespace: cls.find_plugins() # Call `register_fn` on all loaded entry points_ ep_map = cls._entry_point_map for name, ep in cls._entry_point_map.items(): logger.info("Load plugins {}...".format(ep.dist)) try: register_fn = ep.load() if not callable(register_fn): raise RuntimeError( "Entry point {} is not a function".format(ep)) ep_map[ep.name] = register_fn except Exception: logger.exception("Failed to load {}".format(ep)) prev_activities = list(ActivityBase.__subclasses__()) prev_macros = list(MacroBase.__subclasses__()) logger.debug("Register plugins {}...".format(ep.dist)) try: # The plugin must declare new classes derrived from # ActivityBase and/or MacroBase register_fn( activity_base=ActivityBase, macro_base=MacroBase, arg_parser=arg_parser, ) except Exception: logger.exception("Could not register {}".format(name)) continue found_one = False for activity_cls in ActivityBase.__subclasses__(): if activity_cls in prev_activities: continue found_one = True logger.info("Register {}.{}".format(ep.dist, activity_cls)) for macro_cls in MacroBase.__subclasses__(): if macro_cls in prev_macros: continue found_one = True logger.info("Register {}.{}".format(ep.dist, macro_cls)) if not found_one: logger.warning( "Plugin {} did not register activites nor macros".format( ep.dist)) # Build plugin maps from currently known subclasses cls._register_subclasses(ActivityBase, cls.activity_plugin_map) logger.debug("Registered activity plugins:\n{}".format( cls.activity_plugin_map)) cls._register_subclasses(MacroBase, cls.macro_plugin_map) logger.debug("Registered macro plugins:\n{}".format( cls.macro_plugin_map)) return
def _compile(self, value, parent=None, parent_key=None, stack=None): """Apply load-time conversions after a config file was read. - Replace activity definitions with instances of :class:`ActvityBase` - Resolve load-time macros (partly by replacing them with activites) **Note:** Some makros, especially `$(CONTEXT.VAR)` are *not* resolved here, because this needs to be done at run-time. """ pm = PluginManager assert pm.activity_plugin_map stats = self.stats_manager if stack is None: # Top-Level call; `value` is the YAML cnfig dict self.stack = PathStack("config") stack = self.stack # Register sequence names for seq_name in value.get("sequences", {}).keys(): # Create an initial statistics dict for sequence_stats[SEQ_NAME]: stats.register_sequence(seq_name) if parent_key == "activity": path_info = parent.get(parent_key) else: path_info = parent_key with stack.enter(path_info, ignore=parent is None): # logger.debug("compile {}".format(stack)) # Reslove `$name()` macros, which may replace themselves, e.g. # - "GetRequest" -> `GetRequestActivity()` # - "$load()" -> list or dict that needs to be compiled as well if isinstance(value, str) and "$" in value: has_match = False for macro_cls in pm.macro_plugin_map.values(): try: macro = macro_cls() handled, res = macro.match_apply(self, parent, parent_key) if handled: has_match = True logger.debug("Eval {}: {} => {}".format(stack, value, res)) # Re-init `value` in case the macro replaced it value = parent[parent_key] break except Exception as e: msg = "Could not evaluate macro {!r}".format(value) self.report_error(msg, exc=e) # raise ConfigurationError( # "Could not evaluate {!r} at {}: {}".format(value, stack, e) # ) from e if not has_match and GENERIC_MACRO_REX.match(value): msg = "Entry looks like a macro, but has no handler: '{}'".format( value ) self.report_error(msg, level="warning") # Resolve lists and dicts recursively: if isinstance(value, dict): # Macros may change the dictionary size, so iterate over a copy for key, sub_val in tuple(value.items()): self._compile(sub_val, value, key, stack) return elif isinstance(value, (list, tuple)): # Macros may change the list size, so iterate over a copy for idx, elem in enumerate(tuple(value)): self._compile(elem, value, idx, stack) return # Either 'activity' was already an activity name, or a preceeding macro # set it: if isinstance(value, str) and value in pm.activity_plugin_map: # Replace the activity definition with an instance of the class. # Allow activities to do compile-time checking ad processing activity_cls = pm.activity_plugin_map[value] try: # print(parent) activity_inst = activity_cls(self, **parent) parent[parent_key] = activity_inst if stats: stats.register_activity(activity_inst) except ActivityCompileError as e: # Don't pass exc to supress stack trace self.report_error("{}".format(e)) except Exception as e: msg = "Could not evaluate activity {!r}".format(value) self.report_error(msg, exc=e) # logger.error("{} {}: {}".format(stack, value, e)) return
def execute(self, session, **expanded_args): """""" global_vars = { # "foo": 41, # "__builtins__": {}, } # local_vars = session.context local_vars = session.context.copy() assert "result" not in local_vars assert "session" not in local_vars local_vars["session"] = session.make_helper() # prev_local_keys = set(locals()) prev_global_keys = set(globals()) prev_context_keys = set(local_vars.keys()) try: exec(self.script, global_vars, local_vars) except ConnectionError as e: # TODO: more requests-exceptions? msg = "Script failed: {!r}: {}".format(e, e) raise ScriptActivityError(msg) except Exception as e: msg = "Script failed: {!r}: {}".format(e, e) if session.verbose >= 4: logger.exception(msg) raise ScriptActivityError(msg) from e raise ScriptActivityError(msg) finally: local_vars.pop("session") result = local_vars.pop("result", None) context_keys = set(local_vars.keys()) new_keys = context_keys.difference(prev_context_keys) if new_keys: if self.export is None: logger.info( "Skript activity has no `export` defined. Ignoring new variables: '{}'".format( "', '".join(new_keys) ) ) else: for k in self.export: v = local_vars.get(k) assert type(v) in (int, float, str, list, dict) session.context[k] = v logger.debug("Set context.{} = {!r}".format(k, v)) # store_keys = new_keys.intersection(self.export) # TODO: this cannot happen? new_globals = set(globals().keys()).difference(prev_global_keys) if new_globals: logger.warning("Script-defined globals: {}".format(new_globals)) raise ScriptActivityError("Script introduced globals") # new_context = context_keys.difference(prev_context_keys) # logger.info("Script-defined context-keys: {}".format(new_context)) # new_locals = set(locals().keys()).difference(prev_local_keys) # if new_locals: # logger.info("Script-defined locals: {}".format(new_locals)) # logger.info("Script locals:\n{}".format(pformat(local_vars))) if expanded_args.get("debug") or session.verbose >= 5: logger.info( "{} {}\n Context after execute:\n {}\n return value: {!r}".format( session.context_stack, self, pformat(session.context, indent=4), result, ) ) elif session.verbose >= 3 and result is not None: logger.info( "{} returnd: {!r}".format( session.context_stack, shorten_string(result, 200) if isinstance(result, str) else result, ) ) return result
def execute(self, session, **expanded_args): """ """ url_list = expanded_args.pop("url_list") base_url = session.get_context("base_url") expanded_args.setdefault("timeout", session.get_context("request_timeout")) debug = expanded_args.get("debug") thread_count = int(expanded_args.get("thread_count", 1)) method = "GET" r_args = { k: v for k, v in expanded_args.items() if k in self.REQUEST_ARGS } verify_ssl = session.sessions.get("verify_ssl", True) r_args.setdefault("verify", verify_ssl) basic_auth = session.sessions.get("basic_auth", False) if basic_auth: r_args.setdefault("auth", session.user.auth) headers = r_args.setdefault("headers", {}) headers.setdefault( "User-Agent", "session/{} Stressor/{}".format(session.session_id, __version__), ) # TODO: requests.Session is not guaranteed to be thread-safe! bs = session.browser_session # Queue-up all pending request queue = Queue() for url in url_list: url = resolve_url(base_url, url) queue.put(url) results = [] def _work(name): # logger.debug("StaticRequests({}) started...".format(name, )) while not session.stop_request.is_set(): try: url = queue.get(False) except Empty: break if debug: logger.info("StaticRequests({}, {})...".format(name, url)) # The actual HTTP request: # TODO: requests.Session is not guaranteed to be thread-safe! try: res = bs.request(method, url, **r_args) res.raise_for_status() results.append((True, name, url, None)) except Exception as e: results.append((False, name, url, "{}".format(e))) queue.task_done() logger.debug("StaticRequests({}) stopped.".format(name)) return logger.debug("Starting {} StaticRequestsActivity workers...".format( thread_count)) thread_list = [] for i in range(thread_count): name = "{}.{:02}".format(session.session_id, i + 1) t = threading.Thread(name=name, target=_work, args=[name]) t.setDaemon(True) # Required to make Ctrl-C work thread_list.append(t) for t in thread_list: t.start() logger.debug("All StaticRequestsActivity workers running...") queue.join() for t in thread_list: t.join() errors = [ "{}".format(error) for ok, name, url, error in results if not ok ] if errors: raise ActivityError("{} reqests failed:\n{}".format( len(errors), format(errors))) # logger.error(pformat(errors)) return bool(errors)