def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env.get()) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) container.append(task) self.log.debug("Added %s task: %s", stage, stage_task)
def test_nesting(self): v1 = 'val_param_name' v2 = 'path_param_name' v3 = 'const_val' os.environ[v1] = 'v1.1' os.environ[v2] = 'v1.2' os.environ[v3] = 'v1.3' e1 = Environment() e1.set({v1: 'local_val1.1'}) e1.add_path({v2: 'param_val1.1'}, finish=True) e2 = Environment(parent=e1) e1.add_path({v2: 'param_val1.3'}, finish=True) os.environ[v1] = 'v2.1' os.environ[v2] = 'v2.2' os.environ[v3] = 'v2.3' e1.set({v1: 'local_val1.2'}) e2.add_path({v2: 'param_val1.2'}, finish=True) self.assertEqual(e1.get(v1), 'local_val1.2') self.assertEqual(e2.get(v1), 'local_val1.1') self.assertEqual(e1.get(v2), os.pathsep.join(('v2.2', 'param_val1.1', 'param_val1.3'))) self.assertEqual(e2.get(v2), os.pathsep.join(('v2.2', 'param_val1.1', 'param_val1.2'))) self.assertEqual(e1.get(v3), 'v2.3') self.assertEqual(e2.get(v3), 'v2.3')
def test_nesting(self): v1 = 'val_param_name' v2 = 'path_param_name' v3 = 'const_val' os.environ[v1] = 'v1.1' os.environ[v2] = 'v1.2' os.environ[v3] = 'v1.3' e1 = Environment() e1.set({v1: 'local_val1.1'}) e1.add_path({v2: 'param_val1.1'}, finish=True) e2 = Environment(parent=e1) e1.add_path({v2: 'param_val1.3'}, finish=True) os.environ[v1] = 'v2.1' os.environ[v2] = 'v2.2' os.environ[v3] = 'v2.3' e1.set({v1: 'local_val1.2'}) e2.add_path({v2: 'param_val1.2'}, finish=True) self.assertEqual(e1.get(v1), 'local_val1.2') self.assertEqual(e2.get(v1), 'local_val1.1') self.assertEqual( e1.get(v2), os.pathsep.join(('v2.2', 'param_val1.1', 'param_val1.3'))) self.assertEqual( e2.get(v2), os.pathsep.join(('v2.2', 'param_val1.1', 'param_val1.2'))) self.assertEqual(e1.get(v3), 'v2.3') self.assertEqual(e2.get(v3), 'v2.3')
def start_subprocess(self, args, cwd, stdout, stderr, stdin, shell, env): if cwd is None: cwd = self.default_cwd env = Environment(self.log, env.get()) env.set(self.shared_env.get()) return shell_exec(args, cwd=cwd, stdout=stdout, stderr=stderr, stdin=stdin, shell=shell, env=env.get())
def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) f_name = 'shellexec_%s_%s' % (stage, index) if task.out: task.out = open(task.out, 'at') else: if task.is_background: out = self.engine.create_artifact(f_name, '.out') self.log.debug( 'STDOUT of background task "%s" redirected to "%s"' % (task, out)) task.out = open(out, 'at') else: task.out = PIPE if task.err: task.err = open(task.err, 'at') else: if task.is_background: err = self.engine.create_artifact(f_name, '.err') self.log.debug( 'STDERR of background task "%s" redirected to "%s"' % (task, err)) task.err = open(err, 'at') else: task.err = PIPE container.append(task) self.log.debug("Added %s task: %s", stage, stage_task)
def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) f_name = 'shellexec_%s_%s' % (stage, index) if task.out: task.out = open(task.out, 'at') else: if task.is_background: out = self.engine.create_artifact(f_name, '.out') self.log.debug('STDOUT of background task "%s" redirected to "%s"' % (task, out)) task.out = open(out, 'at') else: task.out = PIPE if task.err: task.err = open(task.err, 'at') else: if task.is_background: err = self.engine.create_artifact(f_name, '.err') self.log.debug('STDERR of background task "%s" redirected to "%s"' % (task, err)) task.err = open(err, 'at') else: task.err = PIPE container.append(task) self.log.debug("Added %s task: %s", stage, stage_task)
class Engine(object): """ Core entity of the technology, used to coordinate whole process :type reporters: list[Reporter] :type services: list[Service] :type log: logging.Logger :type aggregator: bzt.modules.aggregator.ConsolidatingAggregator :type stopping_reason: BaseException """ ARTIFACTS_DIR = "%Y-%m-%d_%H-%M-%S.%f" def __init__(self, parent_logger): """ :type parent_logger: logging.Logger """ self.file_search_paths = [] self.services = [] self.__artifacts = [] self.reporters = [] self.artifacts_dir = None self.log = parent_logger.getChild(self.__class__.__name__) self.env = Environment(self.log, dict(os.environ)) self.shared_env = Environment(self.log) self.config = Configuration() self.config.log = self.log.getChild(Configuration.__name__) self.modules = {} # available modules self.provisioning = Provisioning() self.aggregator = Aggregator(is_functional=False) self.interrupted = False self.check_interval = 1 self.stopping_reason = None self.engine_loop_utilization = 0 self.prepared = [] self.started = [] self.default_cwd = None self.logging_level_down = lambda: None self.logging_level_up = lambda: None def configure(self, user_configs, read_config_files=True): """ Load configuration files :type user_configs: list[str] :type read_config_files: bool """ self.log.info("Configuring...") if read_config_files: self._load_base_configs() merged_config = self._load_user_configs(user_configs) all_includes = [] while "included-configs" in self.config: includes = self.config.pop("included-configs") included_configs = [self.find_file(conf) for conf in includes if conf not in all_includes + user_configs] all_includes += includes self.config.load(included_configs) self.config['included-configs'] = all_includes self.config.merge({"version": bzt.VERSION}) self._set_up_proxy() if self.config.get(SETTINGS).get("check-updates", True): install_id = self.config.get("install-id", self._generate_id()) def wrapper(): return self._check_updates(install_id) thread = threading.Thread(target=wrapper) # intentionally non-daemon thread thread.start() return merged_config def _generate_id(self): if os.getenv("JENKINS_HOME"): prefix = "jenkins" elif os.getenv("TRAVIS"): prefix = "travis" elif any([key.startswith("bamboo") for key in os.environ.keys()]): prefix = "bamboo" elif os.getenv("TEAMCITY_VERSION"): prefix = "teamcity" elif os.getenv("DOCKER_HOST"): prefix = "docker" elif os.getenv("AWS_"): prefix = "amazon" elif os.getenv("GOOGLE_APPLICATION_CREDENTIALS") or os.getenv("CLOUDSDK_CONFIG"): prefix = "google_cloud" elif os.getenv("WEBJOBS_NAME"): prefix = "azure" elif is_linux(): prefix = 'linux' elif is_windows(): prefix = 'windows' else: prefix = 'macos' return "%s-%x" % (prefix, uuid.getnode()) def prepare(self): """ Prepare engine for work, will call preparing of Provisioning and add downstream EngineModule instances """ self.log.info("Preparing...") interval = self.config.get(SETTINGS).get("check-interval", self.check_interval) self.check_interval = dehumanize_time(interval) try: self.__prepare_aggregator() self.__prepare_services() self.__prepare_provisioning() self.__prepare_reporters() self.config.dump() except BaseException as exc: self.stopping_reason = exc raise def _startup(self): modules = self.services + [self.aggregator] + self.reporters + [self.provisioning] # order matters for module in modules: self.log.debug("Startup %s", module) self.started.append(module) module.startup() self.config.dump() def start_subprocess(self, args, cwd, stdout, stderr, stdin, shell, env): if cwd is None: cwd = self.default_cwd env = Environment(self.log, env.get()) env.set(self.shared_env.get()) return shell_exec(args, cwd=cwd, stdout=stdout, stderr=stderr, stdin=stdin, shell=shell, env=env.get()) def run(self): """ Run the job. Calls `startup`, does periodic `check`, calls `shutdown` in any case """ self.log.info("Starting...") exc_info = None try: self._startup() self.logging_level_down() self._wait() except BaseException as exc: self.log.debug("%s:\n%s", exc, traceback.format_exc()) self.stopping_reason = exc exc_info = sys.exc_info() finally: self.log.warning("Please wait for graceful shutdown...") try: self.logging_level_up() self._shutdown() except BaseException as exc: self.log.debug("%s:\n%s", exc, traceback.format_exc()) if not self.stopping_reason: self.stopping_reason = exc if not exc_info: exc_info = sys.exc_info() if exc_info: reraise(exc_info) def _check_modules_list(self): stop = False modules = [self.provisioning, self.aggregator] + self.services + self.reporters # order matters for module in modules: if module in self.started: self.log.debug("Checking %s", module) finished = bool(module.check()) if finished: self.log.debug("%s finished", module) stop = finished return stop def _wait(self): """ Wait modules for finish :return: """ prev = time.time() while not self._check_modules_list(): now = time.time() diff = now - prev delay = self.check_interval - diff self.engine_loop_utilization = diff / self.check_interval self.log.debug("Iteration took %.3f sec, sleeping for %.3f sec...", diff, delay) if delay > 0: time.sleep(delay) prev = time.time() if self.interrupted: raise ManualShutdown() self.config.dump() def _shutdown(self): """ Shutdown modules :return: """ self.log.info("Shutting down...") self.log.debug("Current stop reason: %s", self.stopping_reason) exc_info = None modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters for module in modules: try: if module in self.started: module.shutdown() except BaseException as exc: self.log.debug("%s:\n%s", exc, traceback.format_exc()) if not exc_info: exc_info = sys.exc_info() self.config.dump() if exc_info: reraise(exc_info) def post_process(self): """ Do post-run analysis and processing for the results. """ self.log.info("Post-processing...") # :type exception: BaseException exc_info = None modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters # services are last because of shellexec which is "final-final" action for module in modules: if module in self.prepared: try: module.post_process() except BaseException as exc: if isinstance(exc, KeyboardInterrupt): self.log.debug("post_process: %s", exc) else: self.log.debug("post_process: %s\n%s", exc, traceback.format_exc()) if not self.stopping_reason: self.stopping_reason = exc if not exc_info: exc_info = sys.exc_info() self.config.dump() if exc_info: reraise(exc_info) def create_artifact(self, prefix, suffix): """ Create new artifact in artifacts dir with given prefix and suffix :type prefix: str :type suffix: str :return: Path to created file :rtype: str :raise TaurusInternalException: if no artifacts dir set """ if not self.artifacts_dir: raise TaurusInternalException("Cannot create artifact: no artifacts_dir set up") filename = get_uniq_name(self.artifacts_dir, prefix, suffix, self.__artifacts) self.__artifacts.append(filename) self.log.debug("New artifact filename: %s", filename) return filename def existing_artifact(self, filename, move=False, target_filename=None): """ Add existing artifact, it will be collected into artifact_dir. If move=True, the original file will be deleted :type filename: str :type move: bool :type target_filename: str """ self.log.debug("Add existing artifact (move=%s): %s", move, filename) if self.artifacts_dir is None: self.log.warning("Artifacts dir has not been set, will not copy %s", filename) return new_filename = os.path.basename(filename) if target_filename is None else target_filename new_name = os.path.join(self.artifacts_dir, new_filename) self.__artifacts.append(new_name) if get_full_path(filename) == get_full_path(new_name): self.log.debug("No need to copy %s", filename) return if not os.path.exists(filename): self.log.warning("Artifact file not exists: %s", filename) return if move: self.log.debug("Moving %s to %s", filename, new_name) shutil.move(filename, new_name) else: self.log.debug("Copying %s to %s", filename, new_name) shutil.copy(filename, new_name) def create_artifacts_dir(self, existing_artifacts=(), merged_config=None): """ Create directory for artifacts, directory name based on datetime.now() """ if not self.artifacts_dir: artifacts_dir = self.config.get(SETTINGS, force_set=True).get("artifacts-dir", self.ARTIFACTS_DIR) self.artifacts_dir = datetime.datetime.now().strftime(artifacts_dir) self.artifacts_dir = get_full_path(self.artifacts_dir) self.log.info("Artifacts dir: %s", self.artifacts_dir) self.env.set({TAURUS_ARTIFACTS_DIR: self.artifacts_dir}) os.environ[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir if not os.path.isdir(self.artifacts_dir): os.makedirs(self.artifacts_dir) # dump current effective configuration dump = self.create_artifact("effective", "") # TODO: not good since this file not exists self.config.set_dump_file(dump) self.config.dump() # dump merged configuration if merged_config: merged_config.dump(self.create_artifact("merged", ".yml"), Configuration.YAML) merged_config.dump(self.create_artifact("merged", ".json"), Configuration.JSON) for artifact in existing_artifacts: self.existing_artifact(artifact) def is_functional_mode(self): return self.aggregator is not None and self.aggregator.is_functional def __load_module(self, alias): """ Load module class by alias :param alias: str :return: class """ if alias in self.modules: return self.modules[alias] mod_conf = self.config.get('modules') if alias not in mod_conf: msg = "Module '%s' not found in list of available aliases %s" % (alias, sorted(mod_conf.keys())) raise TaurusConfigError(msg) settings = ensure_is_dict(mod_conf, alias, "class") acopy = copy.deepcopy(settings) BetterDict.traverse(acopy, Configuration.masq_sensitive) self.log.debug("Module config: %s %s", alias, acopy) err = TaurusConfigError("Class name for alias '%s' is not found in module settings: %s" % (alias, settings)) clsname = settings.get('class', err) self.modules[alias] = load_class(clsname) if not issubclass(self.modules[alias], EngineModule): raise TaurusInternalException("Module class does not inherit from EngineModule: %s" % clsname) return self.modules[alias] def instantiate_module(self, alias): """ Create new instance for module using its alias from module settings section of config. Thus, to instantiate module it should be mentioned in settings. :type alias: str :rtype: EngineModule """ classobj = self.__load_module(alias) instance = classobj() assert isinstance(instance, EngineModule) instance.log = self.log.getChild(alias) instance.engine = self settings = self.config.get("modules") instance.settings = settings.get(alias) return instance def find_file(self, filename): """ Try to find file or dir in search_path if it was specified. Helps finding files in non-CLI environments or relative to config path :param filename: file basename to find :type filename: str """ if not filename: return filename filename = os.path.expanduser(filename) if os.path.exists(filename): return filename elif filename.lower().startswith("http://") or filename.lower().startswith("https://"): parsed_url = parse.urlparse(filename) downloader = ExceptionalDownloader() self.log.info("Downloading %s", filename) tmp_f_name, http_msg = downloader.get(filename) cd_header = http_msg.get('Content-Disposition', '') dest = cd_header.split('filename=')[-1] if cd_header and 'filename=' in cd_header else '' if not dest: dest = os.path.basename(parsed_url.path) fname, ext = os.path.splitext(dest) if dest else (parsed_url.hostname.replace(".", "_"), '.file') dest = self.create_artifact(fname, ext) self.log.debug("Moving %s to %s", tmp_f_name, dest) shutil.move(tmp_f_name, dest) return dest elif self.file_search_paths: for dirname in self.file_search_paths: location = os.path.join(dirname, filename) if os.path.exists(location): self.log.warning("Guessed location from search paths for %s: %s", filename, location) return location self.log.warning("Could not find location at path: %s", filename) return filename def _load_base_configs(self): base_configs = [os.path.join(get_full_path(__file__, step_up=1), 'resources', 'base-config.yml')] machine_dir = get_configs_dir() # can't refactor machine_dir out - see setup.py if os.path.isdir(machine_dir): self.log.debug("Reading extension configs from: %s", machine_dir) for cfile in sorted(os.listdir(machine_dir)): fname = os.path.join(machine_dir, cfile) if os.path.isfile(fname): base_configs.append(fname) else: self.log.debug("No machine configs dir: %s", machine_dir) self.log.debug("Base configs list: %s", base_configs) self.config.load(base_configs) def _load_user_configs(self, user_configs): """ :type user_configs: list[str] :rtype: Configuration """ # "tab-replacement-spaces" is not documented 'cause it loads only from base configs # so it's sort of half-working last resort self.config.tab_replacement_spaces = self.config.get(SETTINGS).get("tab-replacement-spaces", 4) self.log.debug("User configs list: %s", user_configs) self.config.load(user_configs) user_config = Configuration() user_config.log = self.log.getChild(Configuration.__name__) user_config.tab_replacement_spaces = self.config.tab_replacement_spaces user_config.warn_on_tab_replacement = False user_config.load(user_configs, self.__config_loaded) return user_config def __config_loaded(self, config): self.file_search_paths.append(get_full_path(config, step_up=1)) def __prepare_provisioning(self): """ Instantiate provisioning class """ err = TaurusConfigError("Please check global config availability or configure provisioning settings") cls = self.config.get(Provisioning.PROV, err) self.provisioning = self.instantiate_module(cls) self.prepared.append(self.provisioning) self.provisioning.prepare() def __prepare_reporters(self): """ Instantiate reporters, then prepare them in case they would like to interact """ reporting = self.config.get(Reporter.REP, []) for index, reporter in enumerate(reporting): reporter = ensure_is_dict(reporting, index, "module") msg = "reporter 'module' field isn't recognized: %s" cls = reporter.get('module', TaurusConfigError(msg % reporter)) instance = self.instantiate_module(cls) instance.parameters = reporter if self.__singletone_exists(instance, self.reporters): continue assert isinstance(instance, Reporter) self.reporters.append(instance) # prepare reporters for module in self.reporters: self.prepared.append(module) module.prepare() def __prepare_services(self): """ Instantiate service modules, then prepare them """ srv_config = self.config.get(Service.SERV, []) services = [] for index, config in enumerate(srv_config): config = ensure_is_dict(srv_config, index, "module") cls = config.get('module', '') instance = self.instantiate_module(cls) instance.parameters = config if self.__singletone_exists(instance, services): continue assert isinstance(instance, Service) services.append(instance) for service in services[:]: if not service.should_run(): services.remove(service) self.services.extend(services) for module in self.services: self.prepared.append(module) module.prepare() def __singletone_exists(self, instance, mods_list): """ :type instance: EngineModule :type mods_list: list[EngineModule] :rtype: bool """ if not isinstance(instance, Singletone): return False for mod in mods_list: if mod.parameters.get("module") == instance.parameters.get("module"): msg = "Module '%s' can be only used once, will merge all new instances into single" self.log.warning(msg % mod.parameters.get("module")) mod.parameters.merge(instance.parameters) return True def __prepare_aggregator(self): """ Instantiate aggregators :return: """ cls = self.config.get(SETTINGS).get("aggregator", "") if not cls: self.log.warning("Proceeding without aggregator, no results analysis") else: self.aggregator = self.instantiate_module(cls) self.prepared.append(self.aggregator) self.aggregator.prepare() def _set_up_proxy(self): proxy_settings = self.config.get("settings").get("proxy") if proxy_settings and proxy_settings.get("address"): proxy_url = parse.urlsplit(proxy_settings.get("address")) self.log.debug("Using proxy settings: %s", proxy_url) username = proxy_settings.get("username") pwd = proxy_settings.get("password") scheme = proxy_url.scheme if proxy_url.scheme else 'http' if username and pwd: proxy_uri = "%s://%s:%s@%s" % (scheme, username, pwd, proxy_url.netloc) else: proxy_uri = "%s://%s" % (scheme, proxy_url.netloc) proxy_handler = ProxyHandler({"https": proxy_uri, "http": proxy_uri}) opener = build_opener(proxy_handler) install_opener(opener) def _check_updates(self, install_id): try: params = (bzt.VERSION, install_id) req = "http://gettaurus.org/updates/?version=%s&installID=%s" % params self.log.debug("Requesting updates info: %s", req) response = urlopen(req, timeout=10) resp = response.read() if not isinstance(resp, str): resp = resp.decode() self.log.debug("Taurus updates info: %s", resp) data = json.loads(resp) mine = LooseVersion(bzt.VERSION) latest = LooseVersion(data['latest']) if mine < latest or data['needsUpgrade']: msg = "There is newer version of Taurus %s available, consider upgrading. " \ "What's new: http://gettaurus.org/docs/Changelog/" self.log.warning(msg, latest) else: self.log.debug("Installation is up-to-date") except BaseException: self.log.debug("Failed to check for updates: %s", traceback.format_exc()) self.log.warning("Failed to check for updates") def eval_env(self): """ Should be done after `configure` """ envs = self.config.get(SETTINGS, force_set=True).get("env", force_set=True) envs[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir for varname in envs: if envs[varname]: envs[varname] = os.path.expandvars(envs[varname]) for varname in envs: self.env.set({varname: envs[varname]}) if envs[varname] is None: if varname in os.environ: os.environ.pop(varname) else: os.environ[varname] = str(envs[varname]) def custom_expandvars(value): parts = re.split(r'(\$\{.*?\})', value) value = '' for item in parts: if item and item.startswith("${") and item.endswith("}"): key = item[2:-1] if key in envs: item = envs[key] if item is not None: value += text_type(item) return value def apply_env(value, key, container): if isinstance(value, string_types): container[key] = custom_expandvars(value) BetterDict.traverse(self.config, apply_env)
class ShellExecutor(Service): """ :type env: Environment """ def __init__(self): super(ShellExecutor, self).__init__() self.prepare_tasks = [] self.startup_tasks = [] self.check_tasks = [] self.shutdown_tasks = [] self.postprocess_tasks = [] self.env = Environment(self.log) def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) f_name = 'shellexec_%s_%s' % (stage, index) if task.out: task.out = open(task.out, 'at') else: if task.is_background: out = self.engine.create_artifact(f_name, '.out') self.log.debug( 'STDOUT of background task "%s" redirected to "%s"' % (task, out)) task.out = open(out, 'at') else: task.out = PIPE if task.err: task.err = open(task.err, 'at') else: if task.is_background: err = self.engine.create_artifact(f_name, '.err') self.log.debug( 'STDERR of background task "%s" redirected to "%s"' % (task, err)) task.err = open(err, 'at') else: task.err = PIPE container.append(task) self.log.debug("Added %s task: %s", stage, stage_task) def prepare(self): """ Configure Tasks :return: """ self.env.set(self.settings.get('env')) self._load_tasks('prepare', self.prepare_tasks) self._load_tasks('startup', self.startup_tasks) self._load_tasks('check', self.check_tasks) self._load_tasks('shutdown', self.shutdown_tasks) self._load_tasks('post-process', self.postprocess_tasks) for task in self.prepare_tasks: task.start() def startup(self): for task in self.startup_tasks: task.start() def check(self): for task in self.check_tasks: task.start() for task in self.prepare_tasks + self.startup_tasks + self.check_tasks: task.check() return super(ShellExecutor, self).check() def shutdown(self): for task in self.shutdown_tasks: task.start() for task in self.check_tasks + self.startup_tasks: task.shutdown() def post_process(self): for task in self.shutdown_tasks + self.check_tasks + self.startup_tasks + self.prepare_tasks: task.shutdown() for task in self.postprocess_tasks: task.start() task.shutdown()
class Engine(object): """ Core entity of the technology, used to coordinate whole process :type reporters: list[Reporter] :type services: list[Service] :type log: logging.Logger :type aggregator: bzt.modules.aggregator.ConsolidatingAggregator :type stopping_reason: BaseException """ ARTIFACTS_DIR = "%Y-%m-%d_%H-%M-%S.%f" def __init__(self, parent_logger): """ :type parent_logger: logging.Logger """ self.file_search_paths = [] self.services = [] self.__artifacts = [] self.reporters = [] self.artifacts_dir = None self.log = parent_logger.getChild(self.__class__.__name__) self.env = Environment(self.log, dict(os.environ)) self.shared_env = Environment(self.log) self.config = Configuration() self.config.log = self.log.getChild(Configuration.__name__) self.modules = {} # available modules self.provisioning = Provisioning() self.aggregator = Aggregator(is_functional=False) self.interrupted = False self.check_interval = 1 self.stopping_reason = None self.engine_loop_utilization = 0 self.prepared = [] self.started = [] self.default_cwd = None self.logging_level_down = lambda: None self.logging_level_up = lambda: None self._http_client = None def configure(self, user_configs, read_config_files=True): """ Load configuration files :type user_configs: list[str] :type read_config_files: bool """ self.log.info("Configuring...") if read_config_files: self._load_base_configs() merged_config = self._load_user_configs(user_configs) all_includes = [] while "included-configs" in self.config: includes = self.config.pop("included-configs") included_configs = [self.find_file(conf) for conf in includes if conf not in all_includes + user_configs] all_includes += includes self.config.load(included_configs) self.config['included-configs'] = all_includes self.config.merge({"version": bzt.VERSION}) self.get_http_client() if self.config.get(SETTINGS).get("check-updates", True): install_id = self.config.get("install-id", self._generate_id()) def wrapper(): return self._check_updates(install_id) thread = threading.Thread(target=wrapper) # intentionally non-daemon thread thread.start() return merged_config def unify_config(self): executions = self.config.get(ScenarioExecutor.EXEC, []) if isinstance(executions, dict): executions = [executions] self.config[ScenarioExecutor.EXEC] = executions settings = self.config.get(SETTINGS) default_executor = settings.get("default-executor", None) for execution in executions: executor = execution.get("executor", default_executor, force_set=True) if not executor: msg = "Cannot determine executor type and no default executor in %s" raise TaurusConfigError(msg % execution) reporting = self.config.get(Reporter.REP, []) for index in range(len(reporting)): ensure_is_dict(reporting, index, "module") services = self.config.get(Service.SERV, []) for index in range(len(services)): ensure_is_dict(services, index, "module") modules = self.config.get("modules") for module in modules: ensure_is_dict(modules, module, "class") @staticmethod def _generate_id(): if os.getenv("JENKINS_HOME"): prefix = "jenkins" elif os.getenv("TRAVIS"): prefix = "travis" elif any([key.startswith("bamboo") for key in os.environ.keys()]): prefix = "bamboo" elif os.getenv("TEAMCITY_VERSION"): prefix = "teamcity" elif os.getenv("DOCKER_HOST"): prefix = "docker" elif os.getenv("AWS_"): prefix = "amazon" elif os.getenv("GOOGLE_APPLICATION_CREDENTIALS") or os.getenv("CLOUDSDK_CONFIG"): prefix = "google_cloud" elif os.getenv("WEBJOBS_NAME"): prefix = "azure" elif is_linux(): prefix = 'linux' elif is_windows(): prefix = 'windows' else: prefix = 'macos' return "%s-%x" % (prefix, uuid.getnode()) def prepare(self): """ Prepare engine for work, will call preparing of Provisioning and add downstream EngineModule instances """ self.log.info("Preparing...") self.unify_config() interval = self.config.get(SETTINGS).get("check-interval", self.check_interval) self.check_interval = dehumanize_time(interval) try: self.__prepare_aggregator() self.__prepare_services() self.__prepare_provisioning() self.__prepare_reporters() self.config.dump() except BaseException as exc: self.stopping_reason = exc raise def _startup(self): modules = self.services + [self.aggregator] + self.reporters + [self.provisioning] # order matters for module in modules: self.log.debug("Startup %s", module) self.started.append(module) module.startup() self.config.dump() def start_subprocess(self, args, cwd, stdout, stderr, stdin, shell, env): if cwd is None: cwd = self.default_cwd env = Environment(self.log, env.get()) env.set(self.shared_env.get()) return shell_exec(args, cwd=cwd, stdout=stdout, stderr=stderr, stdin=stdin, shell=shell, env=env.get()) def run(self): """ Run the job. Calls `startup`, does periodic `check`, calls `shutdown` in any case """ self.log.info("Starting...") exc_info = exc_value = None try: self._startup() self.logging_level_down() self._wait() except BaseException as exc: self.log.debug("%s:\n%s", exc, traceback.format_exc()) self.stopping_reason = exc exc_info = sys.exc_info() finally: self.log.warning("Please wait for graceful shutdown...") try: self.logging_level_up() self._shutdown() except BaseException as exc: self.log.debug("%s:\n%s", exc, traceback.format_exc()) if not self.stopping_reason: self.stopping_reason = exc if not exc_info: exc_info = sys.exc_info() if not exc_value: exc_value = exc if exc_info: reraise(exc_info, exc_value) def _check_modules_list(self): stop = False modules = [self.provisioning, self.aggregator] + self.services + self.reporters # order matters for module in modules: if module in self.started: self.log.debug("Checking %s", module) finished = bool(module.check()) if finished: self.log.debug("%s finished", module) stop = finished return stop def _wait(self): """ Wait modules for finish :return: """ prev = time.time() while not self._check_modules_list(): now = time.time() diff = now - prev delay = self.check_interval - diff self.engine_loop_utilization = diff / self.check_interval self.log.debug("Iteration took %.3f sec, sleeping for %.3f sec...", diff, delay) if delay > 0: time.sleep(delay) prev = time.time() if self.interrupted: raise ManualShutdown() self.config.dump() def _shutdown(self): """ Shutdown modules :return: """ self.log.info("Shutting down...") self.log.debug("Current stop reason: %s", self.stopping_reason) exc_info = exc_value = None modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters for module in modules: try: if module in self.started: module.shutdown() except BaseException as exc: self.log.debug("%s:\n%s", exc, traceback.format_exc()) if not exc_info: exc_info = sys.exc_info() if not exc_value: exc_value = exc self.config.dump() if exc_info: reraise(exc_info, exc_value) def post_process(self): """ Do post-run analysis and processing for the results. """ self.log.info("Post-processing...") # :type exception: BaseException exc_info = exc_value = None modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters # services are last because of shellexec which is "final-final" action for module in modules: if module in self.prepared: try: module.post_process() except BaseException as exc: if isinstance(exc, KeyboardInterrupt): self.log.debug("post_process: %s", exc) else: self.log.debug("post_process: %s\n%s", exc, traceback.format_exc()) if not self.stopping_reason: self.stopping_reason = exc if not exc_info: exc_info = sys.exc_info() if not exc_value: exc_value = exc self.config.dump() if exc_info: reraise(exc_info, exc_value) def create_artifact(self, prefix, suffix): """ Create new artifact in artifacts dir with given prefix and suffix :type prefix: str :type suffix: str :return: Path to created file :rtype: str :raise TaurusInternalException: if no artifacts dir set """ if not self.artifacts_dir: raise TaurusInternalException("Cannot create artifact: no artifacts_dir set up") filename = get_uniq_name(self.artifacts_dir, prefix, suffix, self.__artifacts) self.__artifacts.append(filename) self.log.debug("New artifact filename: %s", filename) return filename def existing_artifact(self, filename, move=False, target_filename=None): """ Add existing artifact, it will be collected into artifact_dir. If move=True, the original file will be deleted :type filename: str :type move: bool :type target_filename: str """ self.log.debug("Add existing artifact (move=%s): %s", move, filename) if self.artifacts_dir is None: self.log.warning("Artifacts dir has not been set, will not copy %s", filename) return new_filename = os.path.basename(filename) if target_filename is None else target_filename new_name = os.path.join(self.artifacts_dir, new_filename) self.__artifacts.append(new_name) if get_full_path(filename) == get_full_path(new_name): self.log.debug("No need to copy %s", filename) return if not os.path.exists(filename): self.log.warning("Artifact file not exists: %s", filename) return if move: self.log.debug("Moving %s to %s", filename, new_name) shutil.move(filename, new_name) else: self.log.debug("Copying %s to %s", filename, new_name) shutil.copy(filename, new_name) def create_artifacts_dir(self, existing_artifacts=(), merged_config=None): """ Create directory for artifacts, directory name based on datetime.now() """ if not self.artifacts_dir: artifacts_dir = self.config.get(SETTINGS, force_set=True).get("artifacts-dir", self.ARTIFACTS_DIR) self.artifacts_dir = datetime.datetime.now().strftime(artifacts_dir) self.artifacts_dir = get_full_path(self.artifacts_dir) self.log.info("Artifacts dir: %s", self.artifacts_dir) self.env.set({TAURUS_ARTIFACTS_DIR: self.artifacts_dir}) if not os.path.isdir(self.artifacts_dir): os.makedirs(self.artifacts_dir) # dump current effective configuration dump = self.create_artifact("effective", "") # TODO: not good since this file not exists self.config.set_dump_file(dump) self.config.dump() # dump merged configuration if merged_config: merged_config.dump(self.create_artifact("merged", ".yml"), Configuration.YAML) merged_config.dump(self.create_artifact("merged", ".json"), Configuration.JSON) for artifact in existing_artifacts: self.existing_artifact(artifact) def is_functional_mode(self): return self.aggregator is not None and self.aggregator.is_functional def __load_module(self, alias): """ Load module class by alias :param alias: str :return: class """ if alias in self.modules: return self.modules[alias] mod_conf = self.config.get('modules') if alias not in mod_conf: msg = "Module '%s' not found in list of available aliases %s" % (alias, sorted(mod_conf.keys())) raise TaurusConfigError(msg) settings = ensure_is_dict(mod_conf, alias, "class") acopy = copy.deepcopy(settings) BetterDict.traverse(acopy, Configuration.masq_sensitive) self.log.debug("Module config: %s %s", alias, acopy) err = TaurusConfigError("Class name for alias '%s' is not found in module settings: %s" % (alias, settings)) clsname = settings.get('class', err) self.modules[alias] = load_class(clsname) if not issubclass(self.modules[alias], EngineModule): raise TaurusInternalException("Module class does not inherit from EngineModule: %s" % clsname) return self.modules[alias] def instantiate_module(self, alias): """ Create new instance for module using its alias from module settings section of config. Thus, to instantiate module it should be mentioned in settings. :type alias: str :rtype: EngineModule """ classobj = self.__load_module(alias) instance = classobj() assert isinstance(instance, EngineModule) instance.log = self.log.getChild(alias) instance.engine = self settings = self.config.get("modules") instance.settings = settings.get(alias) return instance def find_file(self, filename): """ Try to find file or dir in search_path if it was specified. Helps finding files in non-CLI environments or relative to config path Return path is full and mustn't treat with abspath/etc. :param filename: file basename to find :type filename: str """ if not filename: return filename if filename.lower().startswith("http://") or filename.lower().startswith("https://"): parsed_url = parse.urlparse(filename) downloader = ExceptionalDownloader(self.get_http_client()) self.log.info("Downloading %s", filename) tmp_f_name, headers = downloader.get(filename) cd_header = headers.get('Content-Disposition', '') dest = cd_header.split('filename=')[-1] if cd_header and 'filename=' in cd_header else '' if dest.startswith('"') and dest.endswith('"') or dest.startswith("'") and dest.endswith("'"): dest = dest[1:-1] elif not dest: dest = os.path.basename(parsed_url.path) fname, ext = os.path.splitext(dest) if dest else (parsed_url.hostname.replace(".", "_"), '.file') dest = self.create_artifact(fname, ext) self.log.debug("Moving %s to %s", tmp_f_name, dest) shutil.move(tmp_f_name, dest) return dest else: filename = os.path.expanduser(filename) # expanding of '~' is required for check of existence # check filename 'as is' and all combinations of file_search_path/filename for dirname in [""] + self.file_search_paths: location = os.path.join(dirname, filename) if os.path.exists(location): if dirname: self.log.warning("Guessed location from search paths for %s: %s", filename, location) return get_full_path(location) self.log.warning("Could not find location at path: %s", filename) return filename def _load_base_configs(self): base_configs = [os.path.join(RESOURCES_DIR, 'base-config.yml')] machine_dir = get_configs_dir() # can't refactor machine_dir out - see setup.py if os.path.isdir(machine_dir): self.log.debug("Reading extension configs from: %s", machine_dir) for cfile in sorted(os.listdir(machine_dir)): fname = os.path.join(machine_dir, cfile) if os.path.isfile(fname): base_configs.append(fname) else: self.log.debug("No machine configs dir: %s", machine_dir) self.log.debug("Base configs list: %s", base_configs) self.config.load(base_configs) def _load_user_configs(self, user_configs): """ :type user_configs: list[str] :rtype: Configuration """ # "tab-replacement-spaces" is not documented 'cause it loads only from base configs # so it's sort of half-working last resort self.config.tab_replacement_spaces = self.config.get(SETTINGS).get("tab-replacement-spaces", 4) self.log.debug("User configs list: %s", user_configs) self.config.load(user_configs) user_config = Configuration() user_config.log = self.log.getChild(Configuration.__name__) user_config.tab_replacement_spaces = self.config.tab_replacement_spaces user_config.warn_on_tab_replacement = False user_config.load(user_configs, self.__config_loaded) return user_config def __config_loaded(self, config): self.file_search_paths.append(get_full_path(config, step_up=1)) def __prepare_provisioning(self): """ Instantiate provisioning class """ err = TaurusConfigError("Please check global config availability or configure provisioning settings") cls = self.config.get(Provisioning.PROV, err) self.provisioning = self.instantiate_module(cls) self.prepared.append(self.provisioning) self.provisioning.prepare() def __prepare_reporters(self): """ Instantiate reporters, then prepare them in case they would like to interact """ reporting = self.config.get(Reporter.REP, []) for index, reporter in enumerate(reporting): msg = "reporter 'module' field isn't recognized: %s" cls = reporter.get('module', TaurusConfigError(msg % reporter)) instance = self.instantiate_module(cls) instance.parameters = reporter if self.__singletone_exists(instance, self.reporters): continue assert isinstance(instance, Reporter) self.reporters.append(instance) for reporter in self.reporters[:]: if not reporter.should_run(): self.reporters.remove(reporter) # prepare reporters for module in self.reporters: self.prepared.append(module) module.prepare() def __prepare_services(self): """ Instantiate service modules, then prepare them """ srv_config = self.config.get(Service.SERV, []) services = [] for index, config in enumerate(srv_config): cls = config.get('module', '') instance = self.instantiate_module(cls) instance.parameters = config if self.__singletone_exists(instance, services): continue assert isinstance(instance, Service) services.append(instance) for service in services[:]: if not service.should_run(): services.remove(service) self.services.extend(services) for module in self.services: self.prepared.append(module) module.prepare() def __singletone_exists(self, instance, mods_list): """ :type instance: EngineModule :type mods_list: list[EngineModule] :rtype: bool """ if not isinstance(instance, Singletone): return False for mod in mods_list: if mod.parameters.get("module") == instance.parameters.get("module"): msg = "Module '%s' can be only used once, will merge all new instances into single" self.log.warning(msg % mod.parameters.get("module")) mod.parameters.merge(instance.parameters) return True def __prepare_aggregator(self): """ Instantiate aggregators :return: """ cls = self.config.get(SETTINGS).get("aggregator", "") if not cls: self.log.warning("Proceeding without aggregator, no results analysis") else: self.aggregator = self.instantiate_module(cls) self.prepared.append(self.aggregator) self.aggregator.prepare() def get_http_client(self): if self._http_client is None: self._http_client = HTTPClient() self._http_client.add_proxy_settings(self.config.get("settings").get("proxy")) return self._http_client def _check_updates(self, install_id): try: params = (bzt.VERSION, install_id) addr = "http://gettaurus.org/updates/?version=%s&installID=%s" % params self.log.debug("Requesting updates info: %s", addr) client = self.get_http_client() response = client.request('GET', addr, timeout=10) data = response.json() self.log.debug("Taurus updates info: %s", data) mine = LooseVersion(bzt.VERSION) latest = LooseVersion(data['latest']) if mine < latest or data['needsUpgrade']: msg = "There is newer version of Taurus %s available, consider upgrading. " \ "What's new: http://gettaurus.org/docs/Changelog/" self.log.warning(msg, latest) else: self.log.debug("Installation is up-to-date") except BaseException: self.log.debug("Failed to check for updates: %s", traceback.format_exc()) self.log.warning("Failed to check for updates") def eval_env(self): """ Should be done after `configure` """ envs = self.config.get(SETTINGS, force_set=True).get("env", force_set=True) envs[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir for varname in envs: if envs[varname]: envs[varname] = str(envs[varname]) envs[varname] = os.path.expandvars(envs[varname]) for varname in envs: self.env.set({varname: envs[varname]}) if envs[varname] is None: if varname in os.environ: os.environ.pop(varname) else: os.environ[varname] = str(envs[varname]) def custom_expandvars(value): parts = re.split(r'(\$\{.*?\})', value) value = '' for item in parts: if item and item.startswith("${") and item.endswith("}"): key = item[2:-1] if key in envs: item = envs[key] if item is not None: value += text_type(item) return value def apply_env(value, key, container): if isinstance(value, string_types): container[key] = custom_expandvars(value) BetterDict.traverse(self.config, apply_env)
class ShellExecutor(Service): """ :type env: Environment :type prepare_tasks: list[Task] :type startup_tasks: list[Task] :type check_tasks: list[Task] :type shutdown_tasks: list[Task] :type postprocess_tasks: list[Task] """ def __init__(self): super(ShellExecutor, self).__init__() self.prepare_tasks = [] self.startup_tasks = [] self.check_tasks = [] self.shutdown_tasks = [] self.postprocess_tasks = [] self.env = Environment(self.log) def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) f_name = 'shellexec_%s_%s' % (stage, index) if task.out: task.out = open(task.out, 'at') else: if task.is_background: out = self.engine.create_artifact(f_name, '.out') self.log.debug( 'STDOUT of background task "%s" redirected to "%s"' % (task, out)) task.out = open(out, 'at') else: task.out = PIPE if task.err: task.err = open(task.err, 'at') else: if task.is_background: err = self.engine.create_artifact(f_name, '.err') self.log.debug( 'STDERR of background task "%s" redirected to "%s"' % (task, err)) task.err = open(err, 'at') else: task.err = PIPE container.append(task) self.log.debug("Added %s task: %s", stage, stage_task) def _filter_prov_context(self): def filter_conf(conf): if not any(key in conf for key in prov_configs.values() ): # if no specific provisioning is configured conf = BetterDict.from_dict({prov_configs['local']: conf }) # all configuration is local return conf.get(current_prov) current_prov = self.engine.config.get("provisioning") modules = self.engine.config.get("modules") prov_classes = {'local': Local, 'cloud': CloudProvisioning} prov_configs = {} for prov in prov_classes: class_name = prov_classes[prov].__module__ + '.' + prov_classes[ prov].__name__ for module in modules: # llo if modules[module] == class_name or ( isinstance(modules[module], dict) and modules[module].get("class") == class_name): prov_configs[prov] = module break self.settings = filter_conf(self.settings) self.parameters = filter_conf(self.parameters) def prepare(self): """ Configure Tasks :return: """ self._filter_prov_context() self.env.set(self.settings.get('env')) self._load_tasks('prepare', self.prepare_tasks) self._load_tasks('startup', self.startup_tasks) self._load_tasks('check', self.check_tasks) self._load_tasks('shutdown', self.shutdown_tasks) self._load_tasks('post-process', self.postprocess_tasks) for task in self.prepare_tasks: task.start() def startup(self): for task in self.startup_tasks: task.start() def check(self): for task in self.check_tasks: task.start() for task in self.prepare_tasks + self.startup_tasks + self.check_tasks: task.check() return super(ShellExecutor, self).check() def shutdown(self): for task in self.shutdown_tasks: self._set_stop_reason_vars(task) task.start() for task in self.check_tasks + self.startup_tasks: task.shutdown() def post_process(self): for task in self.shutdown_tasks + self.check_tasks + self.startup_tasks + self.prepare_tasks: task.shutdown() for task in self.postprocess_tasks: self._set_stop_reason_vars(task) task.start() task.shutdown() def _set_stop_reason_vars(self, task): if isinstance(self.engine.stopping_reason, RCProvider): rc = self.engine.stopping_reason.get_rc() task.env.set( {"TAURUS_STOPPING_REASON": str(self.engine.stopping_reason)}) elif self.engine.stopping_reason: rc = 1 else: rc = 0 task.env.set({ "TAURUS_EXIT_CODE": rc, "TAURUS_STOPPING_REASON": str(self.engine.stopping_reason) })
class ScenarioExecutor(EngineModule): """ :type provisioning: engine.Provisioning :type execution: BetterDict """ RAMP_UP = "ramp-up" HOLD_FOR = "hold-for" CONCURR = "concurrency" THRPT = "throughput" EXEC = "execution" STEPS = "steps" LOAD_FMT = namedtuple("LoadSpec", "concurrency throughput ramp_up hold iterations duration steps") def __init__(self): super(ScenarioExecutor, self).__init__() self.env = Environment(log=self.log) self.provisioning = None self.execution = BetterDict() # FIXME: why have this field if we have `parameters` from base class? self.__scenario = None self.label = None self.widget = None self.reader = None self.stdout = None self.stderr = None self.delay = None self.start_time = None self.preprocess_args = lambda x: None def _get_tool(self, tool, **kwargs): instance = tool(env=self.env, log=self.log, http_client=self.engine.get_http_client(), **kwargs) assert isinstance(instance, RequiredTool) return instance def has_results(self): if self.reader and self.reader.buffer: return True else: return False def get_script_path(self, required=False, scenario=None): """ :type required: bool :type scenario: Scenario """ if scenario is None: scenario = self.get_scenario() if required: exc = TaurusConfigError("You must provide script for %s" % self) script = scenario.get(Scenario.SCRIPT, exc) else: script = scenario.get(Scenario.SCRIPT) if script: script = self.engine.find_file(script) scenario[Scenario.SCRIPT] = script return script def get_scenario(self, name=None, cache_scenario=True): """ Returns scenario dict, extract if scenario is inlined :return: DictOfDicts """ if name is None and self.__scenario is not None: return self.__scenario scenarios = self.engine.config.get("scenarios", force_set=True) if name is None: # get current scenario exc = TaurusConfigError("Scenario is not found in execution: %s" % self.execution) label = self.execution.get('scenario', exc) is_script = isinstance(label, string_types) and label not in scenarios and \ os.path.exists(self.engine.find_file(label)) if isinstance(label, list): msg = "Invalid content of scenario, list type instead of dict or string: %s" raise TaurusConfigError(msg % label) if isinstance(label, dict) or is_script: self.log.debug("Extract %s into scenarios" % label) if isinstance(label, string_types): scenario = BetterDict.from_dict({Scenario.SCRIPT: label}) else: scenario = label path = self.get_script_path(scenario=Scenario(self.engine, scenario)) if path: label = os.path.basename(path) if not path or label in scenarios: hash_str = str(hashlib.md5(to_json(scenario).encode()).hexdigest()) label = 'autogenerated_' + hash_str[-10:] scenarios[label] = scenario self.execution['scenario'] = label self.label = label else: # get scenario by name label = name exc = TaurusConfigError("Scenario '%s' not found in scenarios: %s" % (label, scenarios.keys())) scenario = scenarios.get(label, exc) scenario_obj = Scenario(self.engine, scenario) if name is None and cache_scenario: self.__scenario = scenario_obj return scenario_obj def get_raw_load(self): prov_type = self.engine.config.get(Provisioning.PROV) for param in (ScenarioExecutor.THRPT, ScenarioExecutor.CONCURR): ensure_is_dict(self.execution, param, prov_type) throughput = self.execution.get(ScenarioExecutor.THRPT).get(prov_type, None) concurrency = self.execution.get(ScenarioExecutor.CONCURR).get(prov_type, None) iterations = self.execution.get("iterations", None) steps = self.execution.get(ScenarioExecutor.STEPS, None) hold = self.execution.get(ScenarioExecutor.HOLD_FOR, None) ramp_up = self.execution.get(ScenarioExecutor.RAMP_UP, None) return self.LOAD_FMT(concurrency=concurrency, ramp_up=ramp_up, throughput=throughput, hold=hold, iterations=iterations, duration=None, steps=steps) def get_load(self): """ Helper method to read load specification """ def eval_int(value): try: return int(value) except (ValueError, TypeError): return value def eval_float(value): try: return int(value) except (ValueError, TypeError): return value raw_load = self.get_raw_load() iterations = eval_int(raw_load.iterations) ramp_up = raw_load.ramp_up throughput = eval_float(raw_load.throughput or 0) concurrency = eval_int(raw_load.concurrency or 0) steps = eval_int(raw_load.steps) hold = dehumanize_time(raw_load.hold or 0) if ramp_up is None: duration = hold else: ramp_up = dehumanize_time(raw_load.ramp_up) duration = hold + ramp_up if not iterations: if duration: iterations = 0 # infinite else: iterations = 1 msg = '' if not isinstance(concurrency, numeric_types + (type(None),)): msg += "Invalid concurrency value[%s]: %s " % (type(concurrency).__name__, concurrency) if not isinstance(throughput, numeric_types + (type(None),)): msg += "Invalid throughput value[%s]: %s " % (type(throughput).__name__, throughput) if not isinstance(steps, numeric_types + (type(None),)): msg += "Invalid throughput value[%s]: %s " % (type(steps).__name__, steps) if not isinstance(iterations, numeric_types + (type(None),)): msg += "Invalid throughput value[%s]: %s " % (type(iterations).__name__, iterations) if msg: raise TaurusConfigError(msg) return self.LOAD_FMT(concurrency=concurrency, ramp_up=ramp_up, throughput=throughput, hold=hold, iterations=iterations, duration=duration, steps=steps) def get_resource_files(self): files_list = [] if isinstance(self, FileLister): files_list.extend(self.resource_files()) files_list.extend(self.execution.get("files", [])) return files_list def __repr__(self): return "%s/%s" % (self.execution.get("executor", None), self.label if self.label else id(self)) def prepare(self): super(ScenarioExecutor, self).prepare() self.env.set(self.execution.get("env")) def _execute(self, args, **kwargs): self.preprocess_args(args) # for compatibility with other executors kwargs["stdout"] = kwargs.get("stdout", self.stdout) or PIPE kwargs["stderr"] = kwargs.get("stderr", self.stderr) or PIPE kwargs["cwd"] = kwargs.get("cwd", None) kwargs["env"] = self.env self.start_time = time.time() try: process = self.engine.start_subprocess(args=args, **kwargs) except OSError as exc: raise ToolError("Failed to start %s: %s (%s)" % (self.__class__.__name__, exc, args)) return process def post_process(self): if self.stdout: self.stdout.close() if self.stderr: self.stderr.close() super(ScenarioExecutor, self).post_process()
class ShellExecutor(Service): """ :type env: Environment """ def __init__(self): super(ShellExecutor, self).__init__() self.prepare_tasks = [] self.startup_tasks = [] self.check_tasks = [] self.shutdown_tasks = [] self.postprocess_tasks = [] self.env = None def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env.get()) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) container.append(task) self.log.debug("Added %s task: %s", stage, stage_task) def prepare(self): """ Configure Tasks :return: """ self.env = Environment(self.log, self.engine.env.get()) self.env.set(self.settings.get('env')) self._load_tasks('prepare', self.prepare_tasks) self._load_tasks('startup', self.startup_tasks) self._load_tasks('check', self.check_tasks) self._load_tasks('shutdown', self.shutdown_tasks) self._load_tasks('post-process', self.postprocess_tasks) for task in self.prepare_tasks: task.start() def startup(self): for task in self.startup_tasks: task.start() def check(self): for task in self.check_tasks: task.start() for task in self.prepare_tasks + self.startup_tasks + self.check_tasks: task.check() return super(ShellExecutor, self).check() def shutdown(self): for task in self.shutdown_tasks: task.start() for task in self.check_tasks + self.startup_tasks: task.shutdown() def post_process(self): for task in self.shutdown_tasks + self.check_tasks + self.startup_tasks + self.prepare_tasks: task.shutdown() for task in self.postprocess_tasks: task.start() task.shutdown()
class ScenarioExecutor(EngineModule): """ :type provisioning: engine.Provisioning :type execution: BetterDict """ EXEC = EXEC # backward compatibility RAMP_UP = "ramp-up" HOLD_FOR = "hold-for" CONCURR = "concurrency" THRPT = "throughput" STEPS = "steps" LOAD_FMT = namedtuple( "LoadSpec", "concurrency throughput ramp_up hold iterations duration steps") def __init__(self): super(ScenarioExecutor, self).__init__() self.env = Environment(log=self.log) self.provisioning = None self.execution = BetterDict( ) # FIXME: why have this field if we have `parameters` from base class? self._cached_scenario = None self.label = None self.widget = None self.reader = None self.stdout = None self.stderr = None self.delay = None self.start_time = None self.preprocess_args = lambda x: None def _get_tool(self, tool, **kwargs): instance = tool(env=self.env, log=self.log, http_client=self.engine.get_http_client(), **kwargs) assert isinstance(instance, RequiredTool) return instance def has_results(self): if self.reader and self.reader.buffer: return True else: return False def get_script_path(self, required=False, scenario=None): """ :type required: bool :type scenario: Scenario """ if scenario is None: scenario = self.get_scenario() if required: exc = TaurusConfigError("You must provide script for %s" % self) script = scenario.get(Scenario.SCRIPT, exc) else: script = scenario.get(Scenario.SCRIPT) if script: script = self.engine.find_file(script) scenario[Scenario.SCRIPT] = script return script def get_scenario(self, name=None): """ Returns scenario dict, extract if scenario is inlined :return: DictOfDicts """ if name is None and self._cached_scenario is not None: return self._cached_scenario scenarios = self.engine.config.get("scenarios", force_set=True) label = self._get_scenario_label(name, scenarios) exc = TaurusConfigError("Scenario '%s' not found in scenarios: %s" % (label, scenarios.keys())) scenario_dict = scenarios.get(label, exc) scenario_obj = Scenario(self.engine, scenario_dict) if self.engine.provisioning.extend_configs: script = self.get_script_path(required=False, scenario=scenario_dict) if script and script.lower().endswith('xml'): script_content = '' try: with codecs.open(script, encoding="UTF-8") as fds: script_content = fds.read() except UnicodeDecodeError: pass if "con:soapui-project" in script_content: scenario_obj = self._convert_soap_scenario( scenario_obj, script) if name is None: self._cached_scenario = scenario_obj return scenario_obj def _convert_soap_scenario(self, scenario_obj, script): self.log.info("SoapUI project detected") new_scenario_name, scenario_dict = self._extract_scenario_from_soapui( scenario_obj, script) self.engine.config["scenarios"].merge( {new_scenario_name: scenario_dict}) prev_scenario_name = self.execution["scenario"] self.execution["scenario"] = new_scenario_name for execution in self.engine.config.get(EXEC): if execution.get(SCENARIO) == prev_scenario_name: execution[SCENARIO] = new_scenario_name return Scenario(self.engine, scenario_dict) def _get_scenario_label(self, name, scenarios): if name is None: # get current scenario exc = TaurusConfigError("Scenario is not found in execution: %s" % self.execution) label = self.execution.get('scenario', exc) is_script = isinstance(label, string_types) and label not in scenarios and \ os.path.exists(self.engine.find_file(label)) if isinstance(label, list): msg = "Invalid content of scenario, list type instead of dict or string: %s" raise TaurusConfigError(msg % label) if isinstance(label, dict) or is_script: self.log.debug("Extract %s into scenarios" % label) if isinstance(label, string_types): scenario = BetterDict.from_dict({Scenario.SCRIPT: label}) else: scenario = label path = self.get_script_path( scenario=Scenario(self.engine, scenario)) if path: label = os.path.basename(path) if not path or label in scenarios: hash_str = str( hashlib.md5(to_json(scenario).encode()).hexdigest()) label = 'autogenerated_' + hash_str[-10:] scenarios[label] = scenario self.execution['scenario'] = label self.label = label else: # get scenario by name label = name return label def _extract_scenario_from_soapui(self, base_scenario, script_path): test_case = base_scenario.get("test-case", None) converter = SoapUIScriptConverter(self.log) conv_config = converter.convert_script(script_path) conv_scenarios = conv_config["scenarios"] scenario_name, conv_scenario = converter.find_soapui_test_case( test_case, conv_scenarios) new_name = scenario_name counter = 1 while new_name in self.engine.config["scenarios"]: new_name = scenario_name + ("-%s" % counter) counter += 1 if new_name != scenario_name: self.log.info( "Scenario name '%s' is already taken, renaming to '%s'", scenario_name, new_name) scenario_name = new_name merged_scenario = BetterDict.from_dict(conv_scenario) merged_scenario.merge(base_scenario.data) for field in [Scenario.SCRIPT, "test-case"]: if field in merged_scenario: merged_scenario.pop(field) return scenario_name, merged_scenario def get_raw_load(self): prov_type = self.engine.config.get(Provisioning.PROV) for param in (ScenarioExecutor.THRPT, ScenarioExecutor.CONCURR): ensure_is_dict(self.execution, param, prov_type) throughput = self.execution.get(ScenarioExecutor.THRPT).get( prov_type, None) concurrency = self.execution.get(ScenarioExecutor.CONCURR).get( prov_type, None) iterations = self.execution.get("iterations", None) steps = self.execution.get(ScenarioExecutor.STEPS, None) hold = self.execution.get(ScenarioExecutor.HOLD_FOR, None) ramp_up = self.execution.get(ScenarioExecutor.RAMP_UP, None) return self.LOAD_FMT(concurrency=concurrency, ramp_up=ramp_up, throughput=throughput, hold=hold, iterations=iterations, duration=None, steps=steps) def get_load(self): """ Helper method to read load specification """ def eval_int(value): try: return int(value) except (ValueError, TypeError): return value def eval_float(value): try: return int(value) except (ValueError, TypeError): return value raw_load = self.get_raw_load() iterations = eval_int(raw_load.iterations) ramp_up = raw_load.ramp_up throughput = eval_float(raw_load.throughput or 0) concurrency = eval_int(raw_load.concurrency or 0) steps = eval_int(raw_load.steps) hold = dehumanize_time(raw_load.hold or 0) if ramp_up is None: duration = hold else: ramp_up = dehumanize_time(raw_load.ramp_up) duration = hold + ramp_up if not iterations: if duration: iterations = 0 # infinite else: iterations = 1 msg = '' if not isinstance(concurrency, numeric_types + (type(None), )): msg += "Invalid concurrency value[%s]: %s " % ( type(concurrency).__name__, concurrency) if not isinstance(throughput, numeric_types + (type(None), )): msg += "Invalid throughput value[%s]: %s " % ( type(throughput).__name__, throughput) if not isinstance(steps, numeric_types + (type(None), )): msg += "Invalid throughput value[%s]: %s " % (type(steps).__name__, steps) if not isinstance(iterations, numeric_types + (type(None), )): msg += "Invalid throughput value[%s]: %s " % ( type(iterations).__name__, iterations) if msg: raise TaurusConfigError(msg) return self.LOAD_FMT(concurrency=concurrency, ramp_up=ramp_up, throughput=throughput, hold=hold, iterations=iterations, duration=duration, steps=steps) def get_resource_files(self): files_list = [] if isinstance(self, FileLister): files_list.extend(self.resource_files()) files_list.extend(self.execution.get("files", [])) return files_list def __repr__(self): return "%s/%s" % (self.execution.get( "executor", None), self.label if self.label else id(self)) def prepare(self): super(ScenarioExecutor, self).prepare() self.env.set(self.execution.get("env")) def _execute(self, args, **kwargs): self.preprocess_args(args) # for compatibility with other executors kwargs["stdout"] = kwargs.get("stdout", self.stdout) or PIPE kwargs["stderr"] = kwargs.get("stderr", self.stderr) or PIPE kwargs["cwd"] = kwargs.get("cwd", None) kwargs["env"] = self.env self.start_time = time.time() try: process = self.engine.start_subprocess(args=args, **kwargs) except OSError as exc: raise ToolError("Failed to start %s: %s (%s)" % (self.__class__.__name__, exc, args)) return process def post_process(self): if self.stdout: self.stdout.close() if self.stderr: self.stderr.close() super(ScenarioExecutor, self).post_process()
class ShellExecutor(Service): """ :type env: Environment :type prepare_tasks: list[Task] :type startup_tasks: list[Task] :type check_tasks: list[Task] :type shutdown_tasks: list[Task] :type postprocess_tasks: list[Task] """ def __init__(self): super(ShellExecutor, self).__init__() self.prepare_tasks = [] self.startup_tasks = [] self.check_tasks = [] self.shutdown_tasks = [] self.postprocess_tasks = [] self.env = Environment(self.log) def _load_tasks(self, stage, container): if not isinstance(self.parameters.get(stage, []), list): self.parameters[stage] = [self.parameters[stage]] for index, stage_task in enumerate(self.parameters.get(stage, [])): stage_task = ensure_is_dict(self.parameters[stage], index, "command") task_config = self.parameters[stage][index] default_cwd = self.settings.get("default-cwd", None) cwd = self.engine.find_file(task_config.get("cwd", default_cwd)) if cwd is None: working_dir = self.engine.default_cwd elif cwd == 'artifacts-dir': working_dir = self.engine.artifacts_dir else: working_dir = cwd # make copy of env for every task env = Environment(self.log, self.env) env.set(task_config.get('env')) env.add_path({"PYTHONPATH": working_dir}) task = Task(task_config, self.log, working_dir, env) f_name = 'shellexec_%s_%s' % (stage, index) if task.out: task.out = open(task.out, 'at') else: if task.is_background: out = self.engine.create_artifact(f_name, '.out') self.log.debug('STDOUT of background task "%s" redirected to "%s"' % (task, out)) task.out = open(out, 'at') else: task.out = PIPE if task.err: task.err = open(task.err, 'at') else: if task.is_background: err = self.engine.create_artifact(f_name, '.err') self.log.debug('STDERR of background task "%s" redirected to "%s"' % (task, err)) task.err = open(err, 'at') else: task.err = PIPE container.append(task) self.log.debug("Added %s task: %s", stage, stage_task) def prepare(self): """ Configure Tasks :return: """ self.env.set(self.settings.get('env')) self._load_tasks('prepare', self.prepare_tasks) self._load_tasks('startup', self.startup_tasks) self._load_tasks('check', self.check_tasks) self._load_tasks('shutdown', self.shutdown_tasks) self._load_tasks('post-process', self.postprocess_tasks) for task in self.prepare_tasks: task.start() def startup(self): for task in self.startup_tasks: task.start() def check(self): for task in self.check_tasks: task.start() for task in self.prepare_tasks + self.startup_tasks + self.check_tasks: task.check() return super(ShellExecutor, self).check() def shutdown(self): for task in self.shutdown_tasks: self._set_stop_reason_vars(task) task.start() for task in self.check_tasks + self.startup_tasks: task.shutdown() def post_process(self): for task in self.shutdown_tasks + self.check_tasks + self.startup_tasks + self.prepare_tasks: task.shutdown() for task in self.postprocess_tasks: self._set_stop_reason_vars(task) task.start() task.shutdown() def _set_stop_reason_vars(self, task): if isinstance(self.engine.stopping_reason, RCProvider): rc = self.engine.stopping_reason.get_rc() task.env.set({ "TAURUS_STOPPING_REASON": str(self.engine.stopping_reason) }) elif self.engine.stopping_reason: rc = 1 else: rc = 0 task.env.set({ "TAURUS_EXIT_CODE": rc, "TAURUS_STOPPING_REASON": str(self.engine.stopping_reason) })