def existing_artifact(self, filename, move=False, target_filename=None): """ Add existing artifact, it will be collected into artifact_dir. If move=True, the original file will be deleted :type filename: str :type move: bool :type target_filename: str """ self.log.debug("Add existing artifact (move=%s): %s", move, filename) if self.artifacts_dir is None: self.log.warning("Artifacts dir has not been set, will not copy %s", filename) return new_filename = os.path.basename(filename) if target_filename is None else target_filename new_name = os.path.join(self.artifacts_dir, new_filename) self.__artifacts.append(new_name) if get_full_path(filename) == get_full_path(new_name): self.log.debug("No need to copy %s", filename) return if not os.path.exists(filename): self.log.warning("Artifact file not exists: %s", filename) return if move: self.log.debug("Moving %s to %s", filename, new_name) shutil.move(filename, new_name) else: self.log.debug("Copying %s to %s", filename, new_name) shutil.copy(filename, new_name)
def __init__(self, executor, base_logger): """ :param executor: ScenarioExecutor :type base_logger: logging.Logger """ super(PBenchTool, self).__init__() self.log = base_logger.getChild(self.__class__.__name__) self.executor = executor self.engine = executor.engine self.settings = executor.settings self.execution = executor.execution self.path = get_full_path(self.settings.get('path', 'phantom')) self.modules_path = get_full_path(self.settings.get("modules-path", "/usr/lib/phantom")) self.kpi_file = None self.stats_file = None self.config_file = None self.payload_file = None self.schedule_file = None self.process = None self.use_ssl = False self.hostname = 'localhost' self.port = 80 self._target = {"scheme": None, "netloc": None} self.stdout_file = None self.stderr_file = None
def __build_launcher(self): modified_launcher = self.engine.create_artifact('gatling-launcher', EXE_SUFFIX) origin_launcher = get_full_path(self.settings['path']) origin_dir = get_full_path(origin_launcher, step_up=2) with open(origin_launcher) as origin: origin_lines = origin.readlines() modified_lines = [] mod_success = False for line in origin_lines: if is_windows() and line.startswith('set COMPILATION_CLASSPATH=""'): mod_success = True continue if not is_windows() and line.startswith('COMPILATION_CLASSPATH='): mod_success = True line = line.rstrip() + '":${COMPILATION_CLASSPATH}"\n' modified_lines.append(line) if not mod_success: raise ValueError("Can't modify gatling launcher for jar usage, ability isn't supported") if is_windows(): first_line = 'set "GATLING_HOME=%s"\n' % origin_dir else: first_line = 'GATLING_HOME="%s"\n' % origin_dir modified_lines.insert(1, first_line) with open(modified_launcher, 'w') as modified: modified.writelines(modified_lines) if not is_windows(): os.chmod(modified_launcher, 0o755) return modified_launcher
def __init__(self, junit_config, executor): """ :type junit_config: BetterDict :type executor: SeleniumExecutor """ super(JUnitTester, self).__init__(junit_config, executor) self.props_file = junit_config['props-file'] path_lambda = lambda key, val: get_full_path(self.settings.get(key, val)) self.working_dir = self.settings.get("working-dir") self.junit_path = path_lambda("path", "~/.bzt/selenium-taurus/tools/junit/junit.jar") self.hamcrest_path = path_lambda("hamcrest-core", "~/.bzt/selenium-taurus/tools/junit/hamcrest-core.jar") self.json_jar_path = path_lambda("json-jar", "~/.bzt/selenium-taurus/tools/junit/json.jar") self.selenium_server_jar_path = path_lambda("selenium-server", "~/.bzt/selenium-taurus/selenium-server.jar") self.junit_listener_path = os.path.join(get_full_path(__file__, step_up=1), os.pardir, "resources", "taurus-junit-1.0.jar") self.target_java = str(junit_config.get("compile-target-java", "1.7")) self.base_class_path = [self.selenium_server_jar_path, self.junit_path, self.junit_listener_path, self.hamcrest_path, self.json_jar_path] self.base_class_path.extend(self.scenario.get("additional-classpath", [])) self.base_class_path = [os.path.abspath(executor.engine.find_file(x)) for x in self.base_class_path]
def assertPathsEqual(self, p1, p2): if not isinstance(p1, list): p1 = [p1] if not isinstance(p2, list): p2 = [p2] for num in range(len(p1)): self.assertEqual(get_full_path(p1[num]), get_full_path(p2[num]))
def install(self): dest = get_full_path(self.tool_path, step_up=2) self.log.info("Will install %s into %s", self.tool_name, dest) gatling_dist = self._download(use_link=True) self.log.info("Unzipping %s", gatling_dist) unzip(gatling_dist, dest, 'gatling-charts-highcharts-bundle-' + self.version) os.remove(gatling_dist) os.chmod(get_full_path(self.tool_path), 0o755) self.log.info("Installed Gatling successfully") if not self.check_if_installed(): raise ToolError("Unable to run %s after installation!" % self.tool_name)
def _collect_script_files(self, extensions): file_list = [] if os.path.isdir(self.script): for root, _, files in os.walk(self.script): for test_file in files: if os.path.splitext(test_file)[1].lower() in extensions: path = get_full_path(os.path.join(root, test_file)) file_list.append(path) else: if os.path.splitext(self.script)[1].lower() in extensions: file_list.append(get_full_path(self.script)) return file_list
def _prepare_chrome_loader(self): loader_dir = join(self.engine.artifacts_dir, 'chrome-loader') os.mkdir(loader_dir) # find chromedriver.exe and copy it into artifacts/chrome-loader for _dir in os.getenv('PATH').split(os.pathsep): path = join(_dir, 'chromedriver.exe') if isfile(path): if path.lower().startswith(os.getenv('WINDIR')): msg = 'Wrong chromedriver location: %s, look at ' % path msg += 'http://gettaurus.org/docs/Proxy2JMX/#Microsoft-Windows for help' self.log.warning(msg) shutil.copy2(path, loader_dir) break else: self.log.warning('cromedriver.exe not found in directories described in PATH') return # copy chrome-loader.exe from resources into artifacts/chrome-loader/chrome.exe old_file = join(get_full_path(__file__, step_up=2), 'resources', 'chrome-loader.exe') new_file = join(loader_dir, 'chrome.exe') try: shutil.copy2(old_file, new_file) except IOError as exc: raise TaurusInternalException("Can't copy loader: %s" % exc)
def startup(self): load = self.get_load() cmdline = [self.molotov.tool_path] if load.concurrency is not None: cmdline += ['--workers', str(load.concurrency)] if 'processes' in self.execution: cmdline += ['--processes', str(self.execution['processes'])] # TODO: autosizing as `concurrency: auto`? duration = 0 if load.ramp_up: ramp_up = int(ceil(dehumanize_time(load.hold))) duration += ramp_up cmdline += ['--ramp-up', str(ramp_up)] if load.hold: hold = int(ceil(dehumanize_time(load.hold))) duration += hold cmdline += ['--duration', str(duration)] cmdline += ['--use-extension=bzt.resources.molotov_ext'] cmdline += [self.get_script_path(required=True)] self.env.set({"MOLOTOV_TAURUS_REPORT": self.report_file_name}) self.env.add_path({"PYTHONPATH": get_full_path(__file__, step_up=3)}) self.process = self._execute(cmdline)
def __init__(self, config=None, **kwargs): settings = config or {} version = settings.get("version", self.VERSION) def_path = self.LOCAL_PATH.format(version=version, suffix=EXE_SUFFIX) gatling_path = get_full_path(settings.get("path", def_path)) download_link = settings.get("download-link", self.DOWNLOAD_LINK).format(version=version) super(Gatling, self).__init__(tool_path=gatling_path, download_link=download_link, version=version, **kwargs)
def __gen_datasources(self, scenario): sources = scenario.get("data-sources") if not sources: return [] if not isinstance(sources, list): raise TaurusConfigError("data-sources '%s' is not a list" % sources) elements = [] for idx, source in enumerate(sources): source = ensure_is_dict(sources, idx, "path") source_path = source["path"] delimiter = source.get("delimiter") if has_variable_pattern(source_path): msg = "Path to CSV contains JMeter variable/function, can't check for file existence: %s" self.log.warning(msg, source_path) if not delimiter: delimiter = ',' self.log.warning("Can't detect CSV dialect, default delimiter will be '%s'", delimiter) else: modified_path = self.executor.engine.find_file(source_path) if not os.path.isfile(modified_path): raise TaurusConfigError("data-sources path not found: %s" % modified_path) if not delimiter: delimiter = self.__guess_delimiter(modified_path) source_path = get_full_path(modified_path) config = JMX._get_csv_config(source_path, delimiter, source.get("quoted", False), source.get("loop", True), source.get("variable-names", "")) elements.append(config) elements.append(etree.Element("hashTree")) return elements
def test_files_find_file(self): curdir = get_full_path(os.curdir) try: os.chdir(__dir__() + "/../") self.obj.engine.file_search_paths.append(RESOURCES_DIR + "gatling/") self.obj.engine.config.merge({ "execution": { "scenario": { "script": "simulations.jar", "simulation": "tests.gatling.BasicSimulation" }, "files": ["deps.jar"] } }) self.obj.execution.merge(self.obj.engine.config["execution"]) self.obj.prepare() try: self.obj.startup() while not self.obj.check(): time.sleep(self.obj.engine.check_interval) finally: self.obj.shutdown() for jar in ("simulations.jar", "deps.jar"): for var in ("JAVA_CLASSPATH", "COMPILATION_CLASSPATH"): self.assertIn(jar, self.obj.env.get(var)) finally: os.chdir(curdir)
def test_external_jar_right_launcher(self): self.obj.execution.merge({ 'files': [ 'tests/resources/grinder/fake_grinder.jar', 'tests/resources/selenium/junit/jar'], 'scenario': { "script": RESOURCES_DIR + "gatling/BasicSimulation.scala", "simulation": "mytest.BasicSimulation"}}) self.obj.prepare() self.obj.startup() self.obj.shutdown() modified_launcher = self.obj.launcher with open(modified_launcher) as modified: modified_lines = modified.readlines() for jar in ('fake_grinder.jar', 'another_dummy.jar'): for var in ("JAVA_CLASSPATH", "COMPILATION_CLASSPATH"): self.assertIn(jar, self.obj.env.get(var)) for line in modified_lines: self.assertFalse(line.startswith('set COMPILATION_CLASSPATH=""')) self.assertTrue(not line.startswith('COMPILATION_CLASSPATH=') or line.endswith('":${COMPILATION_CLASSPATH}"\n')) with open(self.obj.stdout_file.name) as stdout: out_lines = stdout.readlines() out_lines = [out_line.rstrip() for out_line in out_lines] self.assertEqual(out_lines[-4], get_full_path(self.obj.settings['path'], step_up=2)) # $GATLING_HOME self.assertIn('fake_grinder.jar', out_lines[-3]) # $COMPILATION_CLASSPATH self.assertIn('another_dummy.jar', out_lines[-3]) # $COMPILATION_CLASSPATH self.assertEqual(out_lines[-2], 'TRUE') # $NO_PAUSE
def _check_windows(self): art_dir = self.obj.engine.artifacts_dir os.environ['LOCALAPPDATA'] = art_dir os.mkdir(join(art_dir, 'Chromium')) os.mkdir(join(art_dir, 'Chromium', 'Application')) os.mkdir(join(art_dir, 'chromedriver')) res_dir = join(get_full_path(__file__, step_up=3), 'bzt', 'resources') src = join(res_dir, 'chrome-loader.c') dst_chrome = join(art_dir, 'Chromium', 'Application', 'chrome.exe') dst_chromedriver = join(art_dir, 'chromedriver', 'chromedriver.exe') shutil.copy2(src, dst_chrome) shutil.copy2(src, dst_chromedriver) required_env = { 'PATH_TO_CHROME': dst_chrome, 'ADDITIONAL_CHROME_PARAMS': '--proxy-server="http://host1:port1"', 'CHROME_LOADER_LOG': join(self.obj.engine.artifacts_dir, 'chrome-loader.log')} os.environ['PATH'] = join(art_dir, 'chromedriver') + os.pathsep + os.getenv('PATH') self.obj.startup() loader_dir = set(os.listdir(join(art_dir, 'chrome-loader'))) self.assertEqual(loader_dir, {'chrome.exe', 'chromedriver.exe'}) required_env = {str(key.upper()): str(required_env[key]) for key in required_env} real_env = self.obj.engine.provisioning.executors[0].env.get() real_env = {str(key.upper()): str(real_env[key]) for key in real_env} self.assertTrue(real_env["PATH"].startswith(join(self.obj.engine.artifacts_dir, "chrome-loader"))) for key in required_env: self.assertIn(key, real_env) self.assertEqual(required_env[key], real_env[key])
def __init__(self, config, scenario, engine): super(HierarchicHTTPRequest, self).__init__(config, scenario, engine) self.upload_files = self.config.get("upload-files", []) method = self.config.get("method") if method == "PUT" and len(self.upload_files) > 1: self.upload_files = self.upload_files[:1] for file_dict in self.upload_files: param = file_dict.get("param", None) if method == "PUT": file_dict["param"] = "" if method == "POST" and not param: raise TaurusConfigError("Items from upload-files must specify parameter name") path_exc = TaurusConfigError("Items from upload-files must specify path to file") path = str(file_dict.get("path", path_exc)) if not has_variable_pattern(path): # exclude variables path = get_full_path(self.engine.find_file(path)) # prepare full path for jmx else: msg = "Path '%s' contains variable and can't be expanded. Don't use relative paths in 'upload-files'!" self.log.warning(msg % path) file_dict["path"] = path mime = mimetypes.guess_type(file_dict["path"])[0] or "application/octet-stream" file_dict.get("mime-type", mime, force_set=True) self.content_encoding = self.config.get('content-encoding', None)
def create_artifacts_dir(self, existing_artifacts=(), merged_config=None): """ Create directory for artifacts, directory name based on datetime.now() """ if not self.artifacts_dir: artifacts_dir = self.config.get(SETTINGS, force_set=True).get("artifacts-dir", self.ARTIFACTS_DIR) self.artifacts_dir = datetime.datetime.now().strftime(artifacts_dir) self.artifacts_dir = get_full_path(self.artifacts_dir) self.log.info("Artifacts dir: %s", self.artifacts_dir) os.environ[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir if not os.path.isdir(self.artifacts_dir): os.makedirs(self.artifacts_dir) # dump current effective configuration dump = self.create_artifact("effective", "") # TODO: not good since this file not exists self.config.set_dump_file(dump) self.config.dump() # dump merged configuration if merged_config: merged_config.dump(self.create_artifact("merged", ".yml"), Configuration.YAML) merged_config.dump(self.create_artifact("merged", ".json"), Configuration.JSON) for artifact in existing_artifacts: self.existing_artifact(artifact)
def startup(self): self.start_time = time.time() load = self.get_load() concurrency = load.concurrency or 1 if load.ramp_up: hatch = concurrency / float(load.ramp_up) else: hatch = concurrency wrapper = os.path.join(get_full_path(__file__, step_up=2), "resources", "locustio-taurus-wrapper.py") self.env.add_path({"PYTHONPATH": self.engine.artifacts_dir}) self.env.add_path({"PYTHONPATH": os.getcwd()}) self.env.set({"LOCUST_DURATION": dehumanize_time(load.duration)}) self.log_file = self.engine.create_artifact("locust", ".log") args = [sys.executable, wrapper, '-f', self.script] args += ['--logfile=%s' % self.log_file] args += ["--no-web", "--only-summary", ] args += ["--clients=%d" % concurrency, "--hatch-rate=%f" % hatch] if load.iterations: args.append("--num-request=%d" % load.iterations) if self.is_master: args.extend(["--master", '--expect-slaves=%s' % self.expected_slaves]) host = self.get_scenario().get("default-address") if host: args.append('--host=%s' % host) self.__out = open(self.engine.create_artifact("locust", ".out"), 'w') self.process = self.execute(args, stderr=STDOUT, stdout=self.__out)
def __init__(self, rspec_config, executor): super(MochaTester, self).__init__(rspec_config, executor) self.plugin_path = os.path.join(get_full_path(__file__, step_up=1), os.pardir, "resources", "mocha-taurus-plugin.js") self.node_tool = None
def test_install_Grinder(self): path = os.path.abspath(BUILD_DIR + "grinder-taurus/lib/grinder.jar") shutil.rmtree(get_full_path(path, step_up=2), ignore_errors=True) grinder_link = GrinderExecutor.DOWNLOAD_LINK grinder_version = GrinderExecutor.VERSION mirrors_source = GrinderExecutor.MIRRORS_SOURCE try: GrinderExecutor.DOWNLOAD_LINK = "file:///" + RESOURCES_DIR + \ "grinder/grinder-{version}_{version}-binary.zip" GrinderExecutor.VERSION = "3.11" GrinderExecutor.MIRRORS_SOURCE = "file:///" + RESOURCES_DIR + "jmeter/unicode_file" self.assertFalse(os.path.exists(path)) self.obj.settings.merge({"path": path}) self.obj.settings.merge({"properties-file": RESOURCES_DIR + "grinder/grinder.base.properties", "properties": {"sample_prop": "some_val"}}) self.obj.execution.merge({"scenario": { "script": RESOURCES_DIR + "grinder/helloworld.py", "properties-file": RESOURCES_DIR + "grinder/grinder.properties", "properties": {"grinder.useConsole": "false"}}}) self.obj.prepare() self.assertTrue(os.path.exists(path)) finally: GrinderExecutor.DOWNLOAD_LINK = grinder_link GrinderExecutor.VERSION = grinder_version GrinderExecutor.MIRRORS_SOURCE = mirrors_source
def startup(self): script_dir = get_full_path(self.script, step_up=1) script_file = os.path.basename(self.script) cmdline = self.get_launch_cmdline( "run", script_file, "--reporters", "taurus", "--reporter-taurus-filename", self.report_file, "--suppress-exit-code", "--insecure", ) scenario = self.get_scenario() timeout = scenario.get('timeout', None) if timeout is not None: cmdline += ["--timeout-request", str(int(dehumanize_time(timeout) * 1000))] think = scenario.get_think_time() if think is not None: cmdline += ["--delay-request", str(int(dehumanize_time(think) * 1000))] cmdline += self._dump_vars("globals") cmdline += self._dump_vars("environment") load = self.get_load() if load.iterations: cmdline += ['--iteration-count', str(load.iterations)] # TODO: allow running several collections like directory, see https://github.com/postmanlabs/newman/issues/871 # TODO: support hold-for, probably by having own runner # if load.hold: # cmdline += ['--hold-for', str(load.hold)] self.process = self._execute(cmdline, cwd=script_dir)
def test_external_jar_right_launcher(self): obj = self.getGatling() obj.execution.merge({ 'files': [ 'tests/grinder/fake_grinder.jar', 'tests/selenium/jar'], 'scenario': { "script": __dir__() + "/../gatling/BasicSimulation.scala", "simulation": "mytest.BasicSimulation"}}) obj.prepare() obj.startup() obj.shutdown() jar_files = obj.jar_list modified_launcher = obj.launcher with open(modified_launcher) as modified: modified_lines = modified.readlines() self.assertIn('fake_grinder.jar', jar_files) self.assertIn('another_dummy.jar', jar_files) for line in modified_lines: self.assertFalse(line.startswith('set COMPILATION_CLASSPATH=""')) self.assertTrue(not line.startswith('COMPILATION_CLASSPATH=') or line.endswith('":${COMPILATION_CLASSPATH}"\n')) with open(obj.engine.artifacts_dir + '/gatling-stdout.log') as stdout: out_lines = stdout.readlines() out_lines = [out_line.rstrip() for out_line in out_lines] self.assertEqual(out_lines[-3], get_full_path(obj.settings['path'], step_up=2)) # $GATLING_HOME self.assertIn('fake_grinder.jar', out_lines[-2]) # $COMPILATION_CLASSPATH self.assertIn('another_dummy.jar', out_lines[-2]) # $COMPILATION_CLASSPATH self.assertEqual(out_lines[-1], 'TRUE') # $NO_PAUSE
def __init__(self, config=None, **kwargs): settings = config or {} # don't extend system-wide default tool_path = get_full_path(settings.get("path"), default="phantom") super(PBench, self).__init__(tool_path=tool_path, installable=False, **kwargs)
def prepare(self): super(LocustIOExecutor, self).prepare() self.stdout = open(self.engine.create_artifact("locust", ".out"), 'w') self.stderr = open(self.engine.create_artifact("locust", ".err"), 'w') self.install_required_tools() self.scenario = self.get_scenario() self.__setup_script() self.engine.existing_artifact(self.script) # path to taurus dir. It's necessary for bzt usage inside tools/helpers self.env.add_path({"PYTHONPATH": get_full_path(__file__, step_up=3)}) self.is_master = self.execution.get("master", self.is_master) if self.is_master: count_error = TaurusConfigError("Slaves count required when starting in master mode") self.expected_slaves = int(self.execution.get("slaves", count_error)) slaves_ldjson = self.engine.create_artifact("locust-slaves", ".ldjson") self.reader = SlavesReader(slaves_ldjson, self.expected_slaves, self.log) self.env.set({"SLAVES_LDJSON": slaves_ldjson}) else: kpi_jtl = self.engine.create_artifact("kpi", ".jtl") self.reader = JTLReader(kpi_jtl, self.log) self.env.set({"JTL": kpi_jtl}) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader)
def prepare(self): super(RobotExecutor, self).prepare() self.install_required_tools() self.script = self.get_script_path() if not self.script: raise TaurusConfigError("'script' should be present for robot executor") self.reporting_setup(suffix=".ldjson") scenario = self.get_scenario() variables = scenario.get("variables") if variables: if isinstance(variables, (string_types, text_type)): self.variables_file = get_full_path(variables) elif isinstance(variables, dict): self.variables_file = self.engine.create_artifact("robot-vars", ".yaml") with open(self.variables_file, 'wb') as fds: yml = yaml.safe_dump(variables, default_flow_style=False, explicit_start=True, canonical=False, allow_unicode=True, encoding='utf-8', width=float("inf")) fds.write(yml) else: raise TaurusConfigError("`variables` is neither file nor dict") tags = scenario.get("tags", None) if tags: if isinstance(tags, (string_types, text_type)): self.tags = tags else: raise TaurusConfigError("`tags` is not a string or text")
def prepare(self): super(NUnitExecutor, self).prepare() self.script = get_full_path(self.get_script_path()) if not self.script: raise TaurusConfigError("Script not passed to runner %s" % self) self.install_required_tools() self.reporting_setup(suffix=".ldjson")
def setUp(self): super(TestProxy2JMX, self).setUp() self.obj = Proxy2JMXEmul() self.obj.engine = EngineEmul() res_dir = join(get_full_path(__file__, step_up=3), 'bzt', 'resources') src = join(res_dir, 'chrome-loader.c') dst_loader = join(res_dir, 'chrome-loader.exe') shutil.copy2(src, dst_loader)
def __dump_csv(self, filename): self.log.info("Dumping final status as CSV: %s", filename) # FIXME: what if there's no last_sec with open(get_full_path(filename), 'wt') as fhd: writer = csv.DictWriter(fhd, self.__get_csv_dict('', self.last_sec[DataPoint.CUMULATIVE]['']).keys()) writer.writeheader() for label, kpiset in iteritems(self.last_sec[DataPoint.CUMULATIVE]): writer.writerow(self.__get_csv_dict(label, kpiset))
def detected_testng_xml(self): script_path = self.get_script_path() if script_path and self.settings.get("autodetect-xml", True): script_dir = get_full_path(script_path, step_up=1) testng_xml = join(script_dir, 'testng.xml') if os.path.exists(testng_xml): return testng_xml return None
def __init__(self, jmeter_version=JMeterExecutor.JMETER_VER, has_ctg=None, reaction=None): jmeter_path = "~/.bzt/jmeter-taurus/{version}/" jmeter_path = get_full_path(jmeter_path) super(MockJMeter, self).__init__(tool_path=jmeter_path, parent_logger=logging.getLogger(''), jmeter_version=jmeter_version, jmeter_download_link=None, plugins=[], proxy={}) self.has_ctg = has_ctg self.reaction = reaction if reaction else []
def prepare(self): super(MochaTester, self).prepare() self.script = self.get_script_path() if not self.script: raise TaurusConfigError("Script not passed to runner %s" % self) self.tools_dir = get_full_path(self.settings.get("tools-dir", self.tools_dir)) self.install_required_tools() self.reporting_setup(suffix='.ldjson')
def __init__(self): super(EngineEmul, self).__init__(ROOT_LOGGER) directory = get_full_path(TEST_DIR) prefix = datetime.datetime.now().strftime(self.ARTIFACTS_DIR) self.config.merge({ "provisioning": "local", "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__, "local": ModuleMock.__module__ + "." + ModuleMock.__name__}, "settings": { "check-updates": False, "artifacts-dir": get_uniq_name(directory=directory, prefix=prefix)}}) self.check_interval = 0.1 self.create_artifacts_dir() self.prepare_exc = None self.was_finalize = False
def startup(self): script_dir = get_full_path(self.script, step_up=1) script_file = os.path.basename(self.script) cmdline = self.get_launch_cmdline( "--report-file", self.report_file, "--wdio-config", script_file, ) load = self.get_load() if load.iterations: cmdline += ['--iterations', str(load.iterations)] if load.hold: cmdline += ['--hold-for', str(load.hold)] self.process = self._execute(cmdline, cwd=script_dir)
def __init__(self, config=None, **kwargs): settings = config or {} version = str(settings.get('version', self.VERSION)) base_dir = get_full_path(SeleniumExecutor.SELENIUM_TOOLS_DIR) filename = 'chromedriver.exe' if is_windows() else 'chromedriver' tool_path = os.path.join(base_dir, 'chromedriver', version, filename) link = settings.get('download-link', self.DOWNLOAD_LINK) if is_windows(): arch = 'win32' # no 64-bit windows builds, :( elif is_mac(): arch = 'mac64' else: arch = 'linux32' if platform_bitness() == 32 else 'linux64' link = link.format(version=version, arch=arch) super(ChromeDriver, self).__init__(tool_path=tool_path, version=version, download_link=link, **kwargs)
def _get_simulation_props(self): props = {} if os.path.isfile(self.script): if self.script.endswith('.jar'): self.env.add_path({"JAVA_CLASSPATH": self.script}) self.env.add_path({"COMPILATION_CLASSPATH": self.script}) else: props['gatling.core.directory.simulations'] = get_full_path( self.script, step_up=1) else: props['gatling.core.directory.simulations'] = self.script simulation = self.get_scenario().get("simulation") if simulation: props['gatling.core.simulationClass'] = simulation else: props['gatling.core.runDescription'] = "Taurus_Test" return props
def test_passes_artifacts_dir_with_envs(self): cmdline = "echo %TAURUS_ARTIFACTS_DIR%" if is_windows() else "echo $TAURUS_ARTIFACTS_DIR" engine = EngineEmul({ "settings": { "env": {"BZT_ARTIFACTS_DIR_ENV_TEST": "custom_dir_from_env"}, "artifacts-dir": get_uniq_name(directory=get_full_path(TEST_DIR), prefix="${BZT_ARTIFACTS_DIR_ENV_TEST}/%Y-%m-%d_%H-%M-%S.%f") }}) engine.eval_env() engine.prepare() executor = self.obj executor.engine = engine process = executor._execute(cmdline, shell=True) stdout, _ = communicate(process) self.assertEqual(engine.artifacts_dir, stdout.strip()) if "BZT_ARTIFACTS_DIR_ENV_TEST" in os.environ: os.environ.pop("BZT_ARTIFACTS_DIR_ENV_TEST")
def test_external_jar_wrong_launcher(self): modified_launcher = self.obj.engine.create_artifact('wrong-gatling', EXE_SUFFIX) origin_launcher = get_full_path(self.obj.settings['path']) with open(origin_launcher) as orig_file: with open(modified_launcher, 'w') as mod_file: for line in orig_file.readlines(): if 'COMPILATION_CLASSPATH' not in line: mod_file.writelines([line]) os.chmod(modified_launcher, 0o755) self.obj.settings.merge({"path": modified_launcher}) self.obj.execution.merge({ 'files': [ 'tests/resources/grinder/fake_grinder.jar', 'tests/resources/selenium/junit/jar'], 'scenario': 'tests/resources/gatling/bs'}) self.assertRaises(ToolError, self.obj.prepare)
def __init__(self): super(EngineEmul, self).__init__(logging.getLogger('')) directory = get_full_path(TEST_DIR) prefix = datetime.datetime.now().strftime(self.ARTIFACTS_DIR) self.config.get('settings')['artifacts-dir'] = get_uniq_name( directory=directory, prefix=prefix) self.config.get('settings')['check-updates'] = False self.create_artifacts_dir() self.config.merge({"provisioning": "local"}) self.config.merge({ "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ } }) self.prepare_exc = None self.was_finalize = False
def install_required_tools(self): grinder_path = self.settings.get( "path", "~/.bzt/grinder-taurus/lib/grinder.jar") grinder_path = get_full_path(grinder_path) self.settings["path"] = grinder_path download_link = self.settings.get("download-link", "") required_tools = [ TclLibrary(self.log), JavaVM(self.log), Grinder(grinder_path, self.log, GrinderExecutor.VERSION, download_link=download_link) ] for tool in required_tools: if not tool.check_if_installed(): tool.install()
def _load_base_configs(self): base_configs = [ os.path.join(get_full_path(__file__, step_up=1), 'resources', 'base-config.yml') ] machine_dir = get_configs_dir( ) # can't refactor machine_dir out - see setup.py if os.path.isdir(machine_dir): self.log.debug("Reading extension configs from: %s", machine_dir) for cfile in sorted(os.listdir(machine_dir)): fname = os.path.join(machine_dir, cfile) if os.path.isfile(fname): base_configs.append(fname) else: self.log.debug("No machine configs dir: %s", machine_dir) self.log.debug("Base configs list: %s", base_configs) self.config.load(base_configs)
def install_required_tools(self): grinder_path = self.settings.get( "path", "~/.bzt/grinder-taurus/lib/grinder.jar") grinder_path = get_full_path(grinder_path) self.settings["path"] = grinder_path download_link = self.settings.get("download-link", "") http_client = self.engine.get_http_client() grinder = Grinder(grinder_path, self.log, GrinderExecutor.VERSION, download_link, http_client) required_tools = [ TclLibrary(self.log), JavaVM(self.log), TaurusJavaHelper(http_client), grinder ] for tool in required_tools: if not tool.check_if_installed(): tool.install()
def _get_testng_xml(self): if 'testng-xml' in self.scenario: testng_xml = self.scenario.get('testng-xml') if testng_xml: return testng_xml else: return None # empty value for switch off testng.xml path autodetect script_path = self.get_script_path() if script_path: script_dir = get_full_path(script_path, step_up=1) testng_xml = os.path.join(script_dir, 'testng.xml') if os.path.exists(testng_xml): self.log.info("Detected testng.xml file at %s", testng_xml) self.scenario['testng-xml'] = testng_xml return testng_xml return None
def _check_windows(self): art_dir = self.obj.engine.artifacts_dir os.environ['LOCALAPPDATA'] = art_dir os.mkdir(join(art_dir, 'Chromium')) os.mkdir(join(art_dir, 'Chromium', 'Application')) os.mkdir(join(art_dir, 'chromedriver')) res_dir = join(get_full_path(__file__, step_up=3), 'bzt', 'resources') src = join(res_dir, 'chrome-loader.c') dst_chrome = join(art_dir, 'Chromium', 'Application', 'chrome.exe') dst_chromedriver = join(art_dir, 'chromedriver', 'chromedriver.exe') shutil.copy2(src, dst_chrome) shutil.copy2(src, dst_chromedriver) required_env = { 'PATH_TO_CHROME': dst_chrome, 'ADDITIONAL_CHROME_PARAMS': '--proxy-server="user1:123@host1:port1"', 'CHROME_LOADER_LOG': join(self.obj.engine.artifacts_dir, 'chrome-loader.log') } os.environ['PATH'] = join( art_dir, 'chromedriver') + os.pathsep + os.getenv('PATH') self.obj.startup() loader_dir = set(os.listdir(join(art_dir, 'chrome-loader'))) self.assertEqual(loader_dir, {'chrome.exe', 'chromedriver.exe'}) required_env = { str(key.upper()): str(required_env[key]) for key in required_env } real_env = self.obj.engine.provisioning.executors[0].env.get() real_env = {str(key.upper()): str(real_env[key]) for key in real_env} self.assertTrue(real_env["PATH"].startswith( join(self.obj.engine.artifacts_dir, "chrome-loader"))) for key in required_env: self.assertIn(key, real_env) self.assertEqual(required_env[key], real_env[key])
def prepare(self): self.install_required_tools() scenario = self.get_scenario() self.exec_id = self.label self.script = self.get_script_path() if not self.script: if "requests" in scenario: self.script = self.__scenario_from_requests() else: msg = "There must be a script file or requests for its generation " msg += "to run Grinder tool (%s)" % self.execution.get( 'scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact( "grinder", ".properties") with open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + "-kpi.log") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get("report-by-url", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations used by worker processes (logback-worker.xml) res_dir = os.path.join(get_full_path(__file__, step_up=2), 'resources') self.env.add_path({"CLASSPATH": res_dir}, finish=True) self.env.add_path( { "CLASSPATH": TaurusJavaHelper(self.engine.get_http_client()).tool_path }, finish=True) self.env.add_path({"CLASSPATH": self.settings.get("path", None)}, finish=True) self.cmd_line = ["java", "net.grinder.Grinder", self.properties_file]
def prepare(self): self.install_required_tools() scenario = self.get_scenario() jar_files = [] files = self.execution.get('files', []) for candidate in files: candidate = get_full_path(self.engine.find_file(candidate)) if os.path.isfile(candidate) and candidate.lower().endswith('.jar'): jar_files.append(candidate) elif os.path.isdir(candidate): for element in os.listdir(candidate): element = os.path.join(candidate, element) if os.path.isfile(element) and element.lower().endswith('.jar'): jar_files.append(element) self.log.debug("JAR files list for Gatling: %s", jar_files) if jar_files: separator = os.pathsep self.jar_list = separator + separator.join(jar_files) if is_windows() or jar_files: self.log.debug("Building Gatling launcher") self.launcher = self.__build_launcher() else: self.log.debug("Will not build Gatling launcher") self.launcher = self.settings["path"] self.script = self.get_script_path() if not self.script: if "requests" in scenario: self.get_scenario()['simulation'], self.script = self.__generate_script() self.__copy_data_sources() else: msg = "There must be a script file or requests for its generation " msg += "to run Gatling tool (%s)" % self.execution.get('scenario') raise TaurusConfigError(msg) self.dir_prefix = self.settings.get('dir_prefix', None) if self.dir_prefix is None: self.dir_prefix = 'gatling-%s' % id(self) self.reader = DataLogReader(self.engine.artifacts_dir, self.log, self.dir_prefix) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader)
def startup(self): executable = self.settings.get("interpreter", sys.executable) py_path = os.getenv("PYTHONPATH") taurus_dir = get_full_path(__file__, step_up=3) if py_path: py_path = os.pathsep.join((py_path, taurus_dir)) else: py_path = taurus_dir self.env["PYTHONPATH"] = py_path report_type = ".ldjson" if self.engine.is_functional_mode() else ".csv" report_tpl = self.engine.create_artifact("apiritif-", "") + "%s" + report_type cmdline = [ executable, "-m", "apiritif.loadgen", '--result-file-template', report_tpl ] load = self.get_load() if load.concurrency: cmdline += ['--concurrency', str(load.concurrency)] if load.iterations: cmdline += ['--iterations', str(load.iterations)] if load.hold: cmdline += ['--hold-for', str(load.hold)] if load.ramp_up: cmdline += ['--ramp-up', str(load.ramp_up)] if load.steps: cmdline += ['--steps', str(load.steps)] if self.__is_verbose(): cmdline += ['--verbose'] cmdline += [self.script] self.start_time = time.time() self._start_subprocess(cmdline) self._tailer = FileReader(filename=self.stdout_file, parent_logger=self.log)
def startup(self): self.start_time = time.time() load = self.get_load() concurrency = load.concurrency or 1 if self.is_master: concurrency = math.ceil(concurrency / float(self.expected_slaves)) if load.ramp_up: hatch = concurrency / float(load.ramp_up) else: hatch = concurrency wrapper = os.path.join(get_full_path(__file__, step_up=2), "resources", "locustio-taurus-wrapper.py") self.env.add_path({"PYTHONPATH": self.engine.artifacts_dir}) self.env.add_path({"PYTHONPATH": os.getcwd()}) self.env.set({"LOCUST_DURATION": dehumanize_time(load.duration)}) self.log_file = self.engine.create_artifact("locust", ".log") args = [sys.executable, wrapper, '-f', self.script] args += ['--logfile=%s' % self.log_file] args += [ "--no-web", "--only-summary", ] args += ["--clients=%d" % concurrency, "--hatch-rate=%f" % hatch] if load.iterations: num_requests = load.iterations * concurrency args.append("--num-request=%d" % num_requests) self.env.set({"LOCUST_NUMREQUESTS": num_requests}) if self.is_master: args.extend( ["--master", '--expect-slaves=%s' % self.expected_slaves]) host = self.get_scenario().get("default-address") if host: args.append('--host=%s' % host) self.__out = open(self.engine.create_artifact("locust", ".out"), 'w') self.process = self.execute(args, stderr=STDOUT, stdout=self.__out)
def __init__(self, tool_path="", log=None, **kwargs): self.webdriver_manager = None self.log = log self.dest = None os.environ['WDM_LOG_LEVEL'] = '0' base_dir = get_full_path(SeleniumExecutor.SELENIUM_TOOLS_DIR) filename = self.DRIVER_NAME filename += '.exe' if is_windows() else "" if not tool_path: tool_path = os.path.join(base_dir, f'drivers/{self.DRIVER_NAME}', filename) try: self.webdriver_manager = self.MANAGER(path=base_dir, print_first_line=False, log_level=0) except (ValueError, ConnectionError, ProxyError, SSLError) as err: self.webdriver_manager = None self.log.warning(err) super().__init__(tool_path=tool_path, **kwargs)
def prepare(self): super(ApiritifNoseExecutor, self).prepare() if 'executor' in self.execution.keys(): if self.execution['executor'] == 'nose': msg = "'nose' keyword is deprecated and will be removed soon. Please use 'apiritif' instead." self.log.warning(msg) self.script = self.get_script_path() if not self.script: if "requests" in self.get_scenario(): self.script = self.__tests_from_requests() else: raise TaurusConfigError("Nothing to test, no requests were provided in scenario") # todo: requred tools? # path to taurus dir. It's necessary for bzt usage inside tools/helpers self.env.add_path({"PYTHONPATH": get_full_path(BZT_DIR, step_up=1)}) self.reporting_setup() # no prefix/suffix because we don't fully control report file names
def test_external_jar_wrong_launcher(self): obj = self.getGatling() modified_launcher = obj.engine.create_artifact('wrong-gatling', EXE_SUFFIX) origin_launcher = get_full_path(obj.settings['path']) with open(origin_launcher) as orig_file: orig_lines = orig_file.readlines() mod_lines = [line for line in orig_lines if 'COMPILATION_CLASSPATH' not in line] with open(modified_launcher, 'w') as mod_file: mod_file.writelines(mod_lines) obj.settings.merge({"path": modified_launcher}) obj.execution.merge({ 'files': [ 'tests/grinder/fake_grinder.jar', 'tests/selenium/jar'], 'scenario': 'tests/gatling/bs'}) self.assertRaises(ValueError, obj.prepare)
def test_external_jar_right_launcher(self): obj = self.getGatling() obj.execution.merge({ 'files': [ 'tests/resources/grinder/fake_grinder.jar', 'tests/resources/selenium/junit/jar' ], 'scenario': { "script": __dir__() + "/../resources/gatling/BasicSimulation.scala", "simulation": "mytest.BasicSimulation" } }) obj.prepare() obj.startup() obj.shutdown() jar_files = obj.jar_list modified_launcher = obj.launcher with open(modified_launcher) as modified: modified_lines = modified.readlines() self.assertIn('fake_grinder.jar', jar_files) self.assertIn('another_dummy.jar', jar_files) for line in modified_lines: self.assertFalse(line.startswith('set COMPILATION_CLASSPATH=""')) self.assertTrue(not line.startswith('COMPILATION_CLASSPATH=') or line.endswith('":${COMPILATION_CLASSPATH}"\n')) with open(obj.stdout_file.name) as stdout: out_lines = stdout.readlines() out_lines = [out_line.rstrip() for out_line in out_lines] self.assertEqual(out_lines[-4], get_full_path(obj.settings['path'], step_up=2)) # $GATLING_HOME self.assertIn('fake_grinder.jar', out_lines[-3]) # $COMPILATION_CLASSPATH self.assertIn('another_dummy.jar', out_lines[-3]) # $COMPILATION_CLASSPATH self.assertEqual(out_lines[-2], 'TRUE') # $NO_PAUSE
def install_required_tools(self): super(TestNGTester, self).install_required_tools() self.testng_path = self.path_lambda(self.settings.get("path", "~/.bzt/selenium-taurus/tools/testng/testng.jar")) self.testng_plugin_path = join(get_full_path(__file__, step_up=2), "resources", "taurus-testng-1.0.jar") tools = [] if self.script and any(self._collect_script_files({'.java'})): tools.append(JavaC(self.log)) tools.append(TclLibrary(self.log)) tools.append(JavaVM(self.log)) link = SELENIUM_DOWNLOAD_LINK.format(version=SELENIUM_VERSION) tools.append(SeleniumServerJar(self.selenium_server_jar_path, link, self.log)) tools.append(TestNGJar(self.testng_path, TESTNG_DOWNLOAD_LINK)) tools.append(HamcrestJar(self.hamcrest_path, HAMCREST_DOWNLOAD_LINK)) tools.append(JsonJar(self.json_jar_path, JSON_JAR_DOWNLOAD_LINK)) tools.append(TestNGPluginJar(self.testng_plugin_path, "")) self._check_tools(tools)
def prepare(self): self.install_required_tools() scenario = self.get_scenario() self.exec_id = self.label self.script = self.get_script_path() if self.script: self.script = os.path.abspath(self.engine.find_file(self.script)) elif "requests" in scenario: self.script = self.__scenario_from_requests() else: msg = "There must be a script file or requests for its generation " msg += "to run Grinder tool (%s)" % self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact( "grinder", ".properties") with open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + "-kpi.log") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get("report-by-url", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations used by worker processes (logback-worker.xml) res_dir = os.path.join(get_full_path(__file__, step_up=2), 'resources') classpath = res_dir classpath += os.path.pathsep + os.path.join(res_dir, "grinder-logger-1.0.jar") path = self.settings.get("path", None) if path: classpath += os.path.pathsep + path self.cmd_line = ["java", "-classpath", classpath] self.cmd_line += ["net.grinder.Grinder", self.properties_file]
def startup(self): executable = self.settings.get("interpreter", sys.executable) self.env.add_path({"PYTHONPATH": get_full_path(__file__, step_up=3)}) cmdline = [executable, self.runner_path, '--report-file', self.report_file] load = self.get_load() if load.iterations: cmdline += ['--iterations', str(load.iterations)] if load.hold: cmdline += ['--duration', str(load.hold)] if self.variables_file is not None: cmdline += ['--variablefile', self.variables_file] cmdline += [self.script] self._start_subprocess(cmdline)
def __init__(self, jmeter_version=JMeterExecutor.JMETER_VER, has_ctg=None, reaction=None, http_client=None): jmeter_path = "~/.bzt/jmeter-taurus/{version}/" jmeter_path = get_full_path(jmeter_path) if http_client is None: http_client = MockHTTPClient() super(MockJMeter, self).__init__(tool_path=jmeter_path, parent_logger=logging.getLogger(''), jmeter_version=jmeter_version, jmeter_download_link=None, plugins=[], http_client=http_client) self.has_ctg = has_ctg self.reaction = reaction if reaction else []
def __configure(self, configs): self.log.info("Starting with configs: %s", configs) if self.options.no_system_configs is None: self.options.no_system_configs = False bzt_rc = os.path.expanduser(os.path.join('~', ".bzt-rc")) if os.path.exists(bzt_rc): self.log.debug("Using personal config: %s" % bzt_rc) else: self.log.debug("Adding personal config: %s", bzt_rc) self.log.info("No personal config found, creating one at %s", bzt_rc) shutil.copy( os.path.join(get_full_path(__file__, step_up=1), 'resources', 'base-bzt-rc.yml'), bzt_rc) merged_config = self.engine.configure( [bzt_rc] + configs, not self.options.no_system_configs) # apply aliases for alias in self.options.aliases: cli_aliases = self.engine.config.get('cli-aliases') keys = sorted(cli_aliases.keys()) err = TaurusConfigError( "'%s' not found in aliases. Available aliases are: %s" % (alias, ", ".join(keys))) self.engine.config.merge(cli_aliases.get(alias, err)) if self.options.option: overrider = ConfigOverrider(self.log) overrider.apply_overrides(self.options.option, self.engine.config) settings = self.engine.config.get(SETTINGS) settings.get('verbose', bool(self.options.verbose)) # respect value from config if self.options.verbose: # force verbosity if cmdline asked for it settings['verbose'] = True if settings.get('verbose'): CLI.console_handler.setLevel(logging.DEBUG) self.engine.create_artifacts_dir(configs, merged_config) self.engine.default_cwd = os.getcwd()
def test_jsr223(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/jsr223.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 1) request = requests[0] self.assertIn("jsr223", request) jsrs = request["jsr223"] self.assertIsInstance(jsrs, list) self.assertEqual(len(jsrs), 5) self.assertEqual(jsrs[0]["language"], "beanshell") self.assertEqual(jsrs[0]["script-text"], "scripty") self.assertEqual(jsrs[0]["parameters"], "parames") self.assertNotIn('script-file', jsrs[0]) self.assertEqual(jsrs[1]["language"], "javascript") self.assertEqual(jsrs[1]["script-text"], u'console.log("ПРИВЕТ");\nline("2");') self.assertEqual(jsrs[1]["parameters"], "a b c") self.assertNotIn('script-file', jsrs[1]) self.assertEqual(jsrs[2]["language"], "javascript") self.assertEqual(jsrs[2]["script-file"], "script.js") self.assertEqual(jsrs[2]["parameters"], None) self.assertNotIn('script-text', jsrs[2]) self.assertEqual(jsrs[3]["language"], "beanshell") self.assertEqual(jsrs[3]["execute"], "before") self.assertEqual(jsrs[3]["parameters"], None) self.assertEqual(jsrs[3]['script-text'], 'console.log("beanshell aka jsr223");') self.assertNotIn('script-file', jsrs[3]) self.assertEqual(jsrs[4]["language"], "java") self.assertEqual(jsrs[4]["execute"], "before") self.assertEqual(jsrs[4]["parameters"], None) self.assertIn('BlazeDemo.java', jsrs[4]['script-file']) self.assertNotIn('script-text', jsrs[4]) self.assertTrue( os.path.exists( os.path.join(get_full_path(obj.dst_file, step_up=1), 'script.js')))
def startup(self): script_dir = get_full_path(self.script, step_up=1) script_file = os.path.basename(self.script) cmdline = self.get_launch_cmdline( "run", script_file, "--reporters", "taurus", "--reporter-taurus-filename", self.report_file, "--suppress-exit-code", "--insecure", ) scenario = self.get_scenario() timeout = scenario.get('timeout', None) if timeout is not None: cmdline += [ "--timeout-request", str(int(dehumanize_time(timeout) * 1000)) ] think = scenario.get_think_time() if think is not None: cmdline += [ "--delay-request", str(int(dehumanize_time(think) * 1000)) ] cmdline += self._dump_vars("globals") cmdline += self._dump_vars("environment") load = self.get_load() if load.iterations: cmdline += ['--iteration-count', str(load.iterations)] # TODO: allow running several collections like directory, see https://github.com/postmanlabs/newman/issues/871 # TODO: support hold-for, probably by having own runner # if load.hold: # cmdline += ['--hold-for', str(load.hold)] self.process = self.execute(cmdline, cwd=script_dir)
def install_required_tools(self): super(JUnitTester, self).install_required_tools() self.junit_path = self.path_lambda(self.settings.get("path", "~/.bzt/selenium-taurus/tools/junit/junit.jar")) self.junit_listener_path = join(get_full_path(__file__, step_up=2), "resources", "taurus-junit-1.0.jar") tools = [] # only check javac if we need to compile. if we have JAR as script - we don't need javac if self.script and any(self._collect_script_files({'.java'})): tools.append(JavaC(self.log)) tools.append(TclLibrary(self.log)) tools.append(JavaVM(self.log)) link = SELENIUM_DOWNLOAD_LINK.format(version=SELENIUM_VERSION) tools.append(SeleniumServerJar(self.selenium_server_jar_path, link, self.log)) tools.append(JUnitJar(self.junit_path, self.log, JUNIT_VERSION)) tools.append(HamcrestJar(self.hamcrest_path, HAMCREST_DOWNLOAD_LINK)) tools.append(JsonJar(self.json_jar_path, JSON_JAR_DOWNLOAD_LINK)) tools.append(JUnitListenerJar(self.junit_listener_path, "")) self._check_tools(tools)
def test_external_jar_wrong_launcher(self): modified_launcher = self.obj.engine.create_artifact( 'wrong-gatling', EXE_SUFFIX) origin_launcher = get_full_path(self.obj.settings['path']) with open(origin_launcher) as orig_file: with open(modified_launcher, 'w') as mod_file: for line in orig_file.readlines(): if 'COMPILER_CLASSPATH' not in line: mod_file.writelines([line]) os.chmod(modified_launcher, 0o755) self.obj.settings.merge({"path": modified_launcher}) self.obj.execution.merge({ "scenario": { "script": "tests/resources/gatling/bs", "additional-classpath": ["tests/resources/gatling/fake.jar"] } }) self.assertRaises(ToolError, self.obj.prepare)
def __dump_xml(self, filename): self.log.info("Dumping final status as XML: %s", filename) root = etree.Element("FinalStatus") report_info = get_bza_report_info(self.engine, self.log) if report_info: link, _ = report_info[0] report_element = etree.Element("ReportURL") report_element.text = link root.append(report_element) if self.last_sec: for label, kpiset in iteritems( self.last_sec[DataPoint.CUMULATIVE]): root.append(self.__get_xml_summary(label, kpiset)) with open(get_full_path(filename), 'wb') as fhd: tree = etree.ElementTree(root) tree.write(fhd, pretty_print=True, encoding="UTF-8", xml_declaration=True)