def run_pylint(base_path, check_files=None, cleanup_test_dir=False): pylint_args = os.environ.get("PYLINT_ARGS", "") if pylint_args: pylint_args += " " pylint_cfg = repo_path() + "/pylintrc" check_files = get_pylint_files(base_path, "*") if not check_files: print "Nothing to do..." return 0 # nothing to do os.putenv("TEST_PATH", repo_path() + "/tests") cmd = "pylint --rcfile=\"%s\" %s%s" % (pylint_cfg, pylint_args, " ".join(check_files)) print("Running pylint with: %s" % cmd) p = subprocess.Popen(cmd, shell=True, cwd=base_path) exit_code = p.wait() print("Finished with exit code: %d" % exit_code) if exit_code == 0 and cleanup_test_dir: # Don't remove directory when specified via WORKDIR env if not os.environ.get("WORKDIR"): print("Removing build path...") shutil.rmtree(base_path) return exit_code
def cfg_setup_fixture(request, web, site): # noqa: F811 # pylint: disable=redefined-outer-name hostname = "test-prediction" # Enforce use of the pre-created RRD file from the git. The restart of the core # is needed to make it renew it's internal RRD file cache site.makedirs("var/check_mk/rrd/test-prediction") with open(site.path("var/check_mk/rrd/test-prediction/CPU_load.rrd"), "wb") as f: f.write( open("%s/tests-py3/integration/cmk/base/test-files/CPU_load.rrd" % repo_path(), "rb").read()) site.write_file( "var/check_mk/rrd/test-prediction/CPU_load.info", open("%s/tests-py3/integration/cmk/base/test-files/CPU_load.info" % repo_path()).read()) site.restart_core() create_linux_test_host(request, web, site, "test-prediction") site.write_file( "etc/check_mk/conf.d/linux_test_host_%s_cpu_load.mk" % hostname, """ globals().setdefault('custom_checks', []) custom_checks = [ ( {'service_description': u'CPU load', 'has_perfdata': True}, [], ALL_HOSTS, {} ), ] + custom_checks """) web.activate_changes() yield # Cleanup site.delete_file("etc/check_mk/conf.d/linux_test_host_%s_cpu_load.mk" % hostname) site.delete_dir("var/check_mk/rrd")
def run_pylint(base_path, check_files=None, cleanup_test_dir=False): pylint_args = os.environ.get("PYLINT_ARGS", "") if pylint_args: pylint_args += " " pylint_output = os.environ.get("PYLINT_OUTPUT") pylint_cfg = repo_path() + "/pylintrc" check_files = get_pylint_files(base_path, "*") if not check_files: print "Nothing to do..." return 0 # nothing to do os.putenv("TEST_PATH", repo_path() + "/tests") cmd = 'pylint --rcfile="%s" %s%s' % (pylint_cfg, pylint_args, " ".join(check_files)) print ("Running pylint with: %s" % cmd) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True, cwd=base_path) stdout = p.communicate()[0] if stdout.strip(): if pylint_output: file(pylint_output, "a").write(stdout) else: print (stdout) exit_code = p.returncode print ("Finished with exit code: %d" % exit_code) if exit_code == 0 and cleanup_test_dir: # Don't remove directory when specified via WORKDIR env if not os.environ.get("WORKDIR"): print ("Removing build path...") shutil.rmtree(base_path) return exit_code
def test_pylint_checks(): base_path = pylint_cmk.get_test_dir() f = file(base_path + "/cmk-checks.py", "w") # add the modules for path in pylint_cmk.ordered_module_files(): pylint_cmk.add_file(f, path) # Now add the checks for path in pylint_cmk.check_files(repo_path() + "/checks"): pylint_cmk.add_file(f, path) # Also add inventory plugins for path in pylint_cmk.check_files(repo_path() + "/inventory"): pylint_cmk.add_file(f, path) # Also add bakery plugins for path in pylint_cmk.check_files(os.path.realpath(repo_path() + "/../cmc/agents/bakery")): pylint_cmk.add_file(f, path) f.close() exit_code = pylint_cmk.run_pylint(base_path, cleanup_test_dir=True) assert exit_code == 0, "PyLint found an error in checks, inventory " \ "or agent bakery plugins"
def test_pylint_checks(): base_path = pylint_cmk.get_test_dir() f = file(base_path + "/cmk-checks.py", "w") # add the modules for path in pylint_cmk.ordered_module_files(): pylint_cmk.add_file(f, path) # Now add the checks for path in pylint_cmk.check_files(repo_path() + "/checks"): pylint_cmk.add_file(f, path) # Also add inventory plugins for path in pylint_cmk.check_files(repo_path() + "/inventory"): pylint_cmk.add_file(f, path) # Also add bakery plugins for path in pylint_cmk.check_files(cmc_path() + "/agents/bakery"): pylint_cmk.add_file(f, path) f.close() exit_code = pylint_cmk.run_pylint(base_path, cleanup_test_dir=True) assert exit_code == 0, "PyLint found an error in checks, inventory " \ "or agent bakery plugins"
def test_cfg(web, site): print("Applying default config") web.add_host("modes-test-host", attributes={ "ipaddress": "127.0.0.1", }) web.add_host("modes-test-host2", attributes={ "ipaddress": "127.0.0.1", "tag_criticality": "test", }) web.add_host("modes-test-host3", attributes={ "ipaddress": "127.0.0.1", "tag_criticality": "test", }) web.add_host("modes-test-host4", attributes={ "ipaddress": "127.0.0.1", "tag_criticality": "offline", }) site.write_file( "etc/check_mk/conf.d/modes-test-host.mk", "datasource_programs.append(('cat ~/var/check_mk/agent_output/<HOST>', [], ALL_HOSTS))\n" ) site.makedirs("var/check_mk/agent_output/") site.write_file( "var/check_mk/agent_output/modes-test-host", file("%s/tests/integration/cmk_base/test-files/linux-agent-output" % repo_path()).read()) site.write_file( "var/check_mk/agent_output/modes-test-host2", file("%s/tests/integration/cmk_base/test-files/linux-agent-output" % repo_path()).read()) site.write_file( "var/check_mk/agent_output/modes-test-host3", file("%s/tests/integration/cmk_base/test-files/linux-agent-output" % repo_path()).read()) web.discover_services("modes-test-host") web.discover_services("modes-test-host2") web.discover_services("modes-test-host3") web.activate_changes() yield None # # Cleanup code # print("Cleaning up test config") site.delete_dir("var/check_mk/agent_output") site.delete_file("etc/check_mk/conf.d/modes-test-host.mk") web.delete_host("modes-test-host") web.delete_host("modes-test-host2") web.delete_host("modes-test-host3") web.delete_host("modes-test-host4")
def module_files(): modules = [] for base_path in [ repo_path() + "/modules", os.path.realpath(repo_path() + "/../cmc/modules") ]: modules += [ base_path + "/" + f for f in os.listdir(base_path) if not f.startswith(".") ] return sorted(modules)
def _get_files_to_check(pylint_test_dir): p = subprocess.Popen( [ "%s/scripts/find-python-files" % repo_path(), str(sys.version_info[0]) ], stdout=subprocess.PIPE, encoding="utf-8", shell=False, close_fds=True, ) stdout = p.communicate()[0] files = [] for fname in stdout.splitlines(): # Thin out these excludes some day... rel_path = fname[len(repo_path()) + 1:] # Can currently not be checked alone. Are compiled together below if rel_path.startswith("checks/") or \ rel_path.startswith("inventory/") or \ rel_path.startswith("agents/bakery/") or \ rel_path.startswith("enterprise/agents/bakery/"): continue # TODO: We should also test them... if rel_path == "werk" \ or rel_path.startswith("tests/") \ or rel_path.startswith("scripts/") \ or rel_path.startswith("agents/wnx/integration/"): continue # TODO: disable random, not that important stuff if rel_path.startswith("agents/windows/it/") \ or rel_path.startswith("agents/windows/msibuild/") \ or rel_path.startswith("doc/") \ or rel_path.startswith("livestatus/api/python/example") \ or rel_path.startswith("livestatus/api/python/make_"): continue files.append(fname) # Add the compiled files for things that are no modules yet open(pylint_test_dir + "/__init__.py", "w") _compile_check_and_inventory_plugins(pylint_test_dir) if is_enterprise_repo(): _compile_bakery_plugins(pylint_test_dir) # Not checking compiled check, inventory, bakery plugins with Python 3 if sys.version_info[0] == 2: files += [ pylint_test_dir, ] return files
def _compile_check_and_inventory_plugins(pylint_test_dir): for idx, f_name in enumerate(pylint_cmk.check_files(repo_path() + "/checks")): with stand_alone_template(pylint_test_dir + "/cmk_checks_%s.py" % idx) as file_handle: pylint_cmk.add_file(file_handle, f_name) with stand_alone_template(pylint_test_dir + "/cmk_checks.py") as file_handle: pylint_cmk.add_file(file_handle, repo_path() + "/cmk/base/inventory_plugins.py") for path in pylint_cmk.check_files(repo_path() + "/inventory"): pylint_cmk.add_file(file_handle, path)
def _compile_bakery_plugins(pylint_test_dir): with open(pylint_test_dir + "/cmk_bakery_plugins.py", "w") as f: # This pylint warning is incompatible with our "concatenation technology". f.write("# pylint: disable=reimported,wrong-import-order,wrong-import-position\n") pylint_cmk.add_file( f, os.path.realpath( os.path.join(repo_path(), "enterprise/cmk/base/cee/agent_bakery_plugins.py"))) # Also add bakery plugins for path in pylint_cmk.check_files(os.path.join(repo_path(), "enterprise/agents/bakery")): pylint_cmk.add_file(f, path)
def test_check_tests_symlinks(): # TODO After complete Python 3 migration we can remove this pattern = 'test_*.py' py2_check_tests = set(p.name for p in Path(testlib.repo_path()).joinpath( Path('tests/unit/checks')).glob(pattern)) py3_check_tests = set(p.name for p in Path(testlib.repo_path()).joinpath( Path('tests-py3/unit/checks')).glob(pattern)) assert py2_check_tests.issubset( py3_check_tests ), "Forget to implement/symlink related Python 3 check test: %s" % ", ".join( py2_check_tests - py3_check_tests)
def test_py2_check_tests(): check_tests = Path(testlib.repo_path()).joinpath(Path('tests/unit/checks')) generic_check_tests = Path(testlib.repo_path()).joinpath( Path('tests/unit/checks/generictests/datasets')) if check_tests.exists(): py2_check_tests = set(p.name for p in check_tests.glob('test_*.py')) assert py2_check_tests == set( ), "Found deprecated Python 2 check tests: %s" % ", ".join(py2_check_tests) if generic_check_tests.exists(): py2_generic_check_tests = set(p.name for p in generic_check_tests.glob('*.py')) assert py2_generic_check_tests == set( ), "Found deprecated Python 2 generic check tests: %s" % ", ".join(py2_generic_check_tests)
def local_test_hosts(web, site): site.makedirs("var/check_mk/agent_output/") web.add_hosts([ ("test-host", "", { "ipaddress": "127.0.0.1", }), ("test-host2", "xy/zzz", { "ipaddress": "127.0.0.1", }), ]) site.write_file( "etc/check_mk/conf.d/local-test-hosts.mk", "datasource_programs.append(('cat ~/var/check_mk/agent_output/<HOST>', [], ['test-host', 'test-host2']))\n" ) for hostname in ["test-host", "test-host2"]: site.write_file( "var/check_mk/agent_output/%s" % hostname, open( "%s/tests/integration/cmk/base/test-files/linux-agent-output" % repo_path()).read()) yield for hostname in ["test-host", "test-host2"]: web.delete_host(hostname) site.delete_file("var/check_mk/agent_output/%s" % hostname) site.delete_file("etc/check_mk/conf.d/local-test-hosts.mk")
def ordered_module_files(): ordered_modules = [ "modules/check_mk_base.py", "modules/check_mk.py", "modules/config.py", "modules/discovery.py", "modules/snmp.py", "modules/notify.py", "modules/events.py", "modules/nagios.py", "modules/automation.py", "modules/inventory.py", "../cmc/modules/real_time_checks.py", "../cmc/modules/alert_handling.py", "../cmc/modules/keepalive.py", "../cmc/modules/cmc.py", "../cmc/modules/inline_snmp.py", "../cmc/modules/agent_bakery.py", "../cmc/modules/cap.py", "../cmc/modules/rrd.py", ] modules = [ os.path.realpath(repo_path() + "/" + p) for p in ordered_modules ] # Add modules which are not specified above for path in module_files(): if path not in modules: modules.append(path) return modules
def test_paths_in_omd_and_opt_root(monkeypatch): omd_root = '/omd/sites/dingeling' with monkeypatch.context() as m: m.setitem(os.environ, 'OMD_ROOT', omd_root) test_paths = import_module("%s/cmk/utils/paths.py" % repo_path()) _check_paths(omd_root, test_paths)
def test_py2_inv_plugins_tests(): inv_plugin_tests = Path(testlib.repo_path()).joinpath(Path('tests/unit/inventory')) py2_inv_plugin_tests = set(p.name for p in inv_plugin_tests.glob('test_*.py')) if inv_plugin_tests.exists(): assert py2_inv_plugin_tests == set( ), "Found deprecated Python 2 inventory plugin tests: %s" % ", ".join(py2_inv_plugin_tests)
def ordered_module_files(): ordered_modules = [ "modules/check_mk_base.py", "modules/check_mk.py", "modules/config.py", "modules/discovery.py", "modules/snmp.py", "modules/notify.py", "modules/events.py", "modules/nagios.py", "modules/automation.py", "modules/inventory.py", "../cmc/modules/real_time_checks.py", "../cmc/modules/alert_handling.py", "../cmc/modules/keepalive.py", "../cmc/modules/cmc.py", "../cmc/modules/inline_snmp.py", "../cmc/modules/agent_bakery.py", "../cmc/modules/cap.py", "../cmc/modules/rrd.py", ] modules = [os.path.realpath(repo_path() + "/" + p) for p in ordered_modules] # Add modules which are not specified above for path in module_files(): if path not in modules: modules.append(path) return modules
def test_calculate_data_for_prediction(cfg_setup, utcdate, timezone, params): period_info = prediction.prediction_periods[params['period']] with on_time(utcdate, timezone): now = int(time.time()) groupby = period_info["groupby"] assert callable(groupby) timegroup = groupby(now)[0] time_windows = prediction.time_slices(now, int(params["horizon"] * 86400), period_info, timegroup) hostname, service_description, dsname = 'test-prediction', "CPU load", 'load15' rrd_datacolumn = cmk.utils.prediction.rrd_datacolum(hostname, service_description, dsname, "MAX") data_for_pred = prediction.calculate_data_for_prediction(time_windows, rrd_datacolumn) path = "%s/tests-py3/integration/cmk/base/test-files/%s/%s" % (repo_path(), timezone, timegroup) reference = cmk.utils.prediction.retrieve_data_for_prediction(path, timegroup) data_points = data_for_pred.pop('points') assert reference is not None ref_points = reference.pop('points') for cal, ref in zip(data_points, ref_points): assert cal == pytest.approx(ref, rel=1e-12, abs=1e-12) assert data_for_pred == reference
def test_cfg(monkeypatch): test_hosts = [ "ds-test-host1", "ds-test-host2", "ds-test-node1", "ds-test-node2" ] ts = Scenario() for h in test_hosts: ts.add_host(h) ts.set_option("ipaddresses", dict((h, "127.0.0.1") for h in test_hosts)) ts.add_cluster("ds-test-cluster1", nodes=["ds-test-node1", "ds-test-node2"]) ts.set_ruleset("datasource_programs", [ ('cat %s/<HOST>' % cmk.utils.paths.tcp_cache_dir, [], test_hosts, {}), ]) with open("%s/tests/integration/cmk/base/test-files/linux-agent-output" % repo_path()) as f: linux_agent_output = f.read().decode("utf-8") for h in test_hosts: cache_path = Path(cmk.utils.paths.tcp_cache_dir, h) cache_path.parent.mkdir(parents=True, exist_ok=True) # pylint: disable=no-member with cache_path.open("w", encoding="utf-8") as f: f.write(linux_agent_output) return ts.apply(monkeypatch)
def test_calculate_data_for_prediction(cfg_setup, utcdate, timezone, params): period_info = prediction._PREDICTION_PERIODS[params['period']] with on_time(utcdate, timezone): now = int(time.time()) assert callable(period_info.groupby) timegroup = period_info.groupby(now)[0] time_windows = prediction._time_slices(now, int(params["horizon"] * 86400), period_info, timegroup) hostname, service_description, dsname = 'test-prediction', "CPU load", 'load15' rrd_datacolumn = cmk.utils.prediction.rrd_datacolum( hostname, service_description, dsname, "MAX") data_for_pred = prediction._calculate_data_for_prediction( time_windows, rrd_datacolumn) expected_reference = _load_expected_result( "%s/tests/integration/cmk/base/test-files/%s/%s" % (repo_path(), timezone, timegroup)) assert isinstance(expected_reference, dict) assert sorted(data_for_pred) == sorted(expected_reference) for key in data_for_pred: if key == "points": for cal, ref in zip(data_for_pred['points'], expected_reference['points']): assert cal == pytest.approx(ref, rel=1e-12, abs=1e-12) else: # TypedDict key must be a string literal assert data_for_pred[key] == expected_reference[ key] # type: ignore[misc]
def _site_id(): site_id = os.environ.get("OMD_SITE") if site_id == None: site_id = file(testlib.repo_path() + "/.site").read().strip() os.putenv("OMD_SITE", site_id) return site_id
def graph_test_config(web, site): # No graph yet... with pytest.raises(APIError) as exc_info: web.get_regular_graph("test-host-get-graph", "Check_MK", 0, expect_error=True) assert "Cannot calculate graph recipes" in "%s" % exc_info try: # Now add the host web.add_host("test-host-get-graph", attributes={ "ipaddress": "127.0.0.1", }) site.write_file( "etc/check_mk/conf.d/test-host-get-graph.mk", "datasource_programs.append(('cat ~/var/check_mk/agent_output/<HOST>', [], ['test-host-get-graph']))\n" ) site.makedirs("var/check_mk/agent_output/") site.write_file( "var/check_mk/agent_output/test-host-get-graph", open( "%s/tests/integration/cmk/base/test-files/linux-agent-output" % repo_path()).read()) web.discover_services("test-host-get-graph") web.activate_changes() site.schedule_check("test-host-get-graph", "Check_MK", 0) # Wait for RRD file creation. Isn't this a bug that the graph is not instantly available? rrd_path = site.path( "var/check_mk/rrd/test-host-get-graph/Check_MK.rrd") for attempt in xrange(50): time.sleep(0.1) proc = subprocess.Popen([ site.path("bin/unixcat"), site.path("tmp/run/rrdcached.sock") ], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate("FLUSH %s\n" % rrd_path) if os.path.exists(rrd_path): break sys.stdout.write("waiting for %s (attempt %d)%s%s\n" % ( rrd_path, attempt + 1, # ", stdout: %s" % out if out else "", ", stderr: %s" % err if err else "")) else: assert False, "RRD file %s missing" % rrd_path yield finally: web.delete_host("test-host-get-graph") site.delete_file("etc/check_mk/conf.d/test-host-get-graph.mk") web.activate_changes()
def run_pylint(base_path, check_files): args = os.environ.get("PYLINT_ARGS", "") if args: pylint_args = args.split(" ") else: pylint_args = [] pylint_cfg = repo_path() + "/.pylintrc" cmd = [ "python", "-m", "pylint", "--rcfile", pylint_cfg, "--jobs=%d" % num_jobs_to_use(), ] + pylint_args + check_files print("Running pylint in '%s' with: %s" % (base_path, subprocess.list2cmdline(cmd))) p = subprocess.Popen(cmd, shell=False, cwd=base_path) exit_code = p.wait() print("Finished with exit code: %d" % exit_code) return exit_code
def _git_repos(): # This ensures that we can also work with git-worktrees. For this, the original git repository # needs to be mapped into the container as well. repo_path = testlib.repo_path() git_entry = os.path.join(repo_path, '.git') repos = { # To get access to the test scripts and for updating the version from # the current git checkout. Will also be used for updating the image with # the current git state repo_path: { "bind": "/git-lowerdir", "mode": "ro", }, } if os.path.isfile(git_entry): # if not, it's a directory with open(git_entry, "r") as f: real_path = f.read() real_path = real_path[8:] # skip "gitdir: " real_path = real_path.split("/.git")[0] # cut off .git/... repos[real_path] = { "bind": real_path, "mode": "ro", } return repos
def _compile_check_and_inventory_plugins(pylint_test_dir): with open(pylint_test_dir + "/cmk_checks.py", "w") as f: # Fake data structures where checks register (See cmk/base/checks.py) f.write(""" # -*- encoding: utf-8 -*- check_info = {} check_includes = {} precompile_params = {} check_default_levels = {} factory_settings = {} check_config_variables = [] snmp_info = {} snmp_scan_functions = {} active_check_info = {} special_agent_info = {} inv_info = {} # Inventory plugins inv_export = {} # Inventory export hooks def inv_tree_list(path): return inv_tree(path, []) def inv_tree(path, default_value=None): if default_value is not None: node = default_value else: node = {} return node """) # add the modules # These pylint warnings are incompatible with our "concatenation technology". f.write( "# pylint: disable=reimported,ungrouped-imports,wrong-import-order,wrong-import-position,redefined-outer-name\n" ) pylint_cmk.add_file(f, repo_path() + "/cmk/base/check_api.py") pylint_cmk.add_file(f, repo_path() + "/cmk/base/inventory_plugins.py") # Now add the checks for path in pylint_cmk.check_files(repo_path() + "/checks"): pylint_cmk.add_file(f, path) # Now add the inventory plugins for path in pylint_cmk.check_files(repo_path() + "/inventory"): pylint_cmk.add_file(f, path)
def test_dump_agent_test(execute): for opt in ["--dump-agent", "-d"]: p = execute(["cmk", opt, "modes-test-host"]) assert p.returncode == 0 assert p.stderr == "" assert p.stdout == file( "%s/tests/integration/cmk_base/test-files/linux-agent-output" % repo_path()).read()
def test_pylint(pylint_test_dir, capsys): with capsys.disabled(): print("\n") retcode = subprocess.call("python -m pylint --version".split(), shell=False) print() assert not retcode exit_code = pylint_cmk.run_pylint(repo_path(), _get_files_to_check(pylint_test_dir)) assert exit_code == 0, "PyLint found an error"
def graph_test_config(web, site): # No graph yet... with pytest.raises(APIError) as e: web.get_regular_graph("test-host-get-graph", "Check_MK", 0, expect_error=True) assert "Cannot calculate graph recipes" in "%s" % e try: # Now add the host web.add_host("test-host-get-graph", attributes={ "ipaddress": "127.0.0.1", }) site.write_file( "etc/check_mk/conf.d/test-host-get-graph.mk", "datasource_programs.append(('cat ~/var/check_mk/agent_output/<HOST>', [], ['test-host-get-graph']))\n" ) site.makedirs("var/check_mk/agent_output/") site.write_file( "var/check_mk/agent_output/test-host-get-graph", file( "%s/tests/integration/cmk_base/test-files/linux-agent-output" % repo_path()).read()) web.discover_services("test-host-get-graph") web.activate_changes() site.schedule_check("test-host-get-graph", "Check_MK", 0) # Wait for RRD file creation # Isn't this a bug that the graph is not instantly available? timeout = 10 print "Checking for graph..." while timeout and not site.file_exists( "var/check_mk/rrd/test-host-get-graph/Check_MK.rrd"): try: web.get_regular_graph("test-host-get-graph", "Check_MK", 0, expect_error=True) except Exception: pass timeout -= 1 time.sleep(1) print "Checking for graph..." assert site.file_exists("var/check_mk/rrd/test-host-get-graph/Check_MK.rrd"), \ "RRD %s is still missing" % "var/check_mk/rrd/test-host-get-graph/Check_MK.rrd" yield finally: web.delete_host("test-host-get-graph") site.delete_file("etc/check_mk/conf.d/test-host-get-graph.mk") web.activate_changes()
def test_dump_agent_test(test_cfg, site): for opt in ["--dump-agent", "-d"]: p = site.execute(["cmk", opt, "modes-test-host"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() assert p.returncode == 0 assert stderr == "" assert stdout == file( "%s/tests/integration/cmk_base/test-files/linux-agent-output" % repo_path()).read()
def test_inventory_plugin_header(): for inventory_pluginfile in Path(testlib.repo_path()).joinpath(Path('inventory')).iterdir(): if inventory_pluginfile.name.startswith("."): # .f12 continue with inventory_pluginfile.open() as f: shebang = f.readline().strip() encoding_header = f.readline().strip() assert shebang == "#!/usr/bin/env python3", "Inventory plugin '%s' has wrong shebang '%s'" % ( inventory_pluginfile.name, shebang) assert encoding_header == "# -*- coding: utf-8 -*-", "Inventory plugin '%s' has wrong encoding header '%s'" % ( inventory_pluginfile.name, encoding_header)
def test_check_plugin_header(plugin_path: str): for plugin in Path(testlib.repo_path(), plugin_path).iterdir(): if plugin.name.startswith("."): # .f12 continue with plugin.open() as handle: shebang = handle.readline().strip() encoding_header = handle.readline().strip() assert shebang == "#!/usr/bin/env python3", ( f"Plugin '{plugin.name}' has wrong shebang '{shebang}'",) assert encoding_header == "# -*- coding: utf-8 -*-", ( f"Plugin '{plugin.name}' has wrong encoding header '{encoding_header}'")
def pytest_collection_modifyitems(items): """Mark collected test types based on their location""" for item in items: type_marker = item.get_closest_marker("type") if type_marker and type_marker.args: continue # Do not modify manually set marks file_path = Path("%s" % item.reportinfo()[0]) repo_rel_path = file_path.relative_to(testlib.repo_path()) ty = repo_rel_path.parts[1] if ty not in test_types: raise Exception("Test in %s not TYPE marked: %r (%r)" % (repo_rel_path, item, ty)) item.add_marker(pytest.mark.type.with_args(ty))
def run_pylint(base_path, check_files=None): #, cleanup_test_dir=False): args = os.environ.get("PYLINT_ARGS", "") if args: pylint_args = args.split(" ") else: pylint_args = [] pylint_cfg = repo_path() + "/.pylintrc" if not check_files: check_files = get_pylint_files(base_path, "*") if not check_files: print("Nothing to do...") return 0 # nothing to do cmd = [ "python", "-m", "pylint", "--rcfile", pylint_cfg, "--jobs=%d" % num_jobs_to_use(), ] + pylint_args + check_files os.putenv("TEST_PATH", repo_path() + "/tests") print("Running pylint in '%s' with: %s" % (base_path, subprocess.list2cmdline(cmd))) p = subprocess.Popen(cmd, shell=False, cwd=base_path) exit_code = p.wait() print("Finished with exit code: %d" % exit_code) #if exit_code == 0 and cleanup_test_dir: # # Don't remove directory when specified via WORKDIR env # if not os.environ.get("WORKDIR"): # print("Removing build path...") # shutil.rmtree(base_path) return exit_code
def test_pylint_misc(): search_paths = [ "lib", "bin", "notifications", "agents/plugins", "doc/treasures/active_checks", "../cmc/agents/plugins", "../cmc/bin", "../cmc/misc", ] checked, worst = 0, 0 for rel_path in search_paths: path = os.path.realpath(repo_path() + "/" + rel_path) worst = max(worst, pylint_cmk.run_pylint(path)) checked += 1 assert checked > 0, "Did not find a file to check!" assert worst == 0, "At least one issue found"
def site_id(): site_id = os.environ.get("SITE") if site_id == None: site_id = file(testlib.repo_path() + "/.site").read().strip() return site_id