def test_perfdocs_generator_save_perfdocs_fail(logger, shutil, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) templates_dir = os.path.join(top_dir, "tools", "lint", "perfdocs", "templates") os.makedirs(templates_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) with temp_file("index.rst", tempdir=templates_dir, content="{test_documentation}"): perfdocs_tmpdir = generator._create_perfdocs() shutil.copytree = mock.Mock(side_effect=Exception()) generator._save_perfdocs(perfdocs_tmpdir) expected = "There was an error while saving the documentation: " args, _ = logger.critical.call_args assert logger.critical.call_count == 1 assert args[0] == expected
def test_perfdocs_generator_save_perfdocs_pass(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) templates_dir = os.path.join(top_dir, "tools", "lint", "perfdocs", "templates") os.makedirs(templates_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) assert not os.path.isdir(generator.perfdocs_path) with temp_file("index.rst", tempdir=templates_dir, content="{test_documentation}"): perfdocs_tmpdir = generator._create_perfdocs() generator._save_perfdocs(perfdocs_tmpdir) expected = ["index.rst", "mozperftest.rst"] files = [f for f in os.listdir(generator.perfdocs_path)] files.sort() for i, file in enumerate(files): assert file == expected[i]
def test_perfdocs_generator_created_perfdocs(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) templates_dir = os.path.join(top_dir, "tools", "lint", "perfdocs", "templates") os.makedirs(templates_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) with temp_file("index.rst", tempdir=templates_dir, content="{test_documentation}"): perfdocs_tmpdir = generator._create_perfdocs() files = [f for f in os.listdir(perfdocs_tmpdir)] files.sort() expected_files = ["index.rst", "mozperftest.rst"] for i, file in enumerate(files): assert file == expected_files[i] with open(os.path.join(perfdocs_tmpdir, expected_files[0])) as f: filedata = f.readlines() assert "".join(filedata) == " * :doc:`mozperftest`"
def test_perfdocs_framework_gatherers_urls(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) from perfdocs.gatherer import frameworks from perfdocs.verifier import Verifier from perfdocs.generator import Generator from perfdocs.utils import read_yaml # This test is only for raptor gatherer = frameworks["raptor"] with open(perfdocs_sample["config"], "w") as f: f.write(DYNAMIC_SAMPLE_CONFIG.format("raptor")) fg = gatherer(perfdocs_sample["config_2"], top_dir) fg.get_suite_list = mock.Mock() fg.get_suite_list.return_value = { "suite": [perfdocs_sample["example1_manifest"]], "another_suite": [perfdocs_sample["example2_manifest"]], } v = Verifier(top_dir) gn = Generator(v, generate=True, workspace=top_dir) # Check to make sure that if a test is present under multiple # suties the urls are generated correctly for the test under # every suite for suite, suitetests in fg.get_test_list().items(): url = fg._urls.get(suite) assert url is not None assert url[0]["test_name"] == "Example" assert url[0]["url"] == "Example_url" perfdocs_tree = gn._perfdocs_tree[0] yaml_content = read_yaml( os.path.join(os.path.join(perfdocs_tree["path"], perfdocs_tree["yml"]))) suites = yaml_content["suites"] # Check that the sections for each suite are generated correctly for suite_name, suite_details in suites.items(): gn._verifier._gatherer = mock.Mock( framework_gatherers={"raptor": gatherer}) section = gn._verifier._gatherer.framework_gatherers[ "raptor"].build_suite_section( fg, suite_name, suites.get(suite_name)["description"]) assert suite_name.capitalize() == section[0] assert suite_name in section[2] tests = suites.get(suite_name).get("tests", {}) for test_name in tests.keys(): desc = gn._verifier._gatherer.framework_gatherers[ "raptor"].build_test_description(fg, test_name, tests[test_name], suite_name) assert suite_name in desc[0] assert test_name in desc[0]
def test_perfdocs_generator_create_temp_dir(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) tmpdir = generator._create_temp_dir() assert os.path.isdir(tmpdir)
def test_perfdocs_generator_needed_regeneration(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=False, workspace=top_dir) generator.generate_perfdocs() expected = "PerfDocs need to be regenerated." args, _ = logger.warning.call_args assert logger.warning.call_count == 1 assert args[0] == expected
def test_perfdocs_generator_build_perfdocs(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) frameworks_info = generator.build_perfdocs_from_tree() expected = ["dynamic", "static"] for framework in sorted(frameworks_info.keys()): for i, framework_info in enumerate(frameworks_info[framework]): assert framework_info == expected[i]
def test_perfdocs_generator_generate_perfdocs_pass(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) templates_dir = os.path.join(top_dir, "tools", "lint", "perfdocs", "templates") os.makedirs(templates_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) with temp_file("index.rst", tempdir=templates_dir, content="{test_documentation}"): generator.generate_perfdocs() assert logger.warning.call_count == 0
def test_perfdocs_generator_create_temp_dir_fail(logger, os, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) os.mkdir = mock.Mock(side_effect=OSError()) os.path.isdir.return_value = False tmpdir = generator._create_temp_dir() expected = "Error creating temp file: " args, _ = logger.critical.call_args assert not tmpdir assert logger.critical.call_count == 1 assert args[0] == expected
def test_perfdocs_generator_needed_update(logger, structured_logger, perfdocs_sample): top_dir = perfdocs_sample["top_dir"] setup_sample_logger(logger, structured_logger, top_dir) templates_dir = os.path.join(top_dir, "tools", "lint", "perfdocs", "templates") os.makedirs(templates_dir) from perfdocs.generator import Generator from perfdocs.verifier import Verifier # Initializing perfdocs verifier = Verifier(top_dir) verifier.validate_tree() generator = Generator(verifier, generate=True, workspace=top_dir) with temp_file("index.rst", tempdir=templates_dir, content="{test_documentation}"): generator.generate_perfdocs() # Removed file for testing and run again generator._generate = False files = [f for f in os.listdir(generator.perfdocs_path)] for f in files: os.remove(os.path.join(generator.perfdocs_path, f)) generator.generate_perfdocs() expected = ( "PerfDocs are outdated, run ./mach lint -l perfdocs --fix` to update them." ) args, _ = logger.warning.call_args assert logger.warning.call_count == 1 assert args[0] == expected
def run_perfdocs(config, logger=None, paths=None, generate=True): """ Build up performance testing documentation dynamically by combining text data from YAML files that reside in `perfdoc` folders across the `testing` directory. Each directory is expected to have an `index.rst` file along with `config.yml` YAMLs defining what needs to be added to the documentation. The YAML must also define the name of the "framework" that should be used in the main index.rst for the performance testing documentation. The testing documentation list will be ordered alphabetically once it's produced (to avoid unwanted shifts because of unordered dicts and path searching). Note that the suite name headings will be given the H4 (---) style so it is suggested that you use H3 (===) style as the heading for your test section. H5 will be used be used for individual tests within each suite. Usage for verification: ./mach lint -l perfdocs Usage for generation: ./mach lint -l perfdocs --fix For validation, see the Verifier class for a description of how it works. The run will fail if the valid result from validate_tree is not False, implying some warning/problem was logged. :param dict config: The configuration given by mozlint. :param StructuredLogger logger: The StructuredLogger instance to be used to output the linting warnings/errors. :param list paths: The paths that are being tested. Used to filter out errors from files outside of these paths. :param bool generate: If true, the docs will be (re)generated. """ from perfdocs.logger import PerfDocLogger top_dir = os.environ.get("WORKSPACE", None) if not top_dir: floc = os.path.abspath(__file__) top_dir = floc.split("tools")[0] top_dir = top_dir.replace("\\", "\\\\") PerfDocLogger.LOGGER = logger PerfDocLogger.TOP_DIR = top_dir # Convert all the paths to relative ones rel_paths = [re.sub(top_dir, "", path) for path in paths] PerfDocLogger.PATHS = rel_paths target_dir = [os.path.join(top_dir, i) for i in rel_paths] for path in target_dir: if not os.path.exists(path): raise Exception("Cannot locate directory at %s" % path) # Late import because logger isn't defined until later from perfdocs.generator import Generator from perfdocs.verifier import Verifier # Run the verifier first verifier = Verifier(top_dir) verifier.validate_tree() if not PerfDocLogger.FAILED: # Even if the tree is valid, we need to check if the documentation # needs to be regenerated, and if it does, we throw a linting error. # `generate` dictates whether or not the documentation is generated. generator = Generator(verifier, generate=generate, workspace=top_dir) generator.generate_perfdocs()