def _update_src(self, build_config): """Retrieve and/or check the existence of the files needed for the build. This can include pulling from URL's. :param dict build_config: The build configuration dictionary. :returns: src_path, extra_files """ src_loc = build_config.get('source_location') if src_loc is None: return None # For URL's, check if the file needs to be updated, and try to do so. if self._isurl(src_loc): missing_libs = wget.missing_libs() if missing_libs: raise TestRunError( "The dependencies needed for remote source retrieval " "({}) are not available on this system. Please provide " "your test source locally.".format( ', '.join(missing_libs))) dwn_name = build_config.get('source_download_name') src_dest = self._download_path(src_loc, dwn_name) wget.update(self._pav_cfg, src_loc, src_dest) return src_dest src_path = self._find_file(Path(src_loc), 'test_src') if src_path is None: raise TestRunError( "Could not find and update src location '{}'".format(src_loc)) if src_path.is_dir(): # For directories, update the directories mtime to match the # latest mtime in the entire directory. self._date_dir(src_path) return src_path elif src_path.is_file(): # For static files, we'll end up just hashing the whole thing. return src_path else: raise TestRunError("Source location '{}' points to something " "unusable.".format(src_path))
class BuilderTests(PavTestCase): def setUp(self) -> None: plugins.initialize_plugins(self.pav_cfg) def tearDown(self) -> None: plugins._reset_plugins() def test_setup_build_dir(self): """Make sure we can correctly handle all of the various archive formats.""" base_config = { 'name': 'test', 'scheduler': 'raw', 'build': { 'modules': ['gcc'], } } # Check that decompression and setup works for all accepted types. archives = [ 'src.tar.gz', 'src.xz', # A bz2 archive 'src.extensions_dont_matter', 'src.zip', # These archives don't have a containing directory. 'no_encaps.tgz', 'no_encaps.zip', 'softlink.zip', 'foo/bar/deep.zip', '../outside.zip', ] test_archives = self.TEST_DATA_ROOT / 'pav_config_dir' / 'test_src' original_tree = test_archives / 'src' for archive in archives: config = copy.deepcopy(base_config) config['build']['source_path'] = archive config['build']['specificity'] = archive test = self._quick_test(config, build=False, finalize=False) test.builder._setup_build_dir(test.builder.path) # Make sure the extracted archive is identical to the original # (Though the containing directory will have a different name) try: self._cmp_tree(test.builder.path, original_tree) except AssertionError as err: raise AssertionError("Error extracting {}".format(archive), *err.args) # Check directory copying config = copy.deepcopy(base_config) config['build']['source_path'] = 'src' test = self._quick_test(config, build=False, finalize=False) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) self._cmp_tree(test.builder.path, original_tree) # Test single compressed files. files = [ 'binfile.gz', 'binfile.bz2', 'binfile.xz', ] for file in files: config = copy.deepcopy(base_config) config['build']['source_path'] = file test = self._quick_test(config, build=False, finalize=False) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) self._cmp_files(test.builder.path / 'binfile', original_tree / 'binfile') # Make sure extra files are getting copied over. config = copy.deepcopy(base_config) config['build']['source_path'] = 'src.tar.gz' config['build']['extra_files'] = [ 'src.tar.gz', 'src.xz', '../outside.zip', 'foo/bar/deep.zip', ] test = self._quick_test(config, build=False, finalize=False) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) for file in config['build']['extra_files']: file = pathlib.Path(file) self._cmp_files(test_archives / file, test.builder.path / file.name) def test_create_file(self): """Check that build time file creation is working correctly.""" files_to_create = { 'file1': ['line_0', 'line_1'], 'wild/file2': ['line_0', 'line_1'], # wild dir exists 'wild/dir2/file3': ['line_0', 'line_1'], # dir2 does not exist 'real.txt': ['line1', 'line4'] # file exists } config = self._quick_test_cfg() config['build']['source_path'] = 'file_tests.tgz' config['build']['create_files'] = files_to_create test = self._quick_test(config) for file, lines in files_to_create.items(): file_path = test.path / 'build' / file self.assertTrue(file_path.exists()) # Stage file contents for comparison. original = io.StringIO() for line in lines: original.write("{}\n".format(line)) created_file = open(str(file_path), 'r', encoding='utf-8') # Compare contents. self.assertEquals(original.getvalue(), created_file.read()) original.close() created_file.close() def test_create_file_errors(self): """Check build time file creation expected errors.""" # Ensure a file can't be written outside the build context. files_to_fail = ['../file', '../../file', 'wild/../../file'] for file in files_to_fail: file_arg = {file: []} config = self._quick_test_cfg() config['build']['source_path'] = 'file_tests.tgz' config['build']['create_files'] = file_arg with self.assertRaises(RuntimeError) as context: self._quick_test(config) self.assertTrue('outside build context' in str(context.exception)) # Ensure a file can't overwrite existing directories. files_to_fail = ['wild', 'rec'] for file in files_to_fail: file_arg = {file: []} config = self._quick_test_cfg() config['build']['source_path'] = 'file_tests.tgz' config['build']['create_files'] = file_arg test = self._quick_test(config, build=False, finalize=False) self.assertFalse(test.build()) def test_copy_build(self): """Check that builds are copied correctly.""" config = self._quick_test_cfg() # The copy_test source file contains several files to copy # for real and several to symlink. config['build']['source_path'] = 'file_tests.tgz' config['build']['copy_files'] = [ 'real.*', 'wild/real_?i*[0-9].dat', 'rec/**/real*', ] test = self._quick_test(config) # Make sure the following exist and are regular files. real_files = [ 'real.txt', 'wild/real_wild1.dat', 'wild/real_wild2.dat', 'rec/real_r1.txt', 'rec/rec2/real_r2.txt' ] for real in real_files: real = test.path / 'build' / real self.assertTrue(real.exists(), msg="Missing {}".format(real)) self.assertTrue(real.is_file(), msg="{} is not a regular file.".format(real)) # Make sure the copied files are writable. mode = real.stat().st_mode self.assertTrue(mode & stat.S_IWGRP) self.assertTrue(mode & stat.S_IWUSR) # Make sure the following exist, but are symlinks. sym_files = [ 'pav_build_log', '.built_by', 'sym.txt', 'wild/sym.dat', 'rec/sym_r1.txt', 'rec/rec2/sym_r2.txt', ] for sym in sym_files: sym = test.path / 'build' / sym self.assertTrue(sym.exists(), msg="Missing {}".format(sym)) self.assertTrue(sym.is_symlink(), msg="{} is not a symlink".format(sym)) @unittest.skipIf(wget.missing_libs(), "The wget module is missing required libs.") def test_src_urls(self): config_dir = self.TEST_DATA_ROOT / 'pav_config_dir' config = { 'name': 'test', 'scheduler': 'raw', 'suite_path': (config_dir / 'tests' / 'fake_test.yaml').as_posix(), 'build': { 'modules': ['gcc'], 'source_url': self.TEST_URL, 'source_path': 'README.md', 'source_download': 'missing', } } expected_path = config_dir / 'test_src' / 'README.md' if expected_path.exists(): expected_path.unlink() self.assertFalse(expected_path.exists()) test = self._quick_test(config, build=False, finalize=False) test.builder._setup_build_dir(test.builder.path) self.assertEqual(self.TEST_URL_HASH, self.get_hash(test.builder.path / 'README.md')) self.assertTrue(expected_path.exists()) # Make sure the build isn't updated even the local is different. with expected_path.open('a') as readme_file: readme_file.write("<extra>") orig_time = expected_path.stat().st_mtime self._quick_test(config, build=False, finalize=False) self.assertEqual(orig_time, expected_path.stat().st_mtime) # Here it should be updated. We're playing a weird trick here though, # by pointing to a completely different url. config = copy.deepcopy(config) config['build']['source_url'] = self.TEST_URL2 config['build']['source_download'] = 'latest' self._quick_test(config, build=False, finalize=False) self.assertGreater(expected_path.stat().st_mtime, orig_time) config = copy.deepcopy(config) config['build']['source_download'] = 'never' config['build']['source_url'] = 'http://nowhere-that-exists.com' self._quick_test(config, build=False, finalize=False) # This should succeed, because the file exists and we're not # going to download it. def test_build(self): """Make sure building works.""" config1 = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '12', 'cmds': ['echo "Hello World [\x1esched.num_nodes\x1e]"'], 'source_path': 'binfile.gz', }, } test = self._quick_test(config1, build=False, finalize=False) # Test a basic build, with a gzip file and an actual build script. self.assertTrue(test.build(), msg="Build failed") # Make sure the build path and build origin contain softlinks to the # same files. self._cmp_tree(test.builder.path, test.build_path) self._is_softlink_dir(test.build_path) # We're going to time out this build on purpose, to test the code # that waits for builds to complete. config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '1', 'cmds': ['sleep 10'], 'source_path': 'binfile.gz', }, } test = self._quick_test(config, 'build_test', build=False, finalize=False) # This build should fail. self.assertFalse(test.build(), "Build succeeded when it should have timed out.") current_note = test.status.current().note self.assertTrue(current_note.startswith("Build timed out")) # Test general build failure. config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '12', 'cmds': ['exit 0'], 'source_path': 'binfile.gz', }, } # Check that building, and then re-using, a build directory works. test = self._quick_test(config, 'build_test', build=False, finalize=False) # Remove the build tree to ensure we do the build fresh. if test.builder.path.is_dir(): shutil.rmtree(str(test.builder.path)) self.assertTrue(test.build()) test2 = self._quick_test(config, 'build_test', build=False, finalize=False) self.assertTrue(test2.build()) self.assertEqual(test.builder.path, test2.builder.path) config3 = copy.deepcopy(config) config3['build']['cmds'] = ['exit 1'] # This should fail because the build exits non-zero test3 = self._quick_test(config3, 'build_test', build=False, finalize=False) self.assertFalse(test3.build(), "Build succeeded when it should have failed.") current_note = test3.status.current().note self.assertTrue( current_note.startswith("Build returned a non-zero result.")) def test_builder_cancel(self): """Check build canceling through their threading event.""" cancel_event = threading.Event() config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '11', 'cmds': ['sleep 5'], }, } # Check that building, and then re-using, a build directory works. test = self._quick_test(config, 'build_test', build=False, finalize=False) thread = threading.Thread(target=test.build, args=(cancel_event, )) thread.start() # Wait for the test to actually start building. timeout = 5 + time.time() states = [stat.state for stat in test.status.history()] while STATES.BUILDING not in states: if time.time() > timeout: self.fail("Test {} did not complete within 5 seconds.".format( test.id)) time.sleep(.5) states = [stat.state for stat in test.status.history()] time.sleep(.2) cancel_event.set() try: thread.join(timeout=1) except TimeoutError: self.fail( "Build did not respond quickly enough to being canceled.") self.assertEqual(test.status.current().state, STATES.ABORTED)
from pathlib import Path import json import logging import tempfile import unittest from pavilion import wget from pavilion.unittest import PavTestCase PAV_DIR = Path(__file__).resolve().parents[2] WGET_MISSING_LIBS = wget.missing_libs() class TestWGet(PavTestCase): GET_TARGET = "https://github.com/lanl/Pavilion/raw/master/README.md" TARGET_HASH = '275fa3c8aeb10d145754388446be1f24bb16fb00' _logger = logging.getLogger(__file__) @unittest.skipIf(WGET_MISSING_LIBS, "Missing wget libs: {}".format(WGET_MISSING_LIBS)) def test_get(self): # Try to get a configuration from the testing pavilion.yaml file. info = wget.head(self.pav_cfg, self.GET_TARGET) # Make sure we can pull basic info using an HTTP HEAD. The Etag can # change pretty easily; and the content-encoding may muck with the # length, so we can't really verify these.
class DocTests(PavTestCase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.docs_built = False self.docs_build_out = None self.docs_build_ret = None self.bad_links = None self.external_links = None def setUp(self): """Build the docs only once.""" if not self.docs_built: out, ret = self.build_docs() self.docs_built = True self.docs_build_out = out self.docs_build_ret = ret def build_docs(self): """Perform a clean build of the test documentation.""" subprocess.call(['make', 'clean'], cwd=(self.PAV_ROOT_DIR/'docs').as_posix(), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) cmd = ['make', 'html'] proc = subprocess.Popen( cmd, cwd=(self.PAV_ROOT_DIR/'docs').as_posix(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = proc.communicate(timeout=20) out = out.decode('utf8') result = proc.poll() return out, result def check_links(self): """Get the list of bad links, and the list of external links. Each is returned as a list of tuples of (origin_file, link). This assumes the docs have been built. returns: bad_links, external_links """ if self.bad_links is not None: return self.bad_links, self.external_links web_root = self.PAV_ROOT_DIR/'docs'/'_build' # These will be non-locals in the scope of the html parser. seen_hrefs = set() seen_targets = set() external_links = set() class HREFParser(HTMLParser): """Parse the hrefs and anchor targets from a given html file.""" def __init__(self, root, file_path): self.root = root self.path = file_path.relative_to(root) self.dir = file_path.parent seen_targets.add((self.path, '')) super().__init__() def handle_starttag(self, tag, attrs): """We want to record all the hrefs in the document. We also record every potential internal target.""" nonlocal seen_hrefs nonlocal seen_targets nonlocal external_links if tag == 'a': hrefs = [value for key, value in attrs if key == 'href'] if len(hrefs) > 1: raise ValueError( "'A' tag with more than one href: {}" .format(attrs)) href_f = hrefs[0] if href_f.startswith('#'): anchor_f = href_f[1:] seen_hrefs.add((self.path, (self.path, anchor_f))) elif '://' in href_f: external_links.add((self.path, href_f)) else: if '#' in href_f: file_loc, anchor_f = href_f.split('#', 2) else: file_loc, anchor_f = href_f, '' file_loc = pathlib.Path(file_loc) try: file_loc = (self.dir/file_loc).resolve() file_loc = file_loc.relative_to(self.root) except FileNotFoundError: pass seen_hrefs.add((self.path, (file_loc, anchor_f))) id_ = [v for k, v in attrs if k == 'id'] if id_: seen_targets.add((self.path, id_[0])) for path in flat_walk(web_root): if path.is_dir(): continue parser = HREFParser(web_root, path) if path.suffix == '.html': with path.open() as file: parser.feed(file.read()) bad_links = [] for origin, ref in seen_hrefs: href, anchor = ref if ref not in seen_targets: if not (anchor or href.suffix == '.html' or not (web_root / href).exists()): # Skip links to non-html files that don't have an anchor # and that exist. continue if anchor: href = '{}#{}'.format(href, anchor) bad_links.append((origin, href)) # Save our results so we only have to do this once. self.bad_links = bad_links self.external_links = external_links return bad_links, external_links @unittest.skipIf(not has_sphinx(), "Could not find Sphinx.") def test_doc_build(self): """Build the documentation and check for warnings/errors.""" self.assertEqual(self.docs_build_ret, 0, msg="Error building docs:\n{}" .format(self.docs_build_out)) warnings = [] for line in self.docs_build_out.split('\n'): if 'WARNING' in line: warnings.append(line) self.assertTrue(len(warnings) == 0, msg='{} warnings in documentation build:\n{}\n\n{}' .format(len(warnings), '\n'.join(warnings), self.docs_build_out)) @unittest.skipIf(not has_sphinx(), "Could not find Sphinx.") def test_doc_links(self): """Verify the links in all the documentation. This shouldn't run as its own test, but as a subtest of our document making test so we don't have to make the docs twice.""" bad_links, _ = self.check_links() link_desc = '\n'.join(['{} -> {}'.format(orig, href) for orig, href in bad_links]) self.assertTrue(bad_links == [], msg="\nFound the following bad links:\n" + link_desc) @unittest.skipIf(not has_sphinx() or wget.missing_libs(), "Could not find Sphinx (or maybe wget libs)") def test_doc_ext_links(self): """Check all the external doc links.""" _, ext_links = self.check_links() origins_by_href = defaultdict(lambda: []) for origin, href in ext_links: origins_by_href[href].append(origin) # Check the external links too. for href in origins_by_href.keys(): try: wget.head(self.pav_cfg, href) except wget.WGetError: self.fail("Could not fetch HEAD for doc external href '{}'" .format(href))
class PavTestTests(PavTestCase): def test_obj(self): """Test pavtest object initialization.""" # Initializing with a mostly blank config config = { # The only required param. 'name': 'blank_test', 'scheduler': 'raw', } # Making sure this doesn't throw errors from missing params. PavTest(self.pav_cfg, config, {}) config = { 'subtest': 'st', 'name': 'test', 'scheduler': 'raw', 'build': { 'modules': ['gcc'], 'cmds': ['echo "Hello World"'], }, 'run': { 'modules': ['gcc', 'openmpi'], 'cmds': ['echo "Running dis stuff"'], 'env': { 'BLARG': 'foo' }, } } # Make sure we can create a test from a fairly populated config. t = PavTest(self.pav_cfg, config, {}) # Make sure we can recreate the object from id. t2 = PavTest.load(self.pav_cfg, t.id) # Make sure the objects are identical # This tests the following functions # - from_id # - save_config, load_config # - get_test_path # - write_tmpl for key in set(t.__dict__.keys()).union(t2.__dict__.keys()): self.assertEqual(t.__dict__[key], t2.__dict__[key]) def test_setup_build_dir(self): """Make sure we can correctly handle all of the various archive formats.""" base_config = { 'name': 'test', 'scheduler': 'raw', 'build': { 'modules': ['gcc'], } } # Check that decompression and setup works for all accepted types. archives = [ 'src.tar.gz', 'src.xz', # A bz2 archive 'src.extensions_dont_matter', 'src.zip', # These archives don't have a containing directory. 'no_encaps.tgz', 'no_encaps.zip', ] test_archives = self.TEST_DATA_ROOT / 'pav_config_dir' / 'test_src' original_tree = test_archives / 'src' for archive in archives: config = copy.deepcopy(base_config) config['build']['source_location'] = archive test = PavTest(self.pav_cfg, config, {}) if test.build_origin.exists(): shutil.rmtree(str(test.build_origin)) test._setup_build_dir(test.build_origin) # Make sure the extracted archive is identical to the original # (Though the containing directory will have a different name) try: self._cmp_tree(test.build_origin, original_tree) except AssertionError as err: raise AssertionError("Error extracting {}".format(archive), *err.args) # Check directory copying config = copy.deepcopy(base_config) config['build']['source_location'] = 'src' test = PavTest(self.pav_cfg, config, {}) if test.build_origin.exists(): shutil.rmtree(str(test.build_origin)) test._setup_build_dir(test.build_origin) self._cmp_tree(test.build_origin, original_tree) # Test single compressed files. files = [ 'binfile.gz', 'binfile.bz2', 'binfile.xz', ] for file in files: config = copy.deepcopy(base_config) config['build']['source_location'] = file test = PavTest(self.pav_cfg, config, {}) if test.build_origin.exists(): shutil.rmtree(str(test.build_origin)) test._setup_build_dir(test.build_origin) self._cmp_files(test.build_origin / 'binfile', original_tree / 'binfile') # Make sure extra files are getting copied over. config = copy.deepcopy(base_config) config['build']['source_location'] = 'src.tar.gz' config['build']['extra_files'] = [ 'src.tar.gz', 'src.xz', ] test = PavTest(self.pav_cfg, config, {}) if test.build_origin.exists(): shutil.rmtree(str(test.build_origin)) test._setup_build_dir(test.build_origin) for file in config['build']['extra_files']: self._cmp_files(test_archives / file, test.build_origin / file) README_HASH = '275fa3c8aeb10d145754388446be1f24bb16fb00' @unittest.skipIf(wget.missing_libs(), "The wget module is missing required libs.") def test_src_urls(self): base_config = { 'name': 'test', 'scheduler': 'raw', 'build': { 'modules': ['gcc'], } } config = copy.deepcopy(base_config) config['build']['source_location'] = self.TEST_URL # remove existing downloads, and replace the directory. downloads_path = self.pav_cfg.working_dir / 'downloads' shutil.rmtree(str(downloads_path)) downloads_path.mkdir() test = PavTest(self.pav_cfg, config, {}) if test.build_origin.exists(): shutil.rmtree(str(test.build_origin)) test._setup_build_dir(test.build_origin) self.assertEqual(self.README_HASH, self.get_hash(test.build_origin / 'README.md')) def test_resolve_template(self): tmpl_path = self.TEST_DATA_ROOT / 'resolve_template_good.tmpl' var_man = variables.VariableSetManager() var_man.add_var_set('sched', {'num_nodes': '3', 'partition': 'test'}) var_man.add_var_set('sys', { 'hostname': 'test.host.com', 'complicated': { 'a': 'yes', 'b': 'no' } }) script_path = Path(tempfile.mktemp()) PavTest.resolve_template(tmpl_path, script_path, var_man) good_path = self.TEST_DATA_ROOT / 'resolve_template_good.sh' with script_path.open() as gen_script,\ good_path.open() as ver_script: self.assertEqual(gen_script.read(), ver_script.read()) script_path.unlink() for bad_tmpl in ('resolve_template_keyerror.tmpl', 'resolve_template_bad_key.tmpl'): script_path = Path(tempfile.mktemp()) tmpl_path = self.TEST_DATA_ROOT / bad_tmpl with self.assertRaises( KeyError, msg="Error not raised on bad file '{}'".format(bad_tmpl)): PavTest.resolve_template(tmpl_path, script_path, var_man) if script_path.exists(): script_path.unlink() script_path = Path(tempfile.mktemp()) tmpl_path = self.TEST_DATA_ROOT / 'resolve_template_extra_escape.tmpl' with self.assertRaises( PavTestError, msg="Error not raised on bad file '{}'".format(bad_tmpl)): PavTest.resolve_template(tmpl_path, script_path, var_man) if script_path.exists(): script_path.unlink() def test_build(self): """Make sure building works.""" config1 = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'cmds': ['echo "Hello World [\x1esched.num_nodes\x1e]"'], 'source_location': 'binfile.gz', }, } test = PavTest(self.pav_cfg, config1, {}) # Test a basic build, with a gzip file and an actual build script. self.assertTrue(test.build(), msg="Build failed") # Make sure the build path and build origin contain softlinks to the # same files. self._cmp_tree(test.build_origin, test.build_path) self._is_softlink_dir(test.build_path) # We're going to time out this build on purpose, to test the code # that waits for builds to complete. config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'cmds': ['sleep 10'], 'source_location': 'binfile.gz', }, } test = PavTest(self.pav_cfg, config, {}) test.BUILD_SILENT_TIMEOUT = 1 # This build should fail. self.assertFalse(test.build(), "Build succeeded when it should have timed out.") current_note = test.status.current().note self.assertTrue(current_note.startswith("Build timed out")) # Test general build failure. config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'cmds': ['exit 0'], 'source_location': 'binfile.gz', }, } # Check that building, and then re-using, a build directory works. test = PavTest(self.pav_cfg, config, {}) # Remove the build tree to ensure we do the build fresh. if test.build_origin.is_dir(): shutil.rmtree(str(test.build_origin)) self.assertTrue(test.build()) test2 = PavTest(self.pav_cfg, config, {}) self.assertTrue(test2.build()) self.assertEqual(test.build_origin, test2.build_origin) config3 = copy.deepcopy(config) config3['build']['cmds'] = ['exit 1'] # This should fail because the build exits non-zero test3 = PavTest(self.pav_cfg, config3, {}) self.assertFalse(test3.build(), "Build succeeded when it should have failed.") current_note = test3.status.current().note self.assertTrue( current_note.startswith("Build returned a non-zero result.")) def test_run(self): config1 = { 'name': 'run_test', 'scheduler': 'raw', 'run': { 'env': { 'foo': 'bar', }, # 'cmds': ['echo "I ran, punks"'], }, } test = PavTest(self.pav_cfg, config1, {}) self.assert_(test.build()) self.assertTrue(test.run({}, {}), msg="Test failed to run.") config2 = config1.copy() config2['run']['modules'] = ['asdlfkjae', 'adjwerloijeflkasd'] test = PavTest(self.pav_cfg, config2, {}) self.assert_(test.build()) self.assertEqual( test.run({}, {}), STATES.RUN_FAILED, msg="Test should have failed because a module couldn't be " "loaded. {}".format(test.path)) # TODO: Make sure this is the exact reason for the failure # (doesn't work currently). # Make sure the test fails properly on a timeout. config3 = { 'name': 'sleep_test', 'scheduler': 'raw', 'run': { 'cmds': ['sleep 10'] } } test = PavTest(self.pav_cfg, config3, {}) self.assert_(test.build()) test.RUN_SILENT_TIMEOUT = 1 self.assertEqual( test.run({}, {}), STATES.RUN_TIMEOUT, msg="Test should have failed due to timeout. {}".format(test.path)) def test_suites(self): """Test suite creation and regeneration.""" config1 = { 'name': 'run_test', 'scheduler': 'raw', 'run': { 'env': { 'foo': 'bar', }, # 'cmds': ['echo "I ran, punks"'], }, } tests = [] for i in range(3): tests.append(PavTest(self.pav_cfg, config1, {})) # Make sure this doesn't explode suite = TestSeries(self.pav_cfg, tests) # Make sure we got all the tests self.assertEqual(len(suite.tests), 3) test_paths = [Path(suite.path, p) for p in os.listdir(str(suite.path))] # And that the test paths are unique self.assertEqual(len(set(test_paths)), len([p.resolve() for p in test_paths])) self._is_softlink_dir(suite.path) suite2 = TestSeries.from_id(self.pav_cfg, suite._id) self.assertEqual(sorted(suite.tests.keys()), sorted(suite2.tests.keys())) self.assertEqual(sorted([t.id for t in suite.tests.values()]), sorted([t.id for t in suite2.tests.values()])) self.assertEqual(suite.path, suite2.path) self.assertEqual(suite.id, suite2.id)
def _update_src(self): """Retrieve and/or check the existence of the files needed for the build. This can include pulling from URL's. :returns: src_path, extra_files """ src_path = self._config.get('source_path') if src_path is None: # There is no source to do anything with. return None try: src_path = Path(src_path) except ValueError as err: raise TestBuilderError( "The source path must be a valid unix path, either relative " "or absolute, got '{}':\n{}".format(src_path, err.args[0])) found_src_path = self._find_file(src_path, 'test_src') src_url = self._config.get('source_url') src_download = self._config.get('source_download') if (src_url is not None and ((src_download == 'missing' and found_src_path is None) or src_download == 'latest')): # Make sure we have the library support to perform a download. missing_libs = wget.missing_libs() if missing_libs: raise TestBuilderError( "The dependencies needed for remote source retrieval " "({}) are not available on this system. Please provide " "your test source locally.".format( ', '.join(missing_libs))) if not src_path.is_absolute(): dwn_dest = self.test.suite_path.parents[ 1] / 'test_src' / src_path else: dwn_dest = src_path if not src_path.parent.exists(): try: src_path.parent.mkdir(parents=True) except OSError as err: raise TestBuilderError( "Could not create parent directory to place " "downloaded source:\n{}".format(err.args[0])) self.tracker.update( "Updating source at '{}'.".format(found_src_path), STATES.BUILDING) try: wget.update(self._pav_cfg, src_url, dwn_dest) except wget.WGetError as err: raise TestBuilderError( "Could not retrieve source from the given url '{}':\n{}". format(src_url, err.args[0])) return dwn_dest if found_src_path is None: raise TestBuilderError("Could not find source '{}'".format( src_path.as_posix())) if found_src_path.is_dir(): # For directories, update the directories mtime to match the # latest mtime in the entire directory. self._date_dir(found_src_path) return found_src_path elif found_src_path.is_file(): # For static files, we'll end up just hashing the whole thing. return found_src_path else: raise TestBuilderError( "Source location '{}' points to something unusable.".format( found_src_path))
class BuilderTests(PavTestCase): def test_setup_build_dir(self): """Make sure we can correctly handle all of the various archive formats.""" base_config = { 'name': 'test', 'scheduler': 'raw', 'build': { 'modules': ['gcc'], } } # Check that decompression and setup works for all accepted types. archives = [ 'src.tar.gz', 'src.xz', # A bz2 archive 'src.extensions_dont_matter', 'src.zip', # These archives don't have a containing directory. 'no_encaps.tgz', 'no_encaps.zip', 'softlink.zip', ] test_archives = self.TEST_DATA_ROOT / 'pav_config_dir' / 'test_src' original_tree = test_archives / 'src' for archive in archives: config = copy.deepcopy(base_config) config['build']['source_location'] = archive config['build']['specificity'] = archive test = TestRun(self.pav_cfg, config) tmp_path = test.builder.path.with_suffix('.test') test.builder._setup_build_dir(test.builder.path) # Make sure the extracted archive is identical to the original # (Though the containing directory will have a different name) try: self._cmp_tree(test.builder.path, original_tree) except AssertionError as err: raise AssertionError("Error extracting {}".format(archive), *err.args) # Check directory copying config = copy.deepcopy(base_config) config['build']['source_location'] = 'src' test = TestRun(self.pav_cfg, config) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) self._cmp_tree(test.builder.path, original_tree) # Test single compressed files. files = [ 'binfile.gz', 'binfile.bz2', 'binfile.xz', ] for file in files: config = copy.deepcopy(base_config) config['build']['source_location'] = file test = TestRun(self.pav_cfg, config) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) self._cmp_files(test.builder.path / 'binfile', original_tree / 'binfile') # Make sure extra files are getting copied over. config = copy.deepcopy(base_config) config['build']['source_location'] = 'src.tar.gz' config['build']['extra_files'] = [ 'src.tar.gz', 'src.xz', ] test = TestRun(self.pav_cfg, config) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) for file in config['build']['extra_files']: self._cmp_files(test_archives / file, test.builder.path / file) README_HASH = '275fa3c8aeb10d145754388446be1f24bb16fb00' @unittest.skipIf(wget.missing_libs(), "The wget module is missing required libs.") def test_src_urls(self): base_config = { 'name': 'test', 'scheduler': 'raw', 'build': { 'modules': ['gcc'], } } config = copy.deepcopy(base_config) config['build']['source_location'] = self.TEST_URL # remove existing downloads, and replace the directory. downloads_path = self.pav_cfg.working_dir / 'downloads' shutil.rmtree(str(downloads_path)) downloads_path.mkdir() test = TestRun(self.pav_cfg, config) if test.builder.path.exists(): shutil.rmtree(str(test.builder.path)) test.builder._setup_build_dir(test.builder.path) self.assertEqual(self.README_HASH, self.get_hash(test.builder.path / 'README.md')) def test_build(self): """Make sure building works.""" config1 = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '12', 'cmds': ['echo "Hello World [\x1esched.num_nodes\x1e]"'], 'source_location': 'binfile.gz', }, } test = TestRun(self.pav_cfg, config1) # Test a basic build, with a gzip file and an actual build script. self.assertTrue(test.build(), msg="Build failed") # Make sure the build path and build origin contain softlinks to the # same files. self._cmp_tree(test.builder.path, test.build_path) self._is_softlink_dir(test.build_path) # We're going to time out this build on purpose, to test the code # that waits for builds to complete. config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '1', 'cmds': ['sleep 10'], 'source_location': 'binfile.gz', }, } test = TestRun(self.pav_cfg, config) # This build should fail. self.assertFalse(test.build(), "Build succeeded when it should have timed out.") current_note = test.status.current().note self.assertTrue(current_note.startswith("Build timed out")) # Test general build failure. config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '12', 'cmds': ['exit 0'], 'source_location': 'binfile.gz', }, } # Check that building, and then re-using, a build directory works. test = TestRun(self.pav_cfg, config) # Remove the build tree to ensure we do the build fresh. if test.builder.path.is_dir(): shutil.rmtree(str(test.builder.path)) self.assertTrue(test.build()) test2 = TestRun(self.pav_cfg, config) self.assertTrue(test2.build()) self.assertEqual(test.builder.path, test2.builder.path) config3 = copy.deepcopy(config) config3['build']['cmds'] = ['exit 1'] # This should fail because the build exits non-zero test3 = TestRun(self.pav_cfg, config3) self.assertFalse(test3.build(), "Build succeeded when it should have failed.") current_note = test3.status.current().note self.assertTrue( current_note.startswith("Build returned a non-zero result.")) def test_builder_cancel(self): """Check build canceling through their threading event.""" cancel_event = threading.Event() config = { 'name': 'build_test', 'scheduler': 'raw', 'build': { 'timeout': '11', 'cmds': ['sleep 5'], }, } # Check that building, and then re-using, a build directory works. test = TestRun(self.pav_cfg, config) thread = threading.Thread(target=test.build, args=(cancel_event, )) thread.start() # Wait for the test to actually start building. timeout = 5 + time.time() states = [stat.state for stat in test.status.history()] while STATES.BUILDING not in states: if time.time() > timeout: self.fail("Test {} did not complete within 5 seconds.".format( test.id)) time.sleep(.5) states = [stat.state for stat in test.status.history()] time.sleep(.2) cancel_event.set() try: thread.join(timeout=1) except TimeoutError: self.fail( "Build did not respond quickly enough to being canceled.") self.assertEqual(test.status.current().state, STATES.ABORTED)