def check_runs(self, cmd, run_succeeds=True): """Perform a run and make sure they have correct permissions.""" env = os.environ.copy() env['PAV_CONFIG_DIR'] = self.config_dir.as_posix() proc = sp.Popen(cmd, env=env, stdout=sp.PIPE, stderr=sp.STDOUT) if (proc.wait(3) != 0) == run_succeeds: out = proc.stdout.read() out = out.decode() self.fail("Error running command.\n{}".format(out)) self.wait_tests(self.working_dir) for file in utils.flat_walk(self.working_dir): stat = file.stat() # Make sure all files have the right group. grp_name = grp.getgrgid(stat.st_gid).gr_name self.assertEqual( stat.st_gid, self.alt_group.gr_gid, msg="File {} had the incorrect group. Expected {}, got {}" .format(file, self.alt_group.gr_name, grp_name)) # Make sure all files are properly masked. masked_mode = oct(stat.st_mode & ~self.umask) self.assertEqual( masked_mode, oct(stat.st_mode), msg="Bad permissions for file {}. Expected {}, got {}" .format(file, masked_mode, oct(stat.st_mode)) )
def check_perms(self, path, group, umask): """Perform a run and make sure they have correct permissions.""" for file in utils.flat_walk(path): stat = file.stat() for file in utils.flat_walk(path): stat = file.stat() # Make sure all files have the right group. grp_name = group.gr_name assigned_group = grp.getgrgid(stat.st_gid).gr_name self.assertEqual( stat.st_gid, group.gr_gid, msg="File {} had the incorrect group. Expected {}, got {}" .format(file, grp_name, assigned_group)) # Make sure all files are properly masked. masked_mode = oct(stat.st_mode & ~umask) self.assertEqual( masked_mode, oct(stat.st_mode), msg="Bad permissions for file {}. Expected {}, got {}" .format(file, masked_mode, oct(stat.st_mode)) )
def _date_dir(base_path): """Update the mtime of the given directory or path to the the latest mtime contained within. :param Path base_path: The root of the path to evaluate. """ src_stat = base_path.stat() latest = src_stat.st_mtime for path in utils.flat_walk(base_path): dir_stat = path.stat() if dir_stat.st_mtime > latest: latest = dir_stat.st_mtime if src_stat.st_mtime != latest: os.utime(base_path.as_posix(), (src_stat.st_atime, latest))
def _date_dir(base_path): """Update the mtime of the given directory or path to the the latest mtime contained within. :param str base_path: The root of the path to evaluate. """ src_stat = os.stat(base_path) latest = src_stat.st_mtime paths = utils.flat_walk(base_path) for path in paths: dir_stat = os.stat(path) if dir_stat.st_mtime > latest: latest = dir_stat.st_mtime if src_stat.st_mtime != latest: os.utime(base_path, (src_stat.st_atime, latest))
def build(self, cancel_event=None): """Build the test using its builder object and symlink copy it to it's final location. The build tracker will have the latest information on any encountered errors. :param threading.Event cancel_event: Event to tell builds when to die. :returns: True if build successful """ if self.build_origin_path.exists(): raise RuntimeError( "Whatever called build() is calling it for a second time." "This should never happen for a given test run ({s.id}).". format(s=self)) if cancel_event is None: cancel_event = threading.Event() if self.builder.build(cancel_event=cancel_event): # Create the build origin path, to make tracking a test's build # a bit easier. with PermissionsManager(self.build_origin_path, self.group, self.umask): self.build_origin_path.symlink_to(self.builder.path) with PermissionsManager(self.build_path, self.group, self.umask): if not self.builder.copy_build(self.build_path): cancel_event.set() build_result = True else: with PermissionsManager(self.build_path, self.group, self.umask): self.builder.fail_path.rename(self.build_path) for file in utils.flat_walk(self.build_path): file.chmod(file.stat().st_mode | 0o200) build_result = False self.build_log.symlink_to(self.build_path / 'pav_build_log') return build_result
def check_permissions(self, path: Path, group: grp.struct_group, umask: int, exclude: List[Path] = None): """Perform a run and make sure they have correct permissions.""" if exclude is None: exclude = [] else: exclude = [ex_path for ex_path in exclude] dir_umask = umask & ~0o222 for file in utils.flat_walk(path): excluded = False for parent in file.parents: if parent in exclude: excluded = True if excluded: continue fstat = file.stat() # Make sure all files have the right group. grp_name = grp.getgrgid(fstat.st_gid).gr_name self.assertEqual( fstat.st_gid, group.gr_gid, msg="File {} had the incorrect group. Expected {}, got {}". format(file, self.alt_group.gr_name, grp_name)) mode = fstat.st_mode if file.is_symlink(): mode = file.lstat().st_mode self.assertEqual( mode, 0o120777, msg="Expected symlink {} to have permissions {} but " "got {}".format(file, stat.filemode(0o120777), stat.filemode(mode))) elif (file.name.startswith('binfile') or file.name in ('kickoff.sh', 'build.sh', 'run.sh', 'run.tmpl')): expected = (~umask) & 0o100775 # Binfiles should have owner/group execute. self.assertEqual( mode, expected, msg="Expected {} to have perms {}, but had {}".format( file, stat.filemode(expected), stat.filemode(mode))) elif file.is_file(): expected = (~umask) & 0o100664 self.assertEqual( oct(mode), oct(expected), msg="Expected regular file {} to have permissions {} " "but got {}".format(file, stat.filemode(expected), stat.filemode(mode))) elif file.is_dir(): expected = 0o40775 & (~dir_umask) self.assertEqual( mode, expected, msg="Expected dir {} to have permissions {} but " "got {}".format(file, stat.filemode(expected), stat.filemode(mode))) else: self.fail("Found unhandled file {}.".format(file))
def check_links(self): """Get the list of bad links, and the list of external links. Each is returned as a list of tuples of (origin_file, link). This assumes the docs have been built. returns: bad_links, external_links """ if self.bad_links is not None: return self.bad_links, self.external_links web_root = self.PAV_ROOT_DIR/'docs'/'_build' # These will be non-locals in the scope of the html parser. seen_hrefs = set() seen_targets = set() external_links = set() class HREFParser(HTMLParser): """Parse the hrefs and anchor targets from a given html file.""" def __init__(self, root, file_path): self.root = root self.path = file_path.relative_to(root) self.dir = file_path.parent seen_targets.add((self.path, '')) super().__init__() def handle_starttag(self, tag, attrs): """We want to record all the hrefs in the document. We also record every potential internal target.""" nonlocal seen_hrefs nonlocal seen_targets nonlocal external_links if tag == 'a': hrefs = [value for key, value in attrs if key == 'href'] if len(hrefs) > 1: raise ValueError( "'A' tag with more than one href: {}" .format(attrs)) href_f = hrefs[0] if href_f.startswith('#'): anchor_f = href_f[1:] seen_hrefs.add((self.path, (self.path, anchor_f))) elif '://' in href_f: external_links.add((self.path, href_f)) else: if '#' in href_f: file_loc, anchor_f = href_f.split('#', 2) else: file_loc, anchor_f = href_f, '' file_loc = pathlib.Path(file_loc) try: file_loc = (self.dir/file_loc).resolve() file_loc = file_loc.relative_to(self.root) except FileNotFoundError: pass seen_hrefs.add((self.path, (file_loc, anchor_f))) id_ = [v for k, v in attrs if k == 'id'] if id_: seen_targets.add((self.path, id_[0])) for path in flat_walk(web_root): if path.is_dir(): continue parser = HREFParser(web_root, path) if path.suffix == '.html': with path.open() as file: parser.feed(file.read()) bad_links = [] for origin, ref in seen_hrefs: href, anchor = ref if ref not in seen_targets: if not (anchor or href.suffix == '.html' or not (web_root / href).exists()): # Skip links to non-html files that don't have an anchor # and that exist. continue if anchor: href = '{}#{}'.format(href, anchor) bad_links.append((origin, href)) # Save our results so we only have to do this once. self.bad_links = bad_links self.external_links = external_links return bad_links, external_links