def _split_files_and_dirs(clazz, working_dir, files_and_dirs): files = [] dirs = [] for f in files_and_dirs: f = file_path.normalize(path.join(working_dir, f)) if path.isfile(f): files += [ f ] elif path.isdir(f): dirs += [ f ] else: raise ValueError('not a file or directory: %s' % (str(f))) files = sorted(algorithm.unique(files)) dirs = sorted(algorithm.unique(dirs)) return files, dirs
def installed_packages(clazz): 'Return a list of installed pacakge.' cmd = 'pkgutil --packages' rv = clazz._call_pkgutil(cmd) if rv.exit_code != 0: raise RuntimeError('Failed to execute: %s' % (cmd)) return sorted(algorithm.unique(rv.stdout.strip().split('\n')))
def _match(clazz, filenames, patterns, match_func, match_type): ''' Match a list of files with patterns using match_func and match_type. match_func should be the form match_func(filename, patterns) ''' assert clazz.match_type_is_valid(match_type) filenames = object_util.listify(filenames) patterns = object_util.listify(patterns) # or []) result = [] if not patterns: if match_type == clazz.ANY: return [] elif match_type == clazz.NONE: return filenames elif match_type == clazz.ALL: return [] func_map = { clazz.ANY: clazz._match_any, clazz.NONE: clazz._match_none, clazz.ALL: clazz._match_all, } func = func_map[match_type] result = [] for filename in filenames: if func(match_func, path.basename(filename), patterns): result.append(filename) return sorted(algorithm.unique(result))
def _requires_no_resolve(self, modules): modules = object_util.listify(modules) check.check_string_seq(modules) reqs = [] for name in modules: reqs.extend(self._get_pc_file(module_name).requires) return algorithm.unique(reqs)
def sources(self, env): sources = [] for step in self: for key, value in step.values.items(): if check.is_value_base(value): sources.extend(value.sources(env)) return algorithm.unique(sources)
def glob(clazz, paths, glob_expression): 'Like glob but handles a single path or a sequence of paths' paths = object_util.listify(paths) paths = [ path.join(p, glob_expression) for p in paths ] result = [] for p in paths: result.extend(glob.glob(p)) return sorted(algorithm.unique(result))
def _command_cflags(self, module_names): if not self._check_modules_exist(module_names): return 1 cflags = [] for name in module_names: cflags.extend(self.pc.module_cflags(name)) cflags = algorithm.unique(cflags) print(' '.join(cflags)) return 0
def common_ancestor(clazz, filenames): 'Return a common ancestor for all the given filenames or None if there is not one.' def _path_base(p): return file_util.strip_sep(path.normpath(p).split(os.sep)[0]) ancestors = [ _path_base(f) for f in filenames ] common_ancestor = algorithm.unique(ancestors) if len(common_ancestor) == 1: return common_ancestor[0] or None return None
def flags(self, names): result = {} for name in names: if not name in self: raise KeyError('not found: \"%s\"' % (name)) instruction = self[name] self._append_flag_values(result, instruction) for k, v in result.items(): result[k] = ' '.join(algorithm.unique(self._flatten_flags_list(v))) return result
def _resolve_files_and_dirs(clazz, working_dir, files_and_dirs): result = [] for f in files_and_dirs: f = file_path.normalize(path.join(working_dir, f)) if path.isfile(f): result += [ f ] elif path.isdir(f): result += clazz._resolve_dir(f) result = algorithm.unique(result) result = [ path.normpath(r) for r in result ] return sorted(result)
def __package_files(clazz, package_name, flags): cmd = 'pkgutil --files %s %s' % (package_name, flags) rv = clazz._call_pkgutil(cmd) if rv.exit_code != 0: raise RuntimeError('Failed to execute: %s' % (cmd)) files = sorted(algorithm.unique(rv.stdout.strip().split('\n'))) files = string_list_util.remove_if(files, clazz.__CONTENTS_BLACKLIST) info = clazz.package_info(package_name) package_home = info['volume'] + info['install_location'] package_home = package_home.replace('//', '/') return [path.join(package_home, f) for f in files]
def _resolve_mask_to_list(clazz, s): assert string_util.is_string(s) s = s.lower() s = clazz.ALIASES.get(s, s) parts = [part for part in clazz.mask_split(s) if part] result = [] for part in parts: result.extend(clazz._resolve_mask_part(part)) result = sorted(algorithm.unique(result)) if not result: return [clazz.NONE] return result
def inspect_file_new(clazz, filename): loader = unittest.TestLoader() where = path.dirname(filename) pattern = path.basename(filename) discovery = loader.discover(where, pattern = pattern) result = [] for disc in discovery: for suite in disc: for test in suite: fixture = test.__class__.__name__ test_functions = loader.getTestCaseNames(test) for function in test_functions: result.append(unit_test_description(filename, fixture, function)) return sorted(algorithm.unique(result), key = lambda x: x.function)
def resolve_files(self, files, patterns = None, exclude_patterns = None): 'Resolve a mixed list of files and directories into a sorted list of files.' result = [] for f in files: if not path.exists(f): raise RuntimeError('Not found: %s' % (f)) if path.isfile(f): result.append(self.filepath_normalize(f)) elif path.isdir(f): result += file_find.find_fnmatch(f, patterns, relative = False) result = sorted(algorithm.unique(result)) if not exclude_patterns: return result return file_match.match_fnmatch(result, exclude_patterns, file_match.NONE)
def __command_contents(name, levels, files_only, dirs_only): if files_only and dirs_only: raise RuntimeError('Only one of --files or --dirs can be given.') if files_only: files = npm.package_files(name) elif dirs_only: files = npm.package_dirs(name) else: files = npm.package_contents(name) if levels is not None: files = [__level_path(p, levels) for p in files] files = algorithm.unique(files) for f in files: print(f) return 0
def inspect_file(clazz, filename): old = [] new = [] try: old = clazz.inspect_file_old(filename) except Exception as ex: print('WARNING: failed to inspect unit test %s: %s' % (path.relpath(filename), str(ex))) #return None try: new = clazz.inspect_file_new(filename) except Exception as ex: #print('WARNING: failed to inspect unit test %s: %s' % (path.relpath(filename), str(ex))) #return None pass return sorted(algorithm.unique(old + new), key = lambda x: x.function)
def __manifest_for_binaries(clazz, binaries): 'Install jail binaries.' files = set() for binary in binaries: if not path.isfile(binary): raise RuntimeError('not found: %s' % (binary)) deps = library.dependencies_recursive(binary) files.add(binary) for p in dir_util.all_parents(binary): files.add(p) for dep in deps: files.add(dep) for p in dir_util.all_parents(dep): files.add(p) string_list_util.remove_if(files, '/') return sorted(algorithm.unique(list(files)))
def depth(clazz, patch, target_files): 'Return the depth of a patch relative to target_files. depth can then be used for Patch.patch(strip = depth)' affected_files = clazz.affected_files(patch) print(" patch: ", patch) # print "affected_files: ", affected_files # print " target_files: ", target_files depths = [] for target in target_files: for affected in affected_files: d = clazz.__compute_path(affected, target) if d != None: print(" DEPTH: affected=%s; target=%s; depth=%d" % (affected, target, d)) depths.append(d) depths = algorithm.unique(depths) if len(depths) != 1: raise RuntimeError('Unexpected depths for patch: %s' % (patch)) return depths[0]
def dependencies(clazz, filename): 'Return a list of dependencies for filename (executable or shared lib) or None if not applicable.' filename = path.abspath(filename) types = [ binary_format_macho.FILE_TYPE_EXECUTABLE, binary_format_macho.FILE_TYPE_SHARED_LIB ] if not binary_format_macho().file_is_of_type(filename, types): return None cmd = ['otool', '-L', filename] rv = execute.execute(cmd) assert rv.stdout.find('is not an object file') == -1 lines = rv.stdout.split('\n') if len(lines) < 2: return None deps = [l.partition(' ')[0].strip() for l in lines[1:]] deps = [l for l in deps if l] if filename in deps: deps.remove(filename) return sorted(algorithm.unique(deps))
def find_in_list(clazz, filenames, name, version): 'Find the filenames that match name and version.' name_replacements = { 'lib': '', } name_prefix = clazz._name_prefix(name) #print('cACA: name=%s; name_prefix=%s' % (name, name_prefix)) if name_prefix: name_replacements[name_prefix] = '' name = re.escape( string_util.replace(name, name_replacements, word_boundary=False)) version = re.escape(version) version = version.replace('\\.', '.') version = version.replace('\\-', '.') patterns = [ r'.*%s.*%s.*' % (name, version), r'.*%s.*%s.*' % (name.replace('-', '_'), version), r'.*%s.*%s.*' % (name.replace('_', '-'), version), r'.*%s.*%s.*' % (name.replace('.', '_'), version), r'.*%s.*%s.*' % (name.replace('_', '.'), version), ] expressions = [] for pattern in patterns: expressions.append(re.compile(pattern)) expressions.append(re.compile(pattern, re.IGNORECASE)) result = [] for filename in filenames: for i, expression in enumerate(expressions): base = path.basename(filename) #print('CHECKING %s to %s => %s' % (expression.pattern, base, expression.match(base))) if expression.match(base): result.append(filename) return sorted(algorithm.unique(result))
def remove_dups(self): self._values = algorithm.unique(self._values)
def _normalize_members(clazz, members): 'Normalize the archive members to be unique and sorted.' return sorted(algorithm.unique(members))
def search_replace(clazz, root_dir, replacements, backup = True, test_func = None): assert isinstance(replacements, dict) text = [ str(x) for x in replacements.keys() ] items = clazz.search(root_dir, text, relative = False) filenames = algorithm.unique([ item.filename for item in items ]) return file_replace.replace_many(filenames, replacements, backup = backup, test_func = test_func)
def test_unique(self): self.assertEqual( [ 'a', 'b', 'c' ], algorithm.unique([ 'a', 'b', 'c' ]) ) self.assertEqual( [ 'a', 'b', 'c' ], algorithm.unique([ 'a', 'b', 'c', 'c' ]) ) self.assertEqual( [ 'c', 'a', 'b' ], algorithm.unique([ 'c', 'a', 'b', 'c' ]) )
def __unduplicate_flags(clazz, flags): 'Unduplicate flags.' v = string_util.split_by_white_space(flags) unique_v = algorithm.unique(v) return ' '.join(unique_v)
def find_python_compiled_files(clazz, dirs): dirs = object_util.listify(dirs) result = [] for d in dirs: result.extend(file_find.find_fnmatch(d, [ '*.pyc' ], relative = False)) return algorithm.unique(result)
def _git_roots(clazz, files): files = object_util.listify(files) roots = [ git.root(f) for f in files ] roots = [ r for r in roots if r ] return algorithm.unique(roots)
def _find_config_files(clazz, d): return algorithm.unique(sorted(clazz._find_config_files_in_root(d) + clazz._find_config_files_in_env()))
def _normalize_members(clazz, members): 'Return a sorted and unique list of members.' return sorted(algorithm.unique(members))
def _parse_flags(clazz, s): flags = string_util.split_by_white_space(s) return algorithm.unique([flag.strip() for flag in flags])
def _config_names(self): result = [] for desc in self.test_descriptions: if desc.file_info.config: result.append(desc.file_info.config.data.name) return algorithm.unique(result)