def _glob_one_pattern(clazz, paths, pattern): paths = object_util.listify(paths) paths = [path.join(p, pattern) for p in paths] result = [] for p in paths: result.extend(glob.glob(p)) return sorted(algorithm.unique(result))
def resolve_deps_NEW(self, names, system, hardness, include_names): 'Resolve dependencies.' self.log_i( 'resolve_deps(names=%s, system=%s, hardness=%s, include_names=%s)' % (' '.join(names), system, ' '.join(hardness), include_names)) check.check_string_seq(names) names = object_util.listify(names) hardness = self._normalize_hardness(hardness) reqs = requirement_list() missing = [] for name in names: desc = self._descriptor_map.get(name, None) if desc: reqs.extend(desc.requirements.filter_by(hardness, system)) else: missing.append(name) if missing: return self._resolve_result(None, missing) self.log_i('resolve_deps() reqs=%s' % (str(reqs.names()))) dep_map = self.dependency_map(hardness, system) result = package_descriptor_list( dependency_resolver.resolve_and_order_deps(reqs.names(), self._descriptor_map, dep_map)) self.log_i('resolve_deps() 1 result=%s' % (str(result.names()))) if include_names: result += self.descriptors(names) result.remove_dups() self.log_i('resolve_deps() 2 result=%s' % (str(result.names()))) return self._resolve_result(result, None)
def resolve_deps(self, names, system, hardness, include_names): 'Resolve dependencies.' self.log_i( 'resolve_deps(names=%s, system=%s, hardness=%s, include_names=%s)' % (' '.join(names), system, ' '.join(hardness), include_names)) check.check_string_seq(names) names = object_util.listify(names) hardness = self._normalize_hardness(hardness) reqs = requirement_list() for name in names: desc = self._descriptor_map.get(name, None) if not desc: raise KeyError('Not found in packages: %s' % (name)) reqs.extend(desc.requirements.filter_by(hardness, system)) self.log_i('resolve_deps() reqs=%s' % (str(reqs.names()))) dep_map = self.dependency_map(hardness, system) result = package_descriptor_list( dependency_resolver.resolve_and_order_deps(reqs.names(), self._descriptor_map, dep_map)) self.log_i('resolve_deps() 1 result=%s' % (str(result.names()))) if include_names: result += self.descriptors(names) result.remove_dups() self.log_i('resolve_deps() 2 result=%s' % (str(result.names()))) return result
def glob(clazz, paths, patterns): 'Like glob but handles one or more paths and one or more patterns' patterns = object_util.listify(patterns) result = [] for pattern in patterns: result.extend(clazz._glob_one_pattern(paths, pattern)) return sorted(algorithm.unique(result))
def _call_softwareupdate(clazz, args, verbose): check.check_string_seq(args) check.check_bool(verbose) command_line.check_args_type(args) args = object_util.listify(args) exe = which.which('softwareupdate') if not exe: raise softwareupdater_error('softwareupdate not found') clazz._log.log_d('_call_softwareupdate: exe={} args={}'.format( exe, args)) cmd = [exe] + args env = os_env.clone_current_env() rv = execute.execute(cmd, env=env, stderr_to_stdout=True, raise_error=False, non_blocking=verbose) if rv.exit_code != 0: cmd_flat = ' '.join(cmd) msg = 'softwareupdate command failed: {} - {}\n{}'.format( cmd, rv.exit_code, rv.stdout) raise softwareupdater_error(msg, status_code=rv.exit_code) return rv
def find_git_dirs(clazz, dirs): 'Return the first .git dir found in any dir in dirs.' dirs = object_util.listify(dirs) dirs = [d for d in dirs if path.isdir(d)] possible = [] result = clazz._find(dirs, '.git', None, None, False) result = [file_util.remove_tail(d, '.git') for d in result] return sorted(result)
def upgrade(self, packages): 'Upgrade a package to the latest version' packages = object_util.listify(packages) check.check_string_seq(packages) args = [ '--upgrade' ] + list(packages) error_message = 'Failed to upgrade "{}"'.format(' '.join(packages)) self._call_install(args, error_message = error_message)
def commit(clazz, root_dir, message, filenames): filenames = object_util.listify(filenames) tmp_msg = temp_file.make_temp_file(content = message) args = [ 'commit', '-F', tmp_msg ] + filenames try: rv = git_exe.call_git(root_dir, args) finally: file_util.remove(tmp_msg) return clazz.last_commit_hash(root_dir, short_hash = True)
def remove_members(clazz, archive, members, debug=False): 'Remove members from an archive and then recreate it.' members = object_util.listify(members) tmp_dir = archiver.extract_all_temp_dir(archive, delete=not debug) if debug: print('tmp_dir: {}'.format(tmp_dir)) members = [path.normpath(path.join(tmp_dir, m)) for m in members] file_util.remove(members) archiver.create(archive, tmp_dir)
def atexit_operations(self, operations): 'When the process exists, run one or more operations on the repo.' operations = object_util.listify(operations) from bes.system.log import log def _do_ops(*args, **kargs): arg_repo = args[0] arg_operations = args[1] for op in arg_operations: op(arg_repo) atexit.register(_do_ops, self, operations)
def glob_paths(clazz, paths): 'Glob a list of paths if needed' paths = object_util.listify(paths) result = [] for p in paths: if clazz.has_glob_pattern(p): result.extend(glob.glob(p)) else: result.append(p) return sorted(algorithm.unique(result))
def hardness_matches(self, hardness): 'Return True if hardness matches.' hardness = object_util.listify(hardness) if not requirement_hardness.is_valid_seq(hardness): raise ValueError('invalid hardness: %s - %s' % (str(hardness), type(hardness))) self_hardness = self.hardness or requirement_hardness.DEFAULT for h in hardness: if self_hardness == requirement_hardness[h]: return True return False
def resolve_dirs(clazz, dirs, options=None): 'Resolve a directories only.' check.check_file_resolver_options(options, allow_none=True) clazz._log.log_method_d() dirs = object_util.listify(dirs) file_check.check_dir_seq(dirs) options = options or file_resolver_options() return clazz._do_resolve_files(dirs, options, file_find.DIR)
def filter(self, texts, negate=False): texts = object_util.listify(texts) result = [] for text in texts: matches = self.match(text) if negate: matches = not matches if matches: result.append(text) return result
def _match(clazz, filenames, patterns, match_func, match_type, basename=True): ''' Match a list of files with patterns using match_func and match_type. match_func should be the form match_func(filename, patterns) ''' assert clazz.match_type_is_valid(match_type) filenames = object_util.listify(filenames) patterns = object_util.listify(patterns) # or []) result = [] if not patterns: if match_type == clazz.ANY: return [] elif match_type == clazz.NONE: return filenames elif match_type == clazz.ALL: return [] func_map = { clazz.ANY: clazz._match_any, clazz.NONE: clazz._match_none, clazz.ALL: clazz._match_all, } func = func_map[match_type] result = [] for filename in filenames: if basename: filename_for_match = path.basename(filename) else: filename_for_match = filename if func(match_func, filename_for_match, patterns): result.append(filename) return sorted(algorithm.unique(result))
def match_clause(self, clause): if check.is_tuple(clause): clauses = list(clause) else: clauses = object_util.listify(clause) check.check_string_seq(clauses) for next_clause in clauses: if not self._match_one_clause(next_clause): return False return True
def install_requirements(self, requirements_files): 'Install packages from a requirements file' requirements_files = object_util.listify(requirements_files) check.check_string_seq(requirements_files) for requirements_file in requirements_files: if not path.exists(requirements_file): raise pip_error(f'Requirements file not found: "{requirements_file}"') for requirements_file in requirements_files: self._install_one_requirements_file(requirements_file)
def match_first(self, expressions, strip_comments=False, line_number=None): expressions = object_util.listify(expressions) for line in self._lines: if line_number is not None: if line.line_number < line_number: continue text = line.get_text(strip_comments=strip_comments) for expression in expressions: match = re.findall(expression, text) if match: return self._match_result(expression, match, line) return None
def match_all(self, expressions, strip_comments=False): expressions = object_util.listify(expressions) result = [] for line in self._lines: text = line.get_text(strip_comments=strip_comments) for expression in expressions: match_rv = re.match(expression, text) self.log_d('match_all: match(\"%s\", \"%s\") => %s' % (expression, text, match_rv)) if re.match(expression, text): result.append(line) return text_line_parser(result)
def match_fnmatch(clazz, filenames, patterns, match_type=None, basename=True): patterns = object_util.listify(patterns) match_type = match_type or clazz.ANY return clazz._match(filenames, patterns, fnmatch.fnmatch, match_type, basename=basename)
def status(clazz, root, filenames, abspath = False, untracked_files = True): filenames = object_util.listify(filenames) flags = [ '--porcelain' ] if untracked_files: flags.append('--untracked-files=normal') else: flags.append('--untracked-files=no') args = [ 'status' ] + flags + filenames rv = git_exe.call_git(root, args) result = git_status_list.parse(rv.stdout) if abspath: result.become_absolute(root) return result
def match_backwards(self, line_number, expressions, strip_comments=False): expressions = object_util.listify(expressions) start_index = self.find_by_line_number(line_number) - 1 if start_index < 0: return None for i in range(start_index, -1, -1): line = self._lines[i] text = line.get_text(strip_comments=strip_comments) for expression in expressions: match = re.findall(expression, text) if match: return self._match_result(expression, match, line) return None
def transform(clazz, archive, operations): 'Transform an archive with one or more operations.' check.check_string(archive) operations = object_util.listify(operations) check.check_archive_operation_seq(operations) tmp_dir = clazz.extract_all_temp_dir(archive) for operation in operations: if not check.is_archive_operation(operation): raise TypeError( 'Operation should be a subclass of archive_operation_base: {}' .format(operation)) operation.execute(tmp_dir) tmp_new_archive = clazz.create_temp_file( archive_extension.extension_for_filename(archive), tmp_dir) file_util.rename(tmp_new_archive, archive)
def resolve_deps(clazz, dep_map, names): ''' Return a set of resolved dependencies for the given name or names. Sorted alphabetically, not in build order. ''' cyclic_deps = clazz.cyclic_deps(dep_map) if len(cyclic_deps) > 0: raise cyclic_dependency_error('Cyclic dependencies found: %s' % (' '.join(cyclic_deps)), cyclic_deps) order = clazz.build_order_flat(dep_map) names = object_util.listify(names) result = set(names) for name in names: result |= clazz._resolve_deps(dep_map, name) return sorted(list(result))
def member_checksums(clazz, archive, members, debug=False): 'Return a dict of checksums for the given members in archive.' members = object_util.listify(members) tmp_dir = archiver.extract_all_temp_dir(archive, delete=not debug) if debug: print('tmp_dir: {}'.format(tmp_dir)) result = {} for member in members: assert not member in result p = path.join(tmp_dir, member) if not path.exists(p): raise IOError('member not found: {}'.format(member)) if not path.isfile(p): raise IOError('member is not a file: {}'.format(member)) result[member] = file_util.checksum('sha256', path.join(tmp_dir, member)) return result
def match_re(clazz, filenames, expressions, match_type=None, basename=True): expressions = object_util.listify(expressions) match_type = match_type or clazz.ANY expressions = [re.compile(expression) for expression in expressions] def _match_re(filename, expression): return len(expression.findall(filename)) > 0 return clazz._match(filenames, expressions, _match_re, match_type, basename=basename)
def resolve_files(clazz, what, func=None): ''' Return a list of absolute filenames for what. 'what' can be one or more of: - a file - a directory to search for files ''' check.check_callable(func, allow_none=True) if not what: return [] what = object_util.listify(what) result = [] for x in what: result.extend(clazz._resolve_one(x)) result = sorted(list(set(result))) if func: result = [f for f in result if func(f)] return result
def match_function(clazz, filenames, function, match_type=None, basename=True): match_type = match_type or clazz.ANY assert clazz.match_type_is_valid(match_type) filenames = object_util.listify(filenames) result = [] for filename in filenames: if basename: filename_for_match = path.basename(filename) else: filename_for_match = filename if clazz._match_function_one(filename_for_match, function, match_type): result.append(filename) return sorted(algorithm.unique(result))
def call_sudo(clazz, args, options=None): check.check_sudo_cli_options(options, allow_none=True) command_line.check_args_type(args) args = object_util.listify(args) options = options or sudo_cli_options() exe = which.which('sudo') if not exe: raise sudo_error('sudo not found') clazz._log.log_d('sudo: exe={} args={} options={}'.format( exe, args, options)) cmd = [exe] tmp_askpass = None askpass_env = {} if options.password: tmp_askpass = clazz._make_temp_askpass(options.password) askpass_env = {'SUDO_ASKPASS': tmp_askpass} cmd.append('--askpass') if options.prompt: cmd.extend(['--prompt', '"{}"'.format(options.prompt)]) cmd.extend(args) env = os_env.clone_current_env(d=askpass_env) try: rv = execute.execute(cmd, env=env, cwd=options.working_dir, stderr_to_stdout=True, raise_error=False, non_blocking=options.verbose) if rv.exit_code != 0: if options.error_message: msg = options.error_message else: cmd_flat = ' '.join(cmd) msg = 'sudo command failed: {}\n{}'.format( cmd_flat, rv.stdout) raise sudo_error(msg) return rv finally: if tmp_askpass: file_util.remove(tmp_askpass)
def _find_files(clazz, files, options, file_type): 'Resolve a mixed list of files and directories into a list of files.' files = object_util.listify(files) items = [] for i, f in enumerate(files, start=1): clazz._log.log_d(f'_find_files: files: {i}: {f}') for next_file in files: filename_abs = file_path.normalize(next_file) if not path.exists(filename_abs): raise IOError( 'File or directory not found: "{}"'.format(filename_abs)) if path.isfile(filename_abs): item = clazz._resolved_item(filename_abs, path.dirname(filename_abs)) items.append(item) elif path.isdir(filename_abs): next_entries = clazz._find_files_in_dir( filename_abs, options, 0, file_type) for next_entry in next_entries: item = clazz._resolved_item(next_entry, next_file) items.append(item) found_files = [item.filename_abs for item in items] for i, f in enumerate(found_files, start=1): clazz._log.log_d(f'_find_files: found_files: {i}: {f}') if len(found_files) == 1: root_dir = items[0].root_dir clazz._log.log_d(f'_find_files: one file: root_dir={root_dir}') else: filenames = [item.filename_abs for item in items] root_dir = None # FIXME: instead of checking for one file, we need to figure out # dirs in files and then calssify all the found files according # to that prefix if len(files) == 1: if filename_list.startswith(filenames, files[0]): root_dir = files[0] if not root_dir: root_dir = file_path.common_ancestor(filenames) assert root_dir clazz._log.log_d(f'_find_files: many files: root_dir={root_dir}') return found_files, root_dir