def status(self, app_name): # Attempt to find the status of a given app by finding where that apps # pid file is and loading said pids details (from stderr/stdout) files # that should exist as well as by using shell utilities to determine # if said pid is still running... trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): return (STATUS_UNKNOWN, '') fork_fns = self._form_file_names(app_name) pid = fork_fns.extract_pid() stderr = '' try: stderr = sh.load_file(fork_fns.stderr) except (IOError, ValueError, TypeError): pass stdout = '' try: stdout = sh.load_file(fork_fns.stdout) except (IOError, ValueError, TypeError): pass details = { 'STDOUT': stdout, 'STDERR': stderr, } if pid is not None and sh.is_running(pid): return (STATUS_STARTED, details) else: return (STATUS_UNKNOWN, details)
def _get_source_config(self, config_fn): if config_fn == PLUGIN_CONF: src_fn = sh.joinpths(self.get_option('app_dir'), 'etc', config_fn) contents = sh.load_file(src_fn) return (src_fn, contents) elif config_fn == AGENT_CONF: # WHY U SO BURIED.... src_fn = sh.joinpths(self.get_option('app_dir'), 'etc', 'quantum', 'plugins', 'openvswitch', config_fn) contents = sh.load_file(src_fn) return (src_fn, contents) else: return comp.PkgInstallComponent._get_source_config(self, config_fn)
def _clean_pip_requires(self): # Fixup these files if they exist (sometimes they have 'junk' in them) req_fns = [] for fn in self.requires_files: if not sh.isfile(fn): continue req_fns.append(fn) if req_fns: utils.log_iterable(req_fns, logger=LOG, header="Adjusting %s pip 'requires' files" % (len(req_fns))) for fn in req_fns: new_lines = [] for line in sh.load_file(fn).splitlines(): s_line = line.strip() if len(s_line) == 0: continue elif s_line.startswith("#"): new_lines.append(s_line) elif not self._filter_pip_requires_line(fn, s_line): new_lines.append(("# %s" % (s_line))) else: new_lines.append(s_line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) return len(req_fns)
def source_config(self, config_fn): if config_fn == LOGGING_CONF: real_fn = 'logging.cnf.sample' else: real_fn = config_fn fn = sh.joinpths(self.get_option('app_dir'), 'etc', real_fn) return (fn, sh.load_file(fn))
def _get_source_config(self, config_fn): if config_fn == PASTE_CONF: config_fn = PASTE_SOURCE_FN elif config_fn == LOGGING_CONF: config_fn = LOGGING_SOURCE_FN fn = sh.joinpths(self.get_option("app_dir"), "etc", "nova", config_fn) return (fn, sh.load_file(fn))
def _get_source_config(self, config_fn): if config_fn == ROOT_CONF: # FIXME, maybe we shouldn't be sucking this from the checkout?? fn = sh.joinpths(self.get_option('app_dir'), 'etc', 'melange', config_fn) contents = sh.load_file(fn) return (fn, contents) return comp.PythonInstallComponent._get_source_config(self, config_fn)
def _clean_pip_requires(self): # Fixup these files if they exist (sometimes they have 'junk' in them) req_fns = [] for fn in self.requires_files: if not sh.isfile(fn): continue req_fns.append(fn) if req_fns: utils.log_iterable(req_fns, logger=LOG, header="Adjusting %s pip 'requires' files" % (len(req_fns))) for fn in req_fns: new_lines = [] for line in sh.load_file(fn).splitlines(): s_line = line.strip() if len(s_line) == 0: continue elif s_line.startswith("#"): new_lines.append(s_line) elif not self._filter_pip_requires_line(s_line): new_lines.append(("# %s" % (s_line))) else: new_lines.append(s_line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) return len(req_fns)
def install(self): super(YumDependencyHandler, self).install() repo_filename = sh.joinpths(self.YUM_REPO_DIR, self.REPO_FN) # Ensure we copy the local repo file name to the main repo so that # yum will find it when installing packages. sh.write_file(repo_filename, sh.load_file(self.anvil_repo_filename), tracewriter=self.tracewriter) # Erase it if its been previously installed. cmdline = [] if self.helper.is_installed(self.OPENSTACK_DEPS_PACKAGE_NAME): cmdline.append(self.OPENSTACK_DEPS_PACKAGE_NAME) for p in self.nopackages: if self.helper.is_installed(p): cmdline.append(p) if cmdline: cmdline = ["yum", "erase", "-y"] + cmdline sh.execute(cmdline, stdout_fh=sys.stdout, stderr_fh=sys.stderr) cmdline = ["yum", "clean", "all"] sh.execute(cmdline) cmdline = ["yum", "install", "-y", self.OPENSTACK_DEPS_PACKAGE_NAME] sh.execute(cmdline, stdout_fh=sys.stdout, stderr_fh=sys.stderr) rpm_names = self._convert_names_python2rpm(self.python_names) if rpm_names: cmdline = ["yum", "install", "-y"] + rpm_names sh.execute(cmdline, stdout_fh=sys.stdout, stderr_fh=sys.stderr)
def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = '' try: build_requirements = sh.load_file(self.rpm_build_requires_filename) except IOError as e: if e.errno != errno.ENOENT: raise build_requirements = set(pkg_resources.yield_lines(build_requirements)) for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] paths = list_src_rpms(repo_dir) envra_details = self.envra_helper.explode(*paths) for (path, envra_detail) in zip(paths, envra_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) build_requirements.discard(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: if not sh.isdir(prebuild_dir): sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path))) build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their SRPMs' ' for repo %s using %s jobs') return build_requirements
def source_config(self, config_fn): if self.config_dir: if config_fn in self.source_configs: config_fn = self.source_configs.get(config_fn) fn = sh.joinpths(self.config_dir, config_fn) return (fn, sh.load_file(fn)) return utils.load_template(self.installer.name, config_fn)
def install(self): url_fn = self._extract_url_fn() if not url_fn: raise IOError("Can not determine file name from url: %r" % (self.url)) (cache_path, details_path) = self._cached_paths() use_cached = self._validate_cache(cache_path, details_path) if use_cached: LOG.info("Found valid cached image + metadata at: %s", colorizer.quote(cache_path)) unpack_info = utils.load_yaml_text(sh.load_file(details_path)) else: sh.mkdir(cache_path) if not self._is_url_local(): (fetched_fn, bytes_down) = down.UrlLibDownloader( self.url, sh.joinpths(cache_path, url_fn)).download() LOG.debug("For url %s we downloaded %s bytes to %s", self.url, bytes_down, fetched_fn) else: fetched_fn = self.url unpack_info = Unpacker().unpack(url_fn, fetched_fn, cache_path) sh.write_file(details_path, utils.prettify_yaml(unpack_info)) tgt_image_name = self._generate_img_name(url_fn) img_id = self._register(tgt_image_name, unpack_info) return (tgt_image_name, img_id)
def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append("%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations)
def _get_lines(filename): lines = [] for line in sh.load_file(filename).splitlines(): line = line.strip() if line and not line.startswith("#"): lines.append(line) return lines
def _install_node_repo(self): repo_url = self.get_option('nodejs_repo') if not repo_url: return # Download the said url and install it so that we can actually install # the node.js requirement which seems to be needed by horizon for css compiling?? repo_basename = sh.basename(repo_url) (_fn, fn_ext) = os.path.splitext(repo_basename) fn_ext = fn_ext.lower().strip() if fn_ext not in ['.rpm', '.repo']: LOG.warn("Unknown node.js repository configuration extension %s (we only support .rpm or .repo)!", colorizer.quote(fn_ext)) return with NamedTemporaryFile(suffix=fn_ext) as temp_fh: LOG.info("Downloading node.js repository configuration from %s to %s.", repo_url, temp_fh.name) down.UrlLibDownloader(repo_url, temp_fh.name).download() temp_fh.flush() if fn_ext == ".repo": # Just write out the repo file after downloading it... repo_file_name = sh.joinpths("/etc/yum.repos.d", repo_basename) if not sh.exists(repo_file_name): with sh.Rooted(True): sh.write_file(repo_file_name, sh.load_file(temp_fh.name), tracewriter=self.tracewriter) sh.chmod(repo_file_name, 0644) elif fn_ext == ".rpm": # Install it instead from said rpm (which likely is a # file that contains said repo location)... packager = yum.YumPackager(self.distro).direct_install(temp_fh.name)
def _fetch_epoch_mapping(self): epoch_map = self.distro.get_dependency_config("epoch_map", quiet=True) if not epoch_map: epoch_map = {} built_epochs = {} for name in self.python_names: if name in epoch_map: built_epochs[name] = epoch_map.pop(name) else: built_epochs[name] = self.OPENSTACK_EPOCH # Exclude names from the epoch map that we never downloaded in the # first place (since these are not useful and should not be set in # the first place). try: raw_downloaded = sh.load_file(self.build_requires_filename) downloaded_reqs = pip_helper.parse_requirements(raw_downloaded) except IOError as e: if e.errno != errno.ENOENT: raise else: downloaded_names = set([req.key for req in downloaded_reqs]) tmp_epoch_map = {} for (name, epoch) in six.iteritems(epoch_map): if name.lower() in downloaded_names: tmp_epoch_map[name] = epoch else: LOG.debug( "Discarding %s:%s from the epoch mapping since" " it was not part of the downloaded build" " requirements", name, epoch) epoch_map = tmp_epoch_map epoch_map.update(built_epochs) return epoch_map
def _fetch_epoch_mapping(self): epoch_map = self.distro.get_dependency_config("epoch_map", quiet=True) if not epoch_map: epoch_map = {} built_epochs = {} for name in self.python_names: if name in epoch_map: built_epochs[name] = epoch_map.pop(name) else: built_epochs[name] = self.OPENSTACK_EPOCH # Exclude names from the epoch map that we never downloaded in the # first place (since these are not useful and should not be set in # the first place). try: raw_downloaded = sh.load_file(self.build_requires_filename) downloaded_reqs = pip_helper.parse_requirements(raw_downloaded) except IOError as e: if e.errno != errno.ENOENT: raise else: downloaded_names = set([req.key for req in downloaded_reqs]) tmp_epoch_map = {} for (name, epoch) in six.iteritems(epoch_map): if name.lower() in downloaded_names: tmp_epoch_map[name] = epoch else: LOG.debug("Discarding %s:%s from the epoch mapping since" " it was not part of the downloaded build" " requirements", name, epoch) epoch_map = tmp_epoch_map epoch_map.update(built_epochs) return epoch_map
def source_config(self, config_fn): if (config_fn.startswith("plugins") or config_fn.startswith("rootwrap.d")): real_fn = "quantum/%s" % config_fn else: real_fn = config_fn fn = sh.joinpths(self.installer.get_option("app_dir"), "etc", real_fn) return (fn, sh.load_file(fn))
def _extract_pid(self, filename): if sh.isfile(filename): try: return int(sh.load_file(filename).strip()) except ValueError: return None else: return None
def _get_source_config(self, config_fn): real_fn = config_fn if config_fn == LOGGING_CONF: real_fn = LOGGING_SOURCE_FN elif config_fn == ROOT_CONF: real_fn = ROOT_SOURCE_FN fn = sh.joinpths(self.get_option('app_dir'), 'etc', real_fn) return (fn, sh.load_file(fn))
def source_config(self, config_fn): real_fn = config_fn if config_fn == LOGGING_CONF: real_fn = 'logging.conf.sample' elif config_fn == ROOT_CONF: real_fn = "keystone.conf.sample" fn = sh.joinpths(self.get_option('app_dir'), 'etc', real_fn) return (fn, sh.load_file(fn))
def source_config(self, config_fn): if (config_fn.startswith("plugins") or config_fn.startswith("rootwrap.d")): real_fn = "neutron/%s" % config_fn else: real_fn = config_fn fn = sh.joinpths(self.installer.get_option("app_dir"), "etc", real_fn) return (fn, sh.load_file(fn))
def source_config(self, config_fn): if config_fn == PASTE_CONF: config_fn = "api-paste.ini" elif config_fn == LOGGING_CONF: config_fn = "logging_sample.conf" elif config_fn == API_CONF: config_fn = "nova.conf.sample" fn = sh.joinpths(self.get_option("app_dir"), "etc", "nova", config_fn) return (fn, sh.load_file(fn))
def source_config(self, config_fn): if config_fn == PASTE_CONF: config_fn = 'api-paste.ini' elif config_fn == LOGGING_CONF: config_fn = 'logging_sample.conf' elif config_fn == API_CONF: config_fn = 'nova.conf.sample' fn = sh.joinpths(self.get_option('app_dir'), 'etc', "nova", config_fn) return (fn, sh.load_file(fn))
def _replace_deployment_paths(self, root_dir, replacer): total_replacements = 0 files_replaced = 0 for path in sh.listdir(root_dir, recursive=True, files_only=True): new_contents, replacements = replacer(sh.load_file(path)) if replacements: sh.write_file(path, new_contents) total_replacements += replacements files_replaced += 1 return (files_replaced, total_replacements)
def _extract_pip_requires(self, fn): if not sh.isfile(fn): return [] LOG.debug("Resolving dependencies from %s.", colorizer.quote(fn)) pips_needed = pip_helper.parse_requirements(sh.load_file(fn)) matchings = [] for req in pips_needed: (pkg_info, from_pip) = self._match_pip_requires(req) matchings.append({"requirement": req, "package": pkg_info, "from_pip": from_pip, "needed_by": fn}) return matchings
def status(self, app_name): trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): return (STATUS_UNKNOWN, '') (pid_file, stderr_fn, stdout_fn) = self._form_file_names(FORK_TEMPL % (app_name)) pid = self._extract_pid(pid_file) stderr = '' try: stderr = sh.load_file(stderr_fn) except IOError: pass stdout = '' try: stdout = sh.load_file(stdout_fn) except IOError: pass if pid and sh.is_running(pid): return (STATUS_STARTED, (stdout + stderr).strip()) else: return (STATUS_UNKNOWN, (stdout + stderr).strip())
def extract_pid(self): # Load the pid file and take out the pid from it... # # Typically said file has a integer pid in it so load said file # and covert its contents to an int or fail trying... if self.pid: try: return int(sh.load_file(self.pid).strip()) except (ValueError, TypeError): return None else: return None
def _configure_db_confs(self): LOG.info("Fixing up %s mysql configs.", colorizer.quote(self.distro.name)) new_lines = [] for line in sh.load_file(DBInstaller.MYSQL_CONF).splitlines(): if line.startswith('skip-grant-tables'): new_lines.append('#' + line) elif line.startswith('bind-address'): new_lines.append('#' + line) new_lines.append('bind-address = 0.0.0.0') else: new_lines.append(line) sh.write_file_and_backup(DBInstaller.MYSQL_CONF, utils.joinlinesep(*new_lines))
def _parse_mailmap(self): mapping = {} mailmap_fn = sh.joinpths(self.wkdir, '.mailmap') for line in sh.load_file(mailmap_fn).splitlines(): line = line.strip() if not line.startswith('#') and ' ' in line: try: (canonical_email, alias) = [x for x in line.split(' ') if x.startswith('<')] mapping[alias] = canonical_email except (TypeError, ValueError, IndexError): pass return mapping
def pre_install(self): comp.PythonInstallComponent.pre_install(self) if self.cfg.getboolean('glance', 'eliminate_pip_gits'): fn = sh.joinpths(self.get_option('app_dir'), 'tools', 'pip-requires') if sh.isfile(fn): new_lines = [] for line in sh.load_file(fn).splitlines(): if line.find("git://") != -1: new_lines.append("# %s" % (line)) else: new_lines.append(line) sh.write_file(fn, "\n".join(new_lines))
def _configure_db_confs(self): LOG.info("Fixing up %s mysql configs.", colorizer.quote(self.distro.name)) fc = sh.load_file('/etc/my.cnf') lines = fc.splitlines() new_lines = list() for line in lines: if line.startswith('skip-grant-tables'): line = '#' + line new_lines.append(line) fc = utils.joinlinesep(*new_lines) with sh.Rooted(True): sh.write_file('/etc/my.cnf', fc)
def list_phases(self): phases = set() if not sh.isfile(self.fn): return phases for i, line in enumerate(sh.load_file(self.fn).splitlines()): line = line.strip() if line: data = json.loads(line) if not isinstance(data, dict): raise TypeError("Unknown phase entry in %s on line %s" % (self.fn, i + 1)) if 'name' in data: phases.add(data['name']) return phases
def _configure_db_confs(self): LOG.info("Fixing up %s mysql configs.", colorizer.quote(self.distro.name)) new_lines = [] for line in sh.load_file(DBInstaller.MYSQL_CONF).splitlines(): if line.startswith('skip-grant-tables'): new_lines.append('#' + line) elif line.startswith('bind-address'): new_lines.append('#' + line) new_lines.append('bind-address = 0.0.0.0') else: new_lines.append(line) with sh.Rooted(True): sh.write_file_and_backup(DBInstaller.MYSQL_CONF, utils.joinlinesep(*new_lines))
def apply_patches(patch_files, working_dir): apply_files = expand_patches(patch_files) if not len(apply_files): return if not sh.isdir(working_dir): LOG.warn("Can only apply %s patches 'inside' a directory and not '%s'", len(apply_files), working_dir) return with utils.chdir(working_dir): for p in apply_files: LOG.debug("Applying patch %s in directory %s", p, working_dir) patch_contents = sh.load_file(p) if len(patch_contents): sh.execute(*PATCH_CMD, process_input=patch_contents)
def apply_patches(patch_files, working_dir): apply_files = expand_patches(patch_files) if not len(apply_files): return if not sh.isdir(working_dir): LOG.warn("Can only apply %s patches 'inside' a directory and not '%s'", len(apply_files), working_dir) return with utils.chdir(working_dir): for p in apply_files: LOG.debug("Applying patch %s in directory %s", p, working_dir) patch_contents = sh.load_file(p) if len(patch_contents): sh.execute(PATCH_CMD, process_input=patch_contents)
def _parse(self): fn = self.trace_fn if not sh.isfile(fn): msg = "No trace found at filename %s" % (fn) raise excp.NoTraceException(msg) contents = sh.load_file(fn) lines = contents.splitlines() accum = list() for line in lines: ep = self._split_line(line) if ep is None: continue accum.append(tuple(ep)) return accum
def _extract_pip_requires(self, fn): if not sh.isfile(fn): return [] LOG.debug("Resolving dependencies from %s.", colorizer.quote(fn)) pips_needed = pip_helper.parse_requirements(sh.load_file(fn)) matchings = [] for req in pips_needed: (pkg_info, from_pip) = self._match_pip_requires(req) matchings.append({ 'requirement': req, 'package': pkg_info, 'from_pip': from_pip, 'needed_by': fn, }) return matchings
def _config_fix_httpd(self): LOG.info("Fixing up: %s", colorizer.quote(HorizonInstaller.HTTPD_CONF)) (user, group) = self._get_apache_user_group() new_lines = [] for line in sh.load_file(HorizonInstaller.HTTPD_CONF).splitlines(): # Directives in the configuration files are case-insensitive, # but arguments to directives are often case sensitive... # NOTE(harlowja): we aren't handling multi-line fixups... if re.match("^\s*User\s+(.*)$", line, re.I): line = "User %s" % (user) if re.match("^\s*Group\s+(.*)$", line, re.I): line = "Group %s" % (group) if re.match("^\s*Listen\s+(.*)$", line, re.I): line = "Listen 0.0.0.0:80" new_lines.append(line) with sh.Rooted(True): sh.write_file_and_backup(HorizonInstaller.HTTPD_CONF, utils.joinlinesep(*new_lines))
def _get_rpm_names(self, from_deps=True, from_instances=True): desired_rpms = [] py_reqs = set() if from_instances: inst_packages = list(self.requirements["requires"]) for inst in self.instances: inst_packages.extend(inst.package_names()) if sh.isdir(inst.get_option("app_dir")): try: py_req = inst.egg_info['req'] except AttributeError: pass else: rpm_name, _ = self._find_template_and_rpm_name( inst, inst.get_option('build_name', default_value=inst.name)) if rpm_name is not None: desired_rpms.append((rpm_name, py_req)) else: py_reqs.add(py_req) for rpm_name in inst_packages: desired_rpms.append((rpm_name, None)) if from_deps: # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). requires = sh.load_file( self.gathered_requires_filename).splitlines() for line in [line.strip() for line in requires if line.strip()]: py_reqs.add(pip_helper.extract_requirement(line)) rpm_names = self.py2rpm_helper.names_to_rpm_names( [req.key for req in py_reqs]) desired_rpms.extend((rpm_names[req.key], req) for req in py_reqs) def _format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ','.join(''.join(x) for x in py_req.specs) return full_name return sorted( _format_name(rpm_name, py_req) for rpm_name, py_req in desired_rpms)
def _validate_cache(self, cache_path, details_path): for path in [cache_path, details_path]: if not sh.exists(path): return False check_files = [] try: unpack_info = utils.load_yaml_text(sh.load_file(details_path)) check_files.append(unpack_info['file_name']) if 'kernel' in unpack_info: check_files.append(unpack_info['kernel']['file_name']) if 'ramdisk' in unpack_info: check_files.append(unpack_info['ramdisk']['file_name']) except Exception: return False for path in check_files: if not sh.isfile(path): return False return True
def source_config(self, config_fn): if self.config_dir: allow_missing = False if config_fn in self.source_configs: config_data = self.source_configs.get(config_fn) if isinstance(config_data, (tuple, list)): config_fn, allow_missing = config_data else: config_fn = config_data fn = sh.joinpths(self.config_dir, config_fn) try: return (fn, sh.load_file(fn)) except IOError as e: if e.errno == errno.ENOENT and allow_missing: return (fn, '') else: raise return utils.load_template(self.installer.name, config_fn)
def _extract_pip_requires(self, fn): if not sh.isfile(fn): return [] LOG.debug("Resolving dependencies from %s.", colorizer.quote(fn)) pips_needed = [] for line in sh.load_file(fn).splitlines(): line = line.strip() if not line or line.startswith("#"): continue pips_needed.append(Requirement.parse(line)) if not pips_needed: return [] matchings = [] for requirement in pips_needed: matchings.append([ requirement, self._match_pip_requires(requirement.project_name) ]) return matchings
def apply_patches(patch_files, working_dir): if not sh.isdir(working_dir): LOG.warn("Can only apply patches 'inside' a directory and not '%s'", working_dir) return already_applied = set() for patch_ext, patch_cmd in [('.patch', PATCH_CMD), ('.git_patch', GIT_PATCH_CMD)]: apply_files = expand_patches(patch_files, patch_ext=patch_ext) apply_files = [p for p in apply_files if p not in already_applied] if not apply_files: continue with utils.chdir(working_dir): for p in apply_files: LOG.debug("Applying patch %s using command %s in directory %s", p, patch_cmd, working_dir) patch_contents = sh.load_file(p) if len(patch_contents): sh.execute(patch_cmd, process_input=patch_contents) already_applied.add(p)
def _clean_pip_requires(self): # Fixup these files if they exist, sometimes they have 'junk' in them # that anvil will install instead of pip or setup.py and we don't want # the setup.py file to attempt to install said dependencies since it # typically picks locations that either are not what we desire or if # said file contains editables, it may even pick external source directories # which is what anvil is setting up as well... req_fns = [f for f in self.requires_files if sh.isfile(f)] if req_fns: utils.log_iterable(req_fns, logger=LOG, header="Adjusting %s pip 'requires' files" % (len(req_fns))) for fn in req_fns: old_lines = sh.load_file(fn).splitlines() new_lines = self._filter_pip_requires(fn, old_lines) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) return len(req_fns)
def _get_details(self, program, status): if status != STATUS_STARTED: return None daemon_program = self.daemon_name(program) # TODO(harlowja): we can likely figure this out in a different manner, # but for now try a bunch of likely paths and select the first path that # exists and is readable as the location of the log file of the program. log_paths = [ sh.joinpths('/var/log/', self.name, "%s.log" % (daemon_program)), sh.joinpths('/var/log/', self.name, "%s.log" % (program)), sh.joinpths('/var/log/', self.name, "%s-%s.log" % (self.name, program)), sh.joinpths('/var/log/', self.name, "%s-%s.log" % (self.name, daemon_program)), ] for path in log_paths: if sh.isfile(path): try: return sh.load_file(path) except (OSError, IOError): pass return None
def _clean_pip_requires(self, requires_files): # Fixup incompatible dependencies if not (requires_files and self.forced_packages): return utils.log_iterable(sorted(requires_files), logger=LOG, header="Adjusting %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) for fn in requires_files: old_lines = sh.load_file(fn).splitlines() new_lines = [] for line in old_lines: try: req = pip_helper.extract_requirement(line) new_lines.append(str(forced_by_key[req.key])) except Exception: # we don't force the package or it has a bad format new_lines.append(line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents)
def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append( "%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations)
def load_yaml(path): return load_yaml_text(sh.load_file(path))