def _filter_download_requires(self): yum_map = self._get_known_yum_packages() pip_origins = {} for line in self.pips_to_install: req = pip_helper.extract_requirement(line) pip_origins[req.key] = line pips_to_download = [] req_to_install = [pip_helper.extract_requirement(line) for line in self.pips_to_install] requested_names = [req.key for req in req_to_install] rpm_to_install = self._convert_names_python2rpm(requested_names) satisfied_list = [] for (req, rpm_name) in zip(req_to_install, rpm_to_install): (version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: # We need the source requirement incase its a url. pips_to_download.append(pip_origins[req.key]) else: satisfied_list.append((req, rpm_name, version, repo)) if satisfied_list: # Organize by repo repos = collections.defaultdict(list) for (req, rpm_name, version, repo) in satisfied_list: repos[repo].append("%s as %s-%s" % (req, rpm_name, version)) for r in sorted(repos.keys()): header = ("%s Python packages are already available " "as RPMs from repository %s") header = header % (len(repos[r]), colorizer.quote(r)) utils.log_iterable(sorted(repos[r]), logger=LOG, header=header) return pips_to_download
def _find_pieces(self, files, files_location): """ Match files against the patterns in KERNEL_CHECKS, RAMDISK_CHECKS, and ROOT_CHECKS to determine which files contain which image parts. """ kernel_fn = None ramdisk_fn = None img_fn = None utils.log_iterable(files, logger=LOG, header="Looking at %s files from %s to find the kernel/ramdisk/root images" % (len(files), colorizer.quote(files_location))) for fn in files: if self._pat_checker(fn, KERNEL_CHECKS): kernel_fn = fn LOG.debug("Found kernel: %r" % (fn)) elif self._pat_checker(fn, RAMDISK_CHECKS): ramdisk_fn = fn LOG.debug("Found ram disk: %r" % (fn)) elif self._pat_checker(fn, ROOT_CHECKS): img_fn = fn LOG.debug("Found root image: %r" % (fn)) else: LOG.debug("Unknown member %r - skipping" % (fn)) return (img_fn, ramdisk_fn, kernel_fn)
def _build_dependencies(self): package_files = self.download_dependencies() def filter_files(package_files): for p in package_files: banned = False for k in self.BANNED_PACKAGES: if k in p.lower(): banned = True if banned: continue yield p package_files = [f for f in filter_files(package_files)] if not package_files: LOG.info("No RPM packages of OpenStack dependencies to build") return package_base_names = [sh.basename(f) for f in package_files] utils.log_iterable(sorted(package_base_names), logger=LOG, header=("Building %s dependency RPM" " packages") % (len(package_files))) with utils.progress_bar(name='Building', max_am=len(package_files)) as p_bar: for (i, filename) in enumerate(sorted(package_files)): cmdline = self.py2rpm_start_cmdline() + ["--", filename] build_filename = "py2rpm-%s.out" % sh.basename(filename) out_filename = sh.joinpths(self.log_dir, build_filename) sh.execute_save_output(cmdline, out_filename=out_filename, quiet=True) p_bar.update(i + 1)
def _unconfigure_links(self): sym_files = self.tracereader.symlinks_made() if sym_files: utils.log_iterable(sym_files, logger=LOG, header="Removing %s symlink files" % (len(sym_files))) for fn in sym_files: sh.unlink(fn, run_as_root=True)
def build(repo_dir, repo_name, header_tpl): repo_files = list_src_rpms(repo_dir) if not repo_files: return utils.log_iterable(repo_files, header=header_tpl % (len(repo_files), self.SRC_REPOS[repo_name], self.jobs), logger=LOG) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" with sh.remove_before(self.rpmbuild_dir): self._create_rpmbuild_subdirs() try: self.py2rpm_helper.build_all_binaries(repo_name, repo_dir, rpmbuild_flags, self.tracewriter, self.jobs) finally: # If we made any rpms (even if a failure happened, make # sure that we move them to the right target repo). if move_rpms(repo_name) > 0: self._create_repo(repo_name)
def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = '' try: build_requirements = sh.load_file(self.rpm_build_requires_filename) except IOError as e: if e.errno != errno.ENOENT: raise build_requirements = set(pkg_resources.yield_lines(build_requirements)) for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] paths = list_src_rpms(repo_dir) envra_details = self.envra_helper.explode(*paths) for (path, envra_detail) in zip(paths, envra_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) build_requirements.discard(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: if not sh.isdir(prebuild_dir): sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path))) build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their SRPMs' ' for repo %s using %s jobs') return build_requirements
def install_start(instance): subsystems = set(list(instance.subsystems)) if subsystems: utils.log_iterable(sorted(subsystems), logger=LOG, header='Installing %s using subsystems' % colorizer.quote(instance.name)) else: LOG.info("Installing %s.", colorizer.quote(instance.name))
def stop(self): # Anything to stop in the first place?? what_was_started = [] try: what_was_started = self.tracereader.apps_started() except excp.NoTraceException: pass if not what_was_started: return 0 # Get the investigators/runners which can be used # to actually do the stopping and attempt to perform said stop. applications_stopped = [] for (name, handler) in self._locate_investigators(what_was_started): handler.stop(name) applications_stopped.append(name) if applications_stopped: utils.log_iterable(applications_stopped, header="Stopped %s programs started under %s component" % (len(applications_stopped), self.name), logger=LOG) # Only if we stopped the amount which was supposedly started can # we actually remove the trace where those applications have been # marked as started in (ie the connection back to how they were started) if len(applications_stopped) < len(what_was_started): diff = len(what_was_started) - len(applications_stopped) LOG.warn(("%s less applications were stopped than were started, please check out %s" " to stop these program manually."), diff, colorizer.quote(self.tracereader.filename(), quote_color='yellow')) else: sh.unlink(self.tracereader.filename()) return len(applications_stopped)
def build_all_srpms(self, package_files, tracewriter, jobs): (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) cmdline = self._start_cmdline(escape_values=True)[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self._rpmbuild_dir ] executable = " ".join(self._start_cmdline()[0:1]) params = { "DOWNLOADS_DIR": self._download_dir, "LOGS_DIR": self._log_dir, "PY2RPM": executable, "PY2RPM_FLAGS": " ".join(cmdline) } marks_dir = sh.joinpths(self._deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=tracewriter) makefile_path = sh.joinpths(self._deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), jobs), logger=LOG) self._execute_make(makefile_path, marks_dir, jobs)
def _configure_symlinks(self): links = self.symlinks if not links: return 0 # This sort happens so that we link in the correct order # although it might not matter. Either way. We ensure that the right # order happens. Ie /etc/blah link runs before /etc/blah/blah link_srcs = sorted(links.keys()) link_srcs.reverse() link_nice = [] for source in link_srcs: links_to_be = links[source] for link in links_to_be: link_nice.append("%s => %s" % (link, source)) utils.log_iterable(link_nice, logger=LOG, header="Creating %s sym-links" % (len(link_nice))) links_made = 0 for source in link_srcs: links_to_be = links[source] for link in links_to_be: try: LOG.debug("Symlinking %s to %s.", link, source) sh.symlink(source, link, tracewriter=self.tracewriter) links_made += 1 except (IOError, OSError) as e: LOG.warn("Symlinking %s to %s failed: %s", colorizer.quote(link), colorizer.quote(source), e) return links_made
def _uninstall_touched_files(self): files_touched = self.tracereader.files_touched() if files_touched: utils.log_iterable(files_touched, logger=LOG, header="Removing %s miscellaneous files" % (len(files_touched))) for fn in files_touched: sh.unlink(fn, run_as_root=True)
def _uninstall_pips(self): pips = self.tracereader.pips_installed() if pips: pip_names = [p["name"] for p in pips] utils.log_iterable( pip_names, logger=LOG, header="Potentially removing %s python packages" % (len(pip_names)) ) which_removed = [] with utils.progress_bar("Uninstalling", len(pips), reverse=True) as p_bar: for (i, p) in enumerate(pips): try: uninstaller = make_packager( p, pip.Packager, distro=self.distro, remove_default=self.purge_packages ) if uninstaller.remove(p): which_removed.append(p["name"]) except excp.ProcessExecutionError as e: # NOTE(harlowja): pip seems to die if a pkg isn't there even in quiet mode combined = str(e.stderr) + str(e.stdout) if not re.search(r"not\s+installed", combined, re.I): raise p_bar.update(i + 1) utils.log_iterable( which_removed, logger=LOG, header="Actually removed %s python packages" % (len(which_removed)) )
def _uninstall_dirs(self): dirs_made = self.tracereader.dirs_made() dirs_alive = filter(sh.isdir, dirs_made) if dirs_alive: utils.log_iterable(dirs_alive, logger=LOG, header="Removing %s created directories" % (len(dirs_alive))) for dir_name in dirs_alive: sh.deldir(dir_name, run_as_root=True)
def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append("%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations)
def install(self, urls): am_installed = 0 try: gclient_v1 = importer.import_module('glanceclient.v1.client') gexceptions = importer.import_module('glanceclient.common.exceptions') kclient_v2 = importer.import_module('keystoneclient.v2_0.client') kexceptions = importer.import_module('keystoneclient.exceptions') except RuntimeError as e: LOG.exception("Failed at importing required client modules: %s", e) return am_installed if urls: try: params = self.params['glance'] client = gclient_v1.Client(endpoint=params['endpoints']['public']['uri'], token=self._get_token(kclient_v2)) except (RuntimeError, gexceptions.ClientException, kexceptions.ClientException) as e: LOG.exception('Failed fetching needed clients for image calls due to: %s', e) return am_installed utils.log_iterable(urls, logger=LOG, header="Attempting to download+extract+upload %s images" % len(urls)) for url in urls: try: (name, img_id) = Image(client, url).install() LOG.info("Installed image named %s with image id %s.", colorizer.quote(name), colorizer.quote(img_id)) am_installed += 1 except (IOError, tarfile.TarError, gexceptions.ClientException, kexceptions.ClientException) as e: LOG.exception('Installing %r failed due to: %s', url, e) return am_installed
def _stop_dnsmasq(self): # Shutdown dnsmasq which is typically used by nova-network # to provide dhcp leases and since nova currently doesn't # seem to shut them down itself (why not?) we have to do it for it.. # # TODO(harlowja) file a bug to get that fixed... to_kill = [] for proc in psutil.process_iter(): if proc.name.find("dnsmasq") == -1: continue cwd = '' cmdline = '' cwd = proc.getcwd() cmdline = " ".join(proc.cmdline) to_try = False for t in [cwd, cmdline]: if t.lower().find("nova") != -1: to_try = True if to_try: to_kill.append(proc.pid) if len(to_kill): utils.log_iterable(to_kill, header="Killing leftover nova dnsmasq processes with process ids", logger=nconf.LOG) for pid in to_kill: sh.kill(pid)
def _clean_pip_requires(self): # Fixup these files if they exist (sometimes they have 'junk' in them) req_fns = [] for fn in self.requires_files: if not sh.isfile(fn): continue req_fns.append(fn) if req_fns: utils.log_iterable(req_fns, logger=LOG, header="Adjusting %s pip 'requires' files" % (len(req_fns))) for fn in req_fns: new_lines = [] for line in sh.load_file(fn).splitlines(): s_line = line.strip() if len(s_line) == 0: continue elif s_line.startswith("#"): new_lines.append(s_line) elif not self._filter_pip_requires_line(s_line): new_lines.append(("# %s" % (s_line))) else: new_lines.append(s_line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) return len(req_fns)
def _unconfigure_files(self): cfg_files = self.tracereader.files_configured() if cfg_files: utils.log_iterable(cfg_files, logger=LOG, header="Removing %s configuration files" % (len(cfg_files))) for fn in cfg_files: sh.unlink(fn, run_as_root=True)
def run(self, persona): instances = self._construct_instances(persona) component_order = self._order_components(persona.wanted_components) LOG.info("Processing components for action %s.", colorizer.quote(self.name)) utils.log_iterable(component_order, header="Activating in the following order", logger=LOG) self._on_start(persona, component_order, instances) self._run(persona, component_order, instances) self._on_finish(persona, component_order, instances)
def _uninstall_files(self): files_touched = self.tracereader.files_touched() files_alive = filter(sh.isfile, files_touched) if files_alive: utils.log_iterable(files_alive, logger=LOG, header="Removing %s miscellaneous files" % (len(files_alive))) for fn in files_alive: sh.unlink(fn)
def _install_build_requirements(): build_requires = self.requirements["build-requires"] if build_requires: utils.log_iterable(sorted(build_requires), header=("Installing %s build requirements" % len(build_requires)), logger=LOG) cmdline = ["yum", "install", "-y"] + list(build_requires) sh.execute(cmdline)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`. """ # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() sh.write_file(download_requires_filename, "\n".join(str(req) for req in raw_pips_to_download)) if not raw_pips_to_download: return ([], []) downloaded_flag_file = sh.joinpths(self.deps_dir, "pip-downloaded") # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if sh.isfile(downloaded_flag_file) and self._requirements_satisfied( raw_pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") # NOTE(aababilov): do not clean the cache, it is always useful pip_cache_dir = sh.joinpths(self.deps_dir, "pip-cache") pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_dir) sh.mkdir(pip_download_dir, recurse=True) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(raw_pips_to_download), attempt) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt, raw_pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir, force=True) sh.deldir(pip_dir) if pip_failures: raise pip_failures[-1] with open(downloaded_flag_file, "w"): pass pips_downloaded = [pip_helper.extract_requirement(p) for p in raw_pips_to_download] self._examine_download_dir(pips_downloaded, self.download_dir) what_downloaded = sh.listdir(self.download_dir, files_only=True) return (pips_downloaded, what_downloaded)
def _gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_packages` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ extra_pips = extra_pips or [] cmdline = [ self.multipip_executable, "--skip-requirements-regex", "python.*client", "--pip", self.pip_executable ] cmdline = cmdline + extra_pips + ["-r"] + requires_files cmdline.extend(["--ignore-package"]) cmdline.extend(OPENSTACK_PACKAGES) cmdline.extend(SKIP_PACKAGE_NAMES) cmdline.extend(self.python_names) stdout, stderr = sh.execute(cmdline, check_exit_code=False) self.pips_to_install = list(utils.splitlines_not_empty(stdout)) sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) utils.log_iterable(sorted(self.pips_to_install), logger=LOG, header="Full known python dependency list") incompatibles = collections.defaultdict(list) if stderr: current_name = '' for line in stderr.strip().splitlines(): if line.endswith(": incompatible requirements"): current_name = line.split(":", 1)[0].lower().strip() if current_name not in incompatibles: incompatibles[current_name] = [] else: incompatibles[current_name].append(line) for (name, lines) in incompatibles.items(): if not name: continue LOG.warn("Incompatible requirements found for %s", colorizer.quote(name, quote_color='red')) for line in lines: LOG.warn(line) if not self.pips_to_install: LOG.error("No dependencies for OpenStack found." "Something went wrong. Please check:") LOG.error("'%s'" % "' '".join(cmdline)) raise exc.DependencyException("No dependencies for OpenStack found") # Translate those that we altered requirements for into a set of forced # requirements file (and associated list). self.forced_packages = [] for req in [pip_helper.extract_requirement(line) for line in self.pips_to_install]: if req.key in incompatibles: self.forced_packages.append(req) sh.write_file(self.forced_requires_filename, "\n".join([str(req) for req in self.forced_packages]))
def build_binary(self): def is_src_rpm(path): if not path: return False if not sh.isfile(path): return False if not path.lower().endswith('.src.rpm'): return False return True def list_src_rpms(path): path_files = [] if sh.isdir(path): path_files = sh.listdir(path, filter_func=is_src_rpm) return sorted(path_files) build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) for repo_name in self.REPOS: src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) src_repo_files = list_src_rpms(src_repo_dir) if not src_repo_files: continue utils.log_iterable(src_repo_files, header=('Building %s RPM packages from their' ' SRPMs for repo %s using %s jobs') % (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs), logger=LOG) makefile_path = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self.deps_dir, "marks-binary") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self.log_dir, 'RPMTOP_DIR': self.rpmbuild_dir, } (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "binary.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() self._execute_make(makefile_path, marks_dir) repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) for d in sh.listdir(self.rpmbuild_dir, dirs_only=True): self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir) self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir) self._create_repo(repo_name)
def _establish_passwords(self): pw_read = [] for fn in self.password_files: if sh.isfile(fn): self.passwords.cache.update(utils.load_yaml(fn)) pw_read.append(fn) if pw_read: utils.log_iterable(pw_read, header="Updated passwords to be used from %s files" % len(pw_read), logger=LOG)
def run(self, persona): groups = self._construct_instances(persona) LOG.info("Processing components for action %s.", colorizer.quote(self.name)) for group in persona.matched_components: utils.log_iterable(group, header="Activating group %s in the following order" % colorizer.quote(group.id), logger=LOG) self._on_start(persona, groups) self._run(persona, groups) self._on_finish(persona, groups)
def _setup_logs(self, clear=False): log_fns = [self.access_log, self.error_log] utils.log_iterable(log_fns, logger=LOG, header="Adjusting %s log files" % (len(log_fns))) for fn in log_fns: if clear: sh.unlink(fn, True) sh.touch_file(fn, die_if_there=False, tracewriter=self.tracewriter) sh.chmod(fn, 0666) return len(log_fns)
def _log_pieces_found(self, src_type, root_fn, ramdisk_fn, kernel_fn): pieces = [] if root_fn: pieces.append("%s (root image)" % (colorizer.quote(root_fn))) if ramdisk_fn: pieces.append("%s (ramdisk image)" % (colorizer.quote(ramdisk_fn))) if kernel_fn: pieces.append("%s (kernel image)" % (colorizer.quote(kernel_fn))) if pieces: utils.log_iterable(pieces, logger=LOG, header="Found %s images from a %s" % (len(pieces), src_type))
def _install_pips(self): pips = self._get_pips() if pips: pip_names = [p['name'] for p in pips] utils.log_iterable(pip_names, logger=LOG, header="Setting up %s python packages" % (len(pip_names))) with utils.progress_bar('Installing', len(pips)) as p_bar: for (i, p) in enumerate(pips): self.tracewriter.pip_installed(p) self.pip_factory.get_packager_for(p).install(p) p_bar.update(i + 1)
def _install_pips(self): pips = self.pips if pips: pip_names = [p["name"] for p in pips] utils.log_iterable(pip_names, logger=LOG, header="Setting up %s python packages" % (len(pip_names))) with utils.progress_bar("Installing", len(pips)) as p_bar: for (i, p) in enumerate(pips): installer = make_packager(p, pip.Packager, distro=self.distro) installer.install(p) # Note that we did it so that we can remove it... self.tracewriter.pip_installed(filter_package(p)) p_bar.update(i + 1)
def _configure_files(self): config_fns = self.config_files if config_fns: utils.log_iterable(config_fns, logger=LOG, header="Configuring %s files" % (len(config_fns))) for fn in config_fns: tgt_fn = self.target_config(fn) self.tracewriter.dirs_made(*sh.mkdirslist(sh.dirname(tgt_fn))) (source_fn, contents) = self.source_config(fn) LOG.debug("Configuring file %s ---> %s.", (source_fn), (tgt_fn)) contents = self._config_param_replace(fn, contents, self.config_params(fn)) contents = self._config_adjust(contents, fn) self.tracewriter.cfg_file_written( sh.write_file(tgt_fn, contents)) return len(config_fns)
def install(self): LOG.debug('Preparing to install packages for: %r', self.name) pkgs = self.packages if pkgs: pkg_names = [p['name'] for p in pkgs] utils.log_iterable(pkg_names, logger=LOG, header="Setting up %s distribution packages" % (len(pkg_names))) with utils.progress_bar('Installing', len(pkgs)) as p_bar: for (i, p) in enumerate(pkgs): installer = make_packager( p, self.distro.package_manager_class, distro=self.distro) installer.install(p) # Mark that this happened so that we can uninstall it self.tracewriter.package_installed(filter_package(p)) p_bar.update(i + 1)
def install(self, urls): am_installed = 0 try: # Done at a function level since this module may be used # before these libraries actually exist. gclient_v1 = importer.import_module('glanceclient.v1.client') gexceptions = importer.import_module('glanceclient.common.exceptions') kclient_v2 = importer.import_module('keystoneclient.v2_0.client') kexceptions = importer.import_module('keystoneclient.exceptions') except RuntimeError as e: LOG.exception("Failed at importing required client modules: %s", e) return am_installed if urls: try: # Ensure all services are up for params in (self._glance_params, self._keystone_params): utils.wait_for_url(params['endpoints']['public']['uri']) g_params = self._glance_params client = gclient_v1.Client(endpoint=g_params['endpoints']['public']['uri'], token=self._get_token(kclient_v2)) except (RuntimeError, gexceptions.ClientException, kexceptions.ClientException, IOError) as e: LOG.exception('Failed fetching needed clients for image calls due to: %s', e) return am_installed utils.log_iterable(urls, logger=LOG, header="Attempting to download+extract+upload %s images" % len(urls)) for url in urls: try: img_handle = Image(client, url, is_public=self._is_public, cache_dir=self._cache_dir) (name, img_id) = img_handle.install() LOG.info("Installed image %s with id %s.", colorizer.quote(name), colorizer.quote(img_id)) am_installed += 1 except exc.DuplicateException as e: LOG.warning(e) except (IOError, tarfile.TarError, gexceptions.ClientException, kexceptions.ClientException) as e: LOG.exception('Installing %r failed due to: %s', url, e) return am_installed
def _write_exports(self, component_order, instances, path): entries = [] contents = StringIO() contents.write("# Exports for action %s\n\n" % (self.name)) for c in component_order: exports = instances[c].env_exports if exports: contents.write("# Exports for %s\n" % (c)) for (k, v) in exports.items(): export_entry = "export %s=%s" % ( k, sh.shellquote(str(v).strip())) entries.append(export_entry) contents.write("%s\n" % (export_entry)) contents.write("\n") if entries: sh.write_file(path, contents.getvalue()) utils.log_iterable(entries, header="Wrote to %s %s exports" % (path, len(entries)), logger=LOG)
def _clean_pip_requires(self): # Fixup these files if they exist, sometimes they have 'junk' in them # that anvil will install instead of pip or setup.py and we don't want # the setup.py file to attempt to install said dependencies since it # typically picks locations that either are not what we desire or if # said file contains editables, it may even pick external source directories # which is what anvil is setting up as well... req_fns = [f for f in self.requires_files if sh.isfile(f)] if req_fns: utils.log_iterable(req_fns, logger=LOG, header="Adjusting %s pip 'requires' files" % (len(req_fns))) for fn in req_fns: old_lines = sh.load_file(fn).splitlines() new_lines = self._filter_pip_requires(fn, old_lines) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) return len(req_fns)
def _build_openstack(self): if not self.package_dirs: LOG.warn("No RPM packages of OpenStack installs to build") return component_names = [ self._get_component_name(d) for d in self.package_dirs ] utils.log_iterable(sorted(component_names), logger=LOG, header=("Building %s OpenStack RPM" " packages") % (len(self.package_dirs))) with utils.progress_bar(name='Building', max_am=len(self.package_dirs)) as p_bar: for (i, pkg_dir) in enumerate(sorted(self.package_dirs)): component_name = self._get_component_name(pkg_dir) cmdline = self.py2rpm_start_cmdline() + ["--", pkg_dir] out_filename = sh.joinpths(self.log_dir, "py2rpm.%s.out" % (component_name)) sh.execute_save_output(cmdline, out_filename=out_filename, quiet=True) p_bar.update(i + 1)
def _clean_pip_requires(self, requires_files): # Fixup incompatible dependencies if not (requires_files and self.forced_packages): return utils.log_iterable(sorted(requires_files), logger=LOG, header="Adjusting %s pip 'requires' files" % (len(requires_files))) forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages) for fn in requires_files: old_lines = sh.load_file(fn).splitlines() new_lines = [] for line in old_lines: try: req = pip_helper.extract_requirement(line) new_lines.append(str(forced_by_key[req.key])) except Exception: # we don't force the package or it has a bad format new_lines.append(line) contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents)
def download(self): """Download sources needed to build the component, if any.""" target_dir = self.get_option('app_dir') download_cfg = utils.load_yaml(self._origins_fn).get(self.name, {}) if not target_dir or not download_cfg: return [] uri = download_cfg.pop('repo', None) if not uri: raise ValueError( ("Could not find repo uri for %r component from the %r " "config file." % (self.name, self._origins_fn))) uris = [uri] utils.log_iterable(uris, logger=LOG, header="Downloading from %s uris" % (len(uris))) sh.mkdirslist(target_dir, tracewriter=self.tracewriter) # This is used to delete what is downloaded (done before # fetching to ensure its cleaned up even on download failures) self.tracewriter.download_happened(target_dir, uri) down.GitDownloader(uri, target_dir, **download_cfg).download() return uris
def _uninstall_pips(self): pips = self.tracereader.pips_installed() if pips: pip_names = [p['name'] for p in pips] utils.log_iterable(pip_names, logger=LOG, header="Potentially removing %s python packages" % (len(pip_names))) which_removed = [] with utils.progress_bar('Uninstalling', len(pips), reverse=True) as p_bar: for (i, p) in enumerate(pips): try: uninstaller = make_packager(p, pip.Packager, distro=self.distro, remove_default=self.purge_packages) if uninstaller.remove(p): which_removed.append(p['name']) except excp.ProcessExecutionError as e: # NOTE(harlowja): pip seems to die if a pkg isn't there even in quiet mode combined = (str(e.stderr) + str(e.stdout)) if not re.search(r"not\s+installed", combined, re.I): raise p_bar.update(i + 1) utils.log_iterable(which_removed, logger=LOG, header="Actually removed %s python packages" % (len(which_removed)))
def stop(self): # Anything to stop in the first place?? what_was_started = [] try: what_was_started = self.tracereader.apps_started() except excp.NoTraceException: pass if not what_was_started: return 0 # Get the investigators/runners which can be used # to actually do the stopping and attempt to perform said stop. applications_stopped = [] for (name, handler) in self._locate_investigators(what_was_started): handler.stop(name) applications_stopped.append(name) if applications_stopped: utils.log_iterable( applications_stopped, header="Stopped %s programs started under %s component" % (len(applications_stopped), self.name), logger=LOG) # Only if we stopped the amount which was supposedly started can # we actually remove the trace where those applications have been # marked as started in (ie the connection back to how they were started) if len(applications_stopped) < len(what_was_started): diff = len(what_was_started) - len(applications_stopped) LOG.warn(( "%s less applications were stopped than were started, please check out %s" " to stop these program manually."), diff, colorizer.quote(self.tracereader.filename(), quote_color='yellow')) else: sh.unlink(self.tracereader.filename()) return len(applications_stopped)
def _install_python_setups(self): py_dirs = self.python_directories if py_dirs: real_dirs = {} for (name, wkdir) in py_dirs.items(): real_dirs[name] = wkdir if not real_dirs[name]: real_dirs[name] = self.get_option('app_dir') utils.log_iterable(real_dirs.values(), logger=LOG, header="Setting up %s python directories" % (len(real_dirs))) setup_cmd = self.distro.get_command('python', 'setup') for (name, working_dir) in real_dirs.items(): self.tracewriter.dirs_made(*sh.mkdirslist(working_dir)) self.tracewriter.py_installed(name, working_dir) root_fn = sh.joinpths(self.get_option('trace_dir'), "%s.python.setup" % (name)) sh.execute(*setup_cmd, cwd=working_dir, run_as_root=True, stderr_fn='%s.stderr' % (root_fn), stdout_fn='%s.stdout' % (root_fn), trace_writer=self.tracewriter)
def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append( "%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [ pkg_resources.Requirement.parse(name).key for name in self.python_names ] yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) def _filter_package_files(package_files): package_reqs = [] package_keys = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append(package_details['req']) package_keys.append(package_details['req'].key) package_rpm_names = self._convert_names_python2rpm(package_keys) filtered_files = [] for (filename, req, rpm_name) in zip(package_files, package_reqs, package_rpm_names): if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... (_version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: filtered_files.append(filename) else: LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(repo)) return filtered_files LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files = _filter_package_files(package_files) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) package_files = sorted(filtered_package_files) # Now build them into SRPM rpm files. (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth( sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) py2rpm_options = self._py2rpm_start_cmdline()[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self.rpmbuild_dir, ] params = { "DOWNLOADS_DIR": self.download_dir, "LOGS_DIR": self.log_dir, "PY2RPM": self.py2rpm_executable, "PY2RPM_FLAGS": " ".join(py2rpm_options), } marks_dir = sh.joinpths(self.deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) makefile_path = sh.joinpths(self.deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), self._jobs), logger=LOG) self._execute_make(makefile_path, marks_dir)
def build_binary(self): def is_src_rpm(path): if not path: return False if not sh.isfile(path): return False if not path.lower().endswith('.src.rpm'): return False return True def list_src_rpms(path): path_files = [] if sh.isdir(path): path_files = sh.listdir(path, filter_func=is_src_rpm) return sorted(path_files) build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) for repo_name in self.REPOS: src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) src_repo_files = list_src_rpms(src_repo_dir) if not src_repo_files: continue utils.log_iterable( src_repo_files, header=('Building %s RPM packages from their' ' SRPMs for repo %s using %s jobs') % (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs), logger=LOG) makefile_path = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self.deps_dir, "marks-binary") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self.log_dir, 'RPMTOP_DIR': self.rpmbuild_dir, } (_fn, content) = utils.load_template( sh.joinpths("packaging", "makefiles"), "binary.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() self._execute_make(makefile_path, marks_dir) repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) for d in sh.listdir(self.rpmbuild_dir, dirs_only=True): self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir) self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir) self._create_repo(repo_name)
def _gather_pips_to_install(self, requires_files, extra_pips=None): """Analyze requires_files and extra_pips. Updates `self.forced_packages` and `self.pips_to_install`. Writes requirements to `self.gathered_requires_filename`. """ extra_pips = extra_pips or [] cmdline = [ self.multipip_executable, "--skip-requirements-regex", "python.*client", "--pip", self.pip_executable ] cmdline = cmdline + extra_pips + ["-r"] + requires_files cmdline.extend(["--ignore-package"]) cmdline.extend(OPENSTACK_PACKAGES) cmdline.extend(SKIP_PACKAGE_NAMES) cmdline.extend(self.python_names) stdout, stderr = sh.execute(cmdline, check_exit_code=False) self.pips_to_install = list(utils.splitlines_not_empty(stdout)) sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) utils.log_iterable(sorted(self.pips_to_install), logger=LOG, header="Full known python dependency list") incompatibles = collections.defaultdict(list) if stderr: current_name = '' for line in stderr.strip().splitlines(): if line.endswith(": incompatible requirements"): current_name = line.split(":", 1)[0].lower().strip() if current_name not in incompatibles: incompatibles[current_name] = [] else: incompatibles[current_name].append(line) for (name, lines) in incompatibles.items(): if not name: continue LOG.warn("Incompatible requirements found for %s", colorizer.quote(name, quote_color='red')) for line in lines: LOG.warn(line) if not self.pips_to_install: LOG.error("No dependencies for OpenStack found." "Something went wrong. Please check:") LOG.error("'%s'" % "' '".join(cmdline)) raise exc.DependencyException( "No dependencies for OpenStack found") # Translate those that we altered requirements for into a set of forced # requirements file (and associated list). self.forced_packages = [] for req in [ pip_helper.extract_requirement(line) for line in self.pips_to_install ]: if req.key in incompatibles: self.forced_packages.append(req) sh.write_file(self.forced_requires_filename, "\n".join([str(req) for req in self.forced_packages]))
def _post_start(self, app_name): fork_fns = self._form_file_names(app_name) utils.log_iterable(fork_fns.as_list(), header="Forked %s with details in the following files" % (app_name), logger=LOG)
def _scan_pip_requires(self, requires_files): own_eggs = self._python_eggs(False) def replace_forced_requirements(fn, forced_by_key): old_lines = sh.load_file(fn).splitlines() new_lines = [] alterations = [] for line in old_lines: try: source_req = pip_helper.extract_requirement(line) except (ValueError, TypeError): pass else: if source_req: validate_requirement(fn, source_req) try: replace_req = forced_by_key[source_req.key] except KeyError: pass else: replace_req = str(replace_req) source_req = str(source_req) if replace_req != source_req: line = replace_req alterations.append( "%s => %s" % (colorizer.quote(source_req), colorizer.quote(replace_req))) new_lines.append(line) if alterations: contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines)) sh.write_file_and_backup(fn, contents) utils.log_iterable(alterations, logger=LOG, header="Replaced %s requirements in %s" % (len(alterations), fn), color=None) return len(alterations) def on_replace_done(fn, time_taken): LOG.debug( "Replacing potential forced requirements in %s" " took %s seconds", colorizer.quote(fn), time_taken) def validate_requirement(filename, source_req): install_egg = None for egg_info in own_eggs: if egg_info['name'] == source_req.key: install_egg = egg_info break if not install_egg: return # Ensure what we are about to install/create will actually work # with the desired version. If it is not compatible then we should # abort and someone should update the tag/branch in the origin # file (or fix it via some other mechanism). if install_egg['version'] not in source_req: msg = ("Can not satisfy '%s' with '%s', version" " conflict found in %s") raise exc.DependencyException( msg % (source_req, install_egg['req'], filename)) if not requires_files: return requires_files = sorted(requires_files) utils.log_iterable(requires_files, logger=LOG, header="Scanning %s pip 'requires' files" % (len(requires_files))) forced_by_key = {} for pkg in self.forced_pips: forced_by_key[pkg.key] = pkg mutations = 0 for fn in requires_files: LOG.debug("Replacing any potential forced requirements in %s", colorizer.quote(fn)) mutations += utils.time_it(functools.partial(on_replace_done, fn), replace_forced_requirements, fn, forced_by_key) # NOTE(imelnikov): after updating requirement lists we should re-fetch # data from them again, so we drop pip helper caches here. if mutations > 0: pip_helper.drop_caches()
def _all_rpm_names(self): # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). gathered_requires = sh.load_file( self.gathered_requires_filename).splitlines() gathered_requires = [ line.strip() for line in gathered_requires if line.strip() ] req_names = [] reqs = [] for line in gathered_requires: req = pip_helper.extract_requirement(line) if req.key in req_names: continue req_names.append(req.key) reqs.append(req) rpm_names = self._convert_names_python2rpm(req_names) # Ensure we select the right versions that is required and not a # version that doesn't match the requirements. desired_rpms = [] desired_rpm_names = set() desired_rpms_formatted = [] def format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ",%s" % (py_req) return full_name def capture_rpm(rpm_name, py_req): if rpm_name in desired_rpm_names or not rpm_name: return desired_rpms_formatted.append(format_name(rpm_name, py_req)) desired_rpms.append((rpm_name, py_req)) desired_rpm_names.add(rpm_name) for (rpm_name, req) in zip(rpm_names, reqs): capture_rpm(rpm_name, req) for inst in self.instances: if sh.isdir(inst.get_option("app_dir")): req = None rpm_name = None try: (rpm_name, _tpl) = self._get_template_and_rpm_name(inst) req = inst.egg_info['req'] except AttributeError: pass capture_rpm(rpm_name, req) for rpm_name in inst.package_names(): capture_rpm(rpm_name, None) for rpm_name in self.requirements["requires"]: capture_rpm(rpm_name, None) cmd = [self.yumfind_executable, '-j'] desired_rpms_formatted = sorted(desired_rpms_formatted) for p in desired_rpms_formatted: cmd.extend(['-p', p]) header = "Validating %s required packages are still available" % ( len(desired_rpms)) utils.log_iterable(desired_rpms_formatted, header=header, logger=LOG) rpms_located = [] rpm_names_located = set() for matched in sh.execute(cmd)[0].splitlines(): matched = matched.strip() if matched: pkg = json.loads(matched) if isinstance(pkg, dict): rpm_names_located.add(pkg['name']) rpms_located.append(pkg) rpm_names_missing = desired_rpm_names - rpm_names_located if rpm_names_missing: # Include the python version required information (if applicable) missing_formatted = [] for n in sorted(rpm_names_missing): source_found = False for (n2, py_req) in desired_rpms: if n2 == n: missing_formatted.append(format_name(n2, py_req)) source_found = True break if not source_found: missing_formatted.append(format_name(n, None)) msg = "Could not find available rpm packages: %s" msg = msg % (", ".join(missing_formatted)) raise excp.DependencyException(msg) LOG.info("All %s required packages are still available!", len(desired_rpms)) desired_rpms = [] for pkg in rpms_located: LOG.debug("Found %s", pkg) desired_rpms.append("%s,%s" % (pkg['name'], pkg['version'])) return list(sorted(desired_rpms))