def _unconfigure_links(self): sym_files = self.tracereader.symlinks_made() if sym_files: utils.log_iterable(sym_files, logger=LOG, header="Removing %s symlink files" % (len(sym_files))) for fn in sym_files: sh.unlink(fn, run_as_root=True)
def download_dependencies(download_dir, pips_to_download, output_filename): if not pips_to_download: return # NOTE(aababilov): pip has issues with already downloaded files if sh.isdir(download_dir): for filename in sh.listdir(download_dir, files_only=True): sh.unlink(filename) else: sh.mkdir(download_dir) # Clean out any previous paths that we don't want around. build_path = sh.joinpths(download_dir, ".build") if sh.isdir(build_path): sh.deldir(build_path) sh.mkdir(build_path) cmdline = [ PIP_EXECUTABLE, '-v', 'install', '-I', '-U', '--download', download_dir, '--build', build_path, # Don't download wheels since we lack the ability to create # rpms from them (until future when we will have it, if ever)... "--no-use-wheel", ] for p in pips_to_download: for p_seg in _split(p): if p_seg: cmdline.append(p_seg) sh.execute_save_output(cmdline, output_filename)
def _uninstall_files(self): files_touched = self.tracereader.files_touched() if files_touched: utils.log_iterable(files_touched, logger=LOG, header="Removing %s miscellaneous files" % (len(files_touched))) for fn in files_touched: sh.unlink(fn, run_as_root=True)
def stop(self): # Anything to stop in the first place?? what_was_started = [] try: what_was_started = self.tracereader.apps_started() except excp.NoTraceException: pass if not what_was_started: return 0 # Get the investigators/runners which can be used # to actually do the stopping and attempt to perform said stop. applications_stopped = [] for (name, handler) in self._locate_investigators(what_was_started): handler.stop(name) applications_stopped.append(name) if applications_stopped: utils.log_iterable(applications_stopped, header="Stopped %s programs started under %s component" % (len(applications_stopped), self.name), logger=LOG) # Only if we stopped the amount which was supposedly started can # we actually remove the trace where those applications have been # marked as started in (ie the connection back to how they were started) if len(applications_stopped) < len(what_was_started): diff = len(what_was_started) - len(applications_stopped) LOG.warn(("%s less applications were stopped than were started, please check out %s" " to stop these program manually."), diff, colorizer.quote(self.tracereader.filename(), quote_color='yellow')) else: sh.unlink(self.tracereader.filename()) return len(applications_stopped)
def _unconfigure_files(self): cfg_files = self.tracereader.files_configured() if cfg_files: utils.log_iterable(cfg_files, logger=LOG, header="Removing %s configuration files" % (len(cfg_files))) for fn in cfg_files: sh.unlink(fn, run_as_root=True)
def _uninstall_touched_files(self): files_touched = self.tracereader.files_touched() if files_touched: utils.log_iterable(files_touched, logger=LOG, header="Removing %s miscellaneous files" % (len(files_touched))) for fn in files_touched: sh.unlink(fn, run_as_root=True)
def _remove(self, pkg): yum.YumPackager._remove(self, pkg) options = pkg.get('packager_options') or {} links = options.get('links') or [] for entry in links: if sh.islink(entry['target']): sh.unlink(entry['target'])
def download_dependencies(download_dir, pips_to_download, output_filename): if not pips_to_download: return # NOTE(aababilov): pip has issues with already downloaded files if sh.isdir(download_dir): for filename in sh.listdir(download_dir, files_only=True): sh.unlink(filename) else: sh.mkdir(download_dir) # Clean out any previous paths that we don't want around. build_path = sh.joinpths(download_dir, ".build") if sh.isdir(build_path): sh.deldir(build_path) sh.mkdir(build_path) # Ensure certain directories exist that we want to exist (but we don't # want to delete them run after run). cache_path = sh.joinpths(download_dir, ".cache") if not sh.isdir(cache_path): sh.mkdir(cache_path) cmdline = [ PIP_EXECUTABLE, '-v', 'install', '-I', '-U', '--download', download_dir, '--build', build_path, '--download-cache', cache_path, ] # Don't download wheels... # # See: https://github.com/pypa/pip/issues/1439 if dist_version.StrictVersion(PIP_VERSION) >= dist_version.StrictVersion('1.5'): cmdline.append("--no-use-wheel") cmdline.extend([str(p) for p in pips_to_download]) sh.execute_save_output(cmdline, output_filename)
def _uninstall_files(self): files_touched = self.tracereader.files_touched() files_alive = filter(sh.isfile, files_touched) if files_alive: utils.log_iterable(files_alive, logger=LOG, header="Removing %s miscellaneous files" % (len(files_alive))) for fn in files_alive: sh.unlink(fn)
def destroy(self): self.uninstall() # Clear out any files touched. if self.tracereader.exists(): for f in self.tracereader.files_touched(): sh.unlink(f) for d in self.tracereader.dirs_made(): sh.deldir(d) sh.unlink(self.tracereader.filename())
def _setup_logs(self, clear=False): log_fns = [self.access_log, self.error_log] utils.log_iterable(log_fns, logger=LOG, header="Adjusting %s log files" % (len(log_fns))) for fn in log_fns: if clear: sh.unlink(fn, True) sh.touch_file(fn, die_if_there=False, tracewriter=self.tracewriter) sh.chmod(fn, 0666) return len(log_fns)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [pkg_resources.Requirement.parse(name).key for name in self.python_names] yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) def _filter_package_files(package_files): package_reqs = [] package_keys = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append(package_details['req']) package_keys.append(package_details['req'].key) package_rpm_names = self.py2rpm_helper.convert_names_to_rpm(package_keys) filtered_files = [] for (filename, req, rpm_name) in zip(package_files, package_reqs, package_rpm_names): if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... (_version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: filtered_files.append(filename) else: LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(repo)) return filtered_files LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files = _filter_package_files(package_files) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) package_files = sorted(filtered_package_files) # Now build them into SRPM rpm files. self.py2rpm_helper.build_all_srpms(package_files=package_files, tracewriter=self.tracewriter, jobs=self._jobs)
def _remove(self, pkg): response = yum.YumPackager._remove(self, pkg) if response: options = pkg.get('packager_options', {}) links = options.get('links', []) for entry in links: src = entry['source'] tgt = entry['target'] if sh.islink(tgt): sh.unlink(tgt) return response
def stop(self, app_name): fn_name = UPSTART_TEMPL % (app_name) trace_fn = tr.trace_fn(self.runtime.get_option('trace_dir'), fn_name) # Emit the start, keep track and only do one per component name component_event = self.name + STOP_EVENT_SUFFIX if component_event in self.events: LOG.debug("Already emitted event: %r" % (component_event)) else: LOG.debug("About to emit event: %r" % (component_event)) cmd = EMIT_BASE_CMD + [component_event] sh.execute(*cmd, run_as_root=True) self.events.add(component_event) sh.unlink(trace_fn)
def _setup_logs(self, clear): log_fns = [self.access_log, self.error_log] utils.log_iterable(log_fns, logger=LOG, header="Adjusting %s log files" % (len(log_fns))) for fn in log_fns: with sh.Rooted(True): if clear: sh.unlink(fn, True) sh.mkdirslist(sh.dirname(fn)) sh.touch_file(fn, die_if_there=False) sh.chmod(fn, 0666) self.tracewriter.file_touched(fn) return len(log_fns)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if (sh.isfile(self.downloaded_flag_file) and self._requirements_satisfied(pips_to_download, self.download_dir)): LOG.info("All python dependencies have been already downloaded") else: pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files for filename in sh.listdir(self.download_dir, files_only=True): sh.unlink(filename) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(pips_to_download), attempt + 1) utils.log_iterable(sorted(pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt + 1, pips_to_download, self.download_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] # NOTE(harlowja): Mark that we completed downloading successfully sh.touch_file(self.downloaded_flag_file, die_if_there=False, quiet=True, tracewriter=self.tracewriter) pips_downloaded = [ pip_helper.extract_requirement(p) for p in pips_to_download ] self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, sh.listdir(self.download_dir, files_only=True))
def stop(self, app_name): trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): msg = "No trace directory found from which to stop: %s" % (app_name) raise excp.StopException(msg) with sh.Rooted(True): fn_name = FORK_TEMPL % (app_name) (pid_file, stderr_fn, stdout_fn) = self._form_file_names(fn_name) pid = self._extract_pid(pid_file) if not pid: msg = "Could not extract a valid pid from %s" % (pid_file) raise excp.StopException(msg) (killed, attempts) = sh.kill(pid) # Trash the files if it worked if killed: LOG.debug("Killed pid %s after %s attempts." % (pid, attempts)) LOG.debug("Removing pid file %s" % (pid_file)) sh.unlink(pid_file) LOG.debug("Removing stderr file %r" % (stderr_fn)) sh.unlink(stderr_fn) LOG.debug("Removing stdout file %r" % (stdout_fn)) sh.unlink(stdout_fn) trace_fn = tr.trace_filename(trace_dir, fn_name) if sh.isfile(trace_fn): LOG.debug("Removing %r trace file %r" % (app_name, trace_fn)) sh.unlink(trace_fn) else: msg = "Could not stop %r after %s attempts" % (app_name, attempts) raise excp.StopException(msg)
def stop(self, app_name): trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): msg = "No trace directory found from which to stop: %s" % (app_name) raise excp.StopException(msg) with sh.Rooted(True): fn_name = FORK_TEMPL % (app_name) (pid_file, stderr_fn, stdout_fn) = self._form_file_names(fn_name) pid = self._extract_pid(pid_file) if not pid: msg = "Could not extract a valid pid from %s" % (pid_file) raise excp.StopException(msg) (killed, attempts) = sh.kill(pid) # Trash the files if it worked if killed: LOG.debug("Killed pid %s after %s attempts." % (pid, attempts)) LOG.debug("Removing pid file %s" % (pid_file)) sh.unlink(pid_file) LOG.debug("Removing stderr file %r" % (stderr_fn)) sh.unlink(stderr_fn) LOG.debug("Removing stdout file %r" % (stdout_fn)) sh.unlink(stdout_fn) trace_fn = tr.trace_fn(trace_dir, fn_name) if sh.isfile(trace_fn): LOG.debug("Removing %r trace file %r" % (app_name, trace_fn)) sh.unlink(trace_fn) else: msg = "Could not stop %r after %s attempts" % (app_name, attempts) raise excp.StopException(msg)
def stop(self): # Anything to stop?? killed_am = 0 apps_started = self.tracereader.apps_started() if not apps_started: return killed_am self.pre_stop(apps_started) to_kill = self._locate_investigators(apps_started) for (app_name, handler) in to_kill: handler.stop(app_name) handler.unconfigure() killed_am += 1 self.post_stop(apps_started) if len(apps_started) == killed_am: sh.unlink(self.tracereader.filename()) return killed_am
def stop(self): # Anything to stop?? killed_am = 0 apps_started = 0 try: apps_started = self.tracereader.apps_started() except excp.NoTraceException: pass if not apps_started: return killed_am to_kill = self._locate_investigators(apps_started) for (app_name, handler) in to_kill: handler.stop(app_name) killed_am += 1 if len(apps_started) == killed_am: sh.unlink(self.tracereader.filename()) return killed_am
def _write_python_tarball(self, instance, pkg_dir, ensure_exists=None): def prefix_exists(text, in_what): for t in in_what: if t.startswith(text): return True return False pkg_name = instance.egg_info['name'] version = instance.egg_info['version'] base_name = "%s-%s" % (pkg_name, version) cmdline = [ sys.executable, "setup.py", "sdist", "--formats=tar", "--dist-dir", self.rpm_sources_dir, ] out_filename = sh.joinpths(self.log_dir, "sdist-%s.log" % (instance.name)) sh.execute_save_output(cmdline, cwd=pkg_dir, out_filename=out_filename) archive_name = sh.joinpths(self.rpm_sources_dir, "%s.tar" % (base_name)) if ensure_exists: with contextlib.closing(tarfile.open(archive_name, 'r')) as tfh: tar_entries = [t.path for t in tfh.getmembers()] missing_paths = {} for path in ensure_exists: tar_path = sh.joinpths(base_name, path) source_path = sh.joinpths(pkg_dir, path) if not prefix_exists(tar_path, tar_entries) and sh.exists(source_path): missing_paths[tar_path] = source_path if missing_paths: utils.log_iterable( sorted(missing_paths.keys()), logger=LOG, header='%s paths were not archived and will now be' % (len(missing_paths))) with contextlib.closing(tarfile.open(archive_name, 'a')) as tfh: for (tar_path, source_path) in missing_paths.items(): tfh.add(source_path, tar_path) sh.gzip(archive_name) sh.unlink(archive_name)
def uninstall(self): super(YumDependencyHandler, self).uninstall() if self.tracereader.exists(): for f in self.tracereader.files_touched(): sh.unlink(f) for d in self.tracereader.dirs_made(): sh.deldir(d) sh.unlink(self.tracereader.filename()) self.tracereader = None rpm_names = [] for name in self._convert_names_python2rpm(self.python_names): if self.helper.is_installed(name): rpm_names.append(name) if rpm_names: cmdline = ["yum", "remove", "--remove-leaves", "-y"] + rpm_names sh.execute(cmdline, stdout_fh=sys.stdout, stderr_fh=sys.stderr)
def _write_python_tarball(self, instance, pkg_dir, ensure_exists=None): def prefix_exists(text, in_what): for t in in_what: if t.startswith(text): return True return False pkg_name = instance.egg_info['name'] version = instance.egg_info['version'] base_name = "%s-%s" % (pkg_name, version) cmdline = [ sys.executable, "setup.py", "sdist", "--formats=tar", "--dist-dir", self.rpm_sources_dir, ] env_overrides = { 'PBR_VERSION': version, } out_filename = sh.joinpths(self.log_dir, "sdist-%s.log" % (instance.name)) sh.execute_save_output(cmdline, out_filename, cwd=pkg_dir, env_overrides=env_overrides) archive_name = sh.joinpths(self.rpm_sources_dir, "%s.tar" % (base_name)) if ensure_exists: with contextlib.closing(tarfile.open(archive_name, 'r')) as tfh: tar_entries = [t.path for t in tfh.getmembers()] missing_paths = {} for path in ensure_exists: tar_path = sh.joinpths(base_name, path) source_path = sh.joinpths(pkg_dir, path) if not prefix_exists(tar_path, tar_entries) and sh.exists(source_path): missing_paths[tar_path] = source_path if missing_paths: utils.log_iterable(sorted(missing_paths.keys()), logger=LOG, header='%s paths were not archived and will now be' % (len(missing_paths))) with contextlib.closing(tarfile.open(archive_name, 'a')) as tfh: for (tar_path, source_path) in missing_paths.items(): tfh.add(source_path, tar_path) sh.gzip(archive_name) sh.unlink(archive_name)
def stop(self, app_name): # The location of the pid file should be in the attached # runtimes trace directory, so see if we can find said file # and then attempt to kill the pid that exists in that file # which if succesffully will signal to the rest of this code # that we can go through and cleanup the other remnants of said # pid such as the stderr/stdout files that were being written to... trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): msg = "No trace directory found from which to stop: %r" % ( app_name) raise excp.StopException(msg) fork_fns = self._form_file_names(app_name) skip_kill = True pid = None try: pid = fork_fns.extract_pid() skip_kill = False except IOError as e: if e.errno == errno.ENOENT: pass else: skip_kill = False if not skip_kill and pid is None: msg = "Could not extract a valid pid from %r" % (fork_fns.pid) raise excp.StopException(msg) # Bother trying to kill said process? if not skip_kill: (killed, attempts) = sh.kill(pid) else: (killed, attempts) = (True, 0) # Trash the files if it worked if killed: if not skip_kill: LOG.debug("Killed pid '%s' after %s attempts.", pid, attempts) for leftover_fn in fork_fns.as_list(): if sh.exists(leftover_fn): LOG.debug("Removing forking related file %r", (leftover_fn)) sh.unlink(leftover_fn) else: msg = "Could not stop %r after %s attempts" % (app_name, attempts) raise excp.StopException(msg)
def stop(self, app_name): # The location of the pid file should be in the attached # runtimes trace directory, so see if we can find said file # and then attempt to kill the pid that exists in that file # which if succesffully will signal to the rest of this code # that we can go through and cleanup the other remnants of said # pid such as the stderr/stdout files that were being written to... trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): msg = "No trace directory found from which to stop: %r" % (app_name) raise excp.StopException(msg) with sh.Rooted(True): fork_fns = self._form_file_names(app_name) skip_kill = True pid = None try: pid = fork_fns.extract_pid() skip_kill = False except IOError as e: if e.errno == errno.ENOENT: pass else: skip_kill = False if not skip_kill and pid is None: msg = "Could not extract a valid pid from %r" % (fork_fns.pid) raise excp.StopException(msg) # Bother trying to kill said process? if not skip_kill: (killed, attempts) = sh.kill(pid) else: (killed, attempts) = (True, 0) # Trash the files if it worked if killed: if not skip_kill: LOG.debug("Killed pid '%s' after %s attempts.", pid, attempts) for leftover_fn in fork_fns.as_list(): if sh.exists(leftover_fn): LOG.debug("Removing forking related file %r", (leftover_fn)) sh.unlink(leftover_fn) else: msg = "Could not stop %r after %s attempts" % (app_name, attempts) raise excp.StopException(msg)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`.""" # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) pips_to_download = self._filter_download_requires() sh.write_file(self.download_requires_filename, "\n".join([str(req) for req in pips_to_download])) if not pips_to_download: return ([], []) # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if (sh.isfile(self.downloaded_flag_file) and self._requirements_satisfied(pips_to_download, self.download_dir)): LOG.info("All python dependencies have been already downloaded") else: pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files for filename in sh.listdir(self.download_dir, files_only=True): sh.unlink(filename) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(pips_to_download), attempt + 1) utils.log_iterable(sorted(pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt + 1, pips_to_download, self.download_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] # NOTE(harlowja): Mark that we completed downloading successfully sh.touch_file(self.downloaded_flag_file, die_if_there=False, quiet=True, tracewriter=self.tracewriter) pips_downloaded = [pip_helper.extract_requirement(p) for p in pips_to_download] self._examine_download_dir(pips_downloaded, self.download_dir) return (pips_downloaded, sh.listdir(self.download_dir, files_only=True))
def _write_git_tarball(self, pkg_dir, spec_filename): cmdline = [ "rpm", "-q", "--specfile", spec_filename, "--qf", "%{NAME}-%{VERSION}\n" ] tar_base = sh.execute(cmdline, cwd=pkg_dir)[0].splitlines()[0].strip() # git 1.7.1 from RHEL doesn't understand --format=tar.gz output_filename = sh.joinpths(self.rpm_sources_dir, "%s.tar" % tar_base) cmdline = [ "git", "archive", "--format=tar", "--prefix=%s/" % tar_base, "--output=%s" % output_filename, "HEAD", ] sh.execute(cmdline, cwd=pkg_dir) sh.gzip(output_filename) sh.unlink(output_filename)
def _write_git_tarball(self, instance, pkg_dir, spec_filename): cmdline = [ "rpm", "-q", "--specfile", spec_filename, "--qf", "%{NAME}-%{VERSION}\n" ] tar_base = sh.execute(cmdline, cwd=pkg_dir)[0].splitlines()[0].strip() # NOTE(harlowja): git 1.7.1 from RHEL doesn't understand --format=tar.gz output_filename = sh.joinpths(self.rpm_sources_dir, "%s.tar" % tar_base) cmdline = [ "git", "archive", "--format=tar", "--prefix=%s/" % tar_base, "--output=%s" % output_filename, "HEAD", ] out_filename = sh.joinpths(self.log_dir, "git-tar-%s.log" % instance.name) sh.execute_save_output(cmdline, cwd=pkg_dir, out_filename=out_filename) sh.gzip(output_filename) sh.unlink(output_filename)
def stop(self): # Anything to stop in the first place?? what_was_started = [] try: what_was_started = self.tracereader.apps_started() except excp.NoTraceException: pass if not what_was_started: return 0 # Get the investigators/runners which can be used # to actually do the stopping and attempt to perform said stop. applications_stopped = [] for (name, handler) in self._locate_investigators(what_was_started): handler.stop(name) applications_stopped.append(name) if applications_stopped: utils.log_iterable( applications_stopped, header="Stopped %s programs started under %s component" % (len(applications_stopped), self.name), logger=LOG) # Only if we stopped the amount which was supposedly started can # we actually remove the trace where those applications have been # marked as started in (ie the connection back to how they were started) if len(applications_stopped) < len(what_was_started): diff = len(what_was_started) - len(applications_stopped) LOG.warn(( "%s less applications were stopped than were started, please check out %s" " to stop these program manually."), diff, colorizer.quote(self.tracereader.filename(), quote_color='yellow')) else: sh.unlink(self.tracereader.filename()) return len(applications_stopped)
def _on_completion(self, phase_name, results): (base_name, to_destroy) = self._get_opposite_stages(phase_name) for name in to_destroy: fn = self._get_phase_filename(name, base_name) if sh.isfile(fn): sh.unlink(fn)
def uninstall(self): self._uninstall_pkgs() self._uninstall_touched_files() self._uninstall_dirs() LOG.debug("Deleting install trace file %r", self.tracereader.filename()) sh.unlink(self.tracereader.filename())
def at_exit_cleaner(): sh.unlink(DUMMY_FILE)
def unconfigure(self): if sh.isfile(self.init_fn): sh.unlink(self.init_fn)
def _delete_templates(self): for t in ["object", "container", "account"]: sh.unlink(sh.joinpths(self.get_option("cfg_dir"), "%s-server.conf" % t))
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [ pkg_resources.Requirement.parse(name).key for name in self.python_names ] no_pips.extend(self.ignore_pips) yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) package_reqs = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append((filename, package_details['req'])) def _filter_package_files(): yum_provided = [] req_names = [req.key for (filename, req) in package_reqs] package_rpm_names = self.py2rpm_helper.names_to_rpm_names( req_names) filtered_files = [] for filename, req in package_reqs: rpm_name = package_rpm_names[req.key] if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... rpm_info = self._find_yum_match(yum_map, req, rpm_name) if not rpm_info: filtered_files.append(filename) else: yum_provided.append((req, rpm_info)) LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(rpm_info['repo'])) return (filtered_files, yum_provided) LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files, yum_provided = _filter_package_files() if yum_provided: yum_buff = six.StringIO() for (req, rpm_info) in yum_provided: dep_info = { 'requirement': str(req), 'rpm': rpm_info, } yum_buff.write(json.dumps(dep_info)) yum_buff.write("\n") sh.append_file(self.yum_satisfies_filename, yum_buff.getvalue()) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) build_requires = six.StringIO() for (filename, req) in package_reqs: if filename in filtered_package_files: build_requires.write("%s # %s\n" % (req, sh.basename(filename))) sh.write_file(self.build_requires_filename, build_requires.getvalue()) # Now build them into SRPM rpm files. package_files = sorted(filtered_package_files) self.py2rpm_helper.build_all_srpms(package_files=package_files, tracewriter=self.tracewriter, jobs=self.jobs)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [ pkg_resources.Requirement.parse(name).key for name in self.python_names ] yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) def _filter_package_files(package_files): package_reqs = [] package_keys = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append(package_details['req']) package_keys.append(package_details['req'].key) package_rpm_names = self._convert_names_python2rpm(package_keys) filtered_files = [] for (filename, req, rpm_name) in zip(package_files, package_reqs, package_rpm_names): if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... (_version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: filtered_files.append(filename) else: LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(repo)) return filtered_files LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files = _filter_package_files(package_files) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) package_files = sorted(filtered_package_files) # Now build them into SRPM rpm files. (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth( sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) py2rpm_options = self._py2rpm_start_cmdline()[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self.rpmbuild_dir, ] params = { "DOWNLOADS_DIR": self.download_dir, "LOGS_DIR": self.log_dir, "PY2RPM": self.py2rpm_executable, "PY2RPM_FLAGS": " ".join(py2rpm_options), } marks_dir = sh.joinpths(self.deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) makefile_path = sh.joinpths(self.deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), self._jobs), logger=LOG) self._execute_make(makefile_path, marks_dir)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [pkg_resources.Requirement.parse(name).key for name in self.python_names] no_pips.extend(self.ignore_pips) yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) package_reqs = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append((filename, package_details['req'])) def _filter_package_files(): yum_provided = [] req_names = [req.key for (filename, req) in package_reqs] package_rpm_names = self.py2rpm_helper.names_to_rpm_names(req_names) filtered_files = [] for filename, req in package_reqs: rpm_name = package_rpm_names[req.key] if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... rpm_info = self._find_yum_match(yum_map, req, rpm_name) if not rpm_info: filtered_files.append(filename) else: yum_provided.append((req, rpm_info)) LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(rpm_info['repo'])) return (filtered_files, yum_provided) LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files, yum_provided = _filter_package_files() if yum_provided: yum_buff = six.StringIO() for (req, rpm_info) in yum_provided: dep_info = { 'requirement': str(req), 'rpm': rpm_info, } yum_buff.write(json.dumps(dep_info)) yum_buff.write("\n") sh.append_file(self.yum_satisfies_filename, yum_buff.getvalue()) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) build_requires = six.StringIO() for (filename, req) in package_reqs: if filename in filtered_package_files: build_requires.write("%s # %s\n" % (req, sh.basename(filename))) sh.write_file(self.build_requires_filename, build_requires.getvalue()) # Now build them into SRPM rpm files. package_files = sorted(filtered_package_files) self.py2rpm_helper.build_all_srpms(package_files=package_files, tracewriter=self.tracewriter, jobs=self.jobs)
def _build_dependencies(self): (pips_downloaded, package_files) = self.download_dependencies() # Analyze what was downloaded and eject things that were downloaded # by pip as a dependency of a download but which we do not want to # build or can satisfy by other means no_pips = [pkg_resources.Requirement.parse(name).key for name in self.python_names] yum_map = self._get_known_yum_packages() pips_keys = set([p.key for p in pips_downloaded]) def _filter_package_files(package_files): package_reqs = [] package_keys = [] for filename in package_files: package_details = pip_helper.get_archive_details(filename) package_reqs.append(package_details['req']) package_keys.append(package_details['req'].key) package_rpm_names = self._convert_names_python2rpm(package_keys) filtered_files = [] for (filename, req, rpm_name) in zip(package_files, package_reqs, package_rpm_names): if req.key in no_pips: LOG.info(("Dependency %s was downloaded additionally " "but it is disallowed."), colorizer.quote(req)) continue if req.key in pips_keys: filtered_files.append(filename) continue # See if pip tried to download it but we already can satisfy # it via yum and avoid building it in the first place... (_version, repo) = self._find_yum_match(yum_map, req, rpm_name) if not repo: filtered_files.append(filename) else: LOG.info(("Dependency %s was downloaded additionally " "but it can be satisfied by %s from repository " "%s instead."), colorizer.quote(req), colorizer.quote(rpm_name), colorizer.quote(repo)) return filtered_files LOG.info("Filtering %s downloaded files.", len(package_files)) filtered_package_files = _filter_package_files(package_files) if not filtered_package_files: LOG.info("No SRPM package dependencies to build.") return for filename in package_files: if filename not in filtered_package_files: sh.unlink(filename) package_files = sorted(filtered_package_files) # Now build them into SRPM rpm files. (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) py2rpm_options = self._py2rpm_start_cmdline()[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self.rpmbuild_dir, ] params = { "DOWNLOADS_DIR": self.download_dir, "LOGS_DIR": self.log_dir, "PY2RPM": self.py2rpm_executable, "PY2RPM_FLAGS": " ".join(py2rpm_options), } marks_dir = sh.joinpths(self.deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) makefile_path = sh.joinpths(self.deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), self._jobs), logger=LOG) self._execute_make(makefile_path, marks_dir)
def stop(self, app_name): trace_fn = tr.trace_fn(self.runtime.get_option('trace_dir'), SCREEN_TEMPL % (app_name)) session_id = self._find_session(app_name, trace_fn) self._do_stop(app_name, session_id) sh.unlink(trace_fn)