def _config_adjust_api_reg(self, contents, fn): gparams = ghelper.get_shared_params(**self.options) with io.BytesIO(contents) as stream: config = cfg.create_parser(cfg.RewritableConfigParser, self) config.readfp(stream) config.set('DEFAULT', 'debug', self.get_bool_option('verbose')) config.set('DEFAULT', 'verbose', self.get_bool_option('verbose')) if fn in [REG_CONF]: config.set('DEFAULT', 'bind_port', gparams['endpoints']['registry']['port']) else: config.set('DEFAULT', 'bind_port', gparams['endpoints']['public']['port']) config.set('DEFAULT', 'sql_connection', dbhelper.fetch_dbdsn(dbname=DB_NAME, utf8=True, dbtype=self.get_option('db', 'type'), **utils.merge_dicts(self.get_option('db'), dbhelper.get_shared_passwords(self)))) config.remove_option('DEFAULT', 'log_file') config.set('paste_deploy', 'flavor', self.get_option('paste_flavor')) for (k, v) in self._fetch_keystone_params().items(): config.set('keystone_authtoken', k, v) if fn in [API_CONF]: config.set('DEFAULT', 'default_store', 'file') img_store_dir = sh.joinpths(self.get_option('component_dir'), 'images') config.set('DEFAULT', 'filesystem_store_datadir', img_store_dir) LOG.debug("Ensuring file system store directory %r exists and is empty." % (img_store_dir)) if sh.isdir(img_store_dir): sh.deldir(img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.tracewriter, adjust_suids=True) return config.stringify(fn)
def download_dependencies(download_dir, pips_to_download, output_filename): if not pips_to_download: return # NOTE(aababilov): pip has issues with already downloaded files if sh.isdir(download_dir): for filename in sh.listdir(download_dir, files_only=True): sh.unlink(filename) else: sh.mkdir(download_dir) # Clean out any previous paths that we don't want around. build_path = sh.joinpths(download_dir, ".build") if sh.isdir(build_path): sh.deldir(build_path) sh.mkdir(build_path) # Ensure certain directories exist that we want to exist (but we don't # want to delete them run after run). cache_path = sh.joinpths(download_dir, ".cache") if not sh.isdir(cache_path): sh.mkdir(cache_path) cmdline = [ PIP_EXECUTABLE, '-v', 'install', '-I', '-U', '--download', download_dir, '--build', build_path, '--download-cache', cache_path, ] # Don't download wheels... # # See: https://github.com/pypa/pip/issues/1439 if dist_version.StrictVersion(PIP_VERSION) >= dist_version.StrictVersion('1.5'): cmdline.append("--no-use-wheel") cmdline.extend([str(p) for p in pips_to_download]) sh.execute_save_output(cmdline, output_filename)
def download_dependencies(download_dir, pips_to_download, output_filename): if not pips_to_download: return # NOTE(aababilov): pip has issues with already downloaded files if sh.isdir(download_dir): for filename in sh.listdir(download_dir, files_only=True): sh.unlink(filename) else: sh.mkdir(download_dir) # Clean out any previous paths that we don't want around. build_path = sh.joinpths(download_dir, ".build") if sh.isdir(build_path): sh.deldir(build_path) sh.mkdir(build_path) cmdline = [ PIP_EXECUTABLE, '-v', 'install', '-I', '-U', '--download', download_dir, '--build', build_path, # Don't download wheels since we lack the ability to create # rpms from them (until future when we will have it, if ever)... "--no-use-wheel", ] for p in pips_to_download: for p_seg in _split(p): if p_seg: cmdline.append(p_seg) sh.execute_save_output(cmdline, output_filename)
def _uninstall_dirs(self): dirs_made = self.tracereader.dirs_made() dirs_alive = filter(sh.isdir, dirs_made) if dirs_alive: utils.log_iterable(dirs_alive, logger=LOG, header="Removing %s created directories" % (len(dirs_alive))) for dir_name in dirs_alive: sh.deldir(dir_name, run_as_root=True)
def _config_adjust_api(self, contents, fn): params = ghelper.get_shared_params(**self.options) with io.BytesIO(contents) as stream: config = cfg.create_parser(cfg.RewritableConfigParser, self) config.readfp(stream) img_store_dir = sh.joinpths(self.get_option('component_dir'), 'images') config.set('DEFAULT', 'debug', self.get_bool_option('verbose', )) config.set('DEFAULT', 'verbose', self.get_bool_option('verbose')) config.set('DEFAULT', 'default_store', 'file') config.set('DEFAULT', 'filesystem_store_datadir', img_store_dir) config.set('DEFAULT', 'bind_port', params['endpoints']['public']['port']) config.set( 'DEFAULT', 'sql_connection', dbhelper.fetch_dbdsn(dbname=DB_NAME, utf8=True, dbtype=self.get_option('db', 'type'), **utils.merge_dicts( self.get_option('db'), dbhelper.get_shared_passwords(self)))) config.remove_option('DEFAULT', 'log_file') config.set('paste_deploy', 'flavor', self.get_option('paste_flavor')) LOG.debug( "Ensuring file system store directory %r exists and is empty." % (img_store_dir)) sh.deldir(img_store_dir) self.tracewriter.dirs_made(*sh.mkdirslist(img_store_dir)) return config.stringify(fn)
def _uninstall_dirs(self): dirs_made = self.tracereader.dirs_made() dirs_alive = filter(sh.isdir, dirs_made) if dirs_alive: utils.log_iterable(dirs_alive, logger=LOG, header="Removing %s created directories" % (len(dirs_alive))) for dir_name in dirs_alive: sh.deldir(dir_name)
def post_bootstrap(self): bs_rpmbuild_dir = sh.joinpths('.bootstrap', 'rpmbuild') if sh.isdir(bs_rpmbuild_dir): LOG.info("Moving RPMS build on bootstrap to deps repo") self._move_srpms("anvil-deps", bs_rpmbuild_dir) self._move_rpm_files(bs_rpmbuild_dir, sh.joinpths(self.anvil_repo_dir, 'anvil-deps')) sh.deldir(bs_rpmbuild_dir)
def download_dependencies(self): """Download dependencies from `$deps_dir/download-requires`. """ # NOTE(aababilov): do not drop download_dir - it can be reused sh.mkdirslist(self.download_dir, tracewriter=self.tracewriter) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() sh.write_file(download_requires_filename, "\n".join(str(req) for req in raw_pips_to_download)) if not raw_pips_to_download: return ([], []) downloaded_flag_file = sh.joinpths(self.deps_dir, "pip-downloaded") # NOTE(aababilov): user could have changed persona, so, # check that all requirements are downloaded if sh.isfile(downloaded_flag_file) and self._requirements_satisfied( raw_pips_to_download, self.download_dir): LOG.info("All python dependencies have been already downloaded") else: pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") # NOTE(aababilov): do not clean the cache, it is always useful pip_cache_dir = sh.joinpths(self.deps_dir, "pip-cache") pip_failures = [] for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_dir) sh.mkdir(pip_download_dir, recurse=True) header = "Downloading %s python dependencies (attempt %s)" header = header % (len(raw_pips_to_download), attempt) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=header) failed = False try: self._try_download_dependencies(attempt, raw_pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir, force=True) sh.deldir(pip_dir) if pip_failures: raise pip_failures[-1] with open(downloaded_flag_file, "w"): pass pips_downloaded = [pip_helper.extract_requirement(p) for p in raw_pips_to_download] self._examine_download_dir(pips_downloaded, self.download_dir) what_downloaded = sh.listdir(self.download_dir, files_only=True) return (pips_downloaded, what_downloaded)
def tempdir(**kwargs): # This seems like it was only added in python 3.2 # Make it since its useful... # See: http://bugs.python.org/file12970/tempdir.patch tdir = tempfile.mkdtemp(**kwargs) try: yield tdir finally: sh.deldir(tdir)
def post_bootstrap(self): bs_rpmbuild_dir = sh.joinpths('.bootstrap', 'rpmbuild') if sh.isdir(bs_rpmbuild_dir): LOG.info("Moving RPMS build on bootstrap to deps repo") self._move_srpms("anvil-deps", bs_rpmbuild_dir) self._move_rpm_files( bs_rpmbuild_dir, sh.joinpths(self.anvil_repo_dir, 'anvil-deps')) sh.deldir(bs_rpmbuild_dir)
def destroy(self): self.uninstall() # Clear out any files touched. if self.tracereader.exists(): for f in self.tracereader.files_touched(): sh.unlink(f) for d in self.tracereader.dirs_made(): sh.deldir(d) sh.unlink(self.tracereader.filename())
def build_paths(self): if self._build_paths is None: build_paths = {} for name in RPM_DIR_NAMES: final_path = sh.joinpths(self.package_dir, name.upper()) build_paths[name] = final_path if sh.isdir(final_path): sh.deldir(final_path, True) sh.mkdirslist(final_path, tracewriter=self.tracewriter) self._build_paths = build_paths return copy.deepcopy(self._build_paths) # Return copy (not the same instance)
def build_paths(self): if self._build_paths is None: bpaths = {} for name in ['sources', 'specs', 'srpms', 'rpms', 'build']: final_path = sh.joinpths(self.package_dir, name.upper()) bpaths[name] = final_path if sh.isdir(final_path): sh.deldir(final_path, True) self.tracewriter.dirs_made(*sh.mkdirslist(final_path)) self._build_paths = bpaths return dict(self._build_paths)
def _config_adjust_api(self, config): self._config_adjust_api_reg(config) gparams = ghelper.get_shared_params(**self.installer.options) config.add('bind_port', gparams['endpoints']['public']['port']) config.add( 'default_store', 'file') img_store_dir = sh.joinpths(self.installer.get_option('component_dir'), 'images') config.add('filesystem_store_datadir', img_store_dir) LOG.debug("Ensuring file system store directory %r exists and is empty." % (img_store_dir)) if sh.isdir(img_store_dir): sh.deldir(img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.installer.tracewriter)
def _config_adjust_api(self, config): self._config_adjust_api_reg(config) gparams = ghelper.get_shared_params(**self.installer.options) config.add('bind_port', gparams['endpoints']['public']['port']) config.add('default_store', 'file') img_store_dir = sh.joinpths(self.installer.get_option('component_dir'), 'images') config.add('filesystem_store_datadir', img_store_dir) LOG.debug( "Ensuring file system store directory %r exists and is empty." % (img_store_dir)) if sh.isdir(img_store_dir): sh.deldir(img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.installer.tracewriter)
def uninstall(self): super(YumDependencyHandler, self).uninstall() if self.tracereader.exists(): for f in self.tracereader.files_touched(): sh.unlink(f) for d in self.tracereader.dirs_made(): sh.deldir(d) sh.unlink(self.tracereader.filename()) self.tracereader = None rpm_names = [] for name in self._convert_names_python2rpm(self.python_names): if self.helper.is_installed(name): rpm_names.append(name) if rpm_names: cmdline = ["yum", "remove", "--remove-leaves", "-y"] + rpm_names sh.execute(cmdline, stdout_fh=sys.stdout, stderr_fh=sys.stderr)
def download_dependencies(self, clear_cache=False): """Download dependencies from `$deps_dir/download-requires`. :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly when it has a cache) """ sh.deldir(self.download_dir) sh.mkdir(self.download_dir, recurse=True) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() pips_to_download = [ pkg_resources.Requirement.parse(str(p.strip())) for p in raw_pips_to_download if p.strip() ] sh.write_file(download_requires_filename, "\n".join(str(req) for req in pips_to_download)) if not pips_to_download: return [] pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") pip_cache_dir = sh.joinpths(pip_dir, "cache") if clear_cache: sh.deldir(pip_cache_dir) pip_failures = [] how_many = len(pips_to_download) for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_download_dir) sh.mkdir(pip_download_dir, recurse=True) sh.deldir(pip_build_dir) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=("Downloading %s python dependencies " "(attempt %s)" % (how_many, attempt))) failed = False try: self._try_download_dependencies(attempt, pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir) return sh.listdir(self.download_dir, files_only=True)
def _uninstall_dirs(self): dirs_made = self.tracereader.dirs_made() if dirs_made: dirs_made = [sh.abspth(d) for d in dirs_made] if self.get_option('keep_old', False): download_places = [path_location[0] for path_location in self.tracereader.download_locations()] if download_places: utils.log_iterable(download_places, logger=LOG, header="Keeping %s download directories (and there children directories)" % (len(download_places))) for download_place in download_places: dirs_made = sh.remove_parents(download_place, dirs_made) if dirs_made: utils.log_iterable(dirs_made, logger=LOG, header="Removing %s created directories" % (len(dirs_made))) for dir_name in dirs_made: if sh.isdir(dir_name): sh.deldir(dir_name, run_as_root=True) else: LOG.warn("No directory found at %s - skipping", colorizer.quote(dir_name, quote_color='red'))
def _config_adjust_api(self, contents, fn): params = ghelper.get_shared_params(self.cfg) with io.BytesIO(contents) as stream: config = cfg.RewritableConfigParser() config.readfp(stream) img_store_dir = sh.joinpths(self.get_option('component_dir'), 'images') config.set('DEFAULT', 'debug', True) config.set('DEFAULT', 'verbose', True) config.set('DEFAULT', 'default_store', 'file') config.set('DEFAULT', 'filesystem_store_datadir', img_store_dir) config.set('DEFAULT', 'bind_port', params['endpoints']['public']['port']) config.set('DEFAULT', 'sql_connection', dbhelper.fetch_dbdsn(self.cfg, DB_NAME, utf8=True)) config.remove_option('DEFAULT', 'log_file') config.set('paste_deploy', 'flavor', 'keystone') LOG.info("Ensuring file system store directory %r exists and is empty." % (img_store_dir)) sh.deldir(img_store_dir) self.tracewriter.dirs_made(*sh.mkdirslist(img_store_dir)) return config.stringify(fn)
def download_dependencies(self, clear_cache=False): """Download dependencies from `$deps_dir/download-requires`. :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly when it has a cache) """ sh.deldir(self.download_dir) sh.mkdir(self.download_dir, recurse=True) download_requires_filename = sh.joinpths(self.deps_dir, "download-requires") raw_pips_to_download = self.filter_download_requires() pips_to_download = [pkg_resources.Requirement.parse(str(p.strip())) for p in raw_pips_to_download if p.strip()] sh.write_file(download_requires_filename, "\n".join(str(req) for req in pips_to_download)) if not pips_to_download: return [] pip_dir = sh.joinpths(self.deps_dir, "pip") pip_download_dir = sh.joinpths(pip_dir, "download") pip_build_dir = sh.joinpths(pip_dir, "build") pip_cache_dir = sh.joinpths(pip_dir, "cache") if clear_cache: sh.deldir(pip_cache_dir) pip_failures = [] how_many = len(pips_to_download) for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS): # NOTE(aababilov): pip has issues with already downloaded files sh.deldir(pip_download_dir) sh.mkdir(pip_download_dir, recurse=True) sh.deldir(pip_build_dir) utils.log_iterable(sorted(raw_pips_to_download), logger=LOG, header=("Downloading %s python dependencies " "(attempt %s)" % (how_many, attempt))) failed = False try: self._try_download_dependencies(attempt, pips_to_download, pip_download_dir, pip_cache_dir, pip_build_dir) pip_failures = [] except exc.ProcessExecutionError as e: LOG.exception("Failed downloading python dependencies") pip_failures.append(e) failed = True if not failed: break if pip_failures: raise pip_failures[-1] for filename in sh.listdir(pip_download_dir, files_only=True): sh.move(filename, self.download_dir) return sh.listdir(self.download_dir, files_only=True)
def _config_adjust_api(self, contents, fn): params = ghelper.get_shared_params(**self.options) with io.BytesIO(contents) as stream: config = cfg.create_parser(cfg.RewritableConfigParser, self) config.readfp(stream) img_store_dir = sh.joinpths(self.get_option('component_dir'), 'images') config.set('DEFAULT', 'debug', self.get_bool_option('verbose',)) config.set('DEFAULT', 'verbose', self.get_bool_option('verbose')) config.set('DEFAULT', 'default_store', 'file') config.set('DEFAULT', 'filesystem_store_datadir', img_store_dir) config.set('DEFAULT', 'bind_port', params['endpoints']['public']['port']) config.set('DEFAULT', 'sql_connection', dbhelper.fetch_dbdsn(dbname=DB_NAME, utf8=True, dbtype=self.get_option('db', 'type'), **utils.merge_dicts(self.get_option('db'), dbhelper.get_shared_passwords(self)))) config.remove_option('DEFAULT', 'log_file') config.set('paste_deploy', 'flavor', self.get_option('paste_flavor')) LOG.debug("Ensuring file system store directory %r exists and is empty." % (img_store_dir)) sh.deldir(img_store_dir) self.tracewriter.dirs_made(*sh.mkdirslist(img_store_dir)) return config.stringify(fn)
def _write_all_deps_package(self): spec_filename = sh.joinpths( self.rpmbuild_dir, "SPECS", "%s.spec" % self.OPENSTACK_DEPS_PACKAGE_NAME) # Clean out previous dirs. for dirname in (self.rpmbuild_dir, self.deps_repo_dir, self.deps_src_repo_dir): sh.deldir(dirname) sh.mkdirslist(dirname, tracewriter=self.tracewriter) def get_version_release(): right_now = datetime.now() components = [ str(right_now.year), str(right_now.month), str(right_now.day), ] return (".".join(components), right_now.strftime("%s")) (version, release) = get_version_release() spec_content = """Name: %s Version: %s Release: %s License: Apache 2.0 Summary: OpenStack dependencies BuildArch: noarch """ % (self.OPENSTACK_DEPS_PACKAGE_NAME, version, release) packages = {} for inst in self.instances: try: for pack in inst.packages: packages[pack["name"]] = pack except AttributeError: pass scripts = {} script_map = { "pre-install": "%pre", "post-install": "%post", "pre-uninstall": "%preun", "post-uninstall": "%postun", } for pack_name in sorted(packages.iterkeys()): pack = packages[pack_name] cont = [spec_content, "Requires: ", pack["name"]] version = pack.get("version") if version: cont.append(" ") cont.append(version) cont.append("\n") spec_content = "".join(cont) for script_name in script_map.iterkeys(): try: script_list = pack[script_name] except (KeyError, ValueError): continue script_body = scripts.get(script_name, "") script_body = "%s\n# %s\n" % (script_body, pack_name) for script in script_list: try: line = " ".join( sh.shellquote(word) for word in script["cmd"]) except (KeyError, ValueError): continue if script.get("ignore_failure"): ignore = " 2>/dev/null || true" else: ignore = "" script_body = "".join((script_body, line, ignore, "\n")) scripts[script_name] = script_body spec_content += "\n%description\n\n" for script_name in sorted(script_map.iterkeys()): try: script_body = scripts[script_name] except KeyError: pass else: spec_content = "%s\n%s\n%s\n" % ( spec_content, script_map[script_name], script_body) spec_content += "\n%files\n" sh.write_file(spec_filename, spec_content, tracewriter=self.tracewriter) cmdline = [ "rpmbuild", "-ba", "--define", "_topdir %s" % self.rpmbuild_dir, spec_filename, ] LOG.info("Building %s RPM" % self.OPENSTACK_DEPS_PACKAGE_NAME) sh.execute(cmdline)
def _write_all_deps_package(self): spec_filename = sh.joinpths( self.rpmbuild_dir, "SPECS", "%s.spec" % self.OPENSTACK_DEPS_PACKAGE_NAME) # Clean out previous dirs. for dirname in (self.rpmbuild_dir, self.deps_repo_dir, self.deps_src_repo_dir): sh.deldir(dirname) sh.mkdirslist(dirname, tracewriter=self.tracewriter) def get_version_release(): right_now = datetime.now() components = [ str(right_now.year), str(right_now.month), str(right_now.day), ] return (".".join(components), right_now.strftime("%s")) (version, release) = get_version_release() spec_content = """Name: %s Version: %s Release: %s License: Apache 2.0 Summary: OpenStack dependencies BuildArch: noarch """ % (self.OPENSTACK_DEPS_PACKAGE_NAME, version, release) packages = {} for inst in self.instances: try: for pack in inst.packages: packages[pack["name"]] = pack except AttributeError: pass scripts = {} script_map = { "pre-install": "%pre", "post-install": "%post", "pre-uninstall": "%preun", "post-uninstall": "%postun", } for pack_name in sorted(packages.iterkeys()): pack = packages[pack_name] cont = [spec_content, "Requires: ", pack["name"]] version = pack.get("version") if version: cont.append(" ") cont.append(version) cont.append("\n") spec_content = "".join(cont) for script_name in script_map.iterkeys(): try: script_list = pack[script_name] except (KeyError, ValueError): continue script_body = scripts.get(script_name, "") script_body = "%s\n# %s\n" % (script_body, pack_name) for script in script_list: try: line = " ".join( sh.shellquote(word) for word in script["cmd"]) except (KeyError, ValueError): continue if script.get("ignore_failure"): ignore = " 2>/dev/null || true" else: ignore = "" script_body = "".join(( script_body, line, ignore, "\n")) scripts[script_name] = script_body spec_content += "\n%description\n\n" for script_name in sorted(script_map.iterkeys()): try: script_body = scripts[script_name] except KeyError: pass else: spec_content = "%s\n%s\n%s\n" % ( spec_content, script_map[script_name], script_body) spec_content += "\n%files\n" sh.write_file(spec_filename, spec_content, tracewriter=self.tracewriter) cmdline = [ "rpmbuild", "-ba", "--define", "_topdir %s" % self.rpmbuild_dir, spec_filename, ] LOG.info("Building %s RPM" % self.OPENSTACK_DEPS_PACKAGE_NAME) sh.execute(cmdline)