def restart_after(): require_restart = [service for service in SERVICES if path.exists(path.join(INIT, '%s.conf' % service))] for service in require_restart: log_execute_assert_success(['stop', service], allow_to_fail=True) yield for service in require_restart: log_execute_assert_success(['start', service])
def remove_index(config, index_name, async_rpc=False): from infi.app_repo.indexers import get_indexers from infi.app_repo.utils import log_execute_assert_success from infi.app_repo.service import get_client assert index_name in config.indexes config.indexes = [name for name in config.indexes if name != index_name] config.to_disk() for indexer in get_indexers(config, index_name): log_execute_assert_success(["rm", "-rf", indexer.base_directory]) get_client(config).reload_configuration_from_disk(async_rpc=async_rpc)
def restart_after(): require_restart = [ service for service in SERVICES if path.exists(path.join(INIT, '%s.conf' % service)) ] for service in require_restart: log_execute_assert_success(['stop', service], allow_to_fail=True) yield for service in require_restart: log_execute_assert_success(['start', service])
def install(): # pragma: no cover with restart_after(): for service, commandline_arguments in SERVICES.items(): _install_service(service, commandline_arguments) log_execute_assert_success( ['systemctl', 'enable', '%s.service' % service]) log_execute_assert_success( ['systemctl', 'start', '%s.service' % service])
def restart_after(): require_restart = [ service for service in SERVICES if path.exists(path.join(SYSTEMD, '%s.service' % service)) ] for service in require_restart: log_execute_assert_success( ['systemctl', 'stop', '%s.service' % service], allow_to_fail=True) yield for service in require_restart: log_execute_assert_success( ['systemctl', 'start', '%s.service' % service])
def rebuild_index(self): packages = [] log_execute_assert_success([ 'find', self.base_directory, '-type', 'd', '-empty', '-print', '-delete' ]) for package in self._iter_packages(): releases = [] for release in sorted( self._iter_releases(package), reverse=True, key=lambda release: parse_version(release['version'])): release['distributions'] = list( self._iter_distributions(package, release)) if not release['distributions']: continue releases.append(release) write_file(path.join(package['abspath'], 'releases.json'), encode(releases, indent=4, large_object=True)) latest_release = self._get_latest_release(releases) latest_release_txt = path.join(package['abspath'], 'latest_release.txt') if latest_release: package['latest_version'] = latest_release['version'] package['latest_version_release_date'] = latest_release[ 'release_date'] package[ 'installation_instructions'] = self._get_installation_instructions( package, latest_release) packages.append(package) write_file(latest_release_txt, latest_release['version']) elif path.exists(latest_release_txt): remove(latest_release_txt) sorted_packages = sorted(packages, key=lambda package: package['product_name']) write_file(path.join(self.base_directory, 'packages.json'), encode(sorted_packages, indent=4, large_object=True))
def _get_from_http(self, config, uri): log_execute_assert_success(["curl", "http://127.0.0.1:{}/{}".format(config.webserver.port, uri)])
def _delete_repo_metadata(self, dirpath): repodata = path.join(dirpath, 'repodata') log_execute_assert_success(['rm', '-rf', repodata])
def dpkg_scanpackages(cmdline_arguments): return log_execute_assert_success(['dpkg-scanpackages'] + cmdline_arguments).get_stdout()
def gpg(cmdline_arguments): return log_execute_assert_success(['gpg'] + cmdline_arguments).get_stdout()
def sign_repomd(dirpath): repomd = path.join(dirpath, "repodata", "repomd.xml") if path.exists("%s.asc" % repomd): remove("%s.asc" % repomd) log_execute_assert_success(["gpg", "-a", "--detach-sign", repomd])
def apt_ftparchive(cmdline_arguments): return log_execute_assert_success(['apt-ftparchive'] + cmdline_arguments).get_stdout()
def _extract_update(self, dirpath, filepath): log_execute_assert_success(["unzip", "-qq", "-o", filepath, "-d", dirpath])
def install(): # pragma: no cover with restart_after(): for service, commandline_arguments in SERVICES.items(): _install_upstart_job(service, commandline_arguments) log_execute_assert_success(['initctl', 'reload-configuration'])
def createrepo_update(dirpath): log_execute_assert_success(CREATEREPO_ARGUMENTS + ["--update", "--skip-stat", dirpath])
def _get_from_http(self, config, uri): log_execute_assert_success([ "curl", "http://127.0.0.1:{}/{}".format(config.webserver.port, uri) ])
def createrepo_update(dirpath, cachedir=None): arguments = CREATEREPO_ARGUMENTS + ['--update', '--skip-stat', dirpath] if cachedir: arguments += ['--cachedir', cachedir] log_execute_assert_success(arguments)
def createrepo(dirpath, cachedir=None): arguments = CREATEREPO_ARGUMENTS + [dirpath] if cachedir: arguments += ['--cachedir', cachedir] log_execute_assert_success(arguments)
def _delete_repo_metadata(self, dirpath): repodata = path.join(dirpath, "repodata") log_execute_assert_success(["rm", "-rf", repodata])
def createrepo(dirpath): log_execute_assert_success(CREATEREPO_ARGUMENTS + [dirpath])
def sign_repomd(dirpath): repomd = path.join(dirpath, 'repodata', 'repomd.xml') if path.exists('%s.asc' % repomd): remove('%s.asc' % repomd) log_execute_assert_success(['gpg', '-a', '--detach-sign', repomd])
def createrepo_update(dirpath): log_execute_assert_success(CREATEREPO_ARGUMENTS + ['--update', '--skip-stat', dirpath])
def _extract_update(self, dirpath, filepath): log_execute_assert_success( ["unzip", "-qq", "-o", filepath, "-d", dirpath])