def generate_release_file_for_specific_distribution_and_version( self, distribution, codename, force=True): dirpath = path.join(self.base_directory, distribution, 'dists', codename) in_release = path.join(dirpath, 'InRelease') release = path.join(dirpath, 'Release') release_gpg = release + '.gpg' if path.exists(release) and not force: return # write release file contents = apt_ftparchive(['release', dirpath]) with fopen(release, 'w') as fd: available_archs = sorted( KNOWN_DISTRIBUTIONS[distribution][codename]) fd.write( RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents)) # delete old release signature files for filepath in [in_release, release_gpg]: if path.exists(filepath): remove(filepath) # sign release file if codename == "trusty": # trusty doesn't support SHA256 for InRelease gpg([ '--clearsign', '--digest-algo', 'SHA1', '-o', in_release, release ]) else: gpg([ '--clearsign', '--digest-algo', 'SHA256', '-o', in_release, release ]) gpg(['-abs', '-o', release_gpg, release])
def _rpm(): for item in glob(path.join(config.packages_directory, 'rpm', '*')): remove(item) for src in glob(path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')): linux, distro, version, arch = path.basename(src).split('-') dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch) ensure_directory_exists(path.dirname(dst)) _override_symlink(src, dst)
def hard_link_and_override(src, dst): if not path.exists(dst): link(src, dst) elif path.isfile(dst): remove(dst) link(src, dst) elif path.isdir(dst): link(src, path.join(dst, path.basename(src)))
def _try_except_finally_on_filepath(self, func, index, filepath, *args, **kwargs): # TODO rejection needs a test try: func(self.config, index, filepath, *args, **kwargs) except: logger.exception("processing source {} failed, moving it to {}".format(filepath, self.config.rejected_directory)) try: hard_link_or_raise_exception(filepath, path.join(self.config.rejected_directory, index)) except: pass finally: remove(filepath)
def _rpm(): for item in glob(path.join(config.packages_directory, 'rpm', '*')): remove(item) for src in glob( path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')): linux, distro, version, arch = path.basename(src).split('-') dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch) ensure_directory_exists(path.dirname(dst)) _override_symlink(src, dst)
def _try_except_finally_on_filepath( self, func, index, filepath, *args, **kwargs): # TODO rejection needs a test try: func(self.config, index, filepath, *args, **kwargs) except: logger.exception( "processing source {} failed, moving it to {}".format( filepath, self.config.rejected_directory)) try: hard_link_or_raise_exception( filepath, path.join(self.config.rejected_directory, index)) except: pass finally: remove(filepath)
def sign_rpm_package(filepath): def _rpm_addsign_rewrites_the_file(filepath): from os import environ logger.info("Signing {!r}".format(filepath)) command = ['rpm', '--addsign', filepath] env = environ.copy() env['HOME'] = env.get('HOME', "/root") env['GNUPGHOME'] = path.join(env.get('HOME', "/root"), ".gnupg") log_execute_assert_success('echo | setsid rpm --addsign {}'.format(filepath), env=env, shell=True) temp_filepath = filepath + '.signed' copyfile(filepath, temp_filepath) try: _rpm_addsign_rewrites_the_file(temp_filepath) with open(temp_filepath, 'rb') as src: with open(filepath, 'wb') as dst: copyfileobj(src, dst) finally: remove(temp_filepath)
def rebuild_index(self): packages = [] for package in self._iter_packages(): releases = [] for release in sorted(self._iter_releases(package), reverse=True, key=lambda release: parse_version(release['version'])): release['distributions'] = list(self._iter_distributions(package, release)) if not release['distributions']: continue releases.append(release) write_file(path.join(package['abspath'], 'releases.json'), encode(releases, indent=4, large_object=True)) latest_release = self._get_latest_release(releases) latest_release_txt = path.join(package['abspath'], 'latest_release.txt') if latest_release: package['latest_version'] = latest_release['version'] package['installation_instructions'] = self._get_installation_instructions(package, latest_release) packages.append(package) write_file(latest_release_txt, latest_release['version']) elif path.exists(latest_release_txt): remove(latest_release_txt) sorted_packages = sorted(packages, key=lambda package: package['product_name']) write_file(path.join(self.base_directory, 'packages.json'), encode(sorted_packages, indent=4, large_object=True))
def rebuild_index(self): packages = [] log_execute_assert_success([ 'find', self.base_directory, '-type', 'd', '-empty', '-print', '-delete' ]) for package in self._iter_packages(): releases = [] for release in sorted( self._iter_releases(package), reverse=True, key=lambda release: parse_version(release['version'])): release['distributions'] = list( self._iter_distributions(package, release)) if not release['distributions']: continue releases.append(release) write_file(path.join(package['abspath'], 'releases.json'), encode(releases, indent=4, large_object=True)) latest_release = self._get_latest_release(releases) latest_release_txt = path.join(package['abspath'], 'latest_release.txt') if latest_release: package['latest_version'] = latest_release['version'] package['latest_version_release_date'] = latest_release[ 'release_date'] package[ 'installation_instructions'] = self._get_installation_instructions( package, latest_release) packages.append(package) write_file(latest_release_txt, latest_release['version']) elif path.exists(latest_release_txt): remove(latest_release_txt) sorted_packages = sorted(packages, key=lambda package: package['product_name']) write_file(path.join(self.base_directory, 'packages.json'), encode(sorted_packages, indent=4, large_object=True))
def generate_release_file_for_specific_distribution_and_version(self, distribution, codename, force=True): dirpath = path.join(self.base_directory, distribution, 'dists', codename) in_release = path.join(dirpath, 'InRelease') release = path.join(dirpath, 'Release') release_gpg = release + '.gpg' if path.exists(release) and not force: return # write release file contents = apt_ftparchive(['release', dirpath]) with fopen(release, 'w') as fd: available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename]) fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents)) # delete old release signature files for filepath in [in_release, release_gpg]: if path.exists(filepath): remove(filepath) # sign release file if codename == "trusty": # trusty doesn't support SHA256 for InRelease gpg(['--clearsign', '--digest-algo', 'SHA1', '-o', in_release, release]) else: gpg(['--clearsign', '--digest-algo', 'SHA256', '-o', in_release, release]) gpg(['-abs', '-o', release_gpg, release])
def delete_artifact(self, filepath): if path.exists(filepath): remove(filepath)
def _override_symlink(src, dst): if path.exists(dst): assert path.islink(dst) remove(dst) symlink(src, dst)
def _override_updates_symlink(self, src, dst): if path.exists(dst): assert path.islink(dst) remove(dst) symlink(src, dst)
def sign_repomd(dirpath): repomd = path.join(dirpath, 'repodata', 'repomd.xml') if path.exists('%s.asc' % repomd): remove('%s.asc' % repomd) log_execute_assert_success(['gpg', '-a', '--detach-sign', repomd])
def delete(self): if path.exists(self.filepath): remove(self.filepath)
def sign_repomd(dirpath): repomd = path.join(dirpath, "repodata", "repomd.xml") if path.exists("%s.asc" % repomd): remove("%s.asc" % repomd) log_execute_assert_success(["gpg", "-a", "--detach-sign", repomd])
def delete_old_release_signature_files(): for filepath in [in_release, '%s.gpg' % release]: if path.exists(filepath): remove(filepath)