def _rpm(): for item in glob(path.join(config.packages_directory, 'rpm', '*')): remove(item) for src in glob(path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')): linux, distro, version, arch = path.basename(src).split('-') dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch) ensure_directory_exists(path.dirname(dst)) _override_symlink(src, dst)
def _rpm(): for item in glob(path.join(config.packages_directory, 'rpm', '*')): remove(item) for src in glob( path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')): linux, distro, version, arch = path.basename(src).split('-') dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch) ensure_directory_exists(path.dirname(dst)) _override_symlink(src, dst)
def iter_files(self): for platform, architectures in KNOWN_PLATFORMS.items(): for arch in architectures: dirpath = path.join(self.base_directory, '%s-%s' % (platform, arch)) for filepath in glob(path.join(dirpath, '*.rpm')): yield filepath
def rebuild_index(self): for package_dir in glob(path.join(self.base_directory, '*')): if not path.isdir(package_dir) or package_dir.endswith('updates'): continue latest_zip = self._get_latest_update_file_in_directory(package_dir) if latest_zip: self._extract_update(package_dir, latest_zip)
def iter_files(self): ensure_directory_exists(self.base_directory) for distribution_name, distribution_dict in KNOWN_DISTRIBUTIONS.items(): for version, architectures in distribution_dict.items(): for arch in architectures: dirpath = self.deduce_dirname(distribution_name, version, arch) for filepath in glob(path.join(dirpath, '*.deb')): yield filepath
def _iter_packages(self): for package_dirpath in glob(path.join(self.base_directory, 'packages', '*')): yield dict(abspath=package_dirpath, hidden=self._is_hidden(package_dirpath), product_name=self._deduce_produce_name(package_dirpath), name=path.basename(package_dirpath), release_notes_url=self._deduce_release_notes_url(package_dirpath), releases_uri=self._normalize_url(path.join(package_dirpath, 'releases.json',)))
def _iter_distributions(self, package, release): for distribution_dirpath in glob(path.join(release['abspath'], 'distributions', '*')): for arch_dirpath in glob(path.join(distribution_dirpath, 'architectures', '*')): for extension_dirpath in glob(path.join(arch_dirpath, 'extensions', '*')): try: [filepath] = list(glob(path.join(extension_dirpath, '*'))) except ValueError: logger.warn("expected only one file under {}, but it is not the case".format(extension_dirpath)) continue distribution = dict(platform=path.basename(distribution_dirpath), hidden=self._is_hidden(distribution_dirpath) or \ self._is_hidden(arch_dirpath) or \ self._is_hidden(extension_dirpath), architecture=path.basename(arch_dirpath), extension=path.basename(extension_dirpath), filepath=self._normalize_url(filepath)) yield distribution
def _iter_releases(self, package): from os import stat from time import ctime for version_dirpath in glob(path.join(package['abspath'], 'releases', '*')): mod_time = stat(version_dirpath).st_mtime release = dict(version=path.basename(version_dirpath), hidden=self._is_hidden(version_dirpath), abspath=version_dirpath, last_modified=ctime(mod_time) if mod_time else '') yield release
def initialise(self): ensure_directory_exists(self.base_directory) for platform, architectures in KNOWN_PLATFORMS.items(): for arch in architectures: dirpath = path.join(self.base_directory, "%s-%s" % (platform, arch)) ensure_directory_exists(path.join(dirpath, "repodata")) gkg_key = path.join(self.config.packages_directory, "gpg.key") hard_link_and_override(gkg_key, path.join(dirpath, "repodata", "repomd.xml.key")) for dirpath in glob(path.join(self.base_directory, "*")): if not self._is_repodata_exists(dirpath): createrepo(dirpath)
def _get_latest_update_file_in_directory(self, dirpath): from pkg_resources import parse_version latest_update_file, latest_version = None, parse_version('0') update_files = [filepath for filepath in glob(path.join(dirpath, '*.zip')) if ARCH in filepath] for filepath in update_files: package_name, package_version, platform_string, architecture, extension = parse_filepath(filepath) package_version = parse_version(package_version) if package_version > latest_version: latest_version = package_version latest_update_file = filepath return latest_update_file
def initialise(self): ensure_directory_exists(self.base_directory) for platform, architectures in KNOWN_PLATFORMS.items(): for arch in architectures: dirpath = path.join(self.base_directory, '%s-%s' % (platform, arch)) ensure_directory_exists(path.join(dirpath, 'repodata')) gkg_key = path.join(self.config.packages_directory, 'gpg.key') hard_link_and_override( gkg_key, path.join(dirpath, 'repodata', 'repomd.xml.key')) for dirpath in glob(path.join(self.base_directory, '*')): if not self._is_repodata_exists(dirpath): createrepo(dirpath)
def _get_latest_update_file_in_directory(self, dirpath): from pkg_resources import parse_version latest_update_file, latest_version = None, parse_version('0') update_files = [ filepath for filepath in glob(path.join(dirpath, '*.zip')) if ARCH in filepath ] for filepath in update_files: package_name, package_version, platform_string, architecture, extension = parse_filepath( filepath) package_version = parse_version(package_version) if package_version > latest_version: latest_version = package_version latest_update_file = filepath return latest_update_file
def initialise(self): from os import path ensure_directory_exists(self.base_directory) self.cachedir = path.join(self.base_directory, 'cachedir') ensure_directory_exists(self.cachedir) for platform, architectures in KNOWN_PLATFORMS.items(): for arch in architectures: dirpath = path.join(self.base_directory, '%s-%s' % (platform, arch)) ensure_directory_exists(path.join(dirpath, 'repodata')) gkg_key = path.join(self.config.packages_directory, 'gpg.key') hard_link_and_override(gkg_key, path.join(dirpath, 'repodata', 'repomd.xml.key')) for dirpath in glob(path.join(self.base_directory, '*')): if not self._is_repodata_exists(dirpath): createrepo(dirpath)
def _iter_releases(self, package): from os import stat from time import ctime from datetime import date, datetime for version_dirpath in glob( path.join(package['abspath'], 'releases', '*')): mod_time = stat(version_dirpath).st_mtime release_date = self._read_release_date_from_file( version_dirpath) or mod_time release = dict( version=path.basename(version_dirpath), hidden=self._is_hidden(version_dirpath), abspath=version_dirpath, last_modified=datetime.fromtimestamp(mod_time).isoformat() if mod_time else '', last_modified_timestamp=int(mod_time) if mod_time else None, release_date=date.fromtimestamp(release_date).isoformat() if release_date else '', ) yield release
def rebuild_index(self): for dirpath in glob(path.join(self.base_directory, '*')): self._delete_repo_metadata(dirpath) self._update_index(dirpath)
def process_incoming(self, index): assert index in self.config.indexes for filepath in glob( path.join(self.config.incoming_directory, index, '*')): self._try_except_finally_on_filepath(process_filepath_by_name, index, filepath)
def iter_files(self): for filepath in glob(path.join(self.base_directory, '*', '*.zip')): yield filepath
def iter_files(self): return glob(path.join(self.base_directory, '*', '*.tar.gz'))
def process_incoming(self, index): assert index in self.config.indexes for filepath in glob(path.join(self.config.incoming_directory, index, '*')): self._try_except_finally_on_filepath(process_filepath_by_name, index, filepath)