def delete_packages(config, should_delete, index, index_type, dry_run, quiet): from infi.logging.wrappers import script_logging_context from infi.gevent_utils.os import path from infi.app_repo.service import get_client client = get_client(config) show_warning = False with script_logging_context(syslog=False, logfile=False, stderr=True): artifacts = client.get_artifacts(index, index_type) files_to_remove = [ filepath for filepath in artifacts if should_delete(path.basename(filepath)) ] for filepath in files_to_remove: filepath_relative = path.relpath(filepath, config.base_directory) if dry_run: logger.info("[dry-run] deleting {}".format(filepath_relative)) continue if not quiet: if not raw_input( 'delete {} [y/N]? '.format(filepath_relative)).lower() in ( 'y', 'yes'): continue logger.info("deleting {} ".format(filepath_relative)) show_warning = True client.delete_artifact(filepath) if show_warning: logger.warn( "do not forget to rebuild the index(es) after deleting all the packages that you wanted to delete" )
def should_delete(filepath): """returns True on old releases of the package""" basename = path.basename(filepath) if not basename.startswith(package['name']): return False prefix = '{}-{}-'.format(package['name'], latest_version) return not basename.startswith(prefix) and not_recent(filepath)
def _deduce_produce_name(self, dirpath): try: with fopen(path.join(dirpath, 'product_name')) as fd: return fd.read().strip() except: return ' '.join( word.capitalize() for word in path.basename(dirpath).split('-')).strip()
def hard_link_and_override(src, dst): if not path.exists(dst): link(src, dst) elif path.isfile(dst): remove(dst) link(src, dst) elif path.isdir(dst): link(src, path.join(dst, path.basename(src)))
def _rpm(): for item in glob(path.join(config.packages_directory, 'rpm', '*')): remove(item) for src in glob(path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')): linux, distro, version, arch = path.basename(src).split('-') dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch) ensure_directory_exists(path.dirname(dst)) _override_symlink(src, dst)
def _iter_packages(self): for package_dirpath in glob(path.join(self.base_directory, 'packages', '*')): yield dict(abspath=package_dirpath, hidden=self._is_hidden(package_dirpath), product_name=self._deduce_produce_name(package_dirpath), name=path.basename(package_dirpath), release_notes_url=self._deduce_release_notes_url(package_dirpath), releases_uri=self._normalize_url(path.join(package_dirpath, 'releases.json',)))
def _iter_distributions(self, package, release): for distribution_dirpath in glob(path.join(release['abspath'], 'distributions', '*')): for arch_dirpath in glob(path.join(distribution_dirpath, 'architectures', '*')): for extension_dirpath in glob(path.join(arch_dirpath, 'extensions', '*')): try: [filepath] = list(glob(path.join(extension_dirpath, '*'))) except ValueError: logger.warn("expected only one file under {}, but it is not the case".format(extension_dirpath)) continue distribution = dict(platform=path.basename(distribution_dirpath), hidden=self._is_hidden(distribution_dirpath) or \ self._is_hidden(arch_dirpath) or \ self._is_hidden(extension_dirpath), architecture=path.basename(arch_dirpath), extension=path.basename(extension_dirpath), filepath=self._normalize_url(filepath)) yield distribution
def _iter_releases(self, package): from os import stat from time import ctime for version_dirpath in glob(path.join(package['abspath'], 'releases', '*')): mod_time = stat(version_dirpath).st_mtime release = dict(version=path.basename(version_dirpath), hidden=self._is_hidden(version_dirpath), abspath=version_dirpath, last_modified=ctime(mod_time) if mod_time else '') yield release
def _rpm(): for item in glob(path.join(config.packages_directory, 'rpm', '*')): remove(item) for src in glob( path.join(config.packages_directory, config.webserver.default_index, 'yum', 'linux-*')): linux, distro, version, arch = path.basename(src).split('-') dst = path.join(config.artifacts_directory, 'rpm', distro, version, arch) ensure_directory_exists(path.dirname(dst)) _override_symlink(src, dst)
def parse_filepath(filepath): """:returns: 5-tuple (package_name, package_version, platform_string, architecture, extension)""" filename = path.basename(filepath) result = match(FILEPATH, filename) if result is None: logger.error("failed to parse {}".format(filename)) raise FilenameParsingFailed(filepath) group = result.groupdict() return translate_filepath((group['package_name'], group['package_version'], PLATFORM_STRING.get(group['extension'], group['platform_string']), group['architecture'], group['extension']))
def _upload_file(address, port, username, password, index, filepath): from ftplib import FTP from infi.gevent_utils.os import path, fopen from infi.app_repo.ftpserver import make_ftplib_gevent_friendly make_ftplib_gevent_friendly() ftp = FTP() ftp.connect(address, port) ftp.login(username, password) ftp.cwd(index) with fopen(filepath) as fd: ftp.storbinary("STOR %s" % path.basename(filepath), fd)
def hard_link_or_raise_exception(src, dst): if not path.exists(dst): link(src, dst) return dst elif path.isfile(dst): raise FileAlreadyExists(dst) elif path.isdir(dst): dst_abspath = path.join(dst, path.basename(src)) if path.exists(dst_abspath): raise FileAlreadyExists(dst_abspath) link(src, dst_abspath) return dst_abspath
def upload_file(config, index, filepath): from ftplib import FTP from infi.gevent_utils.os import path, fopen from infi.app_repo.ftpserver import make_ftplib_gevent_friendly from infi.gevent_utils.deferred import create_threadpool_executed_func make_ftplib_gevent_friendly() ftp = FTP() ftp.connect('127.0.0.1', config.ftpserver.port) ftp.login(config.ftpserver.username, config.ftpserver.password) ftp.cwd(index) with fopen(filepath) as fd: ftp.storbinary("STOR %s" % path.basename(filepath), fd)
def _iter_releases(self, package): from os import stat from time import ctime from datetime import date, datetime for version_dirpath in glob( path.join(package['abspath'], 'releases', '*')): mod_time = stat(version_dirpath).st_mtime release_date = self._read_release_date_from_file( version_dirpath) or mod_time release = dict( version=path.basename(version_dirpath), hidden=self._is_hidden(version_dirpath), abspath=version_dirpath, last_modified=datetime.fromtimestamp(mod_time).isoformat() if mod_time else '', last_modified_timestamp=int(mod_time) if mod_time else None, release_date=date.fromtimestamp(release_date).isoformat() if release_date else '', ) yield release
def delete_packages(config, should_delete, index, index_type, dry_run, quiet): from infi.logging.wrappers import script_logging_context from infi.gevent_utils.os import path from infi.app_repo.service import get_client client = get_client(config) show_warning = False with script_logging_context(syslog=False, logfile=False, stderr=True): artifacts = client.get_artifacts(index, index_type) files_to_remove = [filepath for filepath in artifacts if should_delete(path.basename(filepath))] for filepath in files_to_remove: filepath_relative = path.relpath(filepath, config.base_directory) if dry_run: logger.info("[dry-run] deleting {}".format(filepath_relative)) continue if not quiet: if not raw_input('delete {} [y/N]? '.format(filepath_relative)).lower() in ('y', 'yes'): continue logger.info("deleting {} ".format(filepath_relative)) show_warning = True client.delete_artifact(filepath) if show_warning: logger.warn("do not forget to rebuild the index(es) after deleting all the packages that you wanted to delete")
def test_basename(self): self.switch_validator.assert_called(0) self.assertEqual("a.text", path.basename("/a/b/c/a.text")) self.switch_validator.assert_called(0)
def are_you_interested_in_file(self, filepath, platform, arch): return path.basename(filepath).startswith( "python-") and filepath.endswith(".tar.gz")
def _deduce_produce_name(self, dirpath): try: with fopen(path.join(dirpath, 'product_name')) as fd: return fd.read().strip() except: return ' '.join(word.capitalize() for word in path.basename(dirpath).split('-')).strip()
def build_regex_predicate(pattern): import re from infi.gevent_utils.os import path return lambda filepath: re.compile(pattern).match(path.basename(filepath))
def are_you_interested_in_file(self, filepath, platform, arch): return path.basename(filepath).startswith("python-") and filepath.endswith(".tar.gz")