def __init__(self, working_dir, build_system, vcs=None, ensure_latest=True, verbose=False): """Create a BuildProcess. Args: working_dir (str): Directory containing the package to build. build_system (`BuildSystem`): Build system used to build the package. vcs (`ReleaseVCS`): Version control system to use for the release process. If None, the package will only be built, not released. ensure_latest: If True, do not allow the release process to occur if an newer versioned package is already released. """ self.verbose = verbose self.working_dir = working_dir self.build_system = build_system self.vcs = vcs self.ensure_latest = ensure_latest if vcs and vcs.path != working_dir: raise BuildProcessError( "Build process was instantiated with a mismatched VCS instance" ) self.debug_print = config.debug_printer("package_release") self.package = get_developer_package(working_dir) hook_names = self.package.config.release_hooks or [] self.hooks = create_release_hooks(hook_names, working_dir) self.build_path = os.path.join(self.working_dir, self.package.config.build_directory)
def __init__(self, package_requests, package_paths, package_filter=None, timestamp=0, callback=None, building=False, verbosity=False, buf=None, package_load_callback=None, caching=True): """Create a Resolver. Args: package_requests: List of Requirement objects representing the request. package_paths: List of paths to search for pkgs. package_filter (`PackageFilterList`): Package filter. callback: See `Solver`. package_load_callback: If not None, this callable will be called prior to each package being loaded. It is passed a single `Package` object. building: True if we're resolving for a build. caching: If True, cache(s) may be used to speed the resolve. If False, caches will not be used. """ self.package_requests = package_requests self.package_paths = package_paths self.timestamp = timestamp self.callback = callback self.package_load_callback = package_load_callback self.building = building self.verbosity = verbosity self.caching = caching self.buf = buf # store hash of pre-timestamp-combined package filter. This is used in # the memcached key if package_filter: self.package_filter_hash = package_filter.hash else: self.package_filter_hash = '' # combine timestamp and package filter into single filter if self.timestamp: if package_filter: self.package_filter = package_filter.copy() else: self.package_filter = PackageFilterList() rule = TimestampRule.after(self.timestamp) self.package_filter.add_exclusion(rule) else: self.package_filter = package_filter self.status_ = ResolverStatus.pending self.resolved_packages_ = None self.failure_description = None self.graph_ = None self.from_cache = False self.memcached_servers = config.memcached_uri if config.resolve_caching else None self.solve_time = 0.0 # time spent solving self.load_time = 0.0 # time spent loading package resources self._print = config.debug_printer("resolve_memcache")
def __init__(self, working_dir, build_system, package=None, vcs=None, ensure_latest=True, skip_repo_errors=False, ignore_existing_tag=False, verbose=False): """Create a BuildProcess. Args: working_dir (str): Directory containing the package to build. build_system (`BuildSystem`): Build system used to build the package. vcs (`ReleaseVCS`): Version control system to use for the release process. ensure_latest: If True, do not allow the release process to occur if an newer versioned package is already released. skip_repo_errors: If True, proceed with the release even when errors occur. BE CAREFUL using this option, it is here in case a package needs to be released urgently even though there is some problem with reading or writing the repository. ignore_existing_tag: Perform the release even if the repository is already tagged at the current version. If the config setting plugins.release_vcs.check_tag is False, this has no effect. """ self.verbose = verbose self.working_dir = working_dir self.build_system = build_system self.vcs = vcs self.ensure_latest = ensure_latest self.skip_repo_errors = skip_repo_errors self.ignore_existing_tag = ignore_existing_tag if vcs and vcs.pkg_root != working_dir: raise BuildProcessError( "Build process was instantiated with a mismatched VCS instance" ) self.debug_print = config.debug_printer("package_release") self.package = package or get_developer_package(working_dir) hook_names = self.package.config.release_hooks or [] self.hooks = create_release_hooks(hook_names, working_dir) self.build_path = os.path.join(self.working_dir, self.package.config.build_directory)
def __init__(self, working_dir, build_system, package=None, vcs=None, ensure_latest=True, skip_repo_errors=False, ignore_existing_tag=False, verbose=False): """Create a BuildProcess. Args: working_dir (str): Directory containing the package to build. build_system (`BuildSystem`): Build system used to build the package. vcs (`ReleaseVCS`): Version control system to use for the release process. ensure_latest: If True, do not allow the release process to occur if an newer versioned package is already released. skip_repo_errors: If True, proceed with the release even when errors occur. BE CAREFUL using this option, it is here in case a package needs to be released urgently even though there is some problem with reading or writing the repository. ignore_existing_tag: Perform the release even if the repository is already tagged at the current version. If the config setting plugins.release_vcs.check_tag is False, this has no effect. """ self.verbose = verbose self.working_dir = working_dir self.build_system = build_system self.vcs = vcs self.ensure_latest = ensure_latest self.skip_repo_errors = skip_repo_errors self.ignore_existing_tag = ignore_existing_tag if vcs and vcs.pkg_root != working_dir: raise BuildProcessError( "Build process was instantiated with a mismatched VCS instance") self.debug_print = config.debug_printer("package_release") self.package = package or get_developer_package(working_dir) hook_names = self.package.config.release_hooks or [] self.hooks = create_release_hooks(hook_names, working_dir) self.build_path = os.path.join(self.working_dir, self.package.config.build_directory)
def __init__(self, context, package_requests, package_paths, package_filter=None, package_orderers=None, timestamp=0, callback=None, building=False, verbosity=False, buf=None, package_load_callback=None, caching=True, suppress_passive=False, print_stats=False): """Create a Resolver. Args: package_requests: List of Requirement objects representing the request. package_paths: List of paths to search for pkgs. package_filter (`PackageFilterList`): Package filter. package_orderers (list of `PackageOrder`): Custom package ordering. callback: See `Solver`. package_load_callback: If not None, this callable will be called prior to each package being loaded. It is passed a single `Package` object. building: True if we're resolving for a build. caching: If True, cache(s) may be used to speed the resolve. If False, caches will not be used. print_stats (bool): If true, print advanced solver stats at the end. """ self.context = context self.package_requests = package_requests self.package_paths = package_paths self.timestamp = timestamp self.callback = callback self.package_orderers = package_orderers self.package_load_callback = package_load_callback self.building = building self.verbosity = verbosity self.caching = caching self.buf = buf self.suppress_passive = suppress_passive self.print_stats = print_stats # store hash of package orderers. This is used in the memcached key if package_orderers: sha1s = ''.join(x.sha1 for x in package_orderers) self.package_orderers_hash = sha1(sha1s.encode("utf8")).hexdigest() else: self.package_orderers_hash = '' # store hash of pre-timestamp-combined package filter. This is used in # the memcached key if package_filter: self.package_filter_hash = package_filter.sha1 else: self.package_filter_hash = '' # combine timestamp and package filter into single filter if self.timestamp: if package_filter: self.package_filter = package_filter.copy() else: self.package_filter = PackageFilterList() rule = TimestampRule.after(self.timestamp) self.package_filter.add_exclusion(rule) else: self.package_filter = package_filter self.status_ = ResolverStatus.pending self.resolved_packages_ = None self.resolved_ephemerals_ = None self.failure_description = None self.graph_ = None self.from_cache = False self.memcached_servers = config.memcached_uri if config.resolve_caching else None self.solve_time = 0.0 # time spent solving self.load_time = 0.0 # time spent loading package resources self._print = config.debug_printer("resolve_memcache")
from rez.utils.scope import ScopeContext from rez.utils.sourcecode import SourceCode, early, late, include from rez.utils.filesystem import TempDirs from rez.utils.data_utils import ModifyList from rez.exceptions import ResourceError, InvalidPackageError from rez.utils.memcached import memcached from rez.utils.system import add_sys_paths from rez.utils import py23 from rez.config import config from rez.vendor.atomicwrites import atomic_write from rez.vendor.enum import Enum from rez.vendor.six.six.moves import StringIO from rez.vendor import yaml tmpdir_manager = TempDirs(config.tmpdir, prefix="rez_write_") debug_print = config.debug_printer("file_loads") file_cache = {} class FileFormat(Enum): py = ("py", ) yaml = ("yaml", ) txt = ("txt", ) __order__ = "py,yaml,txt" def __init__(self, extension): self.extension = extension @contextmanager
class Client(object): """Wrapper for memcache.Client instance. Adds the features: - unlimited key length; - hard/soft flushing; - ability to cache None. """ class _Miss(object): def __nonzero__(self): return False miss = _Miss() logger = config.debug_printer("memcache") def __init__(self, servers, debug=False): """Create a memcached client. Args: servers (str or list of str): Server URI(s), eg '127.0.0.1:11211'. debug (bool): If True, quasi human readable keys are used. This helps debugging - run 'memcached -vv' in the foreground to see the keys being get/set/stored. """ self.servers = [servers] if isinstance(servers, basestring) else servers self.key_hasher = self._debug_key_hash if debug else self._key_hash self._client = None self.debug = debug self.current = '' def __nonzero__(self): return bool(self.servers) @property def client(self): """Get the native memcache client. Returns: `memcache.Client` instance. """ if self._client is None: self._client = Client_(self.servers) return self._client def test_servers(self): """Test that memcached servers are servicing requests. Returns: set: URIs of servers that are responding. """ responders = set() for server in self.servers: client = Client_([server]) key = uuid4().hex client.set(key, 1) if client.get(key) == 1: responders.add(server) return responders def set(self, key, val, time=0, min_compress_len=0): """See memcache.Client.""" if not self.servers: return key = self._qualified_key(key) hashed_key = self.key_hasher(key) val = (key, val) self.client.set(key=hashed_key, val=val, time=time, min_compress_len=min_compress_len) self.logger("SET: %s", key) def get(self, key): """See memcache.Client. Returns: object: A value if cached, else `self.miss`. Note that this differs from `memcache.Client`, which returns None on cache miss, and thus cannot cache the value None itself. """ if not self.servers: return self.miss key = self._qualified_key(key) hashed_key = self.key_hasher(key) entry = self.client.get(hashed_key) if isinstance(entry, tuple) and len(entry) == 2: key_, result = entry if key_ == key: self.logger("HIT: %s", key) return result self.logger("MISS: %s", key) return self.miss def delete(self, key): """See memcache.Client.""" if self.servers: key = self._qualified_key(key) hashed_key = self.key_hasher(key) self.client.delete(hashed_key) def flush(self, hard=False): """Drop existing entries from the cache. Args: hard (bool): If True, all current entries are flushed from the server(s), which affects all users. If False, only the local process is affected. """ if not self.servers: return if hard: self.client.flush_all() self.reset_stats() else: from uuid import uuid4 tag = uuid4().hex if self.debug: tag = "flushed" + tag self.current = tag def get_stats(self): """Get server statistics. Returns: A list of tuples (server_identifier, stats_dictionary). """ return self._get_stats() def reset_stats(self): """Reset the server stats.""" self._get_stats("reset") def disconnect(self): """Disconnect from server(s). Behaviour is undefined after this call.""" if self.servers and self._client: self._client.disconnect_all() #print "Disconnected memcached client %s" % str(self) def _qualified_key(self, key): return "%s:%s:%s" % (cache_interface_version, self.current, key) def _get_stats(self, stat_args=None): return self.client.get_stats(stat_args=stat_args) @classmethod def _key_hash(cls, key): return md5(key).hexdigest() @classmethod def _debug_key_hash(cls, key): import re h = cls._key_hash(key)[:16] value = "%s:%s" % (h, key) value = value[:SERVER_MAX_KEY_LENGTH] value = re.sub("[^0-9a-zA-Z]+", '_', value) return value
from rez.package_serialise import dump_package_data from rez.exceptions import PackageMetadataError, ResourceError, RezSystemError, \ ConfigurationError, PackageRepositoryError from rez.utils.formatting import is_valid_package_name from rez.utils.resources import cached_property from rez.utils.logging_ import print_warning from rez.utils.memcached import memcached, pool_memcached_connections from rez.utils.filesystem import make_path_writable from rez.serialise import load_from_file, FileFormat from rez.config import config from rez.backport.lru_cache import lru_cache from rez.vendor.schema.schema import Schema, Optional, And, Use, Or from rez.vendor.version.version import Version, VersionRange debug_print = config.debug_printer("resources") # ------------------------------------------------------------------------------ # format version # # 1: # Initial format. # 2: # Late binding functions added. # ------------------------------------------------------------------------------ format_version = 2 def check_format_version(filename, data): format_version_ = data.pop("format_version", None)
ReleaseHookCancellingError, RezError, ReleaseError, BuildError, \ ReleaseVCSError from rez.utils.logging_ import print_warning from rez.utils.colorize import heading, Printer from rez.resolved_context import ResolvedContext from rez.release_hook import create_release_hooks from rez.resolver import ResolverStatus from rez.config import config from rez.vendor.enum import Enum from contextlib import contextmanager from pipes import quote import getpass import os.path import sys debug_print = config.debug_printer("package_release") def get_build_process_types(): """Returns the available build process implementations.""" from rez.plugin_managers import plugin_manager return plugin_manager.get_plugins('build_process') def create_build_process(process_type, working_dir, build_system, package=None, vcs=None, ensure_latest=True, skip_repo_errors=False,
from rez.exceptions import BuildProcessError, BuildContextResolveError, \ ReleaseHookCancellingError, RezError, ReleaseError, BuildError, \ ReleaseVCSError from rez.utils.logging_ import print_warning from rez.resolved_context import ResolvedContext from rez.release_hook import create_release_hooks from rez.resolver import ResolverStatus from rez.config import config from rez.vendor.enum import Enum from contextlib import contextmanager from pipes import quote import getpass import os.path debug_print = config.debug_printer("package_release") def get_build_process_types(): """Returns the available build process implementations.""" from rez.plugin_managers import plugin_manager return plugin_manager.get_plugins('build_process') def create_build_process(process_type, working_dir, build_system, package=None, vcs=None, ensure_latest=True, skip_repo_errors=False, ignore_existing_tag=False, verbose=False, quiet=False): """Create a `BuildProcess` instance.""" from rez.plugin_managers import plugin_manager process_types = get_build_process_types() if process_type not in process_types:
from rez.package_resources_ import package_rex_keys from rez.utils.scope import ScopeContext from rez.utils.sourcecode import SourceCode, early, late, include from rez.utils.filesystem import TempDirs from rez.utils.data_utils import ModifyList from rez.exceptions import ResourceError, InvalidPackageError from rez.utils.memcached import memcached from rez.utils.system import add_sys_paths from rez.config import config from rez.vendor.atomicwrites import atomic_write from rez.vendor.enum import Enum from rez.vendor import yaml tmpdir_manager = TempDirs(config.tmpdir, prefix="rez_write_") debug_print = config.debug_printer("file_loads") file_cache = {} class FileFormat(Enum): py = ("py",) yaml = ("yaml",) txt = ("txt",) __order__ = "py,yaml,txt" def __init__(self, extension): self.extension = extension @contextmanager