def schema(cls): from rez.utils.platform_ import platform_ # Note that this bakes the physical / logical cores at the time the # config is read... which should be fine return Or( And(int, lambda x: x > 0), And("physical_cores", Use(lambda x: platform_.physical_cores)), And("logical_cores", Use(lambda x: platform_.logical_cores)), )
class ForceOrBool(Bool): FORCE_STR = "force" # need force first, or Bool.schema will coerce "force" to True schema = Or(FORCE_STR, Bool.schema) all_words = Bool.all_words | frozenset([FORCE_STR]) def _parse_env_var(self, value): if value == self.FORCE_STR: return value return super(ForceOrBool, self)._parse_env_var(value)
class EmailReleaseHook(ReleaseHook): schema_dict = { "subject": basestring, "body": basestring, "smtp_host": basestring, "smtp_port": int, "sender": basestring, "recipients": Or(basestring, [basestring])} @classmethod def name(cls): return "emailer" def __init__(self, source_path): super(EmailReleaseHook, self).__init__(source_path) def post_release(self, user, install_path, variants, release_message=None, changelog=None, previous_version=None, **kwargs): if not variants: return # nothing was released # construct email body release_dict = dict(path=install_path, previous_version=previous_version or "None.", message=release_message or "No release message.", changelog=changelog or "No changelog.") paths_str = '\n'.join(x.root for x in variants) variants_dict = dict(count=len(variants), paths=paths_str) formatter = scoped_formatter(release=release_dict, variants=variants_dict, system=system, package=self.package) body = formatter.format(self.settings.body) body = body.strip() body = body.replace("\n\n\n", "\n\n") # construct subject line, send email subject = formatter.format(self.settings.subject) self.send_email(subject, body) def send_email(self, subject, body): if not self.settings.recipients: return # nothing to do, sending email to nobody if not self.settings.smtp_host: print_warning("did not send release email: " "SMTP host is not specified") return recipients = self.get_recipients() if not recipients: return print("Sending release email to:") print('\n'.join("- %s" % x for x in recipients)) msg = MIMEText(body) msg["Subject"] = subject msg["From"] = self.settings.sender msg["To"] = str(',').join(recipients) try: s = smtplib.SMTP(self.settings.smtp_host, self.settings.smtp_port) s.sendmail(from_addr=self.settings.sender, to_addrs=recipients, msg=msg.as_string()) print('Email(s) sent.') except Exception as e: print_error("release email delivery failed: %s" % str(e)) def get_recipients(self): value = self.settings.recipients if isinstance(value, list): return value if os.path.exists(value): filepath = value try: return self.load_recipients(filepath) except Exception as e: print_error("failed to load recipients config: %s. Emails " "not sent" % str(e)) elif '@' in value: return [value] # assume it's an email address else: print_error("email recipient file does not exist: %s. Emails not " "sent" % value) return [] def load_recipients(self, filepath): def test(value, type_): if not isinstance(value, type_): raise TypeError("Expected %s, not %s" % type_, value) return value conf = load_yaml(filepath) recipients = set() for rule in test(conf.get("rules", []), list): filters = rule.get("filters") match = True if filters: for attr, test_value in test(filters, dict).items(): missing = object() value = getattr(self.package, attr, missing) if value is missing: match = False elif test_value is None: match = True elif isinstance(test_value, list): match = (value in test_value) else: match = (value == test_value) if not match: break if match: rule_recipients = rule.get("recipients") recipients.update(test(rule_recipients, list)) return sorted(recipients)
dict_to_attributes_code, as_block_string from rez.utils.schema import Required from rez.utils.yaml import dump_yaml from pprint import pformat # preferred order of keys in a package definition file package_key_order = [ 'name', 'nice_name', 'version', 'description', 'authors', 'tools', 'tools_info', 'excluded_tools', 'has_plugins', 'plugin_for', 'requires', 'build_requires', 'private_build_requires', 'variants', 'commands', 'pre_commands', 'post_commands', 'help', 'config', 'uuid', 'timestamp', 'release_message', 'changelog', 'vcs', 'revision', 'previous_version', 'previous_revision', 'has_plugins', 'plugin_for', 'plugin_launch_commands' ] version_schema = Or(basestring, And(Version, Use(str))) package_request_schema = Or(basestring, And(PackageRequest, Use(str))) source_code_schema = Or(SourceCode, And(basestring, Use(SourceCode))) # package serialisation schema package_serialise_schema = Schema({ Required("name"): basestring, Optional("nice_name"): basestring, Optional("version"): version_schema, Optional("description"): basestring,
"preprocess", ) # package attributes that are rex-based functions package_rex_keys = ( "pre_commands", "commands", "post_commands" ) #------------------------------------------------------------------------------ # utility schemas #------------------------------------------------------------------------------ help_schema = Or(basestring, # single help entry [[basestring]]) # multiple help entries _is_late = And(SourceCode, lambda x: hasattr(x, "_late")) def late_bound(schema): return Or(SourceCode, schema) # used when 'requires' is late bound late_requires_schema = Schema([ Or(PackageRequest, And(basestring, Use(PackageRequest))) ]) #------------------------------------------------------------------------------ # schema dicts #------------------------------------------------------------------------------
class OptionalStrOrFunction(Setting): schema = Or(None, basestring, callable) def _parse_env_var(self, value): # note: env-var override only supports string, eg 'mymodule.preprocess_func' return value
def schema(cls): from rez.resolved_context import RezToolsVisibility return Or(*(x.name for x in RezToolsVisibility))
def schema(cls): from rez.resolved_context import SuiteVisibility return Or(*(x.name for x in SuiteVisibility))
class OptionalStr(Str): schema = Or(None, str_type)
"package_filter": OptionalDictOrDictList, "new_session_popen_args": OptionalDict, "context_tracking_amqp": OptionalDict, "context_tracking_extra_fields": OptionalDict, # GUI settings "use_pyside": Bool, "use_pyqt": Bool, "gui_threads": Bool }) # settings common to each plugin type _plugin_config_dict = { "release_vcs": { "tag_name": str_type, "releasable_branches": Or(None, [str_type]), "check_tag": bool } } # ----------------------------------------------------------------------------- # Config # ----------------------------------------------------------------------------- class Config(six.with_metaclass(LazyAttributeMeta, object)): """Rez configuration settings. You should call the `create_config` function, rather than constructing a `Config` object directly.
'commands', 'pre_commands', 'post_commands', 'help', 'config', 'uuid', 'timestamp', 'release_message', 'changelog', 'vcs', 'revision', 'previous_version', 'previous_revision'] version_schema = Or(basestring, And(Version, Use(str))) package_request_schema = Or(basestring, And(PackageRequest, Use(str))) source_code_schema = Or(SourceCode, And(basestring, Use(SourceCode))) # package serialisation schema package_serialise_schema = Schema({ Required("name"): basestring, Optional("version"): version_schema, Optional("description"): basestring, Optional("authors"): [basestring], Optional("tools"): [basestring],
class CommandReleaseHook(ReleaseHook): commands_schema = Schema({ "command": basestring, Optional("args"): Or(And(basestring, Use(lambda x: x.strip().split())), [basestring]), Optional("user"): basestring, Optional("env"): dict }) schema_dict = { "print_commands": bool, "print_output": bool, "print_error": bool, "cancel_on_error": bool, "stop_on_error": bool, "pre_build_commands": [commands_schema], "pre_release_commands": [commands_schema], "post_release_commands": [commands_schema] } @classmethod def name(cls): return "command" def __init__(self, source_path): super(CommandReleaseHook, self).__init__(source_path) def execute_command(self, cmd_name, cmd_arguments, user, errors, env=None): def _err(msg): errors.append(msg) if self.settings.print_error: print >> sys.stderr, msg kwargs = {} if env: kwargs["_env"] = env def _execute(cmd, arguments): try: result = cmd(*(arguments or []), **kwargs) if self.settings.print_output: print result.stdout.strip() except ErrorReturnCode as e: # `e` shows the command that was run msg = "command failed:\n%s" % str(e) _err(msg) return False return True if not os.path.isfile(cmd_name): cmd_full_path = which(cmd_name) else: cmd_full_path = cmd_name if not cmd_full_path: msg = "%s: command not found" % cmd_name _err(msg) return False run_cmd = Command(cmd_full_path) if user == 'root': with sudo: return _execute(run_cmd, cmd_arguments) elif user and user != getpass.getuser(): raise NotImplementedError # TODO else: return _execute(run_cmd, cmd_arguments) def pre_build(self, user, install_path, **kwargs): errors = [] self._execute_commands(self.settings.pre_build_commands, install_path=install_path, package=self.package, errors=errors) if errors and self.settings.cancel_on_error: raise ReleaseHookCancellingError( "The following pre-build commands failed:\n%s" % '\n\n'.join(errors)) def pre_release(self, user, install_path, **kwargs): errors = [] self._execute_commands(self.settings.pre_release_commands, install_path=install_path, package=self.package, errors=errors) if errors and self.settings.cancel_on_error: raise ReleaseHookCancellingError( "The following pre-release commands failed:\n%s" % '\n\n'.join(errors)) def post_release(self, user, install_path, variants, **kwargs): # note that the package we use here is the *installed* package, not the # developer package (self.package). Otherwise, attributes such as 'root' # will be None errors = [] if variants: package = variants[0].parent else: package = self.package self._execute_commands(self.settings.post_release_commands, install_path=install_path, package=package, errors=errors) if errors: print_debug("The following post-release commands failed:\n" + '\n\n'.join(errors)) def _execute_commands(self, commands, install_path, package, errors=None): release_dict = dict(path=install_path) formatter = scoped_formatter(system=system, release=release_dict, package=package) for conf in commands: program = conf["command"] env_ = None env = conf.get("env") if env: env_ = os.environ.copy() env_.update(env) args = conf.get("args", []) args = [formatter.format(x) for x in args] args = [expandvars(x, environ=env_) for x in args] if self.settings.print_commands or config.debug("package_release"): from subprocess import list2cmdline toks = [program] + args msgs = [] msgs.append("running command: %s" % list2cmdline(toks)) if env: for key, value in env.iteritems(): msgs.append(" with: %s=%s" % (key, value)) if self.settings.print_commands: print '\n'.join(msgs) else: for msg in msgs: print_debug(msg) if not self.execute_command(cmd_name=program, cmd_arguments=args, user=conf.get("user"), errors=errors, env=env_): if self.settings.stop_on_error: return
from rez.utils.data_utils import AttrDictWrapper from rez.utils.logging_ import print_warning from rez.package_resources_ import help_schema, _commands_schema, \ _function_schema, late_bound from rez.package_repository import create_memory_package_repository from rez.packages_ import Package from rez.package_py_utils import expand_requirement from rez.vendor.schema.schema import Schema, Optional, Or, Use, And from rez.vendor.version.version import Version from contextlib import contextmanager import os # this schema will automatically harden request strings like 'python-*'; see # the 'expand_requires' function for more info. # package_request_schema = Or(And(basestring, Use(expand_requirement)), And(PackageRequest, Use(str))) tests_schema = Schema({ Optional(basestring): Or( Or(basestring, [basestring]), { "command": Or(basestring, [basestring]), Optional("requires"): [package_request_schema] }) }) package_schema = Schema({ Required("name"): basestring, Optional("base"): basestring,
class CMakeBuildSystem(BuildSystem): """The CMake build system. The 'cmake' executable is run within the build environment. Rez supplies a library of cmake macros in the 'cmake_files' directory; these are added to cmake's searchpath and are available to use in your own CMakeLists.txt file. The following CMake variables are available: - REZ_BUILD_TYPE: One of 'local', 'central'. Describes whether an install is going to the local packages path, or the release packages path. - REZ_BUILD_INSTALL: One of 0 or 1. If 1, an installation is taking place; if 0, just a build is occurring. """ build_systems = {'eclipse': "Eclipse CDT4 - Unix Makefiles", 'codeblocks': "CodeBlocks - Unix Makefiles", 'make': "Unix Makefiles", 'nmake': "NMake Makefiles", 'xcode': "Xcode"} build_targets = ["Debug", "Release", "RelWithDebInfo"] schema_dict = { "build_target": Or(*build_targets), "build_system": Or(*build_systems.keys()), "cmake_args": [basestring], "cmake_binary": Or(None, basestring), "make_binary": Or(None, basestring)} @classmethod def name(cls): return "cmake" @classmethod def child_build_system(cls): return "make" @classmethod def is_valid_root(cls, path): return os.path.isfile(os.path.join(path, "CMakeLists.txt")) @classmethod def bind_cli(cls, parser): settings = config.plugins.build_system.cmake parser.add_argument("--bt", "--build-target", dest="build_target", type=str, choices=cls.build_targets, default=settings.build_target, help="set the build target (default: %(default)s).") parser.add_argument("--bs", "--build-system", dest="build_system", type=str, choices=cls.build_systems.keys(), default=settings.build_system, help="set the cmake build system (default: %(default)s).") def __init__(self, working_dir, opts=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): super(CMakeBuildSystem, self).__init__( working_dir, opts=opts, write_build_scripts=write_build_scripts, verbose=verbose, build_args=build_args, child_build_args=child_build_args) self.settings = self.package.config.plugins.build_system.cmake self.build_target = (opts and opts.build_target) or \ self.settings.build_target self.cmake_build_system = (opts and opts.build_system) or \ self.settings.build_system if self.cmake_build_system == 'xcode' and platform_.name != 'osx': raise RezCMakeError("Generation of Xcode project only available " "on the OSX platform") def build(self, context, variant, build_path, install_path, install=False, build_type=BuildType.local): def _pr(s): if self.verbose: print s # find cmake binary if self.settings.cmake_binary: exe = self.settings.cmake_binary else: exe = context.which("cmake", fallback=True) if not exe: raise RezCMakeError("could not find cmake binary") found_exe = which(exe) if not found_exe: raise RezCMakeError("cmake binary does not exist: %s" % exe) sh = create_shell() # assemble cmake command cmd = [found_exe, "-d", self.working_dir] cmd += (self.settings.cmake_args or []) cmd += (self.build_args or []) cmd.append("-DCMAKE_INSTALL_PREFIX=%s" % install_path) cmd.append("-DCMAKE_MODULE_PATH=%s" % sh.get_key_token("CMAKE_MODULE_PATH")) cmd.append("-DCMAKE_BUILD_TYPE=%s" % self.build_target) cmd.append("-DREZ_BUILD_TYPE=%s" % build_type.name) cmd.append("-DREZ_BUILD_INSTALL=%d" % (1 if install else 0)) cmd.extend(["-G", self.build_systems[self.cmake_build_system]]) if config.rez_1_cmake_variables and \ not config.disable_rez_1_compatibility and \ build_type == BuildType.central: cmd.append("-DCENTRAL=1") # execute cmake within the build env _pr("Executing: %s" % ' '.join(cmd)) if not os.path.abspath(build_path): build_path = os.path.join(self.working_dir, build_path) build_path = os.path.realpath(build_path) callback = functools.partial(self._add_build_actions, context=context, package=self.package, variant=variant, build_type=build_type) # run the build command and capture/print stderr at the same time retcode, _, _ = context.execute_shell(command=cmd, block=True, cwd=build_path, actions_callback=callback) ret = {} if retcode: ret["success"] = False return ret if self.write_build_scripts: # write out the script that places the user in a build env, where # they can run make directly themselves. build_env_script = os.path.join(build_path, "build-env") create_forwarding_script(build_env_script, module=("build_system", "cmake"), func_name="_FWD__spawn_build_shell", working_dir=self.working_dir, build_dir=build_path, variant_index=variant.index) ret["success"] = True ret["build_env_script"] = build_env_script return ret # assemble make command if self.settings.make_binary: cmd = [self.settings.make_binary] else: cmd = ["make"] cmd += (self.child_build_args or []) if not any(x.startswith("-j") for x in (self.child_build_args or [])): n = variant.config.build_thread_count or cpu_count() cmd.append("-j%d" % n) # execute make within the build env _pr("\nExecuting: %s" % ' '.join(cmd)) retcode, _, _ = context.execute_shell(command=cmd, block=True, cwd=build_path, actions_callback=callback) if not retcode and install and "install" not in cmd: cmd.append("install") # execute make install within the build env _pr("\nExecuting: %s" % ' '.join(cmd)) retcode, _, _ = context.execute_shell(command=cmd, block=True, cwd=build_path, actions_callback=callback) ret["success"] = (not retcode) return ret @staticmethod def _add_build_actions(executor, context, package, variant, build_type): settings = package.config.plugins.build_system.cmake cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files") template_path = os.path.join(os.path.dirname(__file__), "template_files") executor.env.CMAKE_MODULE_PATH.append(cmake_path) executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile') executor.env.REZ_BUILD_VARIANT_INDEX = variant.index or 0 executor.env.REZ_BUILD_THREAD_COUNT = package.config.build_thread_count or cpu_count() # build always occurs on a filesystem package, thus 'filepath' attribute # exists. This is not the case for packages in general. executor.env.REZ_BUILD_PROJECT_FILE = package.filepath executor.env.REZ_BUILD_PROJECT_VERSION = str(package.version) executor.env.REZ_BUILD_PROJECT_NAME = package.name executor.env.REZ_BUILD_PROJECT_DESCRIPTION = \ (package.description or '').strip() executor.env.REZ_BUILD_REQUIRES_UNVERSIONED = \ ' '.join(x.name for x in context.requested_packages(True)) executor.env.REZ_BUILD_INSTALL_PYC = '1' if settings.install_pyc else '0' if config.rez_1_environment_variables and \ not config.disable_rez_1_compatibility and \ build_type == BuildType.central: executor.env.REZ_IN_REZ_RELEASE = 1
_function_schema, late_bound from rez.package_repository import create_memory_package_repository from rez.packages_ import Package from rez.package_py_utils import expand_requirement from rez.vendor.schema.schema import Schema, Optional, Or, Use, And from rez.vendor.six import six from rez.vendor.version.version import Version from contextlib import contextmanager import os basestring = six.string_types[0] # this schema will automatically harden request strings like 'python-*'; see # the 'expand_requires' function for more info. # package_request_schema = Or(And(basestring, Use(expand_requirement)), And(PackageRequest, Use(str))) tests_schema = Schema({ Optional(basestring): Or( Or(basestring, [basestring]), extensible_schema_dict({ "command": Or(basestring, [basestring]), Optional("requires"): [package_request_schema], Optional("run_on"): Or(basestring, [basestring]), Optional("on_variants"): Or(bool, { "type": "requires", "value": [package_request_schema]
class OptionalDict(Dict): schema = Or(And(None, Use(lambda x: {})), dict)
class OptionalDictOrDictList(Setting): schema = Or(And(None, Use(lambda x: [])), And(dict, Use(lambda x: [x])), [dict])
class OptionalStrList(StrList): schema = Or(And(None, Use(lambda x: [])), [str_type])
def schema(cls): from rez.solver import VariantSelectMode return Or(*(x.name for x in VariantSelectMode))
class FileSystemPackageRepository(PackageRepository): """A filesystem-based package repository. Packages are stored on disk, in either 'package.yaml' or 'package.py' files. These files are stored into an organised directory structure like so: /LOCATION/pkgA/1.0.0/package.py /1.0.1/package.py /pkgB/2.1/package.py /2.2/package.py Another supported storage format is to store all package versions within a single package family in one file, like so: /LOCATION/pkgC.yaml /LOCATION/pkgD.py These 'combined' package files allow for differences between package versions via a 'package_overrides' section: name: pkgC versions: - '1.0' - '1.1' - '1.2' version_overrides: '1.0': requires: - python-2.5 '1.1+': requires: - python-2.6 """ schema_dict = {"file_lock_timeout": int, "file_lock_dir": Or(None, str), "package_filenames": [basestring]} building_prefix = ".building" package_file_mode = (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) @classmethod def name(cls): return "filesystem" def __init__(self, location, resource_pool): """Create a filesystem package repository. Args: location (str): Path containing the package repository. """ super(FileSystemPackageRepository, self).__init__(location, resource_pool) global _settings _settings = config.plugins.package_repository.filesystem self.register_resource(FileSystemPackageFamilyResource) self.register_resource(FileSystemPackageResource) self.register_resource(FileSystemVariantResource) self.register_resource(FileSystemCombinedPackageFamilyResource) self.register_resource(FileSystemCombinedPackageResource) self.register_resource(FileSystemCombinedVariantResource) self.get_families = lru_cache(maxsize=None)(self._get_families) self.get_family = lru_cache(maxsize=None)(self._get_family) self.get_packages = lru_cache(maxsize=None)(self._get_packages) self.get_variants = lru_cache(maxsize=None)(self._get_variants) self.get_file = lru_cache(maxsize=None)(self._get_file) def _uid(self): t = ["filesystem", self.location] if os.path.exists(self.location): st = os.stat(self.location) t.append(st.st_ino) return tuple(t) def get_package_family(self, name): return self.get_family(name) @pool_memcached_connections def iter_package_families(self): for family in self.get_families(): yield family @pool_memcached_connections def iter_packages(self, package_family_resource): for package in self.get_packages(package_family_resource): yield package def iter_variants(self, package_resource): for variant in self.get_variants(package_resource): yield variant def get_parent_package_family(self, package_resource): return package_resource.parent def get_parent_package(self, variant_resource): return variant_resource.parent def get_variant_state_handle(self, variant_resource): package_resource = variant_resource.parent return package_resource.state_handle def get_last_release_time(self, package_family_resource): return package_family_resource.get_last_release_time() @cached_property def file_lock_dir(self): dirname = _settings.file_lock_dir if not dirname: return None # sanity check if os.path.isabs(dirname) or os.path.basename(dirname) != dirname: raise ConfigurationError( "filesystem package repository setting 'file_lock_dir' must be " "a single relative directory such as '.lock'") # fall back to location path if lock dir doesn't exist. path = os.path.join(self.location, dirname) if not os.path.exists(path): return None return dirname def pre_variant_install(self, variant_resource): if not variant_resource.version: return # create 'building' tagfile, this makes sure that a resolve doesn't # pick up this package if it doesn't yet have a package.py created. path = self.location family_path = os.path.join(path, variant_resource.name) if not os.path.isdir(family_path): os.makedirs(family_path) filename = self.building_prefix + str(variant_resource.version) filepath = os.path.join(family_path, filename) with open(filepath, 'w'): # create empty file pass def install_variant(self, variant_resource, dry_run=False, overrides=None): overrides = overrides or {} # Name and version overrides are a special case - they change the # destination variant to be created/replaced. # variant_name = variant_resource.name variant_version = variant_resource.version if "name" in overrides: variant_name = overrides["name"] if variant_name is self.remove: raise PackageRepositoryError( "Cannot remove package attribute 'name'") if "version" in overrides: ver = overrides["version"] if ver is self.remove: raise PackageRepositoryError( "Cannot remove package attribute 'version'") if isinstance(ver, basestring): ver = Version(ver) overrides = overrides.copy() overrides["version"] = ver variant_version = ver # cannot install over one's self, just return existing variant if variant_resource._repository is self and \ variant_name == variant_resource.name and \ variant_version == variant_resource.version: return variant_resource # check repo exists on disk path = self.location if not os.path.exists(path): raise PackageRepositoryError( "Package repository path does not exist: %s" % path) # install the variant def _create_variant(): return self._create_variant( variant_resource, dry_run=dry_run, overrides=overrides ) if dry_run: variant = _create_variant() else: with self._lock_package(variant_name, variant_version): variant = _create_variant() return variant @contextmanager def _lock_package(self, package_name, package_version=None): from rez.vendor.lockfile import LockFile path = self.location if self.file_lock_dir: path = os.path.join(path, self.file_lock_dir) if not os.path.exists(path): raise PackageRepositoryError( "Lockfile directory %s does not exist - please create and try " "again" % path) filename = ".lock.%s" % package_name if package_version: filename += "-%s" % str(package_version) lock_file = os.path.join(path, filename) lock = LockFile(lock_file) try: lock.acquire(timeout=_settings.file_lock_timeout) yield finally: if lock.is_locked(): lock.release() def clear_caches(self): super(FileSystemPackageRepository, self).clear_caches() self.get_families.cache_clear() self.get_family.cache_clear() self.get_packages.cache_clear() self.get_variants.cache_clear() self.get_file.cache_clear() self._get_family_dirs.forget() self._get_version_dirs.forget() # unfortunately we need to clear file cache across the board clear_file_caches() def get_package_payload_path(self, package_name, package_version=None): path = os.path.join(self.location, package_name) if package_version: path = os.path.join(path, str(package_version)) return path # -- internal def _get_family_dirs__key(self): if os.path.isdir(self.location): st = os.stat(self.location) return str(("listdir", self.location, st.st_ino, st.st_mtime)) else: return str(("listdir", self.location)) @memcached(servers=config.memcached_uri if config.cache_listdir else None, min_compress_len=config.memcached_listdir_min_compress_len, key=_get_family_dirs__key, debug=config.debug_memcache) def _get_family_dirs(self): dirs = [] if not os.path.isdir(self.location): return dirs for name in os.listdir(self.location): path = os.path.join(self.location, name) if os.path.isdir(path): if is_valid_package_name(name) and name != self.file_lock_dir: dirs.append((name, None)) else: name_, ext_ = os.path.splitext(name) if ext_ in (".py", ".yaml") and is_valid_package_name(name_): dirs.append((name_, ext_[1:])) return dirs def _get_version_dirs__key(self, root): st = os.stat(root) return str(("listdir", root, st.st_ino, st.st_mtime)) @memcached(servers=config.memcached_uri if config.cache_listdir else None, min_compress_len=config.memcached_listdir_min_compress_len, key=_get_version_dirs__key, debug=config.debug_memcache) def _get_version_dirs(self, root): # simpler case if this test is on # if _settings.check_package_definition_files: dirs = [] for name in os.listdir(root): if name.startswith('.'): continue path = os.path.join(root, name) if os.path.isdir(path): if not self._is_valid_package_directory(path): continue dirs.append(name) return dirs # with test off, we have to check for 'building' dirs, these have to be # tested regardless. Failed releases may cause 'building files' to be # left behind, so we need to clear these out also # dirs = set() building_dirs = set() # find dirs and dirs marked as 'building' for name in os.listdir(root): if name.startswith('.'): if not name.startswith(self.building_prefix): continue ver_str = name[len(self.building_prefix):] building_dirs.add(ver_str) path = os.path.join(root, name) if os.path.isdir(path): dirs.add(name) # check 'building' dirs for validity for name in building_dirs: if name not in dirs: continue path = os.path.join(root, name) if not self._is_valid_package_directory(path): # package probably still being built dirs.remove(name) return list(dirs) # True if `path` contains package.py or similar def _is_valid_package_directory(self, path): return bool(self._get_file(path, "package")[0]) def _get_families(self): families = [] for name, ext in self._get_family_dirs(): if ext is None: # is a directory family = self.get_resource( FileSystemPackageFamilyResource.key, location=self.location, name=name) else: family = self.get_resource( FileSystemCombinedPackageFamilyResource.key, location=self.location, name=name, ext=ext) families.append(family) return families def _get_family(self, name): is_valid_package_name(name, raise_error=True) if os.path.isdir(os.path.join(self.location, name)): family = self.get_resource( FileSystemPackageFamilyResource.key, location=self.location, name=name) return family else: filepath, format_ = self.get_file(self.location, package_filename=name) if filepath: family = self.get_resource( FileSystemCombinedPackageFamilyResource.key, location=self.location, name=name, ext=format_.extension) return family return None def _get_packages(self, package_family_resource): return [x for x in package_family_resource.iter_packages()] def _get_variants(self, package_resource): return [x for x in package_resource.iter_variants()] def _get_file(self, path, package_filename=None): if package_filename: package_filenames = [package_filename] else: package_filenames = _settings.package_filenames for name in package_filenames: for format_ in (FileFormat.py, FileFormat.yaml): filename = "%s.%s" % (name, format_.extension) filepath = os.path.join(path, filename) if os.path.isfile(filepath): return filepath, format_ return None, None def _create_family(self, name): path = os.path.join(self.location, name) if not os.path.exists(path): os.makedirs(path) self.clear_caches() return self.get_package_family(name) def _create_variant(self, variant, dry_run=False, overrides=None): # special case overrides variant_name = overrides.get("name") or variant.name variant_version = overrides.get("version") or variant.version overrides = (overrides or {}).copy() overrides.pop("name", None) overrides.pop("version", None) # find or create the package family family = self.get_package_family(variant_name) if not family: family = self._create_family(variant_name) if isinstance(family, FileSystemCombinedPackageFamilyResource): raise NotImplementedError( "Cannot install variant into combined-style package file %r." % family.filepath) # find the package if it already exists existing_package = None for package in self.iter_packages(family): if package.version == variant_version: # during a build, the family/version dirs get created ahead of # time, which causes a 'missing package definition file' error. # This is fine, we can just ignore it and write the new file. try: package.validate_data() except PackageDefinitionFileMissing: break uuids = set([variant.uuid, package.uuid]) if len(uuids) > 1 and None not in uuids: raise ResourceError( "Cannot install variant %r into package %r - the " "packages are not the same (UUID mismatch)" % (variant, package)) existing_package = package if variant.index is None: if package.variants: raise ResourceError( "Attempting to install a package without variants " "(%r) into an existing package with variants (%r)" % (variant, package)) elif not package.variants: raise ResourceError( "Attempting to install a variant (%r) into an existing " "package without variants (%r)" % (variant, package)) existing_package_data = None release_data = {} # Need to treat 'config' as special case. In validated data, this is # converted to a Config object. We need it as the raw dict that you'd # see in a package.py. # def _get_package_data(pkg): data = pkg.validated_data() if hasattr(pkg, "_data"): raw_data = pkg._data else: raw_data = pkg.resource._data raw_config_data = raw_data.get('config') data.pop("config", None) if raw_config_data: data["config"] = raw_config_data return data def _remove_build_keys(obj): for key in package_build_only_keys: obj.pop(key, None) new_package_data = _get_package_data(variant.parent) new_package_data.pop("variants", None) new_package_data["name"] = variant_name if variant_version: new_package_data["version"] = variant_version package_changed = False _remove_build_keys(new_package_data) if existing_package: debug_print( "Found existing package for installation of variant %s: %s", variant.uri, existing_package.uri ) existing_package_data = _get_package_data(existing_package) _remove_build_keys(existing_package_data) # detect case where new variant introduces package changes outside of variant data_1 = existing_package_data.copy() data_2 = new_package_data.copy() for key in package_release_keys: data_2.pop(key, None) value = data_1.pop(key, None) if value is not None: release_data[key] = value for key in ("format_version", "base", "variants"): data_1.pop(key, None) data_2.pop(key, None) package_changed = (data_1 != data_2) if debug_print: if package_changed: from rez.utils.data_utils import get_dict_diff_str debug_print("Variant %s package data differs from package %s", variant.uri, existing_package.uri) txt = get_dict_diff_str(data_1, data_2, "Changes:") debug_print(txt) else: debug_print("Variant %s package data matches package %s", variant.uri, existing_package.uri) # check for existing installed variant existing_installed_variant = None installed_variant_index = None if existing_package: if variant.index is None: existing_installed_variant = \ self.iter_variants(existing_package).next() else: variant_requires = variant.variant_requires for variant_ in self.iter_variants(existing_package): variant_requires_ = existing_package.variants[variant_.index] if variant_requires_ == variant_requires: installed_variant_index = variant_.index existing_installed_variant = variant_ if existing_installed_variant: debug_print( "Variant %s already has installed equivalent: %s", variant.uri, existing_installed_variant.uri ) if dry_run: if not package_changed: return existing_installed_variant else: return None # construct package data for new installed package definition if existing_package: _, file_ = os.path.split(existing_package.filepath) package_filename, package_extension = os.path.splitext(file_) package_extension = package_extension[1:] package_format = FileFormat[package_extension] if package_changed: # graft together new package data, with existing package variants, # and other data that needs to stay unchanged (eg timestamp) package_data = new_package_data if variant.index is not None: package_data["variants"] = existing_package_data.get("variants", []) else: package_data = existing_package_data else: package_data = new_package_data package_filename = _settings.package_filenames[0] package_extension = "py" package_format = FileFormat.py # merge existing release data (if any) into the package. Note that when # this data becomes variant-specific, this step will no longer be needed package_data.update(release_data) # merge the new variant into the package if installed_variant_index is None and variant.index is not None: variant_requires = variant.variant_requires if not package_data.get("variants"): package_data["variants"] = [] package_data["variants"].append(variant_requires) installed_variant_index = len(package_data["variants"]) - 1 # a little data massaging is needed package_data.pop("base", None) # create version dir if it doesn't already exist family_path = os.path.join(self.location, variant_name) if variant_version: pkg_base_path = os.path.join(family_path, str(variant_version)) else: pkg_base_path = family_path if not os.path.exists(pkg_base_path): os.makedirs(pkg_base_path) # Apply overrides. # # If we're installing into an existing package, then existing attributes # in that package take precedence over `overrides`. If we're installing # to a new package, then `overrides` takes precedence always. # # This is done so that variants added to an existing package don't change # attributes such as 'timestamp' or release-related fields like 'revision'. # for key, value in overrides.iteritems(): if existing_package: if key not in package_data: package_data[key] = value else: if value is self.remove: package_data.pop(key, None) else: package_data[key] = value # timestamp defaults to now if not specified if not package_data.get("timestamp"): package_data["timestamp"] = int(time.time()) # format version is always set package_data["format_version"] = format_version # write out new package definition file package_file = ".".join([package_filename, package_extension]) filepath = os.path.join(pkg_base_path, package_file) with make_path_writable(pkg_base_path): with open_file_for_write(filepath, mode=self.package_file_mode) as f: dump_package_data(package_data, buf=f, format_=package_format) # delete the tmp 'building' file. if variant_version: filename = self.building_prefix + str(variant_version) filepath = os.path.join(family_path, filename) if os.path.exists(filepath): try: os.remove(filepath) except: pass # delete other stale building files; previous failed releases may have # left some around try: self._delete_stale_build_tagfiles(family_path) except: pass # touch the family dir, this keeps memcached resolves updated properly os.utime(family_path, None) # load new variant new_variant = None self.clear_caches() family = self.get_package_family(variant_name) if family: for package in self.iter_packages(family): if package.version == variant_version: for variant_ in self.iter_variants(package): if variant_.index == installed_variant_index: new_variant = variant_ break elif new_variant: break if not new_variant: raise RezSystemError("Internal failure - expected installed variant") return new_variant def _delete_stale_build_tagfiles(self, family_path): now = time.time() for name in os.listdir(family_path): if not name.startswith(self.building_prefix): continue tagfilepath = os.path.join(family_path, name) ver_str = name[len(self.building_prefix):] pkg_path = os.path.join(family_path, ver_str) if os.path.exists(pkg_path): # build tagfile not needed if package is valid if self._is_valid_package_directory(pkg_path): os.remove(tagfilepath) continue else: # remove tagfile if pkg is gone. Delete only tagfiles over a certain # age, otherwise might delete a tagfile another process has created # just before it created the package directory. st = os.stat(tagfilepath) age = now - st.st_mtime if age > 3600: os.remove(tagfilepath)
def schema(cls): from rez.utils.execution import ExecutableScriptMode return Or(*(x.name for x in ExecutableScriptMode))
"package_orderers": OptionalDictOrDictList, "new_session_popen_args": OptionalDict, "context_tracking_amqp": OptionalDict, "context_tracking_extra_fields": OptionalDict, # GUI settings "use_pyside": Bool, "use_pyqt": Bool, "gui_threads": Bool }) # settings common to each plugin type _plugin_config_dict = { "release_vcs": { "tag_name": basestring, "releasable_branches": Or(None, [basestring]), "check_tag": bool } } # ----------------------------------------------------------------------------- # Config # ----------------------------------------------------------------------------- class Config(six.with_metaclass(LazyAttributeMeta, object)): """Rez configuration settings. You should call the `create_config` function, rather than constructing a `Config` object directly.
def schema(cls): from rez.developer_package import PreprocessMode return Or(*(x.name for x in PreprocessMode))
class OptionalStr(Str): schema = Or(None, basestring)
from rez.utils.schema import Required, schema_keys from rez.utils.formatting import PackageRequest from rez.utils.data_utils import AttrDictWrapper from rez.package_resources_ import help_schema, _commands_schema from rez.package_repository import create_memory_package_repository from rez.packages_ import Package from rez.vendor.schema.schema import Schema, Optional, Or, Use, And from rez.vendor.version.version import Version from contextlib import contextmanager import os package_request_schema = Or(basestring, And(PackageRequest, Use(str))) package_schema = Schema({ Required("name"): basestring, Optional("base"): basestring, Optional("version"): Or(basestring, And(Version, Use(str))), Optional('description'): basestring, Optional('authors'): [basestring], Optional('requires'): [package_request_schema], Optional('build_requires'): [package_request_schema], Optional('private_build_requires'): [package_request_schema], Optional('variants'): [[package_request_schema]], Optional('uuid'): basestring, Optional('config'): dict,
class OptionalStrList(StrList): schema = Or(And(None, Use(lambda x: [])), [basestring])
def late_bound(schema): return Or(SourceCode, schema)
class OptionalBool(Bool): # need None first, or Bool.schema will coerce None to False schema = Or(None, Bool.schema)
from rez.exceptions import PackageMetadataError from rez.package_resources_ import help_schema, _commands_schema, \ _function_schema, late_bound from rez.package_repository import create_memory_package_repository from rez.packages_ import Package from rez.package_py_utils import expand_requirement from rez.vendor.schema.schema import Schema, Optional, Or, Use, And from rez.vendor.version.version import Version from contextlib import contextmanager import os # this schema will automatically harden request strings like 'python-*'; see # the 'expand_requires' function for more info. # package_request_schema = Or(And(basestring, Use(expand_requirement)), And(PackageRequest, Use(str))) tests_schema = Schema({ Optional(basestring): Or( Or(basestring, [basestring]), { "command": Or(basestring, [basestring]), Optional("requires"): [package_request_schema] } ) }) package_schema = Schema({ Optional("requires_rez_version"): And(basestring, Use(Version)),
from rez.utils.formatting import PackageRequest, indent, \ dict_to_attributes_code, as_block_string from rez.utils.schema import Required from rez.utils.yaml import dump_yaml from pprint import pformat # preferred order of keys in a package definition file package_key_order = [ 'name', 'version', 'description', 'authors', 'tools', 'has_plugins', 'plugin_for', 'requires', 'build_requires', 'private_build_requires', 'variants', 'commands', 'pre_commands', 'post_commands', 'help', 'config', 'uuid', 'timestamp', 'release_message', 'changelog', 'vcs', 'revision', 'previous_version', 'previous_revision' ] version_schema = Or(basestring, And(Version, Use(str))) package_request_schema = Or(basestring, And(PackageRequest, Use(str))) source_code_schema = Or(SourceCode, And(basestring, Use(SourceCode))) tests_schema = Schema({ Optional(basestring): Or( Or(basestring, [basestring]), { "command": Or(basestring, [basestring]), Optional("requires"): [package_request_schema] }) }) # package serialisation schema