def find_pip(pip_version=None, python_version=None): """Find a pip exe using the given python version. Returns: 2-tuple: str: pip executable; `ResolvedContext`: Context containing pip, or None if we fell back to system pip. """ pip_exe = "pip" try: context = create_context(pip_version, python_version) except BuildError as e: # fall back on system pip. Not ideal but at least it's something from rez.backport.shutilwhich import which pip_exe = which("pip") if pip_exe: print_warning( "pip rez package could not be found; system 'pip' command (%s) " "will be used instead." % pip_exe) context = None else: raise e return pip_exe, context
def _check_found(py_exe, version_text, log_invalid=True): """Check the Python and pip version text found. Args: py_exe (str or None): Python executable path found, if any. version_text (str or None): Pip version found, if any. log_invalid (bool): Whether to log messages if found invalid. Returns: bool: Python is OK and pip version fits against ``PIP_SPECIFIER``. """ is_valid = True message = "Needs pip%s, but found '%s' for Python '%s'" if version_text is None or not py_exe: is_valid = False if log_invalid: print_debug(message, PIP_SPECIFIER, version_text, py_exe) elif PackagingVersion(version_text) not in PIP_SPECIFIER: is_valid = False if log_invalid: print_warning(message, PIP_SPECIFIER, version_text, py_exe) return is_valid
def send_email(self, subject, body): if not self.settings.recipients: return # nothing to do, sending email to nobody if not self.settings.smtp_host: print_warning("did not send release email: " "SMTP host is not specified") return recipients = self.get_recipients() if not recipients: return print("Sending release email to:") print('\n'.join("- %s" % x for x in recipients)) msg = MIMEText(body) msg["Subject"] = subject msg["From"] = self.settings.sender msg["To"] = str(',').join(recipients) try: s = smtplib.SMTP(self.settings.smtp_host, self.settings.smtp_port) s.sendmail(from_addr=self.settings.sender, to_addrs=recipients, msg=msg.as_string()) print('Email(s) sent.') except Exception as e: print_error("release email delivery failed: %s" % str(e))
def find_python(python_version=None): """Find a pip exe using the given python version. Returns: 2-tuple: str: pip executable; `ResolvedContext`: Context containing pip, or None if we fell back to system pip. """ python_exe = "python" try: context = create_context(python_version) except BuildError as e: # fall back on system pip. Not ideal but at least it's something from rez.backport.shutilwhich import which python_exe = which("python") if python_exe: print_warning( "python rez package could not be found; system 'python' " "command (%s) will be used instead." % python_exe) context = None else: raise e return python_exe, context
def load_plugins(self): import pkgutil from rez.backport.importlib import import_module type_module_name = 'rezplugins.' + self.type_name package = import_module(type_module_name) # on import, the `__path__` variable of the imported package is extended # to include existing directories on the plugin search path (via # extend_path, above). this means that `walk_packages` will walk over all # modules on the search path at the same level (.e.g in a # 'rezplugins/type_name' sub-directory). paths = [package.__path__] if isinstance(package.__path__, basestring) \ else package.__path__ for path in paths: for loader, modname, ispkg in pkgutil.walk_packages( [path], package.__name__ + '.'): if loader is not None: plugin_name = modname.split('.')[-1] if plugin_name.startswith('_'): continue if config.debug("plugins"): print_debug("loading %s plugin at %s: %s..." % (self.type_name, path, modname)) try: # load_module will force reload the module if it's # already loaded, so check for that module = sys.modules.get(modname) if module is None: module = loader.find_module(modname).load_module(modname) if hasattr(module, 'register_plugin') and \ hasattr(module.register_plugin, '__call__'): plugin_class = module.register_plugin() if plugin_class != None: self.register_plugin(plugin_name, plugin_class, module) else: if config.debug("plugins"): print_warning( "'register_plugin' function at %s: %s did not return a class." % (path, modname)) else: if config.debug("plugins"): print_warning( "no 'register_plugin' function at %s: %s" % (path, modname)) # delete from sys.modules? except Exception as e: nameish = modname.split('.')[-1] self.failed_plugins[nameish] = str(e) if config.debug("plugins"): import traceback from StringIO import StringIO out = StringIO() traceback.print_exc(file=out) print_debug(out.getvalue()) # load config data, _ = _load_config_from_filepaths([os.path.join(path, "rezconfig")]) deep_update(self.config_data, data)
def _copy_variants(context, bundle_dir, force=False, skip_non_relocatable=False, verbose=False): relocated_package_names = [] repo_path = os.path.join(bundle_dir, "packages") for variant in context.resolved_packages: package = variant.parent if skip_non_relocatable and not package.is_relocatable: if verbose: print_warning("Skipped bundling of non-relocatable package %s", package.qualified_name) continue copy_package(package=package, dest_repository=repo_path, variants=[variant.index], force=force, keep_timestamp=True, verbose=verbose) relocated_package_names.append(package.name) return relocated_package_names
def send_email(self, subject, body): if not self.settings.recipients: return # nothing to do, sending email to nobody if not self.settings.smtp_host: print_warning("did not send release email: " "SMTP host is not specified") return recipients = self.get_recipients() if not recipients: return print "Sending release email to:" print '\n'.join("- %s" % x for x in recipients) msg = MIMEText(body) msg["Subject"] = subject msg["From"] = self.settings.sender msg["To"] = str(',').join(recipients) try: s = smtplib.SMTP(self.settings.smtp_host, self.settings.smtp_port) s.sendmail(from_addr=self.settings.sender, to_addrs=recipients, msg=msg.as_string()) print 'Email(s) sent.' except Exception, e: print_error("release email delivery failed: %s" % str(e))
def repo_operation(self): exc_type = (ReleaseVCSError if self.skip_repo_errors else RezUncatchableError) try: yield except exc_type as e: print_warning("THE FOLLOWING ERROR WAS SKIPPED:\n%s" % str(e))
def is_mongodb_reachable(client): try: client.server_info() except pymongo_err.ServerSelectionTimeoutError as e: print_warning(e) return False else: return True
def create_release_hooks(names, source_path): hooks = [] for name in names: try: hook = create_release_hook(name, source_path) hooks.append(hook) except: print_warning("Release hook '%s' is not available." % name) return hooks
def find_pip(pip_version=None, python_version=None): """Find a pip exe using the given python version. Returns: 2-tuple: str: pip executable; `ResolvedContext`: Context containing pip, or None if we fell back to system pip. """ pip_exe = "pip" try: context = create_context(pip_version, python_version) except BuildError: # fall back on system pip. Not ideal but at least it's something from rez.backport.shutilwhich import which pip_exe = which("pip") if pip_exe: print_warning( "pip rez package could not be found; system 'pip' command (%s) " "will be used instead." % pip_exe) context = None else: raise # check pip version, must be >=19 to support PEP517 try: pattern = r"pip\s(?P<ver>\d+\.*\d*\.*\d*)" if "Windows" in platform.system(): # https://github.com/nerdvegas/rez/pull/659 ver_str = subprocess.check_output(pip_exe + " -V", shell=True, universal_newlines=True) else: ver_str = subprocess.check_output([pip_exe, '-V'], universal_newlines=True) match = re.search(pattern, ver_str) ver = match.group('ver') pip_major = ver.split('.')[0] if int(pip_major) < 19: raise VersionError( "pip >= 19 is required! Please update your pip.") except VersionError: raise except: # silently skip if pip version detection failed, pip itself will show # a reasonable error message at the least. pass return pip_exe, context
def create_release_hooks(names, source_path): hooks = [] for name in names: try: hook = create_release_hook(name, source_path) hooks.append(hook) except Exception: import traceback print_warning("Release hook '%s' is not available." % name) print_debug(traceback.format_exc()) return hooks
def create_release_tag(self, tag_name, message=None): data = self._read_stub() if "tags" not in data: data["tags"] = {} elif tag_name in data["tags"]: print_warning("Skipped tag creation, tag '%s' already exists" % tag_name) return print "Creating tag '%s'..." % tag_name data["tags"][tag_name] = message self._write_stub(data)
def _release_variant(self, variant, release_message=None, **kwargs): release_path = self.package.config.release_packages_path # test if variant has already been released variant_ = variant.install(release_path, dry_run=True) if variant_ is not None: print_warning( "Skipping %s: destination variant already exists (%r)", self._n_of_m(variant), variant_.uri) return None def cancel_variant_install(): pkg_repo = package_repository_manager.get_repository(release_path) pkg_repo.on_variant_install_cancelled(variant.resource) if variant.index is not None: self._print_header("Releasing variant %s..." % self._n_of_m(variant)) # build and install variant try: build_result = self._build_variant_base( build_type=BuildType.central, variant=variant, install_path=release_path, clean=True, install=True) except BuildError: # indicate to repo that the variant install is cancelled cancel_variant_install() raise # run any tests that are configured to run pre-install try: self._run_tests( variant, run_on=["pre_release"], package_install_path=build_result["package_install_path"]) except PackageTestError: # delete the installed variant payload self._rmtree(build_result["variant_install_path"]) # indicate to repo that the variant install is cancelled cancel_variant_install() raise # add release info to variant, and install it into package repository release_data = self.get_release_data() release_data["release_message"] = release_message variant_ = variant.install(release_path, overrides=release_data) return variant_
def find_pip(pip_version=None, python_version=None): """Find pip. Pip is searched in the following order: 1. Search for rezified python matching python version request; 2. If found, test if pip is present; 3. If pip is present, use it; 4. If not present, search for rezified pip (this is for backwards compatibility); 5. If rezified pip is found, use it; 6. If not, fall back to rez's python installation. Args: pip_version (str or `Version`): Version of pip to use, or latest if None. python_version (str or `Version`): Python version to use, or latest if None. Returns: 2-tuple: - str: Python executable. - `ResolvedContext`: Context containing pip, or None if we fell back to system pip. """ py_exe = None context = None py_exe, pip_version, context = find_pip_from_context( python_version, pip_version=pip_version ) if not py_exe: py_exe, pip_version, context = find_pip_from_context( python_version, pip_version=pip_version or "latest" ) if not py_exe: import pip pip_version = pip.__version__ py_exe = sys.executable print_warning( "Found no pip in python and pip package; " "falling back to pip installed in rez own virtualenv (version %s)", pip_version ) pip_major = pip_version.split('.')[0] if int(pip_major) < 19: raise RezSystemError("pip >= 19 is required! Please update your pip.") return py_exe, context
def check_format_version(filename, data): format_version_ = data.pop("format_version", None) if format_version_ is not None: try: format_version_ = int(format_version_) except: return if format_version_ > format_version: print_warning( "Loading from %s may fail: newer format version (%d) than current " "format version (%d)" % (filename, format_version_, format_version))
def test_print(self): """Test valid msg and nargs combinations for print_*.""" for msg in ("Hello", "Hello %s", "Hello %s %s"): logging_.print_debug(msg) logging_.print_info(msg) logging_.print_warning(msg) logging_.print_error(msg) logging_.print_critical(msg) for nargs in ([], ["foo"], ["foo", "bar"]): logging_.print_debug(msg, *nargs) logging_.print_info(msg, *nargs) logging_.print_warning(msg, *nargs) logging_.print_error(msg, *nargs) logging_.print_critical(msg, *nargs)
def find_pip(pip_version=None, python_version=None): """Find pip. Pip is searched in the following order: 1. Search for rezified python matching python version request; 2. If found, test if pip is present; 3. If pip is present, use it; 4. If not present, search for rezified pip (this is for backwards compatibility); 5. If rezified pip is found, use it; 6. If not, fall back to rez's python installation. Args: pip_version (str or `Version`): Version of pip to use, or latest if None. python_version (str or `Version`): Python version to use, or latest if None. Returns: 2-tuple: - str: Python executable. - `ResolvedContext`: Context containing pip, or None if we fell back to system pip. """ py_exe = None context = None found_pip_version = None valid_found = False for version in [pip_version, "latest"]: try: py_exe, found_pip_version, context = find_pip_from_context( python_version, pip_version=version ) valid_found = _check_found(py_exe, found_pip_version) if valid_found: break except BuildError as error: print_warning(str(error)) if not valid_found: import pip found_pip_version = pip.__version__ py_exe = sys.executable print_warning("Found no pip in any python and/or pip rez packages!") print_warning("Falling back to pip installed in rez own virtualenv:") logging_arguments = ( ("pip", found_pip_version, pip.__file__), ("python", ".".join(map(str, sys.version_info[:3])), py_exe), ) for warn_args in logging_arguments: print_warning("%10s: %s (%s)", *warn_args) if not _check_found(py_exe, found_pip_version, log_invalid=False): message = "pip{specifier} is required! Please update your pip." raise RezSystemError(message.format(specifier=PIP_SPECIFIER)) return py_exe, context
def _physical_cores_base(self): if self.logical_cores == 1: # if we only have one core, we only have one core... no need to # bother with platform-specific stuff... # we do this check for all because on some platforms, the output # of various commands (dmesg, lscpu, /proc/cpuinfo) can be # very different if there's only one cpu, and don't want to have # to deal with that case return 1 cores = self._physical_cores() if cores is None: from rez.utils.logging_ import print_warning print_warning("Could not determine number of physical cores - " "falling back on logical cores value") cores = self.logical_cores return cores
def _convert_to_rex(self, commands): if isinstance(commands, list): from rez.utils.backcompat import convert_old_commands msg = "package %r is using old-style commands." % self.uri if config.disable_rez_1_compatibility or config.error_old_commands: raise SchemaError(None, msg) elif config.warn("old_commands"): print_warning(msg) commands = convert_old_commands(commands) if isinstance(commands, basestring): return SourceCode(source=commands) elif callable(commands): return SourceCode(func=commands) else: return commands
def _convert_to_rex(self, commands): if isinstance(commands, list): from rez.utils.backcompat import convert_old_commands msg = "package %r is using old-style commands." % self.uri if config.disable_rez_1_compatibility or config.error_old_commands: raise SchemaError(None, msg) elif config.warn("old_commands"): print_warning(msg) commands = convert_old_commands(commands) if isinstance(commands, basestring): return SourceCode(commands) elif callable(commands): return SourceCode.from_function(commands) else: return commands
def add_variant(pkgcache, uri, opts): from rez.packages import get_variant_from_uri from rez.utils.logging_ import print_info, print_warning from rez.package_cache import PackageCache print_info("Adding variant %r to package cache at %s:", uri, pkgcache.path) variant = get_variant_from_uri(uri) if variant is None: print("No such variant: %s" % uri, file=sys.stderr) sys.exit(1) destpath, status = pkgcache.add_variant(variant, force=opts.force) if status == PackageCache.VARIANT_FOUND: print_info("Already exists: %s", destpath) elif status == PackageCache.VARIANT_COPYING: print_warning("Another process is currently copying to: %s", destpath) else: print_info("Successfully cached to: %s", destpath)
def remove_variant(pkgcache, uri, opts): from rez.packages import get_variant_from_uri from rez.utils.logging_ import print_info, print_warning, print_error from rez.package_cache import PackageCache print_info("Removing variant %r from package cache at %s:", uri, pkgcache.path) variant = get_variant_from_uri(uri) if variant is None: print("No such variant: %s" % uri, file=sys.stderr) sys.exit(1) status = pkgcache.remove_variant(variant) if status == PackageCache.VARIANT_NOT_FOUND: print_error("No such variant found in cache") elif status == PackageCache.VARIANT_COPYING: print_warning("Another process is currently caching this variant") else: print_info("Variant successfully removed")
def packaging_req_to_rez_req(packaging_req): """Convert packaging requirement object to equivalent rez requirement. Note that environment markers are ignored. Args: packaging_req (`packaging.requirements.Requirement`): Packaging requirement. Returns: `Requirement`: Equivalent rez requirement object. """ if packaging_req.extras: print_warning("Ignoring extras requested on %r - " "this is not yet supported" % str(packaging_req)) rez_req_str = pip_to_rez_package_name(packaging_req.name) if packaging_req.specifier: range_ = pip_specifier_to_rez_requirement(packaging_req.specifier) rez_req_str += '-' + str(range_) return Requirement(rez_req_str)
def bind_cli(cls, parser, group): """ Uses a 'parse_build_args.py' file to add options, if found. """ try: with open("./parse_build_args.py") as f: source = f.read() except Exception as e: return # detect what extra args have been added before_args = set(x.dest for x in parser._actions) try: exec(source, {"parser": group}) except Exception as e: print_warning("Error in ./parse_build_args.py: %s" % str(e)) after_args = set(x.dest for x in parser._actions) extra_args = after_args - before_args # store extra args onto parser so we can get to it in self.build() setattr(parser, "_rezbuild_extra_args", list(extra_args))
def bind_cli(cls, parser): """ Uses a 'parse_build_args.py' file to add options, if found. """ try: with open("./parse_build_args.py") as f: source = f.read() except Exception as e: return # detect what extra args have been added before_args = set(x.dest for x in parser._actions) try: exec source in {"parser": parser} except Exception as e: print_warning("Error in ./parse_build_args.py: %s" % str(e)) after_args = set(x.dest for x in parser._actions) extra_args = after_args - before_args # store extra args onto parser so we can get to it in self.build() setattr(parser, "_rezbuild_extra_args", list(extra_args))
def bind(name, path, import_name=None, version_range=None, version=None, requires=None, pure_python=None, tools=None, extra_module_names=[], extra_attrs={}): import_name = import_name or name if version is None: version = get_version_in_python( name, ["import %s" % import_name, "print %s.__version__" % import_name]) check_version(version, version_range) py_major_minor = '.'.join(str(x) for x in sys.version_info[:2]) py_req = "python-%s" % py_major_minor found_tools = {} if pure_python is None: raise NotImplementedError # detect elif pure_python: variant = [py_req] else: variant = system.variant + [py_req] for tool in (tools or []): try: src = find_exe(tool) found_tools[tool] = src log("found tool '%s': %s" % (tool, src)) except RezBindError as e: print_warning(str(e)) def make_root(variant, root): pypath = make_dirs(root, "python") copy_module(import_name, pypath) if found_tools: binpath = make_dirs(root, "bin") for tool, src in sorted(found_tools.items()): dest = os.path.join(binpath, tool) shutil.copy2(src, dest) for name_ in extra_module_names: copy_module(name_, pypath) with make_package(name, path, make_root=make_root) as pkg: pkg.version = version pkg.variants = [variant] if requires: pkg.requires = requires if found_tools: pkg.tools = list(found_tools) pkg.commands = commands_with_bin else: pkg.commands = commands for key, value in extra_attrs.iteritems(): pkg[key] = value return pkg.installed_variants
def _rmtree(self, path): try: forceful_rmtree(path) except Exception as e: print_warning("Failed to delete %s - %s", path, e)
def _get_distribution_files_mapping(distribution, targetdir): """Get remapping of pip installation to rez package installation. Args: distribution (`distlib.database.InstalledDistribution`): The installed distribution targetdir (str): Where distribution was installed to (via pip --target) Returns: Dict of (str, str): * key: Path of pip installed file, relative to `targetdir`; * value: Relative path to install into rez package. """ def get_mapping(rel_src): topdir = rel_src.split(os.sep)[0] # Special case - dist-info files. These are all in a '<pkgname>-<version>.dist-info' # dir. We keep this dir and place it in the root dir of the rez package. # if topdir.endswith(".dist-info"): return (rel_src, rel_src) # Remapping of other installed files according to manifest if topdir == os.pardir: for remap in config.pip_install_remaps: path = remap['record_path'] if re.search(path, rel_src): pip_subpath = re.sub(path, remap['pip_install'], rel_src) rez_subpath = re.sub(path, remap['rez_install'], rel_src) return (pip_subpath, rez_subpath) tokenised_path = rel_src.replace(os.pardir, '{pardir}') tokenised_path = tokenised_path.replace(os.sep, '{sep}') dist_record = '{dist.name}-{dist.version}.dist-info{os.sep}RECORD' dist_record = dist_record.format(dist=distribution, os=os) try_this_message = r""" Unknown source file in {0}! '{1}' To resolve, try: 1. Manually install the pip package using 'pip install --target' to a temporary location. 2. See where '{1}' actually got installed to by pip, RELATIVE to --target location 3. Create a new rule to 'pip_install_remaps' configuration like: {{ "record_path": r"{2}", "pip_install": r"<RELATIVE path pip installed to in 2.>", "rez_install": r"<DESTINATION sub-path in rez package>", }} 4. Try rez-pip install again. If path remapping is not enough, consider submitting a new issue via https://github.com/nerdvegas/rez/issues/new """.format(dist_record, rel_src, tokenised_path) print_error(dedent(try_this_message).lstrip()) raise IOError( 89, # errno.EDESTADDRREQ : Destination address required "Don't know what to do with relative path in {0}, see " "above error message for".format(dist_record), rel_src, ) # At this point the file should be <pkg-name>/..., so we put # into 'python' subdir in rez package. # rel_dest = os.path.join("python", rel_src) return (rel_src, rel_dest) # iterate over pip installed files result = {} for installed_file in distribution.list_installed_files(): rel_src_orig = os.path.normpath(installed_file[0]) rel_src, rel_dest = get_mapping(rel_src_orig) src_filepath = os.path.join(targetdir, rel_src) if not os.path.exists(src_filepath): print_warning("Skipping non-existent source file: %s (%s)", src_filepath, rel_src_orig) continue result[rel_src] = rel_dest return result
def _copy_variant_payload(src_variant, dest_pkg_repo, shallow=False, follow_symlinks=False, overrides=None, verbose=False): # Get payload path of source variant. For some types (eg from a "memory" # type repo) there may not be a root. # variant_root = getattr(src_variant, "root", None) if not variant_root: raise PackageCopyError( "Cannot copy source variant %s - it is a type of variant that " "does not have a root.", src_variant.uri) if not os.path.isdir(variant_root): raise PackageCopyError( "Cannot copy source variant %s - its root does not appear to " "be present on disk (%s).", src_variant.uri, variant_root) dest_variant_name = overrides.get("name") or src_variant.name dest_variant_version = overrides.get("version") or src_variant.version # determine variant installation path dest_pkg_payload_path = dest_pkg_repo.get_package_payload_path( package_name=dest_variant_name, package_version=dest_variant_version) is_varianted = (src_variant.index is not None) src_variant_subpath = None if is_varianted: src_variant_subpath = src_variant._non_shortlinked_subpath variant_install_path = os.path.join(dest_pkg_payload_path, src_variant_subpath) else: variant_install_path = dest_pkg_payload_path # get ready for copy/symlinking copy_func = partial(replacing_copy, follow_symlinks=follow_symlinks) if shallow: maybe_symlink = replacing_symlink else: maybe_symlink = copy_func # possibly make install path temporarily writable last_dir = get_existing_path( variant_install_path, topmost_path=os.path.dirname(dest_pkg_payload_path)) if last_dir: ctxt = make_path_writable(last_dir) else: ctxt = with_noop() # copy the variant payload with ctxt: safe_makedirs(variant_install_path) # determine files not to copy skip_files = [] if is_varianted and not src_variant.parent.hashed_variants: # Detect overlapped variants. This is the case where one variant subpath # might be A, and another is A/B. We must ensure that A/B is not created # as a symlink during shallow install of variant A - that would then # cause A/B payload to be installed back into original package, possibly # corrupting it. # # Here we detect this case, and create a list of dirs not to copy/link, # because they are in fact a subpath dir for another variant. # # Note that for hashed variants, we don't do this check because overlapped # variants are not possible. # skip_files.extend(_get_overlapped_variant_dirs(src_variant)) else: # just skip package definition file for name in config.plugins.package_repository.filesystem.package_filenames: for fmt in (FileFormat.py, FileFormat.yaml): filename = name + '.' + fmt.extension skip_files.append(filename) # copy/link all topmost files within the variant root for name in os.listdir(variant_root): if name in skip_files: filepath = os.path.join(variant_root, name) if verbose: if is_varianted: msg = ("Did not copy %s - this is part of an " "overlapping variant's root path.") else: msg = "Did not copy package definition file %s" print_info(msg, filepath) continue src_path = os.path.join(variant_root, name) dest_path = os.path.join(variant_install_path, name) if os.path.islink(src_path): copy_func(src_path, dest_path) else: maybe_symlink(src_path, dest_path) # copy permissions of source variant dirs onto dest src_package = src_variant.parent src_pkg_repo = src_package.repository src_pkg_payload_path = src_pkg_repo.get_package_payload_path( package_name=src_package.name, package_version=src_package.version) shutil.copystat(src_pkg_payload_path, dest_pkg_payload_path) subpath = src_variant_subpath while subpath: src_path = os.path.join(src_pkg_payload_path, subpath) dest_path = os.path.join(dest_pkg_payload_path, subpath) shutil.copystat(src_path, dest_path) subpath = os.path.dirname(subpath) # create the variant shortlink if src_variant.parent.hashed_variants: try: # base _v dir base_shortlinks_path = os.path.join( dest_pkg_payload_path, src_package.config.variant_shortlinks_dirname) safe_makedirs(base_shortlinks_path) # shortlink rel_variant_path = os.path.relpath(variant_install_path, base_shortlinks_path) create_unique_base26_symlink(base_shortlinks_path, rel_variant_path) except Exception as e: # Treat any error as warning - lack of shortlink is not # a breaking issue, it just means the variant root path # will be long. # print_warning("Error creating variant shortlink for %s: %s: %s", variant_install_path, e.__class__.__name__, e)
def _build_variant_base(self, variant, build_type, install_path=None, clean=False, install=False, **kwargs): # create build/install paths install_path = install_path or self.package.config.local_packages_path package_install_path = self.get_package_install_path(install_path) variant_build_path = self.build_path if variant.index is None: variant_install_path = package_install_path else: subpath = variant._non_shortlinked_subpath variant_build_path = os.path.join(variant_build_path, subpath) variant_install_path = os.path.join(package_install_path, subpath) # create directories (build, install) if clean and os.path.exists(variant_build_path): shutil.rmtree(variant_build_path) safe_makedirs(variant_build_path) # find last dir of installation path that exists, and possibly make it # writable during variant installation # last_dir = get_existing_path(variant_install_path, topmost_path=install_path) if last_dir: ctxt = make_path_writable(last_dir) else: ctxt = with_noop() with ctxt: if install: # inform package repo that a variant is about to be built/installed pkg_repo = package_repository_manager.get_repository(install_path) pkg_repo.pre_variant_install(variant.resource) if not os.path.exists(variant_install_path): safe_makedirs(variant_install_path) # if hashed variants are enabled, create the variant shortlink if variant.parent.hashed_variants: try: # create the dir containing all shortlinks base_shortlinks_path = os.path.join( package_install_path, variant.parent.config.variant_shortlinks_dirname ) safe_makedirs(base_shortlinks_path) # create the shortlink rel_variant_path = os.path.relpath( variant_install_path, base_shortlinks_path) create_unique_base26_symlink( base_shortlinks_path, rel_variant_path) except Exception as e: # Treat any error as warning - lack of shortlink is not # a breaking issue, it just means the variant root path # will be long. # print_warning( "Error creating variant shortlink for %s: %s: %s", variant_install_path, e.__class__.__name__, e ) # Re-evaluate the variant, so that variables such as 'building' and # 'build_variant_index' are set, and any early-bound package attribs # are re-evaluated wrt these vars. This is done so that attribs such as # 'requires' can change depending on whether a build is occurring or not. # # Note that this re-evaluated variant is ONLY used here, for the purposes # of creating the build context. The variant that is actually installed # is the one evaluated where 'building' is False. # re_evaluated_package = variant.parent.get_reevaluated({ "building": True, "build_variant_index": variant.index or 0, "build_variant_requires": variant.variant_requires }) re_evaluated_variant = re_evaluated_package.get_variant(variant.index) # create build environment context, rxt_filepath = self.create_build_context( variant=re_evaluated_variant, build_type=build_type, build_path=variant_build_path) # run build system build_system_name = self.build_system.name() self._print("\nInvoking %s build system...", build_system_name) build_result = self.build_system.build( context=context, variant=variant, build_path=variant_build_path, install_path=variant_install_path, install=install, build_type=build_type) if not build_result.get("success"): raise BuildError("The %s build system failed." % build_system_name) if install: # Install the 'variant.json' file, which identifies which variant # this is. This is important for hashed variants, where it is not # obvious which variant is in which root path. The file is there # for debugging purposes only. # if variant.index is not None: data = { "index": variant.index, "data": variant.parent.data["variants"][variant.index] } filepath = os.path.join(variant_install_path, "variant.json") with open(filepath, 'w') as f: json.dump(data, f, indent=2) # install some files for debugging purposes (incl build.rxt) extra_files = build_result.get("extra_files", []) if rxt_filepath: extra_files = extra_files + [rxt_filepath] for file_ in extra_files: copy_or_replace(file_, variant_install_path) # Install include modules. Note that this doesn't need to be done # multiple times, but for subsequent variants it has no effect. # self._install_include_modules(install_path) return build_result
def pip_install_package(source_name, pip_version=None, python_version=None, mode=InstallMode.min_deps, release=False, prefix=None, extra_args=None): """Install a pip-compatible python package as a rez package. Args: source_name (str): Name of package or archive/url containing the pip package source. This is the same as the arg you would pass to the 'pip install' command. pip_version (str or `Version`): Version of pip to use to perform the install, uses latest if None. python_version (str or `Version`): Python version to use to perform the install, and subsequently have the resulting rez package depend on. mode (`InstallMode`): Installation mode, determines how dependencies are managed. release (bool): If True, install as a released package; otherwise, it will be installed as a local package. extra_args (List[str]): Additional options to the pip install command. Returns: 2-tuple: List of `Variant`: Installed variants; List of `Variant`: Skipped variants (already installed). """ installed_variants = [] skipped_variants = [] py_exe, context = find_pip(pip_version, python_version) print_info("Installing %r with pip taken from %r", source_name, py_exe) # TODO: should check if packages_path is writable before continuing with pip # if prefix is not None: packages_path = prefix else: packages_path = (config.release_packages_path if release else config.local_packages_path) targetpath = mkdtemp(suffix="-rez", prefix="pip-") if context and config.debug("package_release"): buf = StringIO() print("\n\npackage download environment:", file=buf) context.print_info(buf) _log(buf.getvalue()) # Build pip commandline cmd = [py_exe, "-m", "pip", "install"] _extra_args = extra_args or config.pip_extra_args or [] if "--no-use-pep517" not in _extra_args: cmd.append("--use-pep517") if not _option_present(_extra_args, "-t", "--target"): cmd.append("--target=%s" % targetpath) if mode == InstallMode.no_deps and "--no-deps" not in _extra_args: cmd.append("--no-deps") cmd.extend(_extra_args) cmd.append(source_name) # run pip # # Note: https://github.com/pypa/pip/pull/3934. If/when this PR is merged, # it will allow explicit control of where to put bin files. # _cmd(context=context, command=cmd) # determine version of python in use if context is None: # since we had to use system pip, we have to assume system python version py_ver_str = '.'.join(map(str, sys.version_info)) py_ver = Version(py_ver_str) else: python_variant = context.get_resolved_package("python") py_ver = python_variant.version # Collect resulting python packages using distlib distribution_path = DistributionPath([targetpath]) distributions = list(distribution_path.get_distributions()) dist_names = [x.name for x in distributions] def log_append_pkg_variants(pkg_maker): template = '{action} [{package.qualified_name}] {package.uri}{suffix}' actions_variants = [ ( print_info, 'Installed', installed_variants, pkg_maker.installed_variants or [], ), ( print_debug, 'Skipped', skipped_variants, pkg_maker.skipped_variants or [], ), ] for print_, action, variants, pkg_variants in actions_variants: for variant in pkg_variants: variants.append(variant) package = variant.parent suffix = (' (%s)' % variant.subpath) if variant.subpath else '' print_(template.format(**locals())) # get list of package and dependencies for distribution in distributions: # convert pip requirements into rez requirements rez_requires = get_rez_requirements(installed_dist=distribution, python_version=py_ver, name_casings=dist_names) # log the pip -> rez requirements translation, for debugging _log("Pip to rez requirements translation information for " + distribution.name_and_version + ":\n" + pformat({ "pip": { "run_requires": map(str, distribution.run_requires) }, "rez": rez_requires })) # determine where pip files need to be copied into rez package src_dst_lut = _get_distribution_files_mapping(distribution, targetpath) # build tools list tools = [] for relpath in src_dst_lut.values(): dir_, filename = os.path.split(relpath) if dir_ == "bin": tools.append(filename) # Sanity warning to see if any files will be copied if not src_dst_lut: message = 'No source files exist for {}!' if not _verbose: message += '\nTry again with rez-pip --verbose ...' print_warning(message.format(distribution.name_and_version)) def make_root(variant, path): """Using distlib to iterate over all installed files of the current distribution to copy files to the target directory of the rez package variant """ for rel_src, rel_dest in src_dst_lut.items(): src = os.path.join(targetpath, rel_src) dest = os.path.join(path, rel_dest) if not os.path.exists(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) shutil.copyfile(src, dest) if _is_exe(src): shutil.copystat(src, dest) # create the rez package name = pip_to_rez_package_name(distribution.name) version = pip_to_rez_version(distribution.version) requires = rez_requires["requires"] variant_requires = rez_requires["variant_requires"] metadata = rez_requires["metadata"] with make_package(name, packages_path, make_root=make_root) as pkg: # basics (version etc) pkg.version = version if distribution.metadata.summary: pkg.description = distribution.metadata.summary # requirements and variants if requires: pkg.requires = requires if variant_requires: pkg.variants = [variant_requires] # commands commands = [] commands.append("env.PYTHONPATH.append('{root}/python')") if tools: pkg.tools = tools commands.append("env.PATH.append('{root}/bin')") pkg.commands = '\n'.join(commands) # Make the package use hashed variants. This is required because we # can't control what ends up in its variants, and that can easily # include problematic chars (>, +, ! etc). # TODO: https://github.com/nerdvegas/rez/issues/672 # pkg.hashed_variants = True # add some custom attributes to retain pip-related info pkg.pip_name = distribution.name_and_version pkg.from_pip = True pkg.is_pure_python = metadata["is_pure_python"] distribution_metadata = distribution.metadata.todict() help_ = [] if "home_page" in distribution_metadata: help_.append(["Home Page", distribution_metadata["home_page"]]) if "download_url" in distribution_metadata: help_.append( ["Source Code", distribution_metadata["download_url"]]) if help_: pkg.help = help_ if "author" in distribution_metadata: author = distribution_metadata["author"] if "author_email" in distribution_metadata: author += ' ' + distribution_metadata["author_email"] pkg.authors = [author] log_append_pkg_variants(pkg) # cleanup shutil.rmtree(targetpath) # print summary # if installed_variants: print_info("%d packages were installed.", len(installed_variants)) else: print_warning("NO packages were installed.") if skipped_variants: print_warning( "%d packages were already installed.", len(skipped_variants), ) return installed_variants, skipped_variants
def make_package(name, path, make_base=None, make_root=None, skip_existing=True, warn_on_skip=True): """Make and install a package. Example: >>> def make_root(variant, path): >>> os.symlink("/foo_payload/misc/python27", "ext") >>> >>> with make_package('foo', '/packages', make_root=make_root) as pkg: >>> pkg.version = '1.0.0' >>> pkg.description = 'does foo things' >>> pkg.requires = ['python-2.7'] Args: name (str): Package name. path (str): Package repository path to install package into. make_base (callable): Function that is used to create the package payload, if applicable. make_root (callable): Function that is used to create the package variant payloads, if applicable. skip_existing (bool): If True, detect if a variant already exists, and skip with a warning message if so. warn_on_skip (bool): If True, print warning when a variant is skipped. Yields: `PackageMaker` object. Note: Both `make_base` and `make_root` are called once per variant install, and have the signature (variant, path). Note: The 'installed_variants' attribute on the `PackageMaker` instance will be appended with variant(s) created by this function, if any. """ maker = PackageMaker(name) yield maker # post-with-block: # package = maker.get_package() cwd = os.getcwd() src_variants = [] # skip those variants that already exist if skip_existing: for variant in package.iter_variants(): variant_ = variant.install(path, dry_run=True) if variant_ is None: src_variants.append(variant) else: maker.skipped_variants.append(variant_) if warn_on_skip: print_warning("Skipping installation: Package variant already " "exists: %s" % variant_.uri) else: src_variants = package.iter_variants() with retain_cwd(): # install the package variant(s) into the filesystem package repo at `path` for variant in src_variants: variant_ = variant.install(path) base = variant_.base if make_base and base: if not os.path.exists(base): os.makedirs(base) os.chdir(base) make_base(variant_, base) root = variant_.root if make_root and root: if not os.path.exists(root): os.makedirs(root) os.chdir(root) make_root(variant_, root) maker.installed_variants.append(variant_)
def repo_operation(self): exc_type = ReleaseVCSError if self.skip_repo_errors else None try: yield except exc_type as e: print_warning("THE FOLLOWING ERROR WAS SKIPPED:\n%s" % str(e))
def _copy_variant_payload(src_variant, dest_pkg_repo, shallow=False, follow_symlinks=False, overrides=None, verbose=False): # Get payload path of source variant. For some types (eg from a "memory" # type repo) there may not be a root. # variant_root = getattr(src_variant, "root", None) if not variant_root: raise PackageCopyError( "Cannot copy source variant %s - it is a type of variant that " "does not have a root.", src_variant.uri ) if not os.path.isdir(variant_root): raise PackageCopyError( "Cannot copy source variant %s - its root does not appear to " "be present on disk (%s).", src_variant.uri, variant_root ) dest_variant_name = overrides.get("name") or src_variant.name dest_variant_version = overrides.get("version") or src_variant.version # determine variant installation path dest_pkg_payload_path = dest_pkg_repo.get_package_payload_path( package_name=dest_variant_name, package_version=dest_variant_version ) is_varianted = (src_variant.index is not None) src_variant_subpath = None if is_varianted: src_variant_subpath = src_variant._non_shortlinked_subpath variant_install_path = os.path.join( dest_pkg_payload_path, src_variant_subpath) else: variant_install_path = dest_pkg_payload_path # get ready for copy/symlinking copy_func = partial(replacing_copy, follow_symlinks=follow_symlinks) if shallow: maybe_symlink = replacing_symlink else: maybe_symlink = copy_func # possibly make install path temporarily writable last_dir = get_existing_path( variant_install_path, topmost_path=os.path.dirname(dest_pkg_payload_path)) if last_dir: ctxt = make_path_writable(last_dir) else: ctxt = with_noop() # copy the variant payload with ctxt: safe_makedirs(variant_install_path) # determine files not to copy skip_files = [] if is_varianted and not src_variant.parent.hashed_variants: # Detect overlapped variants. This is the case where one variant subpath # might be A, and another is A/B. We must ensure that A/B is not created # as a symlink during shallow install of variant A - that would then # cause A/B payload to be installed back into original package, possibly # corrupting it. # # Here we detect this case, and create a list of dirs not to copy/link, # because they are in fact a subpath dir for another variant. # # Note that for hashed variants, we don't do this check because overlapped # variants are not possible. # skip_files.extend(_get_overlapped_variant_dirs(src_variant)) else: # just skip package definition file for name in config.plugins.package_repository.filesystem.package_filenames: for fmt in (FileFormat.py, FileFormat.yaml): filename = name + '.' + fmt.extension skip_files.append(filename) # copy/link all topmost files within the variant root for name in os.listdir(variant_root): if name in skip_files: filepath = os.path.join(variant_root, name) if verbose: if is_varianted: msg = ("Did not copy %s - this is part of an " "overlapping variant's root path.") else: msg = "Did not copy package definition file %s" print_info(msg, filepath) continue src_path = os.path.join(variant_root, name) dest_path = os.path.join(variant_install_path, name) if os.path.islink(src_path): copy_func(src_path, dest_path) else: maybe_symlink(src_path, dest_path) # copy permissions of source variant dirs onto dest src_package = src_variant.parent src_pkg_repo = src_package.repository src_pkg_payload_path = src_pkg_repo.get_package_payload_path( package_name=src_package.name, package_version=src_package.version ) shutil.copystat(src_pkg_payload_path, dest_pkg_payload_path) subpath = src_variant_subpath while subpath: src_path = os.path.join(src_pkg_payload_path, subpath) dest_path = os.path.join(dest_pkg_payload_path, subpath) shutil.copystat(src_path, dest_path) subpath = os.path.dirname(subpath) # create the variant shortlink if src_variant.parent.hashed_variants: try: # base _v dir base_shortlinks_path = os.path.join( dest_pkg_payload_path, src_package.config.variant_shortlinks_dirname ) safe_makedirs(base_shortlinks_path) # shortlink rel_variant_path = os.path.relpath( variant_install_path, base_shortlinks_path) create_unique_base26_symlink( base_shortlinks_path, rel_variant_path) except Exception as e: # Treat any error as warning - lack of shortlink is not # a breaking issue, it just means the variant root path # will be long. # print_warning( "Error creating variant shortlink for %s: %s: %s", variant_install_path, e.__class__.__name__, e )
def _overruled_option(cls, option, overruling_option, val): if val and config.warn("shell_startup"): print_warning("%s ignored by %s shell - overruled by %s option" % (option, cls.name(), overruling_option))
def spawn_shell(self, context_file, tmpdir, rcfile=None, norc=False, stdin=False, command=None, env=None, quiet=False, pre_command=None, add_rez=True, package_commands_sourced_first=None, **Popen_args): d = self.get_startup_sequence(rcfile, norc, bool(stdin), command) envvar = d["envvar"] files = d["files"] bind_files = d["bind_files"] do_rcfile = d["do_rcfile"] shell_command = None if package_commands_sourced_first is None: package_commands_sourced_first = config.package_commands_sourced_first def _record_shell(ex, files, bind_rez=True, print_msg=False): if bind_rez and package_commands_sourced_first: ex.source(context_file) for file_ in files: if os.path.exists(os.path.expanduser(file_)): ex.source(file_) if bind_rez and not package_commands_sourced_first: ex.source(context_file) if envvar: ex.unsetenv(envvar) if add_rez and bind_rez: ex.interpreter._bind_interactive_rez() if print_msg and add_rez and not quiet: ex.info('') ex.info('You are now in a rez-configured environment.') ex.info('') if system.is_production_rez_install: ex.command('rezolve context') def _write_shell(ex, filename): code = ex.get_output() target_file = os.path.join(tmpdir, filename) with open(target_file, 'w') as f: f.write(code) return target_file def _create_ex(): return RexExecutor(interpreter=self.new_shell(), parent_environ={}, add_default_namespaces=False) executor = _create_ex() if self.settings.prompt: newprompt = '${REZ_ENV_PROMPT}%s' % self.settings.prompt executor.interpreter._saferefenv('REZ_ENV_PROMPT') executor.env.REZ_ENV_PROMPT = newprompt if d["command"] is not None: _record_shell(executor, files=files) shell_command = d["command"] else: if d["stdin"]: assert(self.stdin_arg) shell_command = "%s %s" % (self.executable, self.stdin_arg) quiet = True elif do_rcfile: assert(self.rcfile_arg) shell_command = "%s %s" % (self.executable, self.rcfile_arg) else: shell_command = self.executable if do_rcfile: # hijack rcfile to insert our own script ex = _create_ex() _record_shell(ex, files=files, print_msg=(not quiet)) filename = "rcfile.%s" % self.file_extension() filepath = _write_shell(ex, filename) shell_command += " %s" % filepath elif envvar: # hijack env-var to insert our own script ex = _create_ex() _record_shell(ex, files=files, print_msg=(not quiet)) filename = "%s.%s" % (envvar, self.file_extension()) filepath = _write_shell(ex, filename) executor.setenv(envvar, filepath) else: # hijack $HOME to insert our own script files = [x for x in files if x not in bind_files] + list(bind_files) if files: for file in files: if file in bind_files: bind_rez = True files_ = [file] if d["source_bind_files"] else [] else: bind_rez = False files_ = [file] ex = _create_ex() ex.setenv('HOME', os.environ.get('HOME', '')) _record_shell(ex, files=files_, bind_rez=bind_rez, print_msg=bind_rez) _write_shell(ex, os.path.basename(file)) executor.setenv("HOME", tmpdir) # keep history if self.histfile and self.histvar: histfile = os.path.expanduser(self.histfile) if os.path.exists(histfile): executor.setenv(self.histvar, histfile) else: if config.warn("shell_startup"): print_warning( "WARNING: Could not configure environment from " "within the target shell (%s); this has been done " "in the parent process instead." % self.name()) executor.source(context_file) if shell_command: # an empty string means 'run no command and exit' executor.command(shell_command) executor.command("exit %s" % self.last_command_status) code = executor.get_output() target_file = os.path.join(tmpdir, "rez-shell.%s" % self.file_extension()) with open(target_file, 'w') as f: f.write(code) if d["stdin"] and stdin and (stdin is not True): Popen_args["stdin"] = stdin cmd = [] if pre_command: if isinstance(pre_command, basestring): cmd = pre_command.strip().split() else: cmd = pre_command cmd.extend([self.executable, target_file]) try: p = subprocess.Popen(cmd, env=env, **Popen_args) except Exception as e: cmd_str = ' '.join(map(pipes.quote, cmd)) raise RezSystemError("Error running command:\n%s\n%s" % (cmd_str, str(e))) return p
def _build_variant_base(self, variant, build_type, install_path=None, clean=False, install=False, **kwargs): # create build/install paths install_path = install_path or self.package.config.local_packages_path package_install_path = self.get_package_install_path(install_path) variant_build_path = self.build_path if variant.index is None: variant_install_path = package_install_path else: subpath = variant._non_shortlinked_subpath variant_build_path = os.path.join(variant_build_path, subpath) variant_install_path = os.path.join(package_install_path, subpath) # create directories (build, install) if clean and os.path.exists(variant_build_path): self._rmtree(variant_build_path) safe_makedirs(variant_build_path) # find last dir of installation path that exists, and possibly make it # writable during variant installation # last_dir = get_existing_path(variant_install_path, topmost_path=install_path) if last_dir: ctxt = make_path_writable(last_dir) else: ctxt = with_noop() with ctxt: if install: # inform package repo that a variant is about to be built/installed pkg_repo = package_repository_manager.get_repository( install_path) pkg_repo.pre_variant_install(variant.resource) if not os.path.exists(variant_install_path): safe_makedirs(variant_install_path) # if hashed variants are enabled, create the variant shortlink if variant.parent.hashed_variants: try: # create the dir containing all shortlinks base_shortlinks_path = os.path.join( package_install_path, variant.parent.config.variant_shortlinks_dirname) safe_makedirs(base_shortlinks_path) # create the shortlink rel_variant_path = os.path.relpath( variant_install_path, base_shortlinks_path) create_unique_base26_symlink(base_shortlinks_path, rel_variant_path) except Exception as e: # Treat any error as warning - lack of shortlink is not # a breaking issue, it just means the variant root path # will be long. # print_warning( "Error creating variant shortlink for %s: %s: %s", variant_install_path, e.__class__.__name__, e) # Re-evaluate the variant, so that variables such as 'building' and # 'build_variant_index' are set, and any early-bound package attribs # are re-evaluated wrt these vars. This is done so that attribs such as # 'requires' can change depending on whether a build is occurring or not. # # Note that this re-evaluated variant is ONLY used here, for the purposes # of creating the build context. The variant that is actually installed # is the one evaluated where 'building' is False. # re_evaluated_package = variant.parent.get_reevaluated({ "building": True, "build_variant_index": variant.index or 0, "build_variant_requires": variant.variant_requires }) re_evaluated_variant = re_evaluated_package.get_variant( variant.index) # create build environment (also creates build.rxt file) context, rxt_filepath = self.create_build_context( variant=re_evaluated_variant, build_type=build_type, build_path=variant_build_path) # list of extra files (build.rxt etc) that are installed if an # installation is taking place # extra_install_files = [rxt_filepath] # create variant.json file. This identifies which variant this is. # This is important for hashed variants, where it is not obvious # which variant is in which root path. The file is there for # debugging purposes only. # if variant.index is not None: data = { "index": variant.index, "data": variant.parent.data["variants"][variant.index] } filepath = os.path.join(variant_build_path, "variant.json") extra_install_files.append(filepath) with open(filepath, 'w') as f: json.dump(data, f, indent=2) # run build system build_system_name = self.build_system.name() self._print("\nInvoking %s build system...", build_system_name) build_result = self.build_system.build( context=context, variant=variant, build_path=variant_build_path, install_path=variant_install_path, install=install, build_type=build_type) if not build_result.get("success"): # delete the possibly partially installed variant payload if install: self._rmtree(variant_install_path) raise BuildError("The %s build system failed." % build_system_name) if install: # add some installation details to build result build_result.update({ "package_install_path": package_install_path, "variant_install_path": variant_install_path }) # the build system can also specify extra files that need to # be installed filepaths = build_result.get("extra_files") if filepaths: extra_install_files.extend(filepaths) # install extra files for file_ in extra_install_files: copy_or_replace(file_, variant_install_path) # Install include modules. Note that this doesn't need to be done # multiple times, but for subsequent variants it has no effect. # self._install_include_modules(install_path) return build_result
def pip_to_rez_version(dist_version, allow_legacy=True): """Convert a distribution version to a rez compatible version. The python version schema specification isn't 100% compatible with rez. 1: version epochs (they make no sense to rez, so they'd just get stripped of the leading N!; 2: python versions are case insensitive, so they should probably be lowercased when converted to a rez version. 3: local versions are also not compatible with rez The canonical public version identifiers MUST comply with the following scheme: [N!]N(.N)*[{a|b|rc}N][.postN][.devN] Epoch segment: N! - skip Release segment: N(.N)* 0 as is Pre-release segment: {a|b|c|rc|alpha|beta|pre|preview}N - always lowercase Post-release segment: .{post|rev|r}N - always lowercase Development release segment: .devN - always lowercase Local version identifiers MUST comply with the following scheme: <public version identifier>[+<local version label>] - use - instead of + Args: dist_version (str): The distribution version to be converted. allow_legacy (bool): Flag to allow/disallow PEP440 incompatibility. Returns: str: Rez-compatible equivalent version string. Raises: InvalidVersion: When legacy mode is not allowed and a PEP440 incompatible version is detected. .. _PEP 440 (all possible matches): https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions .. _Core utilities for Python packages: https://packaging.pypa.io/en/latest/version/ """ pkg_version = parse(dist_version) if isinstance(pkg_version, LegacyVersion): if allow_legacy: print_warning( "Invalid PEP440 version detected: %s. Falling to legacy mode.", pkg_version) # this will always be the entire version string return pkg_version.base_version.lower() else: raise InvalidVersion( "Version: {} is not compatible with PEP440.".format( dist_version)) rez_version = "" if pkg_version.release: # the components of the release segment excluding epoch or any # prerelease/development/postrelease suffixes rez_version += '.'.join(str(i) for i in pkg_version.release) if pkg_version.is_prerelease and pkg_version.pre: # additional check is necessary because dev releases are also considered prereleases # pair of the prerelease phase (the string "a", "b", or "rc") and the prerelease number # the following conversions (-->) take place: # a --> a, alpha --> a, b --> b, beta --> b, c --> c, rc --> rc, pre --> rc, preview --> rc phase, number = pkg_version.pre rez_version += phase + str(number) if pkg_version.is_postrelease: # this attribute will be the postrelease number (an integer) # the following conversions (-->) take place: # post --> post, rev --> post, r --> post rez_version += ".post" + str(pkg_version.post) if pkg_version.is_devrelease: # this attribute will be the development release number (an integer) rez_version += ".dev" + str(pkg_version.dev) if pkg_version.local: # representation of the local version portion is any # the following conversions (-->) take place: # 1.0[+ubuntu-1] --> 1.0[-ubuntu.1] rez_version += "-" + pkg_version.local return rez_version
def _unsupported_option(cls, option, val): if val and config.warn("shell_startup"): print_warning("%s ignored, not supported by %s shell" % (option, cls.name()))