Exemplo n.º 1
0
def _copy_package_include_modules(src_package, dest_pkg_repo, overrides=None):
    src_include_modules_path = \
        os.path.join(src_package.base, IncludeModuleManager.include_modules_subpath)

    if not os.path.exists(src_include_modules_path):
        return

    dest_package_name = overrides.get("name") or src_package.name
    dest_package_version = overrides.get("version") or src_package.version

    pkg_install_path = dest_pkg_repo.get_package_payload_path(
        package_name=dest_package_name, package_version=dest_package_version)

    dest_include_modules_path = \
        os.path.join(pkg_install_path, IncludeModuleManager.include_modules_subpath)

    last_dir = get_existing_path(
        dest_include_modules_path,
        topmost_path=os.path.dirname(pkg_install_path))

    if last_dir:
        ctxt = make_path_writable(last_dir)
    else:
        ctxt = with_noop()

    with ctxt:
        safe_makedirs(dest_include_modules_path)
        additive_copytree(src_include_modules_path, dest_include_modules_path)
Exemplo n.º 2
0
def _copy_package_include_modules(src_package, dest_pkg_repo, overrides=None):
    src_include_modules_path = \
        os.path.join(src_package.base, IncludeModuleManager.include_modules_subpath)

    if not os.path.exists(src_include_modules_path):
        return

    dest_package_name = overrides.get("name") or src_package.name
    dest_package_version = overrides.get("version") or src_package.version

    pkg_install_path = dest_pkg_repo.get_package_payload_path(
        package_name=dest_package_name,
        package_version=dest_package_version
    )

    dest_include_modules_path = \
        os.path.join(pkg_install_path, IncludeModuleManager.include_modules_subpath)

    last_dir = get_existing_path(dest_include_modules_path,
                                 topmost_path=os.path.dirname(pkg_install_path))

    if last_dir:
        ctxt = make_path_writable(last_dir)
    else:
        ctxt = with_noop()

    with ctxt:
        safe_makedirs(dest_include_modules_path)
        additive_copytree(src_include_modules_path, dest_include_modules_path)
def _remove_deprecated_packages(package, namespaces, deprecate):
    """Remove Rez package requirements from a Rez package, if needed.

    If the Python imports defined in `deprecate` are no longer present
    in `namespaces` then that means that `package` no longer depends
    on the stuff listed in `deprecate` and the requirement effectively
    doesn't matter anymore and can be removed.

    Args:
        package (:class:`rez.packges_.DeveloperPackage`): Some Rez package
            whose requirements may change as a result
            of this function getting ran.
        namespaces (iter[str]): The Python dot-separated namespaces that a Rez package uses.
            In short, these are all of the import statements that a
            Rez package has inside of it and can be thought of as its
            "dependencies". The function uses this list to figure out
            if `user_namespaces` is actually still being imported.
        deprecate (iter[tuple[:class:`rez.vendor.version.requirement.Requirement`, tuple[str]]]): E
            ach Rez package that is (assumed to already be) a dependency of `package`
            and the Python import namespaces that the package takes up.

            If any namespace in `namespaces` is still around, then that
            means `package` actually still depends on the Rez package so
            we can't safely remove it (because it's still a dependency).
            But if it isn't there, remove it.

    """
    packages_to_remove = set()

    for package_, package_namespaces in deprecate:
        if _is_package_needed(package_namespaces, namespaces):
            continue

        packages_to_remove.add(package_.name)

    if not packages_to_remove:
        # Nothing to do so exit early.
        return

    with open(package.filepath, "r") as handler:
        code = handler.read()

    new_code = api.remove_from_attribute("requires", list(packages_to_remove),
                                         code)

    with filesystem.make_path_writable(
            os.path.dirname(os.path.dirname(package.filepath))):
        with serialise.open_file_for_write(package.filepath) as handler:
            handler.write(new_code)
Exemplo n.º 4
0
def _bump(package, increment, new_dependencies):
    rez_bump_api.bump(package, **{increment: 1})

    with open(package.filepath, "r") as handler:
        code = handler.read()

    new_code = api.add_to_attribute("requires", new_dependencies, code)

    with filesystem.make_path_writable(
            os.path.dirname(os.path.dirname(package.filepath))):
        with serialise.open_file_for_write(package.filepath) as handler:
            handler.write(new_code)

    root = finder.get_package_root(package)

    return finder.get_nearest_rez_package(root)
Exemplo n.º 5
0
Arquivo: elf.py Projeto: wwfxuk/rez
def patch_rpaths(elfpath, rpaths):
    """Replace an elf's rpath header with those provided.
    """

    # this is a hack to get around https://github.com/nerdvegas/rez/issues/1074
    # I actually hit a case where patchelf was installed as a rez suite tool,
    # causing '$ORIGIN' to be expanded early (to empty string).
    # TODO remove this hack when bug is fixed
    #
    env = os.environ.copy()
    env["ORIGIN"] = "$ORIGIN"

    with make_path_writable(elfpath):
        if rpaths:
            _run("patchelf", "--set-rpath", ':'.join(rpaths), elfpath, env=env)
        else:
            _run("patchelf", "--remove-rpath")
Exemplo n.º 6
0
    def _run_command_on_package(package):
        """Run the user-provided command on the given Rez package.

        Args:
            package (:class:`rez.developer_package.DeveloperPackage`):
                The Rez package that presumably is a package.yaml file
                that needs to be changed.

        Raises:
            :class:`.InvalidPackage:
                If `package` is not a package.yaml file.

        Returns:
            str: Any error message that occurred from this command, if any.

        """
        if not package.filepath.endswith(".yaml"):
            raise exceptions.InvalidPackage(
                package,
                finder.get_package_root(package),
                'Package "{package}" is not a YAML file.'.format(
                    package=package),
            )

        buffer_ = io.StringIO()
        package.print_info(format_=serialise.FileFormat.py, buf=buffer_)
        code = buffer_.getvalue()

        path = os.path.join(finder.get_package_root(package), "package.py")
        _LOGGER.info('Now creating "%s".', path)

        # Writing to DeveloperPackage objects is currently bugged.
        # So instead, we disable caching during write.
        #
        # Reference: https://github.com/nerdvegas/rez/issues/857
        #
        with filesystem.make_path_writable(
                os.path.dirname(os.path.dirname(path))):
            with serialise.open_file_for_write(path) as handler:
                handler.write(code)

        _LOGGER.info('Now removing the old "%s".', package.filepath)

        os.remove(package.filepath)

        return ""
Exemplo n.º 7
0
def _write_package_to_disk(package, version):
    """Update a Rez package on-disk with its new contents.

    Args:
        package (:class:`rez.packages_.DeveloperPackage`):
            Some package on-disk to write out.
        version (str): The new semantic version that will be written to-disk.

    """
    with open(package, "r") as handler:
        code = handler.read()

    graph = parso.parse(code)

    try:
        assignment = parso_utility.find_assignment_nodes("version", graph)[-1]
    except IndexError:
        assignment = None

    prefix = " "

    if assignment:
        prefix = assignment.children[0].prefix.strip("\n") or prefix

    node = tree.String('"{version}"'.format(version=version), (0, 0),
                       prefix=prefix)
    graph = convention.insert_or_append(node, graph, assignment, "version")

    # Writing to DeveloperPackage objects is currently bugged.
    # So instead, we disable caching during write.
    #
    # Reference: https://github.com/nerdvegas/rez/issues/857
    #
    with filesystem.make_path_writable(
            os.path.dirname(os.path.dirname(package))):
        with serialise.open_file_for_write(package) as handler:
            handler.write(graph.get_code())
Exemplo n.º 8
0
def _copy_variant_payload(src_variant,
                          dest_pkg_repo,
                          shallow=False,
                          follow_symlinks=False,
                          overrides=None,
                          verbose=False):
    # Get payload path of source variant. For some types (eg from a "memory"
    # type repo) there may not be a root.
    #
    variant_root = getattr(src_variant, "root", None)
    if not variant_root:
        raise PackageCopyError(
            "Cannot copy source variant %s - it is a type of variant that "
            "does not have a root.", src_variant.uri)

    if not os.path.isdir(variant_root):
        raise PackageCopyError(
            "Cannot copy source variant %s - its root does not appear to "
            "be present on disk (%s).", src_variant.uri, variant_root)

    dest_variant_name = overrides.get("name") or src_variant.name
    dest_variant_version = overrides.get("version") or src_variant.version

    # determine variant installation path
    dest_pkg_payload_path = dest_pkg_repo.get_package_payload_path(
        package_name=dest_variant_name, package_version=dest_variant_version)

    if src_variant.subpath:
        variant_install_path = os.path.join(dest_pkg_payload_path,
                                            src_variant.subpath)
    else:
        variant_install_path = dest_pkg_payload_path

    # get ready for copy/symlinking
    copy_func = partial(replacing_copy, follow_symlinks=follow_symlinks)

    if shallow:
        maybe_symlink = replacing_symlink
    else:
        maybe_symlink = copy_func

    # possibly make install path temporarily writable
    last_dir = get_existing_path(
        variant_install_path,
        topmost_path=os.path.dirname(dest_pkg_payload_path))

    if last_dir:
        ctxt = make_path_writable(last_dir)
    else:
        ctxt = with_noop()

    # copy the variant payload
    with ctxt:
        safe_makedirs(variant_install_path)

        # determine files not to copy
        skip_files = []

        if src_variant.subpath:
            # Detect overlapped variants. This is the case where one variant subpath
            # might be A, and another is A/B. We must ensure that A/B is not created
            # as a symlink during shallow install of variant A - that would then
            # cause A/B payload to be installed back into original package, possibly
            # corrupting it.
            #
            # Here we detect this case, and create a list of dirs not to copy/link,
            # because they are in fact a subpath dir for another variant.
            #
            skip_files.extend(_get_overlapped_variant_dirs(src_variant))
        else:
            # just skip package definition file
            for name in config.plugins.package_repository.filesystem.package_filenames:
                for fmt in (FileFormat.py, FileFormat.yaml):
                    filename = name + '.' + fmt.extension
                    skip_files.append(filename)

        # copy/link all topmost files within the variant root
        for name in os.listdir(variant_root):
            if name in skip_files:
                filepath = os.path.join(variant_root, name)

                if verbose:
                    if src_variant.subpath:
                        msg = ("Did not copy %s - this is part of an "
                               "overlapping variant's root path.")
                    else:
                        msg = "Did not copy package definition file %s"

                    print_info(msg, filepath)

                continue

            src_path = os.path.join(variant_root, name)
            dest_path = os.path.join(variant_install_path, name)

            if os.path.islink(src_path):
                copy_func(src_path, dest_path)
            else:
                maybe_symlink(src_path, dest_path)

    # copy permissions of source variant dirs onto dest
    src_package = src_variant.parent
    src_pkg_repo = src_package.repository

    src_pkg_payload_path = src_pkg_repo.get_package_payload_path(
        package_name=src_package.name, package_version=src_package.version)

    shutil.copystat(src_pkg_payload_path, dest_pkg_payload_path)

    subpath = src_variant.subpath
    while subpath:
        src_path = os.path.join(src_pkg_payload_path, subpath)
        dest_path = os.path.join(dest_pkg_payload_path, subpath)
        shutil.copystat(src_path, dest_path)
        subpath = os.path.dirname(subpath)
Exemplo n.º 9
0
    def _build_variant_base(self,
                            variant,
                            build_type,
                            install_path=None,
                            clean=False,
                            install=False,
                            **kwargs):
        # create build/install paths
        install_path = install_path or self.package.config.local_packages_path
        package_install_path = self.get_package_install_path(install_path)
        variant_build_path = self.build_path

        if variant.index is None:
            variant_install_path = package_install_path
        else:
            subpath = variant._non_shortlinked_subpath
            variant_build_path = os.path.join(variant_build_path, subpath)
            variant_install_path = os.path.join(package_install_path, subpath)

        # create directories (build, install)
        if clean and os.path.exists(variant_build_path):
            self._rmtree(variant_build_path)

        safe_makedirs(variant_build_path)

        # find last dir of installation path that exists, and possibly make it
        # writable during variant installation
        #
        last_dir = get_existing_path(variant_install_path,
                                     topmost_path=install_path)
        if last_dir:
            ctxt = make_path_writable(last_dir)
        else:
            ctxt = with_noop()

        with ctxt:
            if install:
                # inform package repo that a variant is about to be built/installed
                pkg_repo = package_repository_manager.get_repository(
                    install_path)
                pkg_repo.pre_variant_install(variant.resource)

                if not os.path.exists(variant_install_path):
                    safe_makedirs(variant_install_path)

                # if hashed variants are enabled, create the variant shortlink
                if variant.parent.hashed_variants:
                    try:
                        # create the dir containing all shortlinks
                        base_shortlinks_path = os.path.join(
                            package_install_path,
                            variant.parent.config.variant_shortlinks_dirname)

                        safe_makedirs(base_shortlinks_path)

                        # create the shortlink
                        rel_variant_path = os.path.relpath(
                            variant_install_path, base_shortlinks_path)
                        create_unique_base26_symlink(base_shortlinks_path,
                                                     rel_variant_path)

                    except Exception as e:
                        # Treat any error as warning - lack of shortlink is not
                        # a breaking issue, it just means the variant root path
                        # will be long.
                        #
                        print_warning(
                            "Error creating variant shortlink for %s: %s: %s",
                            variant_install_path, e.__class__.__name__, e)

            # Re-evaluate the variant, so that variables such as 'building' and
            # 'build_variant_index' are set, and any early-bound package attribs
            # are re-evaluated wrt these vars. This is done so that attribs such as
            # 'requires' can change depending on whether a build is occurring or not.
            #
            # Note that this re-evaluated variant is ONLY used here, for the purposes
            # of creating the build context. The variant that is actually installed
            # is the one evaluated where 'building' is False.
            #
            re_evaluated_package = variant.parent.get_reevaluated({
                "building":
                True,
                "build_variant_index":
                variant.index or 0,
                "build_variant_requires":
                variant.variant_requires
            })
            re_evaluated_variant = re_evaluated_package.get_variant(
                variant.index)

            # create build environment (also creates build.rxt file)
            context, rxt_filepath = self.create_build_context(
                variant=re_evaluated_variant,
                build_type=build_type,
                build_path=variant_build_path)

            # list of extra files (build.rxt etc) that are installed if an
            # installation is taking place
            #
            extra_install_files = [rxt_filepath]

            # create variant.json file. This identifies which variant this is.
            # This is important for hashed variants, where it is not obvious
            # which variant is in which root path. The file is there for
            # debugging purposes only.
            #
            if variant.index is not None:
                data = {
                    "index": variant.index,
                    "data": variant.parent.data["variants"][variant.index]
                }

                filepath = os.path.join(variant_build_path, "variant.json")
                extra_install_files.append(filepath)

                with open(filepath, 'w') as f:
                    json.dump(data, f, indent=2)

            # run build system
            build_system_name = self.build_system.name()
            self._print("\nInvoking %s build system...", build_system_name)

            build_result = self.build_system.build(
                context=context,
                variant=variant,
                build_path=variant_build_path,
                install_path=variant_install_path,
                install=install,
                build_type=build_type)

            if not build_result.get("success"):
                # delete the possibly partially installed variant payload
                if install:
                    self._rmtree(variant_install_path)

                raise BuildError("The %s build system failed." %
                                 build_system_name)

            if install:
                # add some installation details to build result
                build_result.update({
                    "package_install_path":
                    package_install_path,
                    "variant_install_path":
                    variant_install_path
                })

                # the build system can also specify extra files that need to
                # be installed
                filepaths = build_result.get("extra_files")
                if filepaths:
                    extra_install_files.extend(filepaths)

                # install extra files
                for file_ in extra_install_files:
                    copy_or_replace(file_, variant_install_path)

                # Install include modules. Note that this doesn't need to be done
                # multiple times, but for subsequent variants it has no effect.
                #
                self._install_include_modules(install_path)

            return build_result
Exemplo n.º 10
0
    def _build_variant_base(self, variant, build_type, install_path=None,
                            clean=False, install=False, **kwargs):
        # create build/install paths
        install_path = install_path or self.package.config.local_packages_path
        package_install_path = self.get_package_install_path(install_path)
        variant_build_path = self.build_path

        if variant.index is None:
            variant_install_path = package_install_path
        else:
            subpath = variant._non_shortlinked_subpath
            variant_build_path = os.path.join(variant_build_path, subpath)
            variant_install_path = os.path.join(package_install_path, subpath)

        # create directories (build, install)
        if clean and os.path.exists(variant_build_path):
            shutil.rmtree(variant_build_path)

        safe_makedirs(variant_build_path)

        # find last dir of installation path that exists, and possibly make it
        # writable during variant installation
        #
        last_dir = get_existing_path(variant_install_path,
                                     topmost_path=install_path)
        if last_dir:
            ctxt = make_path_writable(last_dir)
        else:
            ctxt = with_noop()

        with ctxt:
            if install:
                # inform package repo that a variant is about to be built/installed
                pkg_repo = package_repository_manager.get_repository(install_path)
                pkg_repo.pre_variant_install(variant.resource)

                if not os.path.exists(variant_install_path):
                    safe_makedirs(variant_install_path)

                # if hashed variants are enabled, create the variant shortlink
                if variant.parent.hashed_variants:
                    try:
                        # create the dir containing all shortlinks
                        base_shortlinks_path = os.path.join(
                            package_install_path,
                            variant.parent.config.variant_shortlinks_dirname
                        )

                        safe_makedirs(base_shortlinks_path)

                        # create the shortlink
                        rel_variant_path = os.path.relpath(
                            variant_install_path, base_shortlinks_path)
                        create_unique_base26_symlink(
                            base_shortlinks_path, rel_variant_path)

                    except Exception as e:
                        # Treat any error as warning - lack of shortlink is not
                        # a breaking issue, it just means the variant root path
                        # will be long.
                        #
                        print_warning(
                            "Error creating variant shortlink for %s: %s: %s",
                            variant_install_path, e.__class__.__name__, e
                        )

            # Re-evaluate the variant, so that variables such as 'building' and
            # 'build_variant_index' are set, and any early-bound package attribs
            # are re-evaluated wrt these vars. This is done so that attribs such as
            # 'requires' can change depending on whether a build is occurring or not.
            #
            # Note that this re-evaluated variant is ONLY used here, for the purposes
            # of creating the build context. The variant that is actually installed
            # is the one evaluated where 'building' is False.
            #
            re_evaluated_package = variant.parent.get_reevaluated({
                "building": True,
                "build_variant_index": variant.index or 0,
                "build_variant_requires": variant.variant_requires
            })
            re_evaluated_variant = re_evaluated_package.get_variant(variant.index)

            # create build environment
            context, rxt_filepath = self.create_build_context(
                variant=re_evaluated_variant,
                build_type=build_type,
                build_path=variant_build_path)

            # run build system
            build_system_name = self.build_system.name()
            self._print("\nInvoking %s build system...", build_system_name)

            build_result = self.build_system.build(
                context=context,
                variant=variant,
                build_path=variant_build_path,
                install_path=variant_install_path,
                install=install,
                build_type=build_type)

            if not build_result.get("success"):
                raise BuildError("The %s build system failed." % build_system_name)

            if install:
                # Install the 'variant.json' file, which identifies which variant
                # this is. This is important for hashed variants, where it is not
                # obvious which variant is in which root path. The file is there
                # for debugging purposes only.
                #
                if variant.index is not None:
                    data = {
                        "index": variant.index,
                        "data": variant.parent.data["variants"][variant.index]
                    }

                    filepath = os.path.join(variant_install_path, "variant.json")
                    with open(filepath, 'w') as f:
                        json.dump(data, f, indent=2)

                # install some files for debugging purposes (incl build.rxt)
                extra_files = build_result.get("extra_files", [])
                if rxt_filepath:
                    extra_files = extra_files + [rxt_filepath]

                for file_ in extra_files:
                    copy_or_replace(file_, variant_install_path)

                # Install include modules. Note that this doesn't need to be done
                # multiple times, but for subsequent variants it has no effect.
                #
                self._install_include_modules(install_path)

            return build_result
Exemplo n.º 11
0
    def _create_variant(self, variant, dry_run=False, overrides=None):
        # special case overrides
        variant_name = overrides.get("name") or variant.name
        variant_version = overrides.get("version") or variant.version

        overrides = (overrides or {}).copy()
        overrides.pop("name", None)
        overrides.pop("version", None)

        # find or create the package family
        family = self.get_package_family(variant_name)
        if not family:
            family = self._create_family(variant_name)

        if isinstance(family, FileSystemCombinedPackageFamilyResource):
            raise NotImplementedError(
                "Cannot install variant into combined-style package file %r."
                % family.filepath)

        # find the package if it already exists
        existing_package = None

        for package in self.iter_packages(family):
            if package.version == variant_version:
                # during a build, the family/version dirs get created ahead of
                # time, which causes a 'missing package definition file' error.
                # This is fine, we can just ignore it and write the new file.
                try:
                    package.validate_data()
                except PackageDefinitionFileMissing:
                    break

                uuids = set([variant.uuid, package.uuid])
                if len(uuids) > 1 and None not in uuids:
                    raise ResourceError(
                        "Cannot install variant %r into package %r - the "
                        "packages are not the same (UUID mismatch)"
                        % (variant, package))

                existing_package = package

                if variant.index is None:
                    if package.variants:
                        raise ResourceError(
                            "Attempting to install a package without variants "
                            "(%r) into an existing package with variants (%r)"
                            % (variant, package))
                elif not package.variants:
                    raise ResourceError(
                        "Attempting to install a variant (%r) into an existing "
                        "package without variants (%r)" % (variant, package))

        existing_package_data = None
        release_data = {}

        # Need to treat 'config' as special case. In validated data, this is
        # converted to a Config object. We need it as the raw dict that you'd
        # see in a package.py.
        #
        def _get_package_data(pkg):
            data = pkg.validated_data()
            if hasattr(pkg, "_data"):
                raw_data = pkg._data
            else:
                raw_data = pkg.resource._data

            raw_config_data = raw_data.get('config')
            data.pop("config", None)

            if raw_config_data:
                data["config"] = raw_config_data

            return data

        def _remove_build_keys(obj):
            for key in package_build_only_keys:
                obj.pop(key, None)

        new_package_data = _get_package_data(variant.parent)
        new_package_data.pop("variants", None)
        new_package_data["name"] = variant_name
        if variant_version:
            new_package_data["version"] = variant_version
        package_changed = False

        _remove_build_keys(new_package_data)

        if existing_package:
            debug_print(
                "Found existing package for installation of variant %s: %s",
                variant.uri, existing_package.uri
            )

            existing_package_data = _get_package_data(existing_package)
            _remove_build_keys(existing_package_data)

            # detect case where new variant introduces package changes outside of variant
            data_1 = existing_package_data.copy()
            data_2 = new_package_data.copy()

            for key in package_release_keys:
                data_2.pop(key, None)
                value = data_1.pop(key, None)
                if value is not None:
                    release_data[key] = value

            for key in ("format_version", "base", "variants"):
                data_1.pop(key, None)
                data_2.pop(key, None)

            package_changed = (data_1 != data_2)

            if debug_print:
                if package_changed:
                    from rez.utils.data_utils import get_dict_diff_str

                    debug_print("Variant %s package data differs from package %s",
                                variant.uri, existing_package.uri)

                    txt = get_dict_diff_str(data_1, data_2, "Changes:")
                    debug_print(txt)
                else:
                    debug_print("Variant %s package data matches package %s",
                                variant.uri, existing_package.uri)

        # check for existing installed variant
        existing_installed_variant = None
        installed_variant_index = None

        if existing_package:
            if variant.index is None:
                existing_installed_variant = \
                    self.iter_variants(existing_package).next()
            else:
                variant_requires = variant.variant_requires

                for variant_ in self.iter_variants(existing_package):
                    variant_requires_ = existing_package.variants[variant_.index]
                    if variant_requires_ == variant_requires:
                        installed_variant_index = variant_.index
                        existing_installed_variant = variant_

        if existing_installed_variant:
            debug_print(
                "Variant %s already has installed equivalent: %s",
                variant.uri, existing_installed_variant.uri
            )

        if dry_run:
            if not package_changed:
                return existing_installed_variant
            else:
                return None

        # construct package data for new installed package definition
        if existing_package:
            _, file_ = os.path.split(existing_package.filepath)
            package_filename, package_extension = os.path.splitext(file_)
            package_extension = package_extension[1:]
            package_format = FileFormat[package_extension]

            if package_changed:
                # graft together new package data, with existing package variants,
                # and other data that needs to stay unchanged (eg timestamp)
                package_data = new_package_data

                if variant.index is not None:
                    package_data["variants"] = existing_package_data.get("variants", [])
            else:
                package_data = existing_package_data
        else:
            package_data = new_package_data
            package_filename = _settings.package_filenames[0]
            package_extension = "py"
            package_format = FileFormat.py

        # merge existing release data (if any) into the package. Note that when
        # this data becomes variant-specific, this step will no longer be needed
        package_data.update(release_data)

        # merge the new variant into the package
        if installed_variant_index is None and variant.index is not None:
            variant_requires = variant.variant_requires
            if not package_data.get("variants"):
                package_data["variants"] = []
            package_data["variants"].append(variant_requires)
            installed_variant_index = len(package_data["variants"]) - 1

        # a little data massaging is needed
        package_data.pop("base", None)

        # create version dir if it doesn't already exist
        family_path = os.path.join(self.location, variant_name)
        if variant_version:
            pkg_base_path = os.path.join(family_path, str(variant_version))
        else:
            pkg_base_path = family_path
        if not os.path.exists(pkg_base_path):
            os.makedirs(pkg_base_path)

        # Apply overrides.
        #
        # If we're installing into an existing package, then existing attributes
        # in that package take precedence over `overrides`. If we're installing
        # to a new package, then `overrides` takes precedence always.
        #
        # This is done so that variants added to an existing package don't change
        # attributes such as 'timestamp' or release-related fields like 'revision'.
        #
        for key, value in overrides.iteritems():
            if existing_package:
                if key not in package_data:
                    package_data[key] = value
            else:
                if value is self.remove:
                    package_data.pop(key, None)
                else:
                    package_data[key] = value

        # timestamp defaults to now if not specified
        if not package_data.get("timestamp"):
            package_data["timestamp"] = int(time.time())

        # format version is always set
        package_data["format_version"] = format_version

        # write out new package definition file
        package_file = ".".join([package_filename, package_extension])
        filepath = os.path.join(pkg_base_path, package_file)

        with make_path_writable(pkg_base_path):
            with open_file_for_write(filepath, mode=self.package_file_mode) as f:
                dump_package_data(package_data, buf=f, format_=package_format)

        # delete the tmp 'building' file.
        if variant_version:
            filename = self.building_prefix + str(variant_version)
            filepath = os.path.join(family_path, filename)
            if os.path.exists(filepath):
                try:
                    os.remove(filepath)
                except:
                    pass

        # delete other stale building files; previous failed releases may have
        # left some around
        try:
            self._delete_stale_build_tagfiles(family_path)
        except:
            pass

        # touch the family dir, this keeps memcached resolves updated properly
        os.utime(family_path, None)

        # load new variant
        new_variant = None
        self.clear_caches()
        family = self.get_package_family(variant_name)

        if family:
            for package in self.iter_packages(family):
                if package.version == variant_version:
                    for variant_ in self.iter_variants(package):
                        if variant_.index == installed_variant_index:
                            new_variant = variant_
                            break
                elif new_variant:
                    break

        if not new_variant:
            raise RezSystemError("Internal failure - expected installed variant")
        return new_variant
Exemplo n.º 12
0
def _copy_variant_payload(src_variant, dest_pkg_repo, shallow=False,
                          follow_symlinks=False, overrides=None, verbose=False):
        # Get payload path of source variant. For some types (eg from a "memory"
        # type repo) there may not be a root.
        #
        variant_root = getattr(src_variant, "root", None)
        if not variant_root:
            raise PackageCopyError(
                "Cannot copy source variant %s - it is a type of variant that "
                "does not have a root.", src_variant.uri
            )

        if not os.path.isdir(variant_root):
            raise PackageCopyError(
                "Cannot copy source variant %s - its root does not appear to "
                "be present on disk (%s).", src_variant.uri, variant_root
            )

        dest_variant_name = overrides.get("name") or src_variant.name
        dest_variant_version = overrides.get("version") or src_variant.version

        # determine variant installation path
        dest_pkg_payload_path = dest_pkg_repo.get_package_payload_path(
            package_name=dest_variant_name,
            package_version=dest_variant_version
        )

        if src_variant.subpath:
            variant_install_path = os.path.join(dest_pkg_payload_path,
                                                src_variant.subpath)
        else:
            variant_install_path = dest_pkg_payload_path

        # get ready for copy/symlinking
        copy_func = partial(replacing_copy,
                            follow_symlinks=follow_symlinks)

        if shallow:
            maybe_symlink = replacing_symlink
        else:
            maybe_symlink = copy_func

        # possibly make install path temporarily writable
        last_dir = get_existing_path(
            variant_install_path,
            topmost_path=os.path.dirname(dest_pkg_payload_path))

        if last_dir:
            ctxt = make_path_writable(last_dir)
        else:
            ctxt = with_noop()

        # copy the variant payload
        with ctxt:
            safe_makedirs(variant_install_path)

            # determine files not to copy
            skip_files = []

            if src_variant.subpath:
                # Detect overlapped variants. This is the case where one variant subpath
                # might be A, and another is A/B. We must ensure that A/B is not created
                # as a symlink during shallow install of variant A - that would then
                # cause A/B payload to be installed back into original package, possibly
                # corrupting it.
                #
                # Here we detect this case, and create a list of dirs not to copy/link,
                # because they are in fact a subpath dir for another variant.
                #
                skip_files.extend(_get_overlapped_variant_dirs(src_variant))
            else:
                # just skip package definition file
                for name in config.plugins.package_repository.filesystem.package_filenames:
                    for fmt in (FileFormat.py, FileFormat.yaml):
                        filename = name + '.' + fmt.extension
                        skip_files.append(filename)

            # copy/link all topmost files within the variant root
            for name in os.listdir(variant_root):
                if name in skip_files:
                    filepath = os.path.join(variant_root, name)

                    if verbose:
                        if src_variant.subpath:
                            msg = ("Did not copy %s - this is part of an "
                                   "overlapping variant's root path.")
                        else:
                            msg = "Did not copy package definition file %s"

                        print_info(msg, filepath)

                    continue

                src_path = os.path.join(variant_root, name)
                dest_path = os.path.join(variant_install_path, name)

                if os.path.islink(src_path):
                    copy_func(src_path, dest_path)
                else:
                    maybe_symlink(src_path, dest_path)

        # copy permissions of source variant dirs onto dest
        src_package = src_variant.parent
        src_pkg_repo = src_package.repository

        src_pkg_payload_path = src_pkg_repo.get_package_payload_path(
            package_name=src_package.name,
            package_version=src_package.version
        )

        shutil.copystat(src_pkg_payload_path, dest_pkg_payload_path)

        subpath = src_variant.subpath
        while subpath:
            src_path = os.path.join(src_pkg_payload_path, subpath)
            dest_path = os.path.join(dest_pkg_payload_path, subpath)
            shutil.copystat(src_path, dest_path)
            subpath = os.path.dirname(subpath)
Exemplo n.º 13
0
def _add_new_requirement_packages(package,
                                  namespaces,
                                  requirements,
                                  force=False):
    """Add new Rez package requirements to a Rez package, if needed.

    If no import statements were changed then this function does
    nothing. After all, if the imports of a package were changed then
    there's no way a Rez package's requirements should be any different.

    Args:
        package (:class:`rez.packges_.DeveloperPackage`):
            Some Rez package whose requirements may change as a result
            of this function getting ran.
        namespaces (iter[str]): The Python dot-separated namespaces that a Rez package uses.
            In short, these are all of the import statements that a
            Rez package has inside of it and can be thought of as its
            "dependencies". The function uses this list to figure out
            if `user_namespaces` is actually still being imported.
        requirements (iter[tuple[:class:`rez.vendor.version.requirement.Requirement`, tuple[str]]]):
            Each Rez package that might get added to `package` and a
            series of Python namespaces that the Rez package defines.
            If there's any overlap between the package's namespaces and
            the full `namespaces` then that means that `package` depends
            on the Rez package and so it is added as a dependency to
            `package`.
        force (bool, optional):
            If True, change every requirement that already exists in
            `package` and `requirements`. If False, only change package
            requirements if `requirements` is in the list of changed
            `namespaces`. Default is False.

    Returns:
        bool: If `package` was changed as a result of this function.

    """
    packages_to_add = set()

    if force:
        existing = set(requirement.name
                       for requirement in package.requires or [])
        packages_to_add = set(
            str(package_) for package_, _ in requirements
            if package_.name in existing)
    else:
        for package_, package_namespaces in requirements:
            if not _is_package_needed(tuple(package_namespaces), namespaces):
                continue

            packages_to_add.add(str(package_))

    if not packages_to_add:
        # Nothing to do so exit early.
        return False

    with open(package.filepath, "r") as handler:
        code = handler.read()

    new_code = api.add_to_attribute("requires", list(packages_to_add), code)

    with filesystem.make_path_writable(
            os.path.dirname(os.path.dirname(package.filepath))):
        with serialise.open_file_for_write(package.filepath) as handler:
            handler.write(new_code)

    return True
Exemplo n.º 14
0
    def _build_variant_base(self, variant, build_type, install_path=None,
                            clean=False, install=False, **kwargs):
        # create build/install paths
        install_path = install_path or self.package.config.local_packages_path
        variant_install_path = self.get_package_install_path(install_path)
        variant_build_path = self.build_path

        if variant.subpath:
            variant_build_path = os.path.join(variant_build_path, variant.subpath)
            variant_install_path = os.path.join(variant_install_path, variant.subpath)

        # create directories (build, install)
        if clean and os.path.exists(variant_build_path):
            shutil.rmtree(variant_build_path)

        safe_makedirs(variant_build_path)

        # find last dir of installation path that exists, and possibly make it
        # writable during variant installation
        #
        last_dir = get_existing_path(variant_install_path,
                                     topmost_path=install_path)
        if last_dir:
            ctxt = make_path_writable(last_dir)
        else:
            ctxt = with_noop()

        with ctxt:
            if install:
                # inform package repo that a variant is about to be built/installed
                pkg_repo = package_repository_manager.get_repository(install_path)
                pkg_repo.pre_variant_install(variant.resource)

                if not os.path.exists(variant_install_path):
                    safe_makedirs(variant_install_path)

            # Re-evaluate the variant, so that variables such as 'building' and
            # 'build_variant_index' are set, and any early-bound package attribs
            # are re-evaluated wrt these vars. This is done so that attribs such as
            # 'requires' can change depending on whether a build is occurring or not.
            #
            # Note that this re-evaluated variant is ONLY used here, for the purposes
            # of creating the build context. The variant that is actually installed
            # is the one evaluated where 'building' is False.
            #
            re_evaluated_package = variant.parent.get_reevaluated({
                "building": True,
                "build_variant_index": variant.index or 0,
                "build_variant_requires": variant.variant_requires
            })
            re_evaluated_variant = re_evaluated_package.get_variant(variant.index)

            # create build environment
            context, rxt_filepath = self.create_build_context(
                variant=re_evaluated_variant,
                build_type=build_type,
                build_path=variant_build_path)

            # run build system
            build_system_name = self.build_system.name()
            self._print("\nInvoking %s build system...", build_system_name)

            build_result = self.build_system.build(
                context=context,
                variant=variant,
                build_path=variant_build_path,
                install_path=variant_install_path,
                install=install,
                build_type=build_type)

            if not build_result.get("success"):
                raise BuildError("The %s build system failed." % build_system_name)

            if install:
                # install some files for debugging purposes
                extra_files = build_result.get("extra_files", [])
                if rxt_filepath:
                    extra_files = extra_files + [rxt_filepath]

                for file_ in extra_files:
                    copy_or_replace(file_, variant_install_path)

                # Install include modules. Note that this doesn't need to be done
                # multiple times, but for subsequent variants it has no effect.
                #
                self._install_include_modules(install_path)

            return build_result
Exemplo n.º 15
0
def _copy_variant_payload(src_variant, dest_pkg_repo, shallow=False,
                          follow_symlinks=False, overrides=None, verbose=False):
    # Get payload path of source variant. For some types (eg from a "memory"
    # type repo) there may not be a root.
    #
    variant_root = getattr(src_variant, "root", None)

    if not variant_root:
        raise PackageCopyError(
            "Cannot copy source variant %s - it is a type of variant that "
            "does not have a root." % src_variant.uri
        )

    if not os.path.isdir(variant_root):
        raise PackageCopyError(
            "Cannot copy source variant %s - its root does not appear to "
            "be present on disk (%s)." % src_variant.uri, variant_root
        )

    dest_variant_name = overrides.get("name") or src_variant.name
    dest_variant_version = overrides.get("version") or src_variant.version

    # determine variant installation path
    dest_pkg_payload_path = dest_pkg_repo.get_package_payload_path(
        package_name=dest_variant_name,
        package_version=dest_variant_version
    )

    is_varianted = (src_variant.index is not None)
    src_variant_subpath = None

    if is_varianted:
        src_variant_subpath = src_variant._non_shortlinked_subpath

        variant_install_path = os.path.join(
            dest_pkg_payload_path, src_variant_subpath)
    else:
        variant_install_path = dest_pkg_payload_path

    # get ready for copy/symlinking
    copy_func = partial(replacing_copy,
                        follow_symlinks=follow_symlinks)

    if shallow:
        maybe_symlink = replacing_symlink
    else:
        maybe_symlink = copy_func

    # possibly make install path temporarily writable
    last_dir = get_existing_path(
        variant_install_path,
        topmost_path=os.path.dirname(dest_pkg_payload_path))

    if last_dir and config.make_package_temporarily_writable:
        ctxt = make_path_writable(last_dir)
    else:
        ctxt = with_noop()

    # copy the variant payload
    with ctxt:
        safe_makedirs(variant_install_path)

        # determine files not to copy
        skip_files = []

        if is_varianted and not src_variant.parent.hashed_variants:
            # Detect overlapped variants. This is the case where one variant subpath
            # might be A, and another is A/B. We must ensure that A/B is not created
            # as a symlink during shallow install of variant A - that would then
            # cause A/B payload to be installed back into original package, possibly
            # corrupting it.
            #
            # Here we detect this case, and create a list of dirs not to copy/link,
            # because they are in fact a subpath dir for another variant.
            #
            # Note that for hashed variants, we don't do this check because overlapped
            # variants are not possible.
            #
            skip_files.extend(_get_overlapped_variant_dirs(src_variant))
        else:
            # just skip package definition file
            for name in config.plugins.package_repository.filesystem.package_filenames:
                for fmt in (FileFormat.py, FileFormat.yaml):
                    filename = name + '.' + fmt.extension
                    skip_files.append(filename)

        # copy/link all topmost files within the variant root
        for name in os.listdir(variant_root):
            if name in skip_files:
                filepath = os.path.join(variant_root, name)

                if verbose and is_varianted:
                    print_info(
                        "Did not copy %s - this is part of an overlapping "
                        "variant's root path.", filepath
                    )

                continue

            src_path = os.path.join(variant_root, name)
            dest_path = os.path.join(variant_install_path, name)

            if os.path.islink(src_path):
                copy_func(src_path, dest_path)
            else:
                maybe_symlink(src_path, dest_path)

    # copy permissions of source variant dirs onto dest
    src_package = src_variant.parent
    src_pkg_repo = src_package.repository

    src_pkg_payload_path = src_pkg_repo.get_package_payload_path(
        package_name=src_package.name,
        package_version=src_package.version
    )

    shutil.copystat(src_pkg_payload_path, dest_pkg_payload_path)

    subpath = src_variant_subpath

    while subpath:
        src_path = os.path.join(src_pkg_payload_path, subpath)
        dest_path = os.path.join(dest_pkg_payload_path, subpath)
        shutil.copystat(src_path, dest_path)
        subpath = os.path.dirname(subpath)

    # create the variant shortlink
    if src_variant.parent.hashed_variants:
        try:
            # base _v dir
            base_shortlinks_path = os.path.join(
                dest_pkg_payload_path,
                src_package.config.variant_shortlinks_dirname
            )

            safe_makedirs(base_shortlinks_path)

            # shortlink
            rel_variant_path = os.path.relpath(
                variant_install_path, base_shortlinks_path)
            create_unique_base26_symlink(
                base_shortlinks_path, rel_variant_path)

        except Exception as e:
            # Treat any error as warning - lack of shortlink is not
            # a breaking issue, it just means the variant root path
            # will be long.
            #
            print_warning(
                "Error creating variant shortlink for %s: %s: %s",
                variant_install_path, e.__class__.__name__, e
            )
Exemplo n.º 16
0
    def _create_variant(self, variant, dry_run=False, overrides=None):
        # special case overrides
        variant_name = overrides.get("name") or variant.name
        variant_version = overrides.get("version") or variant.version

        overrides = (overrides or {}).copy()
        overrides.pop("name", None)
        overrides.pop("version", None)

        # find or create the package family
        family = self.get_package_family(variant_name)
        if not family:
            family = self._create_family(variant_name)

        if isinstance(family, FileSystemCombinedPackageFamilyResource):
            raise NotImplementedError(
                "Cannot install variant into combined-style package file %r."
                % family.filepath)

        # find the package if it already exists
        existing_package = None

        for package in self.iter_packages(family):
            if package.version == variant_version:
                # during a build, the family/version dirs get created ahead of
                # time, which causes a 'missing package definition file' error.
                # This is fine, we can just ignore it and write the new file.
                try:
                    package.validate_data()
                except PackageDefinitionFileMissing:
                    break

                uuids = set([variant.uuid, package.uuid])
                if len(uuids) > 1 and None not in uuids:
                    raise ResourceError(
                        "Cannot install variant %r into package %r - the "
                        "packages are not the same (UUID mismatch)"
                        % (variant, package))

                existing_package = package

                if variant.index is None:
                    if package.variants:
                        raise ResourceError(
                            "Attempting to install a package without variants "
                            "(%r) into an existing package with variants (%r)"
                            % (variant, package))
                elif not package.variants:
                    raise ResourceError(
                        "Attempting to install a variant (%r) into an existing "
                        "package without variants (%r)" % (variant, package))

        existing_package_data = None
        release_data = {}

        # Need to treat 'config' as special case. In validated data, this is
        # converted to a Config object. We need it as the raw dict that you'd
        # see in a package.py.
        #
        def _get_package_data(pkg):
            data = pkg.validated_data()
            if hasattr(pkg, "_data"):
                raw_data = pkg._data
            else:
                raw_data = pkg.resource._data

            raw_config_data = raw_data.get('config')
            data.pop("config", None)

            if raw_config_data:
                data["config"] = raw_config_data

            return data

        def _remove_build_keys(obj):
            for key in package_build_only_keys:
                obj.pop(key, None)

        new_package_data = _get_package_data(variant.parent)
        new_package_data.pop("variants", None)
        new_package_data["name"] = variant_name
        if variant_version:
            new_package_data["version"] = variant_version
        package_changed = False

        _remove_build_keys(new_package_data)

        if existing_package:
            debug_print(
                "Found existing package for installation of variant %s: %s",
                variant.uri, existing_package.uri
            )

            existing_package_data = _get_package_data(existing_package)
            _remove_build_keys(existing_package_data)

            # detect case where new variant introduces package changes outside of variant
            data_1 = existing_package_data.copy()
            data_2 = new_package_data.copy()

            for key in package_release_keys:
                data_2.pop(key, None)
                value = data_1.pop(key, None)
                if value is not None:
                    release_data[key] = value

            for key in ("format_version", "base", "variants"):
                data_1.pop(key, None)
                data_2.pop(key, None)

            package_changed = (data_1 != data_2)

            if debug_print:
                if package_changed:
                    from rez.utils.data_utils import get_dict_diff_str

                    debug_print("Variant %s package data differs from package %s",
                                variant.uri, existing_package.uri)

                    txt = get_dict_diff_str(data_1, data_2, "Changes:")
                    debug_print(txt)
                else:
                    debug_print("Variant %s package data matches package %s",
                                variant.uri, existing_package.uri)

        # check for existing installed variant
        existing_installed_variant = None
        installed_variant_index = None

        if existing_package:
            if variant.index is None:
                existing_installed_variant = \
                    self.iter_variants(existing_package).next()
            else:
                variant_requires = variant.variant_requires

                for variant_ in self.iter_variants(existing_package):
                    variant_requires_ = existing_package.variants[variant_.index]
                    if variant_requires_ == variant_requires:
                        installed_variant_index = variant_.index
                        existing_installed_variant = variant_

        if existing_installed_variant:
            debug_print(
                "Variant %s already has installed equivalent: %s",
                variant.uri, existing_installed_variant.uri
            )

        if dry_run:
            if not package_changed:
                return existing_installed_variant
            else:
                return None

        # construct package data for new installed package definition
        if existing_package:
            _, file_ = os.path.split(existing_package.filepath)
            package_filename, package_extension = os.path.splitext(file_)
            package_extension = package_extension[1:]
            package_format = FileFormat[package_extension]

            if package_changed:
                # graft together new package data, with existing package variants,
                # and other data that needs to stay unchanged (eg timestamp)
                package_data = new_package_data

                if variant.index is not None:
                    package_data["variants"] = existing_package_data.get("variants", [])
            else:
                package_data = existing_package_data
        else:
            package_data = new_package_data
            package_filename = _settings.package_filenames[0]
            package_extension = "py"
            package_format = FileFormat.py

        # merge existing release data (if any) into the package. Note that when
        # this data becomes variant-specific, this step will no longer be needed
        package_data.update(release_data)

        # merge the new variant into the package
        if installed_variant_index is None and variant.index is not None:
            variant_requires = variant.variant_requires
            if not package_data.get("variants"):
                package_data["variants"] = []
            package_data["variants"].append(variant_requires)
            installed_variant_index = len(package_data["variants"]) - 1

        # a little data massaging is needed
        package_data.pop("base", None)

        # create version dir if it doesn't already exist
        family_path = os.path.join(self.location, variant_name)
        if variant_version:
            pkg_base_path = os.path.join(family_path, str(variant_version))
        else:
            pkg_base_path = family_path
        if not os.path.exists(pkg_base_path):
            os.makedirs(pkg_base_path)

        # Apply overrides.
        #
        # If we're installing into an existing package, then existing attributes
        # in that package take precedence over `overrides`. If we're installing
        # to a new package, then `overrides` takes precedence always.
        #
        # This is done so that variants added to an existing package don't change
        # attributes such as 'timestamp' or release-related fields like 'revision'.
        #
        for key, value in overrides.iteritems():
            if existing_package:
                if key not in package_data:
                    package_data[key] = value
            else:
                if value is self.remove:
                    package_data.pop(key, None)
                else:
                    package_data[key] = value

        # timestamp defaults to now if not specified
        if not package_data.get("timestamp"):
            package_data["timestamp"] = int(time.time())

        # format version is always set
        package_data["format_version"] = format_version

        # write out new package definition file
        package_file = ".".join([package_filename, package_extension])
        filepath = os.path.join(pkg_base_path, package_file)

        with make_path_writable(pkg_base_path):
            with open_file_for_write(filepath, mode=self.package_file_mode) as f:
                dump_package_data(package_data, buf=f, format_=package_format)

        # delete the tmp 'building' file.
        if variant_version:
            filename = self.building_prefix + str(variant_version)
            filepath = os.path.join(family_path, filename)
            if os.path.exists(filepath):
                try:
                    os.remove(filepath)
                except:
                    pass

        # delete other stale building files; previous failed releases may have
        # left some around
        try:
            self._delete_stale_build_tagfiles(family_path)
        except:
            pass

        # touch the family dir, this keeps memcached resolves updated properly
        os.utime(family_path, None)

        # load new variant
        new_variant = None
        self.clear_caches()
        family = self.get_package_family(variant_name)

        if family:
            for package in self.iter_packages(family):
                if package.version == variant_version:
                    for variant_ in self.iter_variants(package):
                        if variant_.index == installed_variant_index:
                            new_variant = variant_
                            break
                elif new_variant:
                    break

        if not new_variant:
            raise RezSystemError("Internal failure - expected installed variant")
        return new_variant